diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index 09f3bb8d07a..bd497967491 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -130,7 +130,7 @@ jobs: - name: Check formatting run: | - ruff format --exclude posthog/hogql/grammar --check --diff . + ruff format --check --diff . - name: Add Problem Matcher run: echo "::add-matcher::.github/mypy-problem-matcher.json" diff --git a/ee/clickhouse/queries/event_query.py b/ee/clickhouse/queries/event_query.py index 259b4c48947..b1b4dbb695e 100644 --- a/ee/clickhouse/queries/event_query.py +++ b/ee/clickhouse/queries/event_query.py @@ -33,13 +33,19 @@ class EnterpriseEventQuery(EventQuery): should_join_distinct_ids=False, should_join_persons=False, # Extra events/person table columns to fetch since parent query needs them - extra_fields: List[ColumnName] = [], - extra_event_properties: List[PropertyName] = [], - extra_person_fields: List[ColumnName] = [], + extra_fields: Optional[List[ColumnName]] = None, + extra_event_properties: Optional[List[PropertyName]] = None, + extra_person_fields: Optional[List[ColumnName]] = None, override_aggregate_users_by_distinct_id: Optional[bool] = None, person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled, **kwargs, ) -> None: + if extra_person_fields is None: + extra_person_fields = [] + if extra_event_properties is None: + extra_event_properties = [] + if extra_fields is None: + extra_fields = [] super().__init__( filter=filter, team=team, diff --git a/ee/clickhouse/queries/funnels/funnel_correlation.py b/ee/clickhouse/queries/funnels/funnel_correlation.py index 3ca6801ee6a..ed3995968a0 100644 --- a/ee/clickhouse/queries/funnels/funnel_correlation.py +++ b/ee/clickhouse/queries/funnels/funnel_correlation.py @@ -868,9 +868,9 @@ class FunnelCorrelation: # Get the total success/failure counts from the results results = [result for result in results_with_total if result[0] != self.TOTAL_IDENTIFIER] - _, success_total, failure_total = [ + _, success_total, failure_total = next( result for result in results_with_total if result[0] == self.TOTAL_IDENTIFIER - ][0] + ) # Add a little structure, and keep it close to the query definition so it's # obvious what's going on with result indices. diff --git a/ee/clickhouse/queries/test/test_cohort_query.py b/ee/clickhouse/queries/test/test_cohort_query.py index 25d0b92ed86..95c1e6837b3 100644 --- a/ee/clickhouse/queries/test/test_cohort_query.py +++ b/ee/clickhouse/queries/test/test_cohort_query.py @@ -27,8 +27,10 @@ def _make_event_sequence( interval_days, period_event_counts, event="$pageview", - properties={}, + properties=None, ): + if properties is None: + properties = {} for period_index, event_count in enumerate(period_event_counts): for i in range(event_count): _create_event( diff --git a/ee/clickhouse/views/test/test_clickhouse_retention.py b/ee/clickhouse/views/test/test_clickhouse_retention.py index f64aa17ca58..0e5a8ad0faf 100644 --- a/ee/clickhouse/views/test/test_clickhouse_retention.py +++ b/ee/clickhouse/views/test/test_clickhouse_retention.py @@ -592,7 +592,7 @@ class BreakdownTests(APIBaseTest, ClickhouseTestMixin): ), ) - chrome_cohort = [cohort for cohort in retention["result"] if cohort["label"] == "Chrome"][0] + chrome_cohort = next(cohort for cohort in retention["result"] if cohort["label"] == "Chrome") people_url = chrome_cohort["values"][0]["people_url"] people_response = self.client.get(people_url) assert people_response.status_code == 200 diff --git a/ee/models/license.py b/ee/models/license.py index d1b575ec801..f0e12d3d2f4 100644 --- a/ee/models/license.py +++ b/ee/models/license.py @@ -72,7 +72,8 @@ class License(models.Model): ] ENTERPRISE_PLAN = "enterprise" - ENTERPRISE_FEATURES = SCALE_FEATURES + [ + ENTERPRISE_FEATURES = [ + *SCALE_FEATURES, AvailableFeature.ADVANCED_PERMISSIONS, AvailableFeature.PROJECT_BASED_PERMISSIONING, AvailableFeature.SAML, diff --git a/ee/settings.py b/ee/settings.py index 448c9ef67aa..7342bdf98f9 100644 --- a/ee/settings.py +++ b/ee/settings.py @@ -1,6 +1,7 @@ """ Django settings for PostHog Enterprise Edition. """ + import os from typing import Dict, List @@ -15,7 +16,8 @@ HOOK_EVENTS: Dict[str, str] = { } # SSO -AUTHENTICATION_BACKENDS = AUTHENTICATION_BACKENDS + [ +AUTHENTICATION_BACKENDS = [ + *AUTHENTICATION_BACKENDS, "ee.api.authentication.MultitenantSAMLAuth", "social_core.backends.google.GoogleOAuth2", ] diff --git a/manage.py b/manage.py index 80de7377615..09efd7a625a 100755 --- a/manage.py +++ b/manage.py @@ -1,5 +1,6 @@ #!/usr/bin/env python """Django's command-line utility for administrative tasks.""" + import os import sys diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 58a2acbea7c..5a2ab24ae12 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -2,10 +2,6 @@ posthog/temporal/common/utils.py:0: error: Argument 1 to "abstractclassmethod" h posthog/temporal/common/utils.py:0: note: This is likely because "from_activity" has named arguments: "cls". Consider marking them positional-only posthog/temporal/common/utils.py:0: error: Argument 2 to "__get__" of "classmethod" has incompatible type "type[HeartbeatType]"; expected "type[Never]" [arg-type] posthog/temporal/data_imports/pipelines/zendesk/talk_api.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "str") [assignment] -posthog/hogql/database/argmax.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] -posthog/hogql/database/argmax.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance -posthog/hogql/database/argmax.py:0: note: Consider using "Sequence" instead, which is covariant -posthog/hogql/database/argmax.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator] posthog/hogql/database/schema/numbers.py:0: error: Incompatible types in assignment (expression has type "dict[str, IntegerDatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment] posthog/hogql/database/schema/numbers.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/database/schema/numbers.py:0: note: Consider using "Mapping" instead, which is covariant in the value type @@ -51,14 +47,6 @@ posthog/hogql/visitor.py:0: error: Argument 1 to "visit" of "Visitor" has incomp posthog/hogql/visitor.py:0: error: Argument 1 to "visit" of "Visitor" has incompatible type "Expr | None"; expected "AST" [arg-type] posthog/hogql/visitor.py:0: error: Argument 1 to "visit" of "Visitor" has incompatible type "WindowFrameExpr | None"; expected "AST" [arg-type] posthog/hogql/visitor.py:0: error: Argument 1 to "visit" of "Visitor" has incompatible type "WindowFrameExpr | None"; expected "AST" [arg-type] -posthog/hogql/database/schema/log_entries.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] -posthog/hogql/database/schema/log_entries.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance -posthog/hogql/database/schema/log_entries.py:0: note: Consider using "Sequence" instead, which is covariant -posthog/hogql/database/schema/log_entries.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator] -posthog/hogql/database/schema/log_entries.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] -posthog/hogql/database/schema/log_entries.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance -posthog/hogql/database/schema/log_entries.py:0: note: Consider using "Sequence" instead, which is covariant -posthog/hogql/database/schema/log_entries.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator] posthog/hogql/database/schema/groups.py:0: error: Incompatible types in assignment (expression has type "dict[str, DatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment] posthog/hogql/database/schema/groups.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/database/schema/groups.py:0: note: Consider using "Mapping" instead, which is covariant in the value type @@ -76,18 +64,6 @@ posthog/hogql/parser.py:0: error: "None" has no attribute "text" [attr-defined] posthog/hogql/parser.py:0: error: Statement is unreachable [unreachable] posthog/hogql/database/schema/person_distinct_ids.py:0: error: Argument 1 to "select_from_person_distinct_ids_table" has incompatible type "dict[str, list[str]]"; expected "dict[str, list[str | int]]" [arg-type] posthog/hogql/database/schema/person_distinct_id_overrides.py:0: error: Argument 1 to "select_from_person_distinct_id_overrides_table" has incompatible type "dict[str, list[str]]"; expected "dict[str, list[str | int]]" [arg-type] -posthog/hogql/database/schema/cohort_people.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] -posthog/hogql/database/schema/cohort_people.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance -posthog/hogql/database/schema/cohort_people.py:0: note: Consider using "Sequence" instead, which is covariant -posthog/hogql/database/schema/cohort_people.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator] -posthog/hogql/database/schema/session_replay_events.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] -posthog/hogql/database/schema/session_replay_events.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance -posthog/hogql/database/schema/session_replay_events.py:0: note: Consider using "Sequence" instead, which is covariant -posthog/hogql/database/schema/session_replay_events.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator] -posthog/hogql/database/schema/session_replay_events.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] -posthog/hogql/database/schema/session_replay_events.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance -posthog/hogql/database/schema/session_replay_events.py:0: note: Consider using "Sequence" instead, which is covariant -posthog/hogql/database/schema/session_replay_events.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator] posthog/plugins/utils.py:0: error: Subclass of "str" and "bytes" cannot exist: would have incompatible method signatures [unreachable] posthog/plugins/utils.py:0: error: Statement is unreachable [unreachable] posthog/models/filters/base_filter.py:0: error: "HogQLContext" has no attribute "person_on_events_mode" [attr-defined] @@ -292,9 +268,6 @@ posthog/queries/trends/util.py:0: error: Argument 1 to "translate_hogql" has inc posthog/hogql/property.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] posthog/hogql/property.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/property.py:0: note: Consider using "Sequence" instead, which is covariant -posthog/hogql/property.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] -posthog/hogql/property.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance -posthog/hogql/property.py:0: note: Consider using "Sequence" instead, which is covariant posthog/hogql/property.py:0: error: Incompatible type for lookup 'pk': (got "str | float", expected "str | int") [misc] posthog/hogql/filters.py:0: error: Incompatible default for argument "team" (default has type "None", argument has type "Team") [assignment] posthog/hogql/filters.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True @@ -329,9 +302,11 @@ posthog/queries/funnels/base.py:0: error: "HogQLContext" has no attribute "perso posthog/queries/funnels/base.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | int"; expected "str" [arg-type] ee/clickhouse/queries/funnels/funnel_correlation.py:0: error: Statement is unreachable [unreachable] posthog/caching/calculate_results.py:0: error: Argument 3 to "process_query" has incompatible type "bool"; expected "LimitContext | None" [arg-type] +posthog/api/person.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/api/person.py:0: error: Argument 1 to "loads" has incompatible type "str | None"; expected "str | bytes | bytearray" [arg-type] posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type] posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type] +posthog/api/person.py:0: error: Cannot determine type of "group_properties_filter_group" [has-type] posthog/hogql_queries/web_analytics/web_analytics_query_runner.py:0: error: Argument 1 to "append" of "list" has incompatible type "EventPropertyFilter"; expected "Expr" [arg-type] posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Signature of "to_actors_query" incompatible with supertype "QueryRunner" [override] posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: Superclass: @@ -373,6 +348,7 @@ posthog/hogql_queries/legacy_compatibility/process_insight.py:0: error: Incompat posthog/hogql_queries/legacy_compatibility/process_insight.py:0: error: Incompatible types in assignment (expression has type "Filter", variable has type "RetentionFilter") [assignment] posthog/api/insight.py:0: error: Argument 1 to "is_insight_with_hogql_support" has incompatible type "Insight | DashboardTile"; expected "Insight" [arg-type] posthog/api/insight.py:0: error: Argument 1 to "process_insight" has incompatible type "Insight | DashboardTile"; expected "Insight" [arg-type] +posthog/api/insight.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/api/dashboards/dashboard.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] posthog/api/feature_flag.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] posthog/api/feature_flag.py:0: error: Item "Sequence[Any]" of "Any | Sequence[Any] | None" has no attribute "filters" [union-attr] @@ -504,9 +480,6 @@ posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "f posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr] posthog/hogql/test/test_resolver.py:0: error: Argument 1 to "clone_expr" has incompatible type "SelectQuery | SelectUnionQuery | Field | Any | None"; expected "Expr" [arg-type] posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "alias" [union-attr] -posthog/hogql/test/test_property.py:0: error: Incompatible default for argument "placeholders" (default has type "None", argument has type "dict[str, Any]") [assignment] -posthog/hogql/test/test_property.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True -posthog/hogql/test/test_property.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase posthog/hogql/test/test_property.py:0: error: Argument 1 to "_property_to_expr" of "TestProperty" has incompatible type "HogQLPropertyFilter"; expected "PropertyGroup | Property | dict[Any, Any] | list[Any]" [arg-type] posthog/hogql/test/test_printer.py:0: error: Argument 2 to "Database" has incompatible type "int"; expected "WeekStartDay | None" [arg-type] posthog/hogql/test/test_printer.py:0: error: Argument 2 to "Database" has incompatible type "int"; expected "WeekStartDay | None" [arg-type] @@ -526,12 +499,6 @@ posthog/hogql/test/test_modifiers.py:0: error: Unsupported right operand type fo posthog/hogql/test/test_modifiers.py:0: error: Unsupported right operand type for in ("str | None") [operator] posthog/hogql/test/test_modifiers.py:0: error: Unsupported right operand type for in ("str | None") [operator] posthog/hogql/test/test_modifiers.py:0: error: Unsupported right operand type for in ("str | None") [operator] -posthog/hogql/test/test_filters.py:0: error: Incompatible default for argument "placeholders" (default has type "None", argument has type "dict[str, Any]") [assignment] -posthog/hogql/test/test_filters.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True -posthog/hogql/test/test_filters.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase -posthog/hogql/test/test_filters.py:0: error: Incompatible default for argument "placeholders" (default has type "None", argument has type "dict[str, Any]") [assignment] -posthog/hogql/test/test_filters.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True -posthog/hogql/test/test_filters.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase posthog/hogql/test/_test_parser.py:0: error: Invalid base class [misc] posthog/hogql/test/_test_parser.py:0: error: Argument "table" to "JoinExpr" has incompatible type "Placeholder"; expected "SelectQuery | SelectUnionQuery | Field | None" [arg-type] posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr] @@ -551,6 +518,7 @@ posthog/hogql/database/schema/test/test_channel_type.py:0: error: Value of type posthog/hogql/database/schema/test/test_channel_type.py:0: error: Value of type "list[Any] | None" is not indexable [index] posthog/hogql/database/schema/event_sessions.py:0: error: Statement is unreachable [unreachable] posthog/api/organization_member.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] +posthog/api/action.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] ee/api/role.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] ee/clickhouse/views/insights.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 6 has incompatible type "ExternalDataSchema"; expected "str" [arg-type] @@ -663,6 +631,7 @@ posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any posthog/api/property_definition.py:0: error: Incompatible types in assignment (expression has type "type[EnterprisePropertyDefinitionSerializer]", variable has type "type[PropertyDefinitionSerializer]") [assignment] posthog/api/property_definition.py:0: error: Item "AnonymousUser" of "User | AnonymousUser" has no attribute "organization" [union-attr] posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any | None" has no attribute "is_feature_available" [union-attr] +posthog/api/event.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/api/dashboards/dashboard_templates.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] ee/api/feature_flag_role_access.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] @@ -722,6 +691,7 @@ posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: posthog/management/commands/test/test_create_batch_export_from_app.py:0: note: Possible overload variants: posthog/management/commands/test/test_create_batch_export_from_app.py:0: note: def __getitem__(self, SupportsIndex, /) -> str posthog/management/commands/test/test_create_batch_export_from_app.py:0: note: def __getitem__(self, slice, /) -> list[str] +posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable] posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item] posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item] posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item] diff --git a/package.json b/package.json index 5c160594e4a..1edf84a1a22 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ "build:esbuild": "node frontend/build.mjs", "schema:build": "pnpm run schema:build:json && pnpm run schema:build:python", "schema:build:json": "ts-node bin/build-schema.mjs && prettier --write frontend/src/queries/schema.json", - "schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.10 --disable-timestamp --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py", + "schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.10 --disable-timestamp --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py && ruff check --fix posthog/schema.py", "grammar:build": "npm run grammar:build:python && npm run grammar:build:cpp", "grammar:build:python": "cd posthog/hogql/grammar && antlr -Dlanguage=Python3 HogQLLexer.g4 && antlr -visitor -no-listener -Dlanguage=Python3 HogQLParser.g4", "grammar:build:cpp": "cd posthog/hogql/grammar && antlr -o ../../../hogql_parser -Dlanguage=Cpp HogQLLexer.g4 && antlr -o ../../../hogql_parser -visitor -no-listener -Dlanguage=Cpp HogQLParser.g4", @@ -47,7 +47,7 @@ "typescript:check": "tsc --noEmit && echo \"No errors reported by tsc.\"", "lint:js": "eslint frontend/src", "lint:css": "stylelint \"frontend/**/*.{css,scss}\"", - "format:backend": "ruff --exclude posthog/hogql/grammar .", + "format:backend": "ruff .", "format:frontend": "pnpm lint:js --fix && pnpm lint:css --fix && pnpm prettier", "format": "pnpm format:backend && pnpm format:frontend", "typegen:write": "kea-typegen write --delete --show-ts-errors", @@ -337,8 +337,8 @@ "pnpm --dir plugin-server exec prettier --write" ], "!(posthog/hogql/grammar/*)*.{py,pyi}": [ - "ruff format", - "ruff check --fix" + "ruff check --fix", + "ruff format" ] }, "browserslist": { diff --git a/posthog/api/action.py b/posthog/api/action.py index 8c3caf435e3..437f0227c81 100644 --- a/posthog/api/action.py +++ b/posthog/api/action.py @@ -165,7 +165,7 @@ class ActionViewSet( viewsets.ModelViewSet, ): scope_object = "action" - renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,) + renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.PaginatedCSVRenderer) queryset = Action.objects.all() serializer_class = ActionSerializer authentication_classes = [TemporaryTokenAuthentication] diff --git a/posthog/api/capture.py b/posthog/api/capture.py index 6b921dd27ea..31592e90e79 100644 --- a/posthog/api/capture.py +++ b/posthog/api/capture.py @@ -59,10 +59,7 @@ LOG_RATE_LIMITER = Limiter( # events that are ingested via a separate path than analytics events. They have # fewer restrictions on e.g. the order they need to be processed in. SESSION_RECORDING_DEDICATED_KAFKA_EVENTS = ("$snapshot_items",) -SESSION_RECORDING_EVENT_NAMES = ( - "$snapshot", - "$performance_event", -) + SESSION_RECORDING_DEDICATED_KAFKA_EVENTS +SESSION_RECORDING_EVENT_NAMES = ("$snapshot", "$performance_event", *SESSION_RECORDING_DEDICATED_KAFKA_EVENTS) EVENTS_RECEIVED_COUNTER = Counter( "capture_events_received_total", @@ -604,9 +601,7 @@ def capture_internal( if event["event"] in SESSION_RECORDING_EVENT_NAMES: session_id = event["properties"]["$session_id"] - headers = [ - ("token", token), - ] + extra_headers + headers = [("token", token), *extra_headers] overflowing = False if token in settings.REPLAY_OVERFLOW_FORCED_TOKENS: diff --git a/posthog/api/event.py b/posthog/api/event.py index 1d251572be8..6366ee866f6 100644 --- a/posthog/api/event.py +++ b/posthog/api/event.py @@ -85,7 +85,7 @@ class EventViewSet( viewsets.GenericViewSet, ): scope_object = "query" - renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,) + renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.PaginatedCSVRenderer) serializer_class = ClickhouseEventSerializer throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle] pagination_class = UncountedLimitOffsetPagination diff --git a/posthog/api/insight.py b/posthog/api/insight.py index 9e4e7c3af64..528dc537679 100644 --- a/posthog/api/insight.py +++ b/posthog/api/insight.py @@ -572,7 +572,7 @@ class InsightViewSet( ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle, ] - renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.CSVRenderer,) + renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.CSVRenderer) filter_backends = [DjangoFilterBackend] filterset_fields = ["short_id", "created_by"] sharing_enabled_actions = ["retrieve", "list"] @@ -838,12 +838,12 @@ Using the correct cache and enriching the response with dashboard specific confi export = "{}/insights/{}/\n".format(SITE_URL, request.GET["export_insight_id"]).encode() + export response = HttpResponse(export) - response[ - "Content-Disposition" - ] = 'attachment; filename="{name} ({date_from} {date_to}) from PostHog.csv"'.format( - name=slugify(request.GET.get("export_name", "export")), - date_from=filter.date_from.strftime("%Y-%m-%d -") if filter.date_from else "up until", - date_to=filter.date_to.strftime("%Y-%m-%d"), + response["Content-Disposition"] = ( + 'attachment; filename="{name} ({date_from} {date_to}) from PostHog.csv"'.format( + name=slugify(request.GET.get("export_name", "export")), + date_from=filter.date_from.strftime("%Y-%m-%d -") if filter.date_from else "up until", + date_to=filter.date_to.strftime("%Y-%m-%d"), + ) ) return response diff --git a/posthog/api/person.py b/posthog/api/person.py index 585fcc33cb8..942f07e9a9e 100644 --- a/posthog/api/person.py +++ b/posthog/api/person.py @@ -224,7 +224,7 @@ class PersonViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): """ scope_object = "person" - renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,) + renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.PaginatedCSVRenderer) queryset = Person.objects.all() serializer_class = PersonSerializer pagination_class = PersonLimitOffsetPagination @@ -932,21 +932,11 @@ def prepare_actor_query_filter(filter: T) -> T: new_group = { "type": "OR", "values": [ - { - "key": "email", - "type": "person", - "value": search, - "operator": "icontains", - }, + {"key": "email", "type": "person", "value": search, "operator": "icontains"}, {"key": "name", "type": "person", "value": search, "operator": "icontains"}, - { - "key": "distinct_id", - "type": "event", - "value": search, - "operator": "icontains", - }, - ] - + group_properties_filter_group, + {"key": "distinct_id", "type": "event", "value": search, "operator": "icontains"}, + *group_properties_filter_group, + ], } prop_group = ( {"type": "AND", "values": [new_group, filter.property_groups.to_dict()]} diff --git a/posthog/api/plugin.py b/posthog/api/plugin.py index 468da9d5ccf..2a6e00f3254 100644 --- a/posthog/api/plugin.py +++ b/posthog/api/plugin.py @@ -63,7 +63,11 @@ def _update_plugin_attachments(request: request.Request, plugin_config: PluginCo _update_plugin_attachment(request, plugin_config, match.group(1), None, user) -def get_plugin_config_changes(old_config: Dict[str, Any], new_config: Dict[str, Any], secret_fields=[]) -> List[Change]: +def get_plugin_config_changes( + old_config: Dict[str, Any], new_config: Dict[str, Any], secret_fields=None +) -> List[Change]: + if secret_fields is None: + secret_fields = [] config_changes = dict_changes_between("Plugin", old_config, new_config) for i, change in enumerate(config_changes): @@ -79,8 +83,10 @@ def get_plugin_config_changes(old_config: Dict[str, Any], new_config: Dict[str, def log_enabled_change_activity( - new_plugin_config: PluginConfig, old_enabled: bool, user: User, was_impersonated: bool, changes=[] + new_plugin_config: PluginConfig, old_enabled: bool, user: User, was_impersonated: bool, changes=None ): + if changes is None: + changes = [] if old_enabled != new_plugin_config.enabled: log_activity( organization_id=new_plugin_config.team.organization.id, @@ -864,7 +870,7 @@ class PluginConfigViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): def _get_secret_fields_for_plugin(plugin: Plugin) -> Set[str]: # A set of keys for config fields that have secret = true - secret_fields = {field["key"] for field in plugin.config_schema if "secret" in field and field["secret"]} + secret_fields = {field["key"] for field in plugin.config_schema if isinstance(field, dict) and field.get("secret")} return secret_fields diff --git a/posthog/api/signup.py b/posthog/api/signup.py index b8c3db86c33..c31f37b891e 100644 --- a/posthog/api/signup.py +++ b/posthog/api/signup.py @@ -503,9 +503,7 @@ def social_create_user( user=user.id if user else None, ) if user: - backend_processor = ( - "domain_whitelist" - ) # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes) + backend_processor = "domain_whitelist" # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes) from_invite = True # jit_provisioning means they're definitely not organization_first_user if not user: diff --git a/posthog/api/team.py b/posthog/api/team.py index 1b615bd6926..c8b2513b679 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -421,7 +421,8 @@ class TeamViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): IsAuthenticated, APIScopePermission, PremiumMultiProjectPermissions, - ] + self.permission_classes + *self.permission_classes, + ] base_permissions = [permission() for permission in common_permissions] diff --git a/posthog/api/test/test_capture.py b/posthog/api/test/test_capture.py index a0fc8826c95..f771aca99b3 100644 --- a/posthog/api/test/test_capture.py +++ b/posthog/api/test/test_capture.py @@ -63,7 +63,7 @@ parser = ResolvingParser( openapi_spec = cast(Dict[str, Any], parser.specification) large_data_array = [ - {"key": random.choice(string.ascii_letters) for _ in range(512 * 1024)} + {"key": "".join(random.choice(string.ascii_letters) for _ in range(512 * 1024))} ] # 512 * 1024 is the max size of a single message and random letters shouldn't be compressible, so this should be at least 2 messages android_json = { @@ -188,7 +188,7 @@ class TestCapture(BaseTest): def _send_original_version_session_recording_event( self, number_of_events: int = 1, - event_data: Dict | None = {}, + event_data: Dict | None = None, snapshot_source=3, snapshot_type=1, session_id="abc123", @@ -198,6 +198,8 @@ class TestCapture(BaseTest): ) -> dict: if event_data is None: event_data = {} + if event_data is None: + event_data = {} event = { "event": "$snapshot", @@ -1525,8 +1527,8 @@ class TestCapture(BaseTest): ] ) def test_cors_allows_tracing_headers(self, _: str, path: str, headers: List[str]) -> None: - expected_headers = ",".join(["X-Requested-With", "Content-Type"] + headers) - presented_headers = ",".join(headers + ["someotherrandomheader"]) + expected_headers = ",".join(["X-Requested-With", "Content-Type", *headers]) + presented_headers = ",".join([*headers, "someotherrandomheader"]) response = self.client.options( path, HTTP_ORIGIN="https://localhost", diff --git a/posthog/api/test/test_comments.py b/posthog/api/test/test_comments.py index 42ede7a5658..6807c924cbb 100644 --- a/posthog/api/test/test_comments.py +++ b/posthog/api/test/test_comments.py @@ -7,7 +7,9 @@ from posthog.test.base import APIBaseTest, QueryMatchingTest class TestComments(APIBaseTest, QueryMatchingTest): - def _create_comment(self, data={}) -> Any: + def _create_comment(self, data=None) -> Any: + if data is None: + data = {} payload = { "content": "my content", "scope": "Notebook", diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index 05b8f11d78d..e89fb0b3c12 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -73,12 +73,14 @@ class TestDecide(BaseTest, QueryMatchingTest): origin="http://127.0.0.1:8000", api_version=1, distinct_id="example_id", - groups={}, + groups=None, geoip_disable=False, ip="127.0.0.1", disable_flags=False, user_agent: Optional[str] = None, ): + if groups is None: + groups = {} return self.client.post( f"/decide/?v={api_version}", { @@ -3336,10 +3338,12 @@ class TestDatabaseCheckForDecide(BaseTest, QueryMatchingTest): origin="http://127.0.0.1:8000", api_version=1, distinct_id="example_id", - groups={}, + groups=None, geoip_disable=False, ip="127.0.0.1", ): + if groups is None: + groups = {} return self.client.post( f"/decide/?v={api_version}", { @@ -3571,11 +3575,15 @@ class TestDecideUsesReadReplica(TransactionTestCase): origin="http://127.0.0.1:8000", api_version=3, distinct_id="example_id", - groups={}, - person_props={}, + groups=None, + person_props=None, geoip_disable=False, ip="127.0.0.1", ): + if person_props is None: + person_props = {} + if groups is None: + groups = {} return self.client.post( f"/decide/?v={api_version}", { diff --git a/posthog/api/test/test_preflight.py b/posthog/api/test/test_preflight.py index 9d82e512814..9d8b59d09a6 100644 --- a/posthog/api/test/test_preflight.py +++ b/posthog/api/test/test_preflight.py @@ -19,7 +19,9 @@ class TestPreflight(APIBaseTest, QueryMatchingTest): def instance_preferences(self, **kwargs): return {"debug_queries": False, "disable_paid_fs": False, **kwargs} - def preflight_dict(self, options={}): + def preflight_dict(self, options=None): + if options is None: + options = {} return { "django": True, "redis": True, @@ -47,7 +49,9 @@ class TestPreflight(APIBaseTest, QueryMatchingTest): **options, } - def preflight_authenticated_dict(self, options={}): + def preflight_authenticated_dict(self, options=None): + if options is None: + options = {} preflight = { "opt_out_capture": False, "licensed_users_available": None, diff --git a/posthog/api/utils.py b/posthog/api/utils.py index 75373856ccd..d34530cda14 100644 --- a/posthog/api/utils.py +++ b/posthog/api/utils.py @@ -251,8 +251,10 @@ def create_event_definitions_sql( event_type: EventDefinitionType, is_enterprise: bool = False, conditions: str = "", - order_expressions: List[Tuple[str, Literal["ASC", "DESC"]]] = [], + order_expressions: Optional[List[Tuple[str, Literal["ASC", "DESC"]]]] = None, ) -> str: + if order_expressions is None: + order_expressions = [] if is_enterprise: from ee.models import EnterpriseEventDefinition diff --git a/posthog/batch_exports/models.py b/posthog/batch_exports/models.py index db51865560a..615b0870796 100644 --- a/posthog/batch_exports/models.py +++ b/posthog/batch_exports/models.py @@ -230,9 +230,11 @@ def fetch_batch_export_log_entries( before: dt.datetime | None = None, search: str | None = None, limit: int | None = None, - level_filter: list[BatchExportLogEntryLevel] = [], + level_filter: typing.Optional[list[BatchExportLogEntryLevel]] = None, ) -> list[BatchExportLogEntry]: """Fetch a list of batch export log entries from ClickHouse.""" + if level_filter is None: + level_filter = [] clickhouse_where_parts: list[str] = [] clickhouse_kwargs: dict[str, typing.Any] = {} diff --git a/posthog/clickhouse/client/migration_tools.py b/posthog/clickhouse/client/migration_tools.py index 0d105b04239..f71abd489fd 100644 --- a/posthog/clickhouse/client/migration_tools.py +++ b/posthog/clickhouse/client/migration_tools.py @@ -5,11 +5,14 @@ from infi.clickhouse_orm import migrations from posthog.clickhouse.client.execute import sync_execute -def run_sql_with_exceptions(sql: Union[str, Callable[[], str]], settings={}): +def run_sql_with_exceptions(sql: Union[str, Callable[[], str]], settings=None): """ migrations.RunSQL does not raise exceptions, so we need to wrap it in a function that does. """ + if settings is None: + settings = {} + def run_sql(database): nonlocal sql if callable(sql): diff --git a/posthog/email.py b/posthog/email.py index 99edbddc717..61edb7ae593 100644 --- a/posthog/email.py +++ b/posthog/email.py @@ -135,10 +135,12 @@ class EmailMessage: campaign_key: str, subject: str, template_name: str, - template_context: Dict = {}, + template_context: Optional[Dict] = None, headers: Optional[Dict] = None, reply_to: Optional[str] = None, ): + if template_context is None: + template_context = {} if not is_email_available(): raise exceptions.ImproperlyConfigured("Email is not enabled in this instance.") diff --git a/posthog/event_usage.py b/posthog/event_usage.py index e1f7f48dcb4..ae8432c6b27 100644 --- a/posthog/event_usage.py +++ b/posthog/event_usage.py @@ -217,7 +217,9 @@ def report_user_organization_membership_level_changed( ) -def report_user_action(user: User, event: str, properties: Dict = {}, team: Optional[Team] = None): +def report_user_action(user: User, event: str, properties: Optional[Dict] = None, team: Optional[Team] = None): + if properties is None: + properties = {} posthoganalytics.capture( user.distinct_id, event, @@ -252,12 +254,14 @@ def groups(organization: Optional[Organization] = None, team: Optional[Team] = N def report_team_action( team: Team, event: str, - properties: Dict = {}, + properties: Optional[Dict] = None, group_properties: Optional[Dict] = None, ): """ For capturing events where it is unclear which user was the core actor we can use the team instead """ + if properties is None: + properties = {} posthoganalytics.capture(str(team.uuid), event, properties=properties, groups=groups(team=team)) if group_properties: @@ -267,12 +271,14 @@ def report_team_action( def report_organization_action( organization: Organization, event: str, - properties: Dict = {}, + properties: Optional[Dict] = None, group_properties: Optional[Dict] = None, ): """ For capturing events where it is unclear which user was the core actor we can use the organization instead """ + if properties is None: + properties = {} posthoganalytics.capture( str(organization.id), event, diff --git a/posthog/hogql/ast.py b/posthog/hogql/ast.py index d5369dd30d4..ccb3f9f3457 100644 --- a/posthog/hogql/ast.py +++ b/posthog/hogql/ast.py @@ -408,7 +408,7 @@ class PropertyType(Type): joined_subquery_field_name: Optional[str] = field(default=None, init=False) def get_child(self, name: str | int, context: HogQLContext) -> "Type": - return PropertyType(chain=self.chain + [name], field_type=self.field_type) + return PropertyType(chain=[*self.chain, name], field_type=self.field_type) def has_child(self, name: str | int, context: HogQLContext) -> bool: return True diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index 46d3f36a042..45e362c8f8e 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -25,7 +25,7 @@ ConstantSupportedData: TypeAlias = ( KEYWORDS = ["true", "false", "null"] # Keywords you can't alias to -RESERVED_KEYWORDS = KEYWORDS + ["team_id"] +RESERVED_KEYWORDS = [*KEYWORDS, "team_id"] # Limit applied to SELECT statements without LIMIT clause when queried via the API DEFAULT_RETURNED_ROWS = 100 diff --git a/posthog/hogql/database/argmax.py b/posthog/hogql/database/argmax.py index c6e479db079..5872dc77d8b 100644 --- a/posthog/hogql/database/argmax.py +++ b/posthog/hogql/database/argmax.py @@ -21,7 +21,7 @@ def argmax_select( fields_to_select.append( ast.Alias( alias=name, - expr=argmax_version(ast.Field(chain=[table_name] + chain)), + expr=argmax_version(ast.Field(chain=[table_name, *chain])), ) ) for key in group_fields: diff --git a/posthog/hogql/database/models.py b/posthog/hogql/database/models.py index 9752fc5f061..f6e985d92b4 100644 --- a/posthog/hogql/database/models.py +++ b/posthog/hogql/database/models.py @@ -91,7 +91,7 @@ class Table(FieldOrTable): return [] def get_asterisk(self): - fields_to_avoid = self.avoid_asterisk_fields() + ["team_id"] + fields_to_avoid = [*self.avoid_asterisk_fields(), "team_id"] asterisk: Dict[str, FieldOrTable] = {} for key, field in self.fields.items(): if key in fields_to_avoid: diff --git a/posthog/hogql/database/schema/cohort_people.py b/posthog/hogql/database/schema/cohort_people.py index f98b5226726..c556903d40c 100644 --- a/posthog/hogql/database/schema/cohort_people.py +++ b/posthog/hogql/database/schema/cohort_people.py @@ -40,7 +40,7 @@ def select_from_cohort_people_table(requested_fields: Dict[str, List[str | int]] requested_fields = {**requested_fields, "cohort_id": ["cohort_id"]} fields: List[ast.Expr] = [ - ast.Alias(alias=name, expr=ast.Field(chain=[table_name] + chain)) for name, chain in requested_fields.items() + ast.Alias(alias=name, expr=ast.Field(chain=[table_name, *chain])) for name, chain in requested_fields.items() ] return ast.SelectQuery( diff --git a/posthog/hogql/database/schema/log_entries.py b/posthog/hogql/database/schema/log_entries.py index 9f5dc816ac4..14efaff09ce 100644 --- a/posthog/hogql/database/schema/log_entries.py +++ b/posthog/hogql/database/schema/log_entries.py @@ -35,7 +35,7 @@ class ReplayConsoleLogsLogEntriesTable(LazyTable): fields: Dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): - fields: List[ast.Expr] = [ast.Field(chain=["log_entries"] + chain) for name, chain in requested_fields.items()] + fields: List[ast.Expr] = [ast.Field(chain=["log_entries", *chain]) for name, chain in requested_fields.items()] return ast.SelectQuery( select=fields, @@ -58,7 +58,7 @@ class BatchExportLogEntriesTable(LazyTable): fields: Dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): - fields: List[ast.Expr] = [ast.Field(chain=["log_entries"] + chain) for name, chain in requested_fields.items()] + fields: List[ast.Expr] = [ast.Field(chain=["log_entries", *chain]) for name, chain in requested_fields.items()] return ast.SelectQuery( select=fields, diff --git a/posthog/hogql/database/schema/session_replay_events.py b/posthog/hogql/database/schema/session_replay_events.py index baaecef89e0..a6f0fbed3bc 100644 --- a/posthog/hogql/database/schema/session_replay_events.py +++ b/posthog/hogql/database/schema/session_replay_events.py @@ -96,8 +96,8 @@ def select_from_session_replay_events_table(requested_fields: Dict[str, List[str if name in aggregate_fields: select_fields.append(ast.Alias(alias=name, expr=aggregate_fields[name])) else: - select_fields.append(ast.Alias(alias=name, expr=ast.Field(chain=[table_name] + chain))) - group_by_fields.append(ast.Field(chain=[table_name] + chain)) + select_fields.append(ast.Alias(alias=name, expr=ast.Field(chain=[table_name, *chain]))) + group_by_fields.append(ast.Field(chain=[table_name, *chain])) return ast.SelectQuery( select=select_fields, diff --git a/posthog/hogql/parser.py b/posthog/hogql/parser.py index f374d70c8cf..0ec619f3389 100644 --- a/posthog/hogql/parser.py +++ b/posthog/hogql/parser.py @@ -752,7 +752,7 @@ class HogQLParseTreeConverter(ParseTreeVisitor): def visitColumnExprAsterisk(self, ctx: HogQLParser.ColumnExprAsteriskContext): if ctx.tableIdentifier(): table = self.visit(ctx.tableIdentifier()) - return ast.Field(chain=table + ["*"]) + return ast.Field(chain=[*table, "*"]) return ast.Field(chain=["*"]) def visitColumnExprTagElement(self, ctx: HogQLParser.ColumnExprTagElementContext): diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index 3f5be7cc42b..ff4766f8607 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -235,7 +235,7 @@ class _Printer(Visitor): if where is None: where = extra_where elif isinstance(where, ast.And): - where = ast.And(exprs=[extra_where] + where.exprs) + where = ast.And(exprs=[extra_where, *where.exprs]) else: where = ast.And(exprs=[extra_where, where]) else: @@ -1169,7 +1169,7 @@ class _Printer(Visitor): return escape_hogql_string(name, timezone=self._get_timezone()) def _unsafe_json_extract_trim_quotes(self, unsafe_field: str, unsafe_args: List[str]) -> str: - return f"replaceRegexpAll(nullIf(nullIf(JSONExtractRaw({', '.join([unsafe_field] + unsafe_args)}), ''), 'null'), '^\"|\"$', '')" + return f"replaceRegexpAll(nullIf(nullIf(JSONExtractRaw({', '.join([unsafe_field, *unsafe_args])}), ''), 'null'), '^\"|\"$', '')" def _get_materialized_column( self, table_name: str, property_name: PropertyName, field_name: TableColumn diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index 821a8db5a23..501bc613bd5 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -163,7 +163,7 @@ def property_to_expr( chain = ["properties"] properties_field = ast.Field(chain=chain) - field = ast.Field(chain=chain + [property.key]) + field = ast.Field(chain=[*chain, property.key]) if isinstance(value, list): if len(value) == 0: diff --git a/posthog/hogql/resolver.py b/posthog/hogql/resolver.py index cbcd3174730..fce251dc8a0 100644 --- a/posthog/hogql/resolver.py +++ b/posthog/hogql/resolver.py @@ -464,7 +464,7 @@ class Resolver(CloningVisitor): if table_count > 1: raise QueryError("Cannot use '*' without table name when there are multiple tables in the query") table_type = ( - scope.anonymous_tables[0] if len(scope.anonymous_tables) > 0 else list(scope.tables.values())[0] + scope.anonymous_tables[0] if len(scope.anonymous_tables) > 0 else next(iter(scope.tables.values())) ) type = ast.AsteriskType(table_type=table_type) diff --git a/posthog/hogql/test/test_filters.py b/posthog/hogql/test/test_filters.py index 951d5814f21..4377f9e12b9 100644 --- a/posthog/hogql/test/test_filters.py +++ b/posthog/hogql/test/test_filters.py @@ -1,4 +1,4 @@ -from typing import Dict, Any +from typing import Dict, Any, Optional from posthog.hogql import ast from posthog.hogql.context import HogQLContext @@ -18,10 +18,10 @@ from posthog.test.base import BaseTest class TestFilters(BaseTest): maxDiff = None - def _parse_expr(self, expr: str, placeholders: Dict[str, Any] = None): + def _parse_expr(self, expr: str, placeholders: Optional[Dict[str, Any]] = None): return clear_locations(parse_expr(expr, placeholders=placeholders)) - def _parse_select(self, select: str, placeholders: Dict[str, Any] = None): + def _parse_select(self, select: str, placeholders: Optional[Dict[str, Any]] = None): return clear_locations(parse_select(select, placeholders=placeholders)) def _print_ast(self, node: ast.Expr): diff --git a/posthog/hogql/test/test_property.py b/posthog/hogql/test/test_property.py index 44cbf6a5b09..44b740552d8 100644 --- a/posthog/hogql/test/test_property.py +++ b/posthog/hogql/test/test_property.py @@ -46,7 +46,7 @@ class TestProperty(BaseTest): def _selector_to_expr(self, selector: str): return clear_locations(selector_to_expr(selector)) - def _parse_expr(self, expr: str, placeholders: Dict[str, Any] = None): + def _parse_expr(self, expr: str, placeholders: Optional[Dict[str, Any]] = None): return clear_locations(parse_expr(expr, placeholders=placeholders)) def test_has_aggregation(self): diff --git a/posthog/hogql_queries/insights/funnels/base.py b/posthog/hogql_queries/insights/funnels/base.py index 82b0161b8c8..1dade0de4b0 100644 --- a/posthog/hogql_queries/insights/funnels/base.py +++ b/posthog/hogql_queries/insights/funnels/base.py @@ -729,7 +729,7 @@ class FunnelBase(ABC): ): events = [] for i in range(0, max_steps): - event_fields = ["latest"] + self.extra_event_fields_and_properties + event_fields = ["latest", *self.extra_event_fields_and_properties] event_fields_with_step = ", ".join([f"{field}_{i}" for field in event_fields]) event_clause = f"({event_fields_with_step}) as step_{i}_matching_event" events.append(parse_expr(event_clause)) diff --git a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py index 72dcf1993e1..04b1115fd38 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py @@ -245,9 +245,9 @@ class FunnelCorrelationQueryRunner(QueryRunner): # Get the total success/failure counts from the results results = [result for result in response.results if result[0] != self.TOTAL_IDENTIFIER] - _, success_total, failure_total = [result for result in response.results if result[0] == self.TOTAL_IDENTIFIER][ - 0 - ] + _, success_total, failure_total = next( + result for result in response.results if result[0] == self.TOTAL_IDENTIFIER + ) # Add a little structure, and keep it close to the query definition so it's # obvious what's going on with result indices. diff --git a/posthog/hogql_queries/insights/funnels/funnel_event_query.py b/posthog/hogql_queries/insights/funnels/funnel_event_query.py index f2d0e115e2d..b2fd19083ed 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_event_query.py +++ b/posthog/hogql_queries/insights/funnels/funnel_event_query.py @@ -1,4 +1,4 @@ -from typing import List, Set, Union +from typing import List, Set, Union, Optional from posthog.clickhouse.materialized_columns.column import ColumnName from posthog.hogql import ast from posthog.hogql.parser import parse_expr @@ -21,9 +21,13 @@ class FunnelEventQuery: def __init__( self, context: FunnelQueryContext, - extra_fields: List[ColumnName] = [], - extra_event_properties: List[PropertyName] = [], + extra_fields: Optional[List[ColumnName]] = None, + extra_event_properties: Optional[List[PropertyName]] = None, ): + if extra_event_properties is None: + extra_event_properties = [] + if extra_fields is None: + extra_fields = [] self.context = context self._extra_fields = extra_fields diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_breakdowns_by_current_url.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_breakdowns_by_current_url.py index b745ea87761..859f3e627aa 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_breakdowns_by_current_url.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_breakdowns_by_current_url.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict, cast +from typing import Dict, cast, Optional from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query @@ -116,7 +116,11 @@ class TestFunnelBreakdownsByCurrentURL(ClickhouseTestMixin, APIBaseTest): journeys_for(journey, team=self.team, create_people=True) - def _run(self, extra: Dict = {}, events_extra: Dict = {}): + def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None): + if events_extra is None: + events_extra = {} + if extra is None: + extra = {} filters = { "events": [ { diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_persons.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_persons.py index 54a8b4cf063..9aac61f1d05 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_persons.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_persons.py @@ -74,7 +74,7 @@ class TestFunnelTrendsPersons(ClickhouseTestMixin, APIBaseTest): self.assertEqual(results[0][0], persons["user_one"].uuid) self.assertEqual( # [person["matched_recordings"][0]["session_id"] for person in results], - [list(results[0][2])[0]["session_id"]], + [next(iter(results[0][2]))["session_id"]], ["s1b"], ) @@ -124,7 +124,7 @@ class TestFunnelTrendsPersons(ClickhouseTestMixin, APIBaseTest): self.assertEqual(results[0][0], persons["user_one"].uuid) self.assertEqual( # [person["matched_recordings"][0]["session_id"] for person in results], - [list(results[0][2])[0]["session_id"]], + [next(iter(results[0][2]))["session_id"]], ["s1c"], ) @@ -163,6 +163,6 @@ class TestFunnelTrendsPersons(ClickhouseTestMixin, APIBaseTest): self.assertEqual(results[0][0], persons["user_one"].uuid) self.assertEqual( # [person["matched_recordings"][0].get("session_id") for person in results], - [list(results[0][2])[0]["session_id"]], + [next(iter(results[0][2]))["session_id"]], ["s1a"], ) diff --git a/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py b/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py index 1dad592a244..bb963cf1f8b 100644 --- a/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py +++ b/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py @@ -1,4 +1,4 @@ -from typing import Dict, Any +from typing import Dict, Any, Optional from freezegun import freeze_time @@ -69,7 +69,9 @@ class TestInsightActorsQueryRunner(ClickhouseTestMixin, APIBaseTest): ] ) - def select(self, query: str, placeholders: Dict[str, Any] = {}): + def select(self, query: str, placeholders: Optional[Dict[str, Any]] = None): + if placeholders is None: + placeholders = {} return execute_hogql_query( query=query, team=self.team, diff --git a/posthog/hogql_queries/insights/trends/breakdown_values.py b/posthog/hogql_queries/insights/trends/breakdown_values.py index fb349f279d1..6a9b9a24a22 100644 --- a/posthog/hogql_queries/insights/trends/breakdown_values.py +++ b/posthog/hogql_queries/insights/trends/breakdown_values.py @@ -228,7 +228,7 @@ class BreakdownValues: if self.hide_other_aggregation is not True and self.histogram_bin_count is None: values = [BREAKDOWN_NULL_STRING_LABEL if value in (None, "") else value for value in values] if needs_other: - values = [BREAKDOWN_OTHER_STRING_LABEL] + values + values = [BREAKDOWN_OTHER_STRING_LABEL, *values] if len(values) == 0: values.insert(0, None) diff --git a/posthog/hogql_queries/legacy_compatibility/clean_properties.py b/posthog/hogql_queries/legacy_compatibility/clean_properties.py index a6e8e8663bb..e77cf1ee1f9 100644 --- a/posthog/hogql_queries/legacy_compatibility/clean_properties.py +++ b/posthog/hogql_queries/legacy_compatibility/clean_properties.py @@ -121,8 +121,8 @@ def is_old_style_properties(properties): def transform_old_style_properties(properties): - key = list(properties.keys())[0] - value = list(properties.values())[0] + key = next(iter(properties.keys())) + value = next(iter(properties.values())) key_split = key.split("__") return [ { diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py index a16991a2f1a..382b37fa56d 100644 --- a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py @@ -381,7 +381,7 @@ def _insight_filter(filter: Dict): else: raise Exception(f"Invalid insight type {filter.get('insight')}.") - if len(list(insight_filter.values())[0].model_dump(exclude_defaults=True)) == 0: + if len(next(iter(insight_filter.values())).model_dump(exclude_defaults=True)) == 0: return {} return insight_filter diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index 12ef703271c..ffb758858d1 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -55,21 +55,19 @@ class WebAnalyticsQueryRunner(QueryRunner, ABC): return [p for p in self.query.properties if p.key != "$pathname"] def session_where(self, include_previous_period: Optional[bool] = None): - properties = ( - [ - parse_expr( - "events.timestamp < {date_to} AND events.timestamp >= minus({date_from}, toIntervalHour(1))", - placeholders={ - "date_from": self.query_date_range.previous_period_date_from_as_hogql() - if include_previous_period - else self.query_date_range.date_from_as_hogql(), - "date_to": self.query_date_range.date_to_as_hogql(), - }, - ) - ] - + self.property_filters_without_pathname - + self._test_account_filters - ) + properties = [ + parse_expr( + "events.timestamp < {date_to} AND events.timestamp >= minus({date_from}, toIntervalHour(1))", + placeholders={ + "date_from": self.query_date_range.previous_period_date_from_as_hogql() + if include_previous_period + else self.query_date_range.date_from_as_hogql(), + "date_to": self.query_date_range.date_to_as_hogql(), + }, + ), + *self.property_filters_without_pathname, + *self._test_account_filters, + ] return property_to_expr( properties, self.team, diff --git a/posthog/management/commands/backfill_persons_and_groups_on_events.py b/posthog/management/commands/backfill_persons_and_groups_on_events.py index b7fb2fcbc46..0e90461a701 100644 --- a/posthog/management/commands/backfill_persons_and_groups_on_events.py +++ b/posthog/management/commands/backfill_persons_and_groups_on_events.py @@ -120,7 +120,9 @@ LIMIT 1 query_number = 0 -def print_and_execute_query(sql: str, name: str, dry_run: bool, timeout=180, query_args={}) -> Any: +def print_and_execute_query(sql: str, name: str, dry_run: bool, timeout=180, query_args=None) -> Any: + if query_args is None: + query_args = {} global query_number if not settings.TEST: diff --git a/posthog/management/commands/create_channel_definitions_file.py b/posthog/management/commands/create_channel_definitions_file.py index 5ff198a7334..859bbe3c631 100644 --- a/posthog/management/commands/create_channel_definitions_file.py +++ b/posthog/management/commands/create_channel_definitions_file.py @@ -62,8 +62,8 @@ class Command(BaseCommand): entries: OrderedDict[Tuple[str, str], SourceEntry] = OrderedDict(map(handle_entry, split_items)) # add google domains to this, from https://www.google.com/supported_domains - for google_domain in ( - ".google.com .google.ad .google.ae .google.com.af .google.com.ag .google.al .google.am .google.co.ao " + for google_domain in [ + *".google.com .google.ad .google.ae .google.com.af .google.com.ag .google.al .google.am .google.co.ao " ".google.com.ar .google.as .google.at .google.com.au .google.az .google.ba .google.com.bd .google.be " ".google.bf .google.bg .google.com.bh .google.bi .google.bj .google.com.bn .google.com.bo " ".google.com.br .google.bs .google.bt .google.co.bw .google.by .google.com.bz .google.ca .google.cd " @@ -87,8 +87,9 @@ class Command(BaseCommand): ".google.co.th .google.com.tj .google.tl .google.tm .google.tn .google.to .google.com.tr .google.tt " ".google.com.tw .google.co.tz .google.com.ua .google.co.ug .google.co.uk .google.com.uy .google.co.uz " ".google.com.vc .google.co.ve .google.co.vi .google.com.vn .google.vu .google.ws .google.rs " - ".google.co.za .google.co.zm .google.co.zw .google.cat" - ).split(" ") + ["google"]: + ".google.co.za .google.co.zm .google.co.zw .google.cat".split(" "), + "google", + ]: google_domain = google_domain.strip() if google_domain[0] == ".": google_domain = google_domain[1:] diff --git a/posthog/management/commands/execute_temporal_workflow.py b/posthog/management/commands/execute_temporal_workflow.py index e5957496907..61c257cecc5 100644 --- a/posthog/management/commands/execute_temporal_workflow.py +++ b/posthog/management/commands/execute_temporal_workflow.py @@ -99,7 +99,7 @@ class Command(BaseCommand): retry_policy = RetryPolicy(maximum_attempts=int(options["max_attempts"])) try: - workflow = [workflow for workflow in WORKFLOWS if workflow.is_named(workflow_name)][0] + workflow = next(workflow for workflow in WORKFLOWS if workflow.is_named(workflow_name)) except IndexError: raise ValueError(f"No workflow with name '{workflow_name}'") except AttributeError: diff --git a/posthog/middleware.py b/posthog/middleware.py index 281723f460f..e43ef3a620f 100644 --- a/posthog/middleware.py +++ b/posthog/middleware.py @@ -94,7 +94,7 @@ class AllowIPMiddleware: client_ip = forwarded_for.pop(0) if settings.TRUST_ALL_PROXIES: return client_ip - proxies = [closest_proxy] + forwarded_for + proxies = [closest_proxy, *forwarded_for] for proxy in proxies: if proxy not in self.trusted_proxies: return None @@ -486,7 +486,7 @@ class CaptureMiddleware: def per_request_logging_context_middleware( - get_response: Callable[[HttpRequest], HttpResponse] + get_response: Callable[[HttpRequest], HttpResponse], ) -> Callable[[HttpRequest], HttpResponse]: """ We get some default logging context from the django-structlog middleware, @@ -517,7 +517,7 @@ def per_request_logging_context_middleware( def user_logging_context_middleware( - get_response: Callable[[HttpRequest], HttpResponse] + get_response: Callable[[HttpRequest], HttpResponse], ) -> Callable[[HttpRequest], HttpResponse]: """ This middleware adds the team_id to the logging context if it exists. Note diff --git a/posthog/models/event/util.py b/posthog/models/event/util.py index 1d8357b855b..c5509489801 100644 --- a/posthog/models/event/util.py +++ b/posthog/models/event/util.py @@ -31,7 +31,7 @@ def create_event( team: Team, distinct_id: str, timestamp: Optional[Union[timezone.datetime, str]] = None, - properties: Optional[Dict] = {}, + properties: Optional[Dict] = None, elements: Optional[List[Element]] = None, person_id: Optional[uuid.UUID] = None, person_properties: Optional[Dict] = None, @@ -48,6 +48,8 @@ def create_event( group4_created_at: Optional[Union[timezone.datetime, str]] = None, person_mode: Literal["full", "propertyless"] = "full", ) -> str: + if properties is None: + properties = {} if not timestamp: timestamp = timezone.now() assert timestamp is not None @@ -285,9 +287,11 @@ class ElementSerializer(serializers.ModelSerializer): ] -def parse_properties(properties: str, allow_list: Set[str] = set()) -> Dict: +def parse_properties(properties: str, allow_list: Optional[Set[str]] = None) -> Dict: # parse_constants gets called for any NaN, Infinity etc values # we just want those to be returned as None + if allow_list is None: + allow_list = set() props = json.loads(properties or "{}", parse_constant=lambda x: None) return { key: value.strip('"') if isinstance(value, str) else value diff --git a/posthog/models/feature_flag/flag_matching.py b/posthog/models/feature_flag/flag_matching.py index e9faf83effa..134af65dfda 100644 --- a/posthog/models/feature_flag/flag_matching.py +++ b/posthog/models/feature_flag/flag_matching.py @@ -135,14 +135,22 @@ class FeatureFlagMatcher: self, feature_flags: List[FeatureFlag], distinct_id: str, - groups: Dict[GroupTypeName, str] = {}, + groups: Optional[Dict[GroupTypeName, str]] = None, cache: Optional[FlagsMatcherCache] = None, - hash_key_overrides: Dict[str, str] = {}, - property_value_overrides: Dict[str, Union[str, int]] = {}, - group_property_value_overrides: Dict[str, Dict[str, Union[str, int]]] = {}, + hash_key_overrides: Optional[Dict[str, str]] = None, + property_value_overrides: Optional[Dict[str, Union[str, int]]] = None, + group_property_value_overrides: Optional[Dict[str, Dict[str, Union[str, int]]]] = None, skip_database_flags: bool = False, cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, ): + if group_property_value_overrides is None: + group_property_value_overrides = {} + if property_value_overrides is None: + property_value_overrides = {} + if hash_key_overrides is None: + hash_key_overrides = {} + if groups is None: + groups = {} self.feature_flags = feature_flags self.distinct_id = distinct_id self.groups = groups @@ -712,11 +720,17 @@ def _get_all_feature_flags( team_id: int, distinct_id: str, person_overrides: Optional[Dict[str, str]] = None, - groups: Dict[GroupTypeName, str] = {}, - property_value_overrides: Dict[str, Union[str, int]] = {}, - group_property_value_overrides: Dict[str, Dict[str, Union[str, int]]] = {}, + groups: Optional[Dict[GroupTypeName, str]] = None, + property_value_overrides: Optional[Dict[str, Union[str, int]]] = None, + group_property_value_overrides: Optional[Dict[str, Dict[str, Union[str, int]]]] = None, skip_database_flags: bool = False, ) -> Tuple[Dict[str, Union[str, bool]], Dict[str, dict], Dict[str, object], bool]: + if group_property_value_overrides is None: + group_property_value_overrides = {} + if property_value_overrides is None: + property_value_overrides = {} + if groups is None: + groups = {} cache = FlagsMatcherCache(team_id) if feature_flags: @@ -738,11 +752,17 @@ def _get_all_feature_flags( def get_all_feature_flags( team_id: int, distinct_id: str, - groups: Dict[GroupTypeName, str] = {}, + groups: Optional[Dict[GroupTypeName, str]] = None, hash_key_override: Optional[str] = None, - property_value_overrides: Dict[str, Union[str, int]] = {}, - group_property_value_overrides: Dict[str, Dict[str, Union[str, int]]] = {}, + property_value_overrides: Optional[Dict[str, Union[str, int]]] = None, + group_property_value_overrides: Optional[Dict[str, Dict[str, Union[str, int]]]] = None, ) -> Tuple[Dict[str, Union[str, bool]], Dict[str, dict], Dict[str, object], bool]: + if group_property_value_overrides is None: + group_property_value_overrides = {} + if property_value_overrides is None: + property_value_overrides = {} + if groups is None: + groups = {} property_value_overrides, group_property_value_overrides = add_local_person_and_group_properties( distinct_id, groups, property_value_overrides, group_property_value_overrides ) diff --git a/posthog/models/filters/retention_filter.py b/posthog/models/filters/retention_filter.py index 9cc3e8d0c7a..338d3d87e3e 100644 --- a/posthog/models/filters/retention_filter.py +++ b/posthog/models/filters/retention_filter.py @@ -48,7 +48,9 @@ class RetentionFilter( SampleMixin, BaseFilter, ): - def __init__(self, data: Dict[str, Any] = {}, request: Optional[Request] = None, **kwargs) -> None: + def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None: + if data is None: + data = {} if data: data["insight"] = INSIGHT_RETENTION else: diff --git a/posthog/models/filters/test/test_filter.py b/posthog/models/filters/test/test_filter.py index a584bbd4159..63a947bca67 100644 --- a/posthog/models/filters/test/test_filter.py +++ b/posthog/models/filters/test/test_filter.py @@ -993,8 +993,10 @@ class TestDjangoPropertiesToQ(property_to_Q_test_factory(_filter_persons, _creat def filter_persons_with_property_group( - filter: Filter, team: Team, property_overrides: Dict[str, Any] = {} + filter: Filter, team: Team, property_overrides: Optional[Dict[str, Any]] = None ) -> List[str]: + if property_overrides is None: + property_overrides = {} flush_persons_and_events() persons = Person.objects.filter(property_group_to_Q(team.pk, filter.property_groups, property_overrides)) persons = persons.filter(team_id=team.pk) diff --git a/posthog/models/filters/utils.py b/posthog/models/filters/utils.py index 0b31b209afa..d91b49b3e05 100644 --- a/posthog/models/filters/utils.py +++ b/posthog/models/filters/utils.py @@ -21,12 +21,14 @@ def earliest_timestamp_func(team_id: int): return get_earliest_timestamp(team_id) -def get_filter(team, data: dict = {}, request: Optional[Request] = None): +def get_filter(team, data: Optional[dict] = None, request: Optional[Request] = None): from .filter import Filter from .path_filter import PathFilter from .retention_filter import RetentionFilter from .stickiness_filter import StickinessFilter + if data is None: + data = {} insight = data.get("insight") if not insight and request: insight = request.GET.get("insight") or request.data.get("insight") diff --git a/posthog/models/performance/sql.py b/posthog/models/performance/sql.py index 4c6a97f34a6..26f184f6cba 100644 --- a/posthog/models/performance/sql.py +++ b/posthog/models/performance/sql.py @@ -1,4 +1,5 @@ """https://developer.mozilla.org/en-US/docs/Web/API/PerformanceEntry""" + from posthog import settings from posthog.clickhouse.kafka_engine import ( KAFKA_COLUMNS_WITH_PARTITION, diff --git a/posthog/models/person/util.py b/posthog/models/person/util.py index 7e8afc3db5e..f6bcc60ebc3 100644 --- a/posthog/models/person/util.py +++ b/posthog/models/person/util.py @@ -127,13 +127,15 @@ def create_person( team_id: int, version: int, uuid: Optional[str] = None, - properties: Optional[Dict] = {}, + properties: Optional[Dict] = None, sync: bool = False, is_identified: bool = False, is_deleted: bool = False, timestamp: Optional[Union[datetime.datetime, str]] = None, created_at: Optional[datetime.datetime] = None, ) -> str: + if properties is None: + properties = {} if uuid: uuid = str(uuid) else: diff --git a/posthog/models/plugin.py b/posthog/models/plugin.py index bdd1a5f8f49..30ea323a31d 100644 --- a/posthog/models/plugin.py +++ b/posthog/models/plugin.py @@ -421,8 +421,10 @@ def fetch_plugin_log_entries( before: Optional[timezone.datetime] = None, search: Optional[str] = None, limit: Optional[int] = None, - type_filter: List[PluginLogEntryType] = [], + type_filter: Optional[List[PluginLogEntryType]] = None, ) -> List[PluginLogEntry]: + if type_filter is None: + type_filter = [] clickhouse_where_parts: List[str] = [] clickhouse_kwargs: Dict[str, Any] = {} if team_id is not None: diff --git a/posthog/models/property_definition.py b/posthog/models/property_definition.py index 2efc8f20319..0a6f89354a6 100644 --- a/posthog/models/property_definition.py +++ b/posthog/models/property_definition.py @@ -80,12 +80,11 @@ class PropertyDefinition(UUIDModel): # creates an index pganalyze identified as missing # https://app.pganalyze.com/servers/i35ydkosi5cy5n7tly45vkjcqa/checks/index_advisor/missing_index/15282978 models.Index(fields=["team_id", "type", "is_numerical"]), - ] + [ GinIndex( name="index_property_definition_name", fields=["name"], opclasses=["gin_trgm_ops"], - ) # To speed up DB-based fuzzy searching + ), # To speed up DB-based fuzzy searching ] constraints = [ models.CheckConstraint( diff --git a/posthog/models/tagged_item.py b/posthog/models/tagged_item.py index 4c55c4a6637..612f2f39399 100644 --- a/posthog/models/tagged_item.py +++ b/posthog/models/tagged_item.py @@ -102,7 +102,7 @@ class TaggedItem(UUIDModel): ) class Meta: - unique_together = ("tag",) + RELATED_OBJECTS + unique_together = ("tag", *RELATED_OBJECTS) # Make sure to add new key to uniqueness constraint when extending tag functionality to new model constraints = [ *[build_partial_uniqueness_constraint(field=field) for field in RELATED_OBJECTS], diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 19cb99cf677..6f5f927fe00 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -81,13 +81,9 @@ class TeamManager(models.Manager): example_email = re.search(r"@[\w.]+", example_emails[0]) if example_email: return [ - { - "key": "email", - "operator": "not_icontains", - "value": example_email.group(), - "type": "person", - } - ] + filters + {"key": "email", "operator": "not_icontains", "value": example_email.group(), "type": "person"}, + *filters, + ] return filters def create_with_data(self, user: Any = None, default_dashboards: bool = True, **kwargs) -> "Team": diff --git a/posthog/models/utils.py b/posthog/models/utils.py index b00a87eb881..a093cf1e4eb 100644 --- a/posthog/models/utils.py +++ b/posthog/models/utils.py @@ -122,7 +122,7 @@ class UUIDClassicModel(models.Model): def sane_repr(*attrs: str, include_id=True) -> Callable[[object], str]: if "id" not in attrs and "pk" not in attrs and include_id: - attrs = ("id",) + attrs + attrs = ("id", *attrs) def _repr(self): pairs = (f"{attr}={repr(getattr(self, attr))}" for attr in attrs) @@ -206,7 +206,7 @@ def create_with_slug(create_func: Callable[..., T], default_slug: str = "", *arg def get_deferred_field_set_for_model( model: Type[models.Model], - fields_not_deferred: Set[str] = set(), + fields_not_deferred: Optional[Set[str]] = None, field_prefix: str = "", ) -> Set[str]: """Return a set of field names to be deferred for a given model. Used with `.defer()` after `select_related` @@ -225,6 +225,8 @@ def get_deferred_field_set_for_model( fields_not_deferred: the models fields to exclude from the deferred field set field_prefix: a prefix to add to the field names e.g. ("team__organization__") to work in the query set """ + if fields_not_deferred is None: + fields_not_deferred = set() return {f"{field_prefix}{x.name}" for x in model._meta.fields if x.name not in fields_not_deferred} diff --git a/posthog/queries/base.py b/posthog/queries/base.py index 393c14e3042..7dff88f6020 100644 --- a/posthog/queries/base.py +++ b/posthog/queries/base.py @@ -276,10 +276,12 @@ def lookup_q(key: str, value: Any) -> Q: def property_to_Q( team_id: int, property: Property, - override_property_values: Dict[str, Any] = {}, + override_property_values: Optional[Dict[str, Any]] = None, cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, using_database: str = "default", ) -> Q: + if override_property_values is None: + override_property_values = {} if property.type not in ["person", "group", "cohort", "event"]: # We need to support event type for backwards compatibility, even though it's treated as a person property type raise ValueError(f"property_to_Q: type is not supported: {repr(property.type)}") @@ -380,10 +382,12 @@ def property_to_Q( def property_group_to_Q( team_id: int, property_group: PropertyGroup, - override_property_values: Dict[str, Any] = {}, + override_property_values: Optional[Dict[str, Any]] = None, cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, using_database: str = "default", ) -> Q: + if override_property_values is None: + override_property_values = {} filters = Q() if not property_group or len(property_group.values) == 0: @@ -423,7 +427,7 @@ def property_group_to_Q( def properties_to_Q( team_id: int, properties: List[Property], - override_property_values: Dict[str, Any] = {}, + override_property_values: Optional[Dict[str, Any]] = None, cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, using_database: str = "default", ) -> Q: @@ -431,6 +435,8 @@ def properties_to_Q( Converts a filter to Q, for use in Django ORM .filter() If you're filtering a Person/Group QuerySet, use is_direct_query to avoid doing an unnecessary nested loop """ + if override_property_values is None: + override_property_values = {} filters = Q() if len(properties) == 0: diff --git a/posthog/queries/breakdown_props.py b/posthog/queries/breakdown_props.py index 397ee061332..fffb0aef0f2 100644 --- a/posthog/queries/breakdown_props.py +++ b/posthog/queries/breakdown_props.py @@ -46,7 +46,7 @@ def get_breakdown_prop_values( entity: Entity, aggregate_operation: str, team: Team, - extra_params={}, + extra_params=None, column_optimizer: Optional[ColumnOptimizer] = None, person_properties_mode: PersonPropertiesMode = PersonPropertiesMode.USING_PERSON_PROPERTIES_COLUMN, use_all_funnel_entities: bool = False, @@ -58,6 +58,8 @@ def get_breakdown_prop_values( When dealing with a histogram though, buckets are returned instead of values. """ + if extra_params is None: + extra_params = {} column_optimizer = column_optimizer or ColumnOptimizer(filter, team.id) date_params = {} diff --git a/posthog/queries/event_query/event_query.py b/posthog/queries/event_query/event_query.py index bcd7002e66f..8737876d001 100644 --- a/posthog/queries/event_query/event_query.py +++ b/posthog/queries/event_query/event_query.py @@ -60,13 +60,19 @@ class EventQuery(metaclass=ABCMeta): should_join_persons=False, should_join_sessions=False, # Extra events/person table columns to fetch since parent query needs them - extra_fields: List[ColumnName] = [], - extra_event_properties: List[PropertyName] = [], - extra_person_fields: List[ColumnName] = [], + extra_fields: Optional[List[ColumnName]] = None, + extra_event_properties: Optional[List[PropertyName]] = None, + extra_person_fields: Optional[List[ColumnName]] = None, override_aggregate_users_by_distinct_id: Optional[bool] = None, person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled, **kwargs, ) -> None: + if extra_person_fields is None: + extra_person_fields = [] + if extra_event_properties is None: + extra_event_properties = [] + if extra_fields is None: + extra_fields = [] self._filter = filter self._team_id = team.pk self._team = team diff --git a/posthog/queries/foss_cohort_query.py b/posthog/queries/foss_cohort_query.py index 91d16ec3ec5..352fc19ee13 100644 --- a/posthog/queries/foss_cohort_query.py +++ b/posthog/queries/foss_cohort_query.py @@ -139,12 +139,18 @@ class FOSSCohortQuery(EventQuery): should_join_distinct_ids=False, should_join_persons=False, # Extra events/person table columns to fetch since parent query needs them - extra_fields: List[ColumnName] = [], - extra_event_properties: List[PropertyName] = [], - extra_person_fields: List[ColumnName] = [], + extra_fields: Optional[List[ColumnName]] = None, + extra_event_properties: Optional[List[PropertyName]] = None, + extra_person_fields: Optional[List[ColumnName]] = None, override_aggregate_users_by_distinct_id: Optional[bool] = None, **kwargs, ) -> None: + if extra_person_fields is None: + extra_person_fields = [] + if extra_event_properties is None: + extra_event_properties = [] + if extra_fields is None: + extra_fields = [] self._fields = [] self._events = [] self._earliest_time_for_event_query = None diff --git a/posthog/queries/funnels/base.py b/posthog/queries/funnels/base.py index a96ba9b9f7f..c4258c6f6eb 100644 --- a/posthog/queries/funnels/base.py +++ b/posthog/queries/funnels/base.py @@ -667,7 +667,7 @@ class ClickhouseFunnelBase(ABC): if self._filter.include_recordings: events = [] for i in range(0, max_steps): - event_fields = ["latest"] + self.extra_event_fields_and_properties + event_fields = ["latest", *self.extra_event_fields_and_properties] event_fields_with_step = ", ".join([f'"{field}_{i}"' for field in event_fields]) event_clause = f"({event_fields_with_step}) as step_{i}_matching_event" events.append(event_clause) diff --git a/posthog/queries/funnels/test/test_breakdowns_by_current_url.py b/posthog/queries/funnels/test/test_breakdowns_by_current_url.py index bb6673387b6..7994b195fca 100644 --- a/posthog/queries/funnels/test/test_breakdowns_by_current_url.py +++ b/posthog/queries/funnels/test/test_breakdowns_by_current_url.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict +from typing import Dict, Optional from posthog.models import Filter from posthog.queries.funnels import ClickhouseFunnel @@ -115,7 +115,11 @@ class TestBreakdownsByCurrentURL(ClickhouseTestMixin, APIBaseTest): journeys_for(journey, team=self.team, create_people=True) - def _run(self, extra: Dict = {}, events_extra: Dict = {}): + def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None): + if events_extra is None: + events_extra = {} + if extra is None: + extra = {} response = ClickhouseFunnel( Filter( data={ diff --git a/posthog/queries/trends/test/test_breakdowns.py b/posthog/queries/trends/test/test_breakdowns.py index 48ed9033c04..78b5a01e45a 100644 --- a/posthog/queries/trends/test/test_breakdowns.py +++ b/posthog/queries/trends/test/test_breakdowns.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict +from typing import Dict, Optional from posthog.constants import TRENDS_TABLE from posthog.models import Filter @@ -104,7 +104,11 @@ class TestBreakdowns(ClickhouseTestMixin, APIBaseTest): journeys_for(journey, team=self.team, create_people=True) - def _run(self, extra: Dict = {}, events_extra: Dict = {}): + def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None): + if events_extra is None: + events_extra = {} + if extra is None: + extra = {} response = Trends().run( Filter( data={ diff --git a/posthog/queries/trends/test/test_breakdowns_by_current_url.py b/posthog/queries/trends/test/test_breakdowns_by_current_url.py index bc7a8159584..26e0c40ae64 100644 --- a/posthog/queries/trends/test/test_breakdowns_by_current_url.py +++ b/posthog/queries/trends/test/test_breakdowns_by_current_url.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict +from typing import Dict, Optional from posthog.models import Filter from posthog.queries.trends.trends import Trends @@ -99,7 +99,11 @@ class TestBreakdownsByCurrentURL(ClickhouseTestMixin, APIBaseTest): journeys_for(journey, team=self.team, create_people=True) - def _run(self, extra: Dict = {}, events_extra: Dict = {}): + def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None): + if events_extra is None: + events_extra = {} + if extra is None: + extra = {} response = Trends().run( Filter( data={ diff --git a/posthog/queries/trends/test/test_formula.py b/posthog/queries/trends/test/test_formula.py index adbf54fa05f..01e838336e5 100644 --- a/posthog/queries/trends/test/test_formula.py +++ b/posthog/queries/trends/test/test_formula.py @@ -129,7 +129,9 @@ class TestFormula(ClickhouseTestMixin, APIBaseTest): }, ) - def _run(self, extra: Dict = {}, run_at: Optional[str] = None): + def _run(self, extra: Optional[Dict] = None, run_at: Optional[str] = None): + if extra is None: + extra = {} with freeze_time(run_at or "2020-01-04T13:01:01Z"): action_response = Trends().run( Filter( diff --git a/posthog/queries/trends/test/test_paging_breakdowns.py b/posthog/queries/trends/test/test_paging_breakdowns.py index 31db69f75b5..b4040fee618 100644 --- a/posthog/queries/trends/test/test_paging_breakdowns.py +++ b/posthog/queries/trends/test/test_paging_breakdowns.py @@ -38,7 +38,9 @@ class TestPagingBreakdowns(APIBaseTest): create_people=True, ) - def _run(self, extra: Dict = {}, run_at: Optional[str] = None): + def _run(self, extra: Optional[Dict] = None, run_at: Optional[str] = None): + if extra is None: + extra = {} with freeze_time(run_at or "2020-01-04T13:01:01Z"): action_response = Trends().run( Filter( diff --git a/posthog/queries/trends/util.py b/posthog/queries/trends/util.py index bb11f0c3829..e002145de99 100644 --- a/posthog/queries/trends/util.py +++ b/posthog/queries/trends/util.py @@ -102,9 +102,11 @@ def process_math( def parse_response( stats: Dict, filter: Filter, - additional_values: Dict = {}, + additional_values: Optional[Dict] = None, entity: Optional[Entity] = None, ) -> Dict[str, Any]: + if additional_values is None: + additional_values = {} counts = stats[1] labels = [item.strftime("%-d-%b-%Y{}".format(" %H:%M" if filter.interval == "hour" else "")) for item in stats[0]] days = [item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if filter.interval == "hour" else "")) for item in stats[0]] diff --git a/posthog/session_recordings/queries/test/test_session_recording_properties.py b/posthog/session_recordings/queries/test/test_session_recording_properties.py index aa152b0b2fa..7972eb742ab 100644 --- a/posthog/session_recordings/queries/test/test_session_recording_properties.py +++ b/posthog/session_recordings/queries/test/test_session_recording_properties.py @@ -25,8 +25,10 @@ class TestSessionRecordingProperties(BaseTest, ClickhouseTestMixin): timestamp, team=None, event_name="$pageview", - properties={"$os": "Windows 95", "$current_url": "aloha.com/2"}, + properties=None, ): + if properties is None: + properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"} if team is None: team = self.team _create_event( diff --git a/posthog/session_recordings/test/test_session_recording_helpers.py b/posthog/session_recordings/test/test_session_recording_helpers.py index 1fd6bb31919..b6b83e02c28 100644 --- a/posthog/session_recordings/test/test_session_recording_helpers.py +++ b/posthog/session_recordings/test/test_session_recording_helpers.py @@ -280,7 +280,6 @@ def test_new_ingestion_large_full_snapshot_is_separated(raw_snapshot_events, moc "distinct_id": "abc123", }, }, - ] + [ { "event": "$snapshot", "properties": { diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index 03e73aabe05..12085f55925 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -780,7 +780,7 @@ class TestSessionRecordings(APIBaseTest, ClickhouseTestMixin, QueryMatchingTest) # by default a session recording is deleted, so we have to explicitly mark the mock as not deleted mock_get_session_recording.return_value = SessionRecording(session_id=session_id, team=self.team, deleted=False) - annoying_data_from_javascript = "\uD801\uDC37 probably from console logs" + annoying_data_from_javascript = "\ud801\udc37 probably from console logs" mock_realtime_snapshots.return_value = [ {"some": annoying_data_from_javascript}, diff --git a/posthog/settings/feature_flags.py b/posthog/settings/feature_flags.py index 371f4973766..8b1f5b3e3f9 100644 --- a/posthog/settings/feature_flags.py +++ b/posthog/settings/feature_flags.py @@ -4,7 +4,8 @@ from posthog.settings.utils import get_list # These flags will be force-enabled on the frontend # The features here are released, but the flags are just not yet removed from the code -PERSISTED_FEATURE_FLAGS = get_list(os.getenv("PERSISTED_FEATURE_FLAGS", "")) + [ +PERSISTED_FEATURE_FLAGS = [ + *get_list(os.getenv("PERSISTED_FEATURE_FLAGS", "")), "simplify-actions", "historical-exports-v2", "ingestion-warnings-enabled", diff --git a/posthog/settings/web.py b/posthog/settings/web.py index f54c2e32fc2..ee6961de70e 100644 --- a/posthog/settings/web.py +++ b/posthog/settings/web.py @@ -341,7 +341,7 @@ KAFKA_PRODUCE_ACK_TIMEOUT_SECONDS = int(os.getenv("KAFKA_PRODUCE_ACK_TIMEOUT_SEC # https://github.com/korfuri/django-prometheus for more details # We keep the number of buckets low to reduce resource usage on the Prometheus -PROMETHEUS_LATENCY_BUCKETS = [0.1, 0.3, 0.9, 2.7, 8.1] + [float("inf")] +PROMETHEUS_LATENCY_BUCKETS = [0.1, 0.3, 0.9, 2.7, 8.1, float("inf")] SALT_KEY = os.getenv("SALT_KEY", "0123456789abcdefghijklmnopqrstuvwxyz") diff --git a/posthog/tasks/email.py b/posthog/tasks/email.py index c02c87eaac2..d06d15ee12a 100644 --- a/posthog/tasks/email.py +++ b/posthog/tasks/email.py @@ -174,9 +174,9 @@ async def send_batch_export_run_failure( # NOTE: We are taking only the date component to cap the number of emails at one per day per batch export. last_updated_at_date = batch_export_run.last_updated_at.strftime("%Y-%m-%d") - campaign_key: ( - str - ) = f"batch_export_run_email_batch_export_{batch_export_run.batch_export.id}_last_updated_at_{last_updated_at_date}" + campaign_key: str = ( + f"batch_export_run_email_batch_export_{batch_export_run.batch_export.id}_last_updated_at_{last_updated_at_date}" + ) message = await sync_to_async(EmailMessage)( campaign_key=campaign_key, diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py index 055629bf055..d977f27560b 100644 --- a/posthog/tasks/test/test_usage_report.py +++ b/posthog/tasks/test/test_usage_report.py @@ -325,7 +325,7 @@ class UsageReport(APIBaseTest, ClickhouseTestMixin, ClickhouseDestroyTablesMixin flush_persons_and_events() def _select_report_by_org_id(self, org_id: str, reports: List[Dict]) -> Dict: - return [report for report in reports if report["organization_id"] == org_id][0] + return next(report for report in reports if report["organization_id"] == org_id) def _create_plugin(self, name: str, enabled: bool) -> None: plugin = Plugin.objects.create(organization_id=self.team.organization.pk, name=name) diff --git a/posthog/temporal/batch_exports/utils.py b/posthog/temporal/batch_exports/utils.py index a097776389c..f165ae070a8 100644 --- a/posthog/temporal/batch_exports/utils.py +++ b/posthog/temporal/batch_exports/utils.py @@ -9,7 +9,7 @@ T = typing.TypeVar("T") def peek_first_and_rewind( - gen: collections.abc.Generator[T, None, None] + gen: collections.abc.Generator[T, None, None], ) -> tuple[T, collections.abc.Generator[T, None, None]]: """Peek into the first element in a generator and rewind the advance. diff --git a/posthog/temporal/data_imports/pipelines/zendesk/credentials.py b/posthog/temporal/data_imports/pipelines/zendesk/credentials.py index e4dfda20135..88a0659b7ce 100644 --- a/posthog/temporal/data_imports/pipelines/zendesk/credentials.py +++ b/posthog/temporal/data_imports/pipelines/zendesk/credentials.py @@ -1,6 +1,7 @@ """ This module handles how credentials are read in dlt sources """ + from typing import ClassVar, List, Union from dlt.common.configuration import configspec from dlt.common.configuration.specs import CredentialsConfiguration diff --git a/posthog/temporal/tests/batch_exports/test_logger.py b/posthog/temporal/tests/batch_exports/test_logger.py index 5c12cef1d03..4ee3ca9a014 100644 --- a/posthog/temporal/tests/batch_exports/test_logger.py +++ b/posthog/temporal/tests/batch_exports/test_logger.py @@ -82,7 +82,7 @@ class CaptureKafkaProducer: def producer(self) -> aiokafka.AIOKafkaProducer: if self._producer is None: self._producer = aiokafka.AIOKafkaProducer( - bootstrap_servers=settings.KAFKA_HOSTS + ["localhost:9092"], + bootstrap_servers=[*settings.KAFKA_HOSTS, "localhost:9092"], security_protocol=settings.KAFKA_SECURITY_PROTOCOL or "PLAINTEXT", acks="all", request_timeout_ms=1000000, diff --git a/posthog/temporal/tests/utils/datetimes.py b/posthog/temporal/tests/utils/datetimes.py index ec0c10980bb..c168e885a3e 100644 --- a/posthog/temporal/tests/utils/datetimes.py +++ b/posthog/temporal/tests/utils/datetimes.py @@ -1,4 +1,5 @@ """Test utilities that operate with datetime.datetimes.""" + import datetime as dt diff --git a/posthog/temporal/tests/utils/events.py b/posthog/temporal/tests/utils/events.py index 884901ca9aa..71ce7f7f616 100644 --- a/posthog/temporal/tests/utils/events.py +++ b/posthog/temporal/tests/utils/events.py @@ -1,4 +1,5 @@ """Test utilities that deal with test event generation.""" + import datetime as dt import json import random diff --git a/posthog/temporal/tests/utils/models.py b/posthog/temporal/tests/utils/models.py index 04da6fe21b0..4ed75ad50aa 100644 --- a/posthog/temporal/tests/utils/models.py +++ b/posthog/temporal/tests/utils/models.py @@ -1,4 +1,5 @@ """Test utilities to manipulate BatchExport* models.""" + import uuid import temporalio.client diff --git a/posthog/test/base.py b/posthog/test/base.py index 6d4735679a0..c96738aafa1 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -409,9 +409,9 @@ def cleanup_materialized_columns(): def also_test_with_materialized_columns( - event_properties=[], - person_properties=[], - group_properties=[], + event_properties=None, + person_properties=None, + group_properties=None, verify_no_jsonextract=True, # :TODO: Remove this when groups-on-events is released materialize_only_with_person_on_events=False, @@ -422,6 +422,12 @@ def also_test_with_materialized_columns( Requires a unittest class with ClickhouseTestMixin mixed in """ + if group_properties is None: + group_properties = [] + if person_properties is None: + person_properties = [] + if event_properties is None: + event_properties = [] try: from ee.clickhouse.materialized_columns.analyze import materialize except: diff --git a/posthog/test/test_latest_migrations.py b/posthog/test/test_latest_migrations.py index 36a047af8a6..1d60179576f 100644 --- a/posthog/test/test_latest_migrations.py +++ b/posthog/test/test_latest_migrations.py @@ -33,6 +33,6 @@ class TestLatestMigrations(TestCase): def _get_latest_migration_from_manifest(django_app: str) -> str: root = pathlib.Path().resolve() manifest = pathlib.Path(f"{root}/latest_migrations.manifest").read_text() - posthog_latest_migration = [line for line in manifest.splitlines() if line.startswith(f"{django_app}: ")][0] + posthog_latest_migration = next(line for line in manifest.splitlines() if line.startswith(f"{django_app}: ")) return posthog_latest_migration.replace(f"{django_app}: ", "") diff --git a/posthog/utils.py b/posthog/utils.py index f186fdadb4a..19e110507ab 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -275,7 +275,7 @@ def get_js_url(request: HttpRequest) -> str: def render_template( template_name: str, request: HttpRequest, - context: Dict = {}, + context: Optional[Dict] = None, *, team_for_public_context: Optional["Team"] = None, ) -> HttpResponse: @@ -284,6 +284,8 @@ def render_template( If team_for_public_context is provided, this means this is a public page such as a shared dashboard. """ + if context is None: + context = {} template = get_template(template_name) context["opt_out_capture"] = settings.OPT_OUT_CAPTURE @@ -471,7 +473,7 @@ def get_frontend_apps(team_id: int) -> Dict[int, Dict[str, Any]]: for p in plugin_configs: config = p["pluginconfig__config"] or {} config_schema = p["config_schema"] or {} - secret_fields = {field["key"] for field in config_schema if "secret" in field and field["secret"]} + secret_fields = {field["key"] for field in config_schema if field.get("secret")} for key in secret_fields: if key in config: config[key] = "** SECRET FIELD **" diff --git a/pyproject.toml b/pyproject.toml index 769cc94f5bf..2701b5a74d6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,8 +14,9 @@ exclude = [ "./env", "./posthog/hogql/grammar", ] + +[tool.ruff.lint] ignore = [ - "B006", "B017", "B019", "B904", @@ -34,14 +35,18 @@ select = [ "C9", "E", "F", + "RUF005", + "RUF013", + "RUF015", + "RUF019", "T2", "W", ] -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] max-complexity = 10 -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "./posthog/queries/column_optimizer/column_optimizer.py" = ["F401"] "./posthog/migrations/0027_move_elements_to_group.py" = ["T201"] "./posthog/queries/cohort_query.py" = ["F401"] diff --git a/requirements-dev.in b/requirements-dev.in index e4b002de8df..44bd424b1bc 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -11,7 +11,7 @@ -c requirements.txt -ruff>=0.1.2 +ruff~=0.3.7 pip-tools==7.3.0 mypy~=1.8.0 mypy-baseline~=0.6.1 diff --git a/requirements-dev.txt b/requirements-dev.txt index ff1fb56f11a..5b8893ea256 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -59,7 +59,9 @@ coreapi==2.3.3 coreschema==0.0.4 # via coreapi coverage[toml]==5.5 - # via pytest-cov + # via + # coverage + # pytest-cov datamodel-code-generator==0.25.2 # via -r requirements-dev.in django==4.2.11 @@ -90,7 +92,9 @@ exceptiongroup==1.2.0 faker==17.5.0 # via -r requirements-dev.in fakeredis[lua]==2.11.0 - # via -r requirements-dev.in + # via + # -r requirements-dev.in + # fakeredis flaky==3.7.0 # via -r requirements-dev.in freezegun==1.2.2 @@ -168,6 +172,7 @@ pydantic[email]==2.5.3 # via # -c requirements.txt # datamodel-code-generator + # pydantic pydantic-core==2.14.6 # via # -c requirements.txt @@ -225,7 +230,7 @@ requests==2.31.0 # responses responses==0.23.1 # via -r requirements-dev.in -ruff==0.1.2 +ruff==0.3.7 # via -r requirements-dev.in six==1.16.0 # via diff --git a/requirements.txt b/requirements.txt index e1d4f8fe7d4..78255c273ae 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,6 +9,7 @@ aioboto3==12.0.0 aiobotocore[boto3]==2.7.0 # via # aioboto3 + # aiobotocore # s3fs aiohttp==3.9.3 # via @@ -252,6 +253,7 @@ giturlparse==0.12.0 # via dlt google-api-core[grpc]==2.11.1 # via + # google-api-core # google-cloud-bigquery # google-cloud-core google-auth==2.22.0 @@ -447,7 +449,9 @@ protobuf==4.22.1 # proto-plus # temporalio psycopg[binary]==3.1.13 - # via -r requirements.in + # via + # -r requirements.in + # psycopg psycopg-binary==3.1.13 # via psycopg psycopg2-binary==2.9.7 @@ -707,6 +711,7 @@ urllib3[secure,socks]==1.26.18 # requests # selenium # sentry-sdk + # urllib3 urllib3-secure-extra==0.1.0 # via urllib3 vine==5.0.0