mirror of
https://github.com/PostHog/posthog.git
synced 2024-11-21 13:39:22 +01:00
chore(deps): Upgrade ruff (#21648)
This commit is contained in:
parent
61ad509c08
commit
a0fc086175
2
.github/workflows/ci-backend.yml
vendored
2
.github/workflows/ci-backend.yml
vendored
@ -130,7 +130,7 @@ jobs:
|
||||
|
||||
- name: Check formatting
|
||||
run: |
|
||||
ruff format --exclude posthog/hogql/grammar --check --diff .
|
||||
ruff format --check --diff .
|
||||
|
||||
- name: Add Problem Matcher
|
||||
run: echo "::add-matcher::.github/mypy-problem-matcher.json"
|
||||
|
@ -33,13 +33,19 @@ class EnterpriseEventQuery(EventQuery):
|
||||
should_join_distinct_ids=False,
|
||||
should_join_persons=False,
|
||||
# Extra events/person table columns to fetch since parent query needs them
|
||||
extra_fields: List[ColumnName] = [],
|
||||
extra_event_properties: List[PropertyName] = [],
|
||||
extra_person_fields: List[ColumnName] = [],
|
||||
extra_fields: Optional[List[ColumnName]] = None,
|
||||
extra_event_properties: Optional[List[PropertyName]] = None,
|
||||
extra_person_fields: Optional[List[ColumnName]] = None,
|
||||
override_aggregate_users_by_distinct_id: Optional[bool] = None,
|
||||
person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
if extra_person_fields is None:
|
||||
extra_person_fields = []
|
||||
if extra_event_properties is None:
|
||||
extra_event_properties = []
|
||||
if extra_fields is None:
|
||||
extra_fields = []
|
||||
super().__init__(
|
||||
filter=filter,
|
||||
team=team,
|
||||
|
@ -868,9 +868,9 @@ class FunnelCorrelation:
|
||||
|
||||
# Get the total success/failure counts from the results
|
||||
results = [result for result in results_with_total if result[0] != self.TOTAL_IDENTIFIER]
|
||||
_, success_total, failure_total = [
|
||||
_, success_total, failure_total = next(
|
||||
result for result in results_with_total if result[0] == self.TOTAL_IDENTIFIER
|
||||
][0]
|
||||
)
|
||||
|
||||
# Add a little structure, and keep it close to the query definition so it's
|
||||
# obvious what's going on with result indices.
|
||||
|
@ -27,8 +27,10 @@ def _make_event_sequence(
|
||||
interval_days,
|
||||
period_event_counts,
|
||||
event="$pageview",
|
||||
properties={},
|
||||
properties=None,
|
||||
):
|
||||
if properties is None:
|
||||
properties = {}
|
||||
for period_index, event_count in enumerate(period_event_counts):
|
||||
for i in range(event_count):
|
||||
_create_event(
|
||||
|
@ -592,7 +592,7 @@ class BreakdownTests(APIBaseTest, ClickhouseTestMixin):
|
||||
),
|
||||
)
|
||||
|
||||
chrome_cohort = [cohort for cohort in retention["result"] if cohort["label"] == "Chrome"][0]
|
||||
chrome_cohort = next(cohort for cohort in retention["result"] if cohort["label"] == "Chrome")
|
||||
people_url = chrome_cohort["values"][0]["people_url"]
|
||||
people_response = self.client.get(people_url)
|
||||
assert people_response.status_code == 200
|
||||
|
@ -72,7 +72,8 @@ class License(models.Model):
|
||||
]
|
||||
|
||||
ENTERPRISE_PLAN = "enterprise"
|
||||
ENTERPRISE_FEATURES = SCALE_FEATURES + [
|
||||
ENTERPRISE_FEATURES = [
|
||||
*SCALE_FEATURES,
|
||||
AvailableFeature.ADVANCED_PERMISSIONS,
|
||||
AvailableFeature.PROJECT_BASED_PERMISSIONING,
|
||||
AvailableFeature.SAML,
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""
|
||||
Django settings for PostHog Enterprise Edition.
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import Dict, List
|
||||
|
||||
@ -15,7 +16,8 @@ HOOK_EVENTS: Dict[str, str] = {
|
||||
}
|
||||
|
||||
# SSO
|
||||
AUTHENTICATION_BACKENDS = AUTHENTICATION_BACKENDS + [
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
*AUTHENTICATION_BACKENDS,
|
||||
"ee.api.authentication.MultitenantSAMLAuth",
|
||||
"social_core.backends.google.GoogleOAuth2",
|
||||
]
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
"""Django's command-line utility for administrative tasks."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
@ -2,10 +2,6 @@ posthog/temporal/common/utils.py:0: error: Argument 1 to "abstractclassmethod" h
|
||||
posthog/temporal/common/utils.py:0: note: This is likely because "from_activity" has named arguments: "cls". Consider marking them positional-only
|
||||
posthog/temporal/common/utils.py:0: error: Argument 2 to "__get__" of "classmethod" has incompatible type "type[HeartbeatType]"; expected "type[Never]" [arg-type]
|
||||
posthog/temporal/data_imports/pipelines/zendesk/talk_api.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "str") [assignment]
|
||||
posthog/hogql/database/argmax.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type]
|
||||
posthog/hogql/database/argmax.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/database/argmax.py:0: note: Consider using "Sequence" instead, which is covariant
|
||||
posthog/hogql/database/argmax.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator]
|
||||
posthog/hogql/database/schema/numbers.py:0: error: Incompatible types in assignment (expression has type "dict[str, IntegerDatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment]
|
||||
posthog/hogql/database/schema/numbers.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/database/schema/numbers.py:0: note: Consider using "Mapping" instead, which is covariant in the value type
|
||||
@ -51,14 +47,6 @@ posthog/hogql/visitor.py:0: error: Argument 1 to "visit" of "Visitor" has incomp
|
||||
posthog/hogql/visitor.py:0: error: Argument 1 to "visit" of "Visitor" has incompatible type "Expr | None"; expected "AST" [arg-type]
|
||||
posthog/hogql/visitor.py:0: error: Argument 1 to "visit" of "Visitor" has incompatible type "WindowFrameExpr | None"; expected "AST" [arg-type]
|
||||
posthog/hogql/visitor.py:0: error: Argument 1 to "visit" of "Visitor" has incompatible type "WindowFrameExpr | None"; expected "AST" [arg-type]
|
||||
posthog/hogql/database/schema/log_entries.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type]
|
||||
posthog/hogql/database/schema/log_entries.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/database/schema/log_entries.py:0: note: Consider using "Sequence" instead, which is covariant
|
||||
posthog/hogql/database/schema/log_entries.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator]
|
||||
posthog/hogql/database/schema/log_entries.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type]
|
||||
posthog/hogql/database/schema/log_entries.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/database/schema/log_entries.py:0: note: Consider using "Sequence" instead, which is covariant
|
||||
posthog/hogql/database/schema/log_entries.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator]
|
||||
posthog/hogql/database/schema/groups.py:0: error: Incompatible types in assignment (expression has type "dict[str, DatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment]
|
||||
posthog/hogql/database/schema/groups.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/database/schema/groups.py:0: note: Consider using "Mapping" instead, which is covariant in the value type
|
||||
@ -76,18 +64,6 @@ posthog/hogql/parser.py:0: error: "None" has no attribute "text" [attr-defined]
|
||||
posthog/hogql/parser.py:0: error: Statement is unreachable [unreachable]
|
||||
posthog/hogql/database/schema/person_distinct_ids.py:0: error: Argument 1 to "select_from_person_distinct_ids_table" has incompatible type "dict[str, list[str]]"; expected "dict[str, list[str | int]]" [arg-type]
|
||||
posthog/hogql/database/schema/person_distinct_id_overrides.py:0: error: Argument 1 to "select_from_person_distinct_id_overrides_table" has incompatible type "dict[str, list[str]]"; expected "dict[str, list[str | int]]" [arg-type]
|
||||
posthog/hogql/database/schema/cohort_people.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type]
|
||||
posthog/hogql/database/schema/cohort_people.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/database/schema/cohort_people.py:0: note: Consider using "Sequence" instead, which is covariant
|
||||
posthog/hogql/database/schema/cohort_people.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator]
|
||||
posthog/hogql/database/schema/session_replay_events.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type]
|
||||
posthog/hogql/database/schema/session_replay_events.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/database/schema/session_replay_events.py:0: note: Consider using "Sequence" instead, which is covariant
|
||||
posthog/hogql/database/schema/session_replay_events.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator]
|
||||
posthog/hogql/database/schema/session_replay_events.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type]
|
||||
posthog/hogql/database/schema/session_replay_events.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/database/schema/session_replay_events.py:0: note: Consider using "Sequence" instead, which is covariant
|
||||
posthog/hogql/database/schema/session_replay_events.py:0: error: Unsupported operand types for + ("list[str]" and "list[str | int]") [operator]
|
||||
posthog/plugins/utils.py:0: error: Subclass of "str" and "bytes" cannot exist: would have incompatible method signatures [unreachable]
|
||||
posthog/plugins/utils.py:0: error: Statement is unreachable [unreachable]
|
||||
posthog/models/filters/base_filter.py:0: error: "HogQLContext" has no attribute "person_on_events_mode" [attr-defined]
|
||||
@ -292,9 +268,6 @@ posthog/queries/trends/util.py:0: error: Argument 1 to "translate_hogql" has inc
|
||||
posthog/hogql/property.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type]
|
||||
posthog/hogql/property.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/property.py:0: note: Consider using "Sequence" instead, which is covariant
|
||||
posthog/hogql/property.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type]
|
||||
posthog/hogql/property.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
|
||||
posthog/hogql/property.py:0: note: Consider using "Sequence" instead, which is covariant
|
||||
posthog/hogql/property.py:0: error: Incompatible type for lookup 'pk': (got "str | float", expected "str | int") [misc]
|
||||
posthog/hogql/filters.py:0: error: Incompatible default for argument "team" (default has type "None", argument has type "Team") [assignment]
|
||||
posthog/hogql/filters.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True
|
||||
@ -329,9 +302,11 @@ posthog/queries/funnels/base.py:0: error: "HogQLContext" has no attribute "perso
|
||||
posthog/queries/funnels/base.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | int"; expected "str" [arg-type]
|
||||
ee/clickhouse/queries/funnels/funnel_correlation.py:0: error: Statement is unreachable [unreachable]
|
||||
posthog/caching/calculate_results.py:0: error: Argument 3 to "process_query" has incompatible type "bool"; expected "LimitContext | None" [arg-type]
|
||||
posthog/api/person.py:0: error: Argument 1 to <tuple> has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type]
|
||||
posthog/api/person.py:0: error: Argument 1 to "loads" has incompatible type "str | None"; expected "str | bytes | bytearray" [arg-type]
|
||||
posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type]
|
||||
posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type]
|
||||
posthog/api/person.py:0: error: Cannot determine type of "group_properties_filter_group" [has-type]
|
||||
posthog/hogql_queries/web_analytics/web_analytics_query_runner.py:0: error: Argument 1 to "append" of "list" has incompatible type "EventPropertyFilter"; expected "Expr" [arg-type]
|
||||
posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Signature of "to_actors_query" incompatible with supertype "QueryRunner" [override]
|
||||
posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: Superclass:
|
||||
@ -373,6 +348,7 @@ posthog/hogql_queries/legacy_compatibility/process_insight.py:0: error: Incompat
|
||||
posthog/hogql_queries/legacy_compatibility/process_insight.py:0: error: Incompatible types in assignment (expression has type "Filter", variable has type "RetentionFilter") [assignment]
|
||||
posthog/api/insight.py:0: error: Argument 1 to "is_insight_with_hogql_support" has incompatible type "Insight | DashboardTile"; expected "Insight" [arg-type]
|
||||
posthog/api/insight.py:0: error: Argument 1 to "process_insight" has incompatible type "Insight | DashboardTile"; expected "Insight" [arg-type]
|
||||
posthog/api/insight.py:0: error: Argument 1 to <tuple> has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type]
|
||||
posthog/api/dashboards/dashboard.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
|
||||
posthog/api/feature_flag.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
|
||||
posthog/api/feature_flag.py:0: error: Item "Sequence[Any]" of "Any | Sequence[Any] | None" has no attribute "filters" [union-attr]
|
||||
@ -504,9 +480,6 @@ posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "f
|
||||
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr]
|
||||
posthog/hogql/test/test_resolver.py:0: error: Argument 1 to "clone_expr" has incompatible type "SelectQuery | SelectUnionQuery | Field | Any | None"; expected "Expr" [arg-type]
|
||||
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "alias" [union-attr]
|
||||
posthog/hogql/test/test_property.py:0: error: Incompatible default for argument "placeholders" (default has type "None", argument has type "dict[str, Any]") [assignment]
|
||||
posthog/hogql/test/test_property.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True
|
||||
posthog/hogql/test/test_property.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase
|
||||
posthog/hogql/test/test_property.py:0: error: Argument 1 to "_property_to_expr" of "TestProperty" has incompatible type "HogQLPropertyFilter"; expected "PropertyGroup | Property | dict[Any, Any] | list[Any]" [arg-type]
|
||||
posthog/hogql/test/test_printer.py:0: error: Argument 2 to "Database" has incompatible type "int"; expected "WeekStartDay | None" [arg-type]
|
||||
posthog/hogql/test/test_printer.py:0: error: Argument 2 to "Database" has incompatible type "int"; expected "WeekStartDay | None" [arg-type]
|
||||
@ -526,12 +499,6 @@ posthog/hogql/test/test_modifiers.py:0: error: Unsupported right operand type fo
|
||||
posthog/hogql/test/test_modifiers.py:0: error: Unsupported right operand type for in ("str | None") [operator]
|
||||
posthog/hogql/test/test_modifiers.py:0: error: Unsupported right operand type for in ("str | None") [operator]
|
||||
posthog/hogql/test/test_modifiers.py:0: error: Unsupported right operand type for in ("str | None") [operator]
|
||||
posthog/hogql/test/test_filters.py:0: error: Incompatible default for argument "placeholders" (default has type "None", argument has type "dict[str, Any]") [assignment]
|
||||
posthog/hogql/test/test_filters.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True
|
||||
posthog/hogql/test/test_filters.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase
|
||||
posthog/hogql/test/test_filters.py:0: error: Incompatible default for argument "placeholders" (default has type "None", argument has type "dict[str, Any]") [assignment]
|
||||
posthog/hogql/test/test_filters.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True
|
||||
posthog/hogql/test/test_filters.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase
|
||||
posthog/hogql/test/_test_parser.py:0: error: Invalid base class [misc]
|
||||
posthog/hogql/test/_test_parser.py:0: error: Argument "table" to "JoinExpr" has incompatible type "Placeholder"; expected "SelectQuery | SelectUnionQuery | Field | None" [arg-type]
|
||||
posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr]
|
||||
@ -551,6 +518,7 @@ posthog/hogql/database/schema/test/test_channel_type.py:0: error: Value of type
|
||||
posthog/hogql/database/schema/test/test_channel_type.py:0: error: Value of type "list[Any] | None" is not indexable [index]
|
||||
posthog/hogql/database/schema/event_sessions.py:0: error: Statement is unreachable [unreachable]
|
||||
posthog/api/organization_member.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
|
||||
posthog/api/action.py:0: error: Argument 1 to <tuple> has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type]
|
||||
ee/api/role.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
|
||||
ee/clickhouse/views/insights.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
|
||||
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 6 has incompatible type "ExternalDataSchema"; expected "str" [arg-type]
|
||||
@ -663,6 +631,7 @@ posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any
|
||||
posthog/api/property_definition.py:0: error: Incompatible types in assignment (expression has type "type[EnterprisePropertyDefinitionSerializer]", variable has type "type[PropertyDefinitionSerializer]") [assignment]
|
||||
posthog/api/property_definition.py:0: error: Item "AnonymousUser" of "User | AnonymousUser" has no attribute "organization" [union-attr]
|
||||
posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any | None" has no attribute "is_feature_available" [union-attr]
|
||||
posthog/api/event.py:0: error: Argument 1 to <tuple> has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type]
|
||||
posthog/api/dashboards/dashboard_templates.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
|
||||
ee/api/feature_flag_role_access.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
|
||||
posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore]
|
||||
@ -722,6 +691,7 @@ posthog/management/commands/test/test_create_batch_export_from_app.py:0: error:
|
||||
posthog/management/commands/test/test_create_batch_export_from_app.py:0: note: Possible overload variants:
|
||||
posthog/management/commands/test/test_create_batch_export_from_app.py:0: note: def __getitem__(self, SupportsIndex, /) -> str
|
||||
posthog/management/commands/test/test_create_batch_export_from_app.py:0: note: def __getitem__(self, slice, /) -> list[str]
|
||||
posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable]
|
||||
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item]
|
||||
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item]
|
||||
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item]
|
||||
|
@ -34,7 +34,7 @@
|
||||
"build:esbuild": "node frontend/build.mjs",
|
||||
"schema:build": "pnpm run schema:build:json && pnpm run schema:build:python",
|
||||
"schema:build:json": "ts-node bin/build-schema.mjs && prettier --write frontend/src/queries/schema.json",
|
||||
"schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.10 --disable-timestamp --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py",
|
||||
"schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.10 --disable-timestamp --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py && ruff check --fix posthog/schema.py",
|
||||
"grammar:build": "npm run grammar:build:python && npm run grammar:build:cpp",
|
||||
"grammar:build:python": "cd posthog/hogql/grammar && antlr -Dlanguage=Python3 HogQLLexer.g4 && antlr -visitor -no-listener -Dlanguage=Python3 HogQLParser.g4",
|
||||
"grammar:build:cpp": "cd posthog/hogql/grammar && antlr -o ../../../hogql_parser -Dlanguage=Cpp HogQLLexer.g4 && antlr -o ../../../hogql_parser -visitor -no-listener -Dlanguage=Cpp HogQLParser.g4",
|
||||
@ -47,7 +47,7 @@
|
||||
"typescript:check": "tsc --noEmit && echo \"No errors reported by tsc.\"",
|
||||
"lint:js": "eslint frontend/src",
|
||||
"lint:css": "stylelint \"frontend/**/*.{css,scss}\"",
|
||||
"format:backend": "ruff --exclude posthog/hogql/grammar .",
|
||||
"format:backend": "ruff .",
|
||||
"format:frontend": "pnpm lint:js --fix && pnpm lint:css --fix && pnpm prettier",
|
||||
"format": "pnpm format:backend && pnpm format:frontend",
|
||||
"typegen:write": "kea-typegen write --delete --show-ts-errors",
|
||||
@ -337,8 +337,8 @@
|
||||
"pnpm --dir plugin-server exec prettier --write"
|
||||
],
|
||||
"!(posthog/hogql/grammar/*)*.{py,pyi}": [
|
||||
"ruff format",
|
||||
"ruff check --fix"
|
||||
"ruff check --fix",
|
||||
"ruff format"
|
||||
]
|
||||
},
|
||||
"browserslist": {
|
||||
|
@ -165,7 +165,7 @@ class ActionViewSet(
|
||||
viewsets.ModelViewSet,
|
||||
):
|
||||
scope_object = "action"
|
||||
renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,)
|
||||
renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.PaginatedCSVRenderer)
|
||||
queryset = Action.objects.all()
|
||||
serializer_class = ActionSerializer
|
||||
authentication_classes = [TemporaryTokenAuthentication]
|
||||
|
@ -59,10 +59,7 @@ LOG_RATE_LIMITER = Limiter(
|
||||
# events that are ingested via a separate path than analytics events. They have
|
||||
# fewer restrictions on e.g. the order they need to be processed in.
|
||||
SESSION_RECORDING_DEDICATED_KAFKA_EVENTS = ("$snapshot_items",)
|
||||
SESSION_RECORDING_EVENT_NAMES = (
|
||||
"$snapshot",
|
||||
"$performance_event",
|
||||
) + SESSION_RECORDING_DEDICATED_KAFKA_EVENTS
|
||||
SESSION_RECORDING_EVENT_NAMES = ("$snapshot", "$performance_event", *SESSION_RECORDING_DEDICATED_KAFKA_EVENTS)
|
||||
|
||||
EVENTS_RECEIVED_COUNTER = Counter(
|
||||
"capture_events_received_total",
|
||||
@ -604,9 +601,7 @@ def capture_internal(
|
||||
|
||||
if event["event"] in SESSION_RECORDING_EVENT_NAMES:
|
||||
session_id = event["properties"]["$session_id"]
|
||||
headers = [
|
||||
("token", token),
|
||||
] + extra_headers
|
||||
headers = [("token", token), *extra_headers]
|
||||
|
||||
overflowing = False
|
||||
if token in settings.REPLAY_OVERFLOW_FORCED_TOKENS:
|
||||
|
@ -85,7 +85,7 @@ class EventViewSet(
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
scope_object = "query"
|
||||
renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,)
|
||||
renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.PaginatedCSVRenderer)
|
||||
serializer_class = ClickhouseEventSerializer
|
||||
throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle]
|
||||
pagination_class = UncountedLimitOffsetPagination
|
||||
|
@ -572,7 +572,7 @@ class InsightViewSet(
|
||||
ClickHouseBurstRateThrottle,
|
||||
ClickHouseSustainedRateThrottle,
|
||||
]
|
||||
renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.CSVRenderer,)
|
||||
renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.CSVRenderer)
|
||||
filter_backends = [DjangoFilterBackend]
|
||||
filterset_fields = ["short_id", "created_by"]
|
||||
sharing_enabled_actions = ["retrieve", "list"]
|
||||
@ -838,12 +838,12 @@ Using the correct cache and enriching the response with dashboard specific confi
|
||||
export = "{}/insights/{}/\n".format(SITE_URL, request.GET["export_insight_id"]).encode() + export
|
||||
|
||||
response = HttpResponse(export)
|
||||
response[
|
||||
"Content-Disposition"
|
||||
] = 'attachment; filename="{name} ({date_from} {date_to}) from PostHog.csv"'.format(
|
||||
name=slugify(request.GET.get("export_name", "export")),
|
||||
date_from=filter.date_from.strftime("%Y-%m-%d -") if filter.date_from else "up until",
|
||||
date_to=filter.date_to.strftime("%Y-%m-%d"),
|
||||
response["Content-Disposition"] = (
|
||||
'attachment; filename="{name} ({date_from} {date_to}) from PostHog.csv"'.format(
|
||||
name=slugify(request.GET.get("export_name", "export")),
|
||||
date_from=filter.date_from.strftime("%Y-%m-%d -") if filter.date_from else "up until",
|
||||
date_to=filter.date_to.strftime("%Y-%m-%d"),
|
||||
)
|
||||
)
|
||||
return response
|
||||
|
||||
|
@ -224,7 +224,7 @@ class PersonViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet):
|
||||
"""
|
||||
|
||||
scope_object = "person"
|
||||
renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,)
|
||||
renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.PaginatedCSVRenderer)
|
||||
queryset = Person.objects.all()
|
||||
serializer_class = PersonSerializer
|
||||
pagination_class = PersonLimitOffsetPagination
|
||||
@ -932,21 +932,11 @@ def prepare_actor_query_filter(filter: T) -> T:
|
||||
new_group = {
|
||||
"type": "OR",
|
||||
"values": [
|
||||
{
|
||||
"key": "email",
|
||||
"type": "person",
|
||||
"value": search,
|
||||
"operator": "icontains",
|
||||
},
|
||||
{"key": "email", "type": "person", "value": search, "operator": "icontains"},
|
||||
{"key": "name", "type": "person", "value": search, "operator": "icontains"},
|
||||
{
|
||||
"key": "distinct_id",
|
||||
"type": "event",
|
||||
"value": search,
|
||||
"operator": "icontains",
|
||||
},
|
||||
]
|
||||
+ group_properties_filter_group,
|
||||
{"key": "distinct_id", "type": "event", "value": search, "operator": "icontains"},
|
||||
*group_properties_filter_group,
|
||||
],
|
||||
}
|
||||
prop_group = (
|
||||
{"type": "AND", "values": [new_group, filter.property_groups.to_dict()]}
|
||||
|
@ -63,7 +63,11 @@ def _update_plugin_attachments(request: request.Request, plugin_config: PluginCo
|
||||
_update_plugin_attachment(request, plugin_config, match.group(1), None, user)
|
||||
|
||||
|
||||
def get_plugin_config_changes(old_config: Dict[str, Any], new_config: Dict[str, Any], secret_fields=[]) -> List[Change]:
|
||||
def get_plugin_config_changes(
|
||||
old_config: Dict[str, Any], new_config: Dict[str, Any], secret_fields=None
|
||||
) -> List[Change]:
|
||||
if secret_fields is None:
|
||||
secret_fields = []
|
||||
config_changes = dict_changes_between("Plugin", old_config, new_config)
|
||||
|
||||
for i, change in enumerate(config_changes):
|
||||
@ -79,8 +83,10 @@ def get_plugin_config_changes(old_config: Dict[str, Any], new_config: Dict[str,
|
||||
|
||||
|
||||
def log_enabled_change_activity(
|
||||
new_plugin_config: PluginConfig, old_enabled: bool, user: User, was_impersonated: bool, changes=[]
|
||||
new_plugin_config: PluginConfig, old_enabled: bool, user: User, was_impersonated: bool, changes=None
|
||||
):
|
||||
if changes is None:
|
||||
changes = []
|
||||
if old_enabled != new_plugin_config.enabled:
|
||||
log_activity(
|
||||
organization_id=new_plugin_config.team.organization.id,
|
||||
@ -864,7 +870,7 @@ class PluginConfigViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet):
|
||||
|
||||
def _get_secret_fields_for_plugin(plugin: Plugin) -> Set[str]:
|
||||
# A set of keys for config fields that have secret = true
|
||||
secret_fields = {field["key"] for field in plugin.config_schema if "secret" in field and field["secret"]}
|
||||
secret_fields = {field["key"] for field in plugin.config_schema if isinstance(field, dict) and field.get("secret")}
|
||||
return secret_fields
|
||||
|
||||
|
||||
|
@ -503,9 +503,7 @@ def social_create_user(
|
||||
user=user.id if user else None,
|
||||
)
|
||||
if user:
|
||||
backend_processor = (
|
||||
"domain_whitelist"
|
||||
) # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes)
|
||||
backend_processor = "domain_whitelist" # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes)
|
||||
from_invite = True # jit_provisioning means they're definitely not organization_first_user
|
||||
|
||||
if not user:
|
||||
|
@ -421,7 +421,8 @@ class TeamViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet):
|
||||
IsAuthenticated,
|
||||
APIScopePermission,
|
||||
PremiumMultiProjectPermissions,
|
||||
] + self.permission_classes
|
||||
*self.permission_classes,
|
||||
]
|
||||
|
||||
base_permissions = [permission() for permission in common_permissions]
|
||||
|
||||
|
@ -63,7 +63,7 @@ parser = ResolvingParser(
|
||||
openapi_spec = cast(Dict[str, Any], parser.specification)
|
||||
|
||||
large_data_array = [
|
||||
{"key": random.choice(string.ascii_letters) for _ in range(512 * 1024)}
|
||||
{"key": "".join(random.choice(string.ascii_letters) for _ in range(512 * 1024))}
|
||||
] # 512 * 1024 is the max size of a single message and random letters shouldn't be compressible, so this should be at least 2 messages
|
||||
|
||||
android_json = {
|
||||
@ -188,7 +188,7 @@ class TestCapture(BaseTest):
|
||||
def _send_original_version_session_recording_event(
|
||||
self,
|
||||
number_of_events: int = 1,
|
||||
event_data: Dict | None = {},
|
||||
event_data: Dict | None = None,
|
||||
snapshot_source=3,
|
||||
snapshot_type=1,
|
||||
session_id="abc123",
|
||||
@ -198,6 +198,8 @@ class TestCapture(BaseTest):
|
||||
) -> dict:
|
||||
if event_data is None:
|
||||
event_data = {}
|
||||
if event_data is None:
|
||||
event_data = {}
|
||||
|
||||
event = {
|
||||
"event": "$snapshot",
|
||||
@ -1525,8 +1527,8 @@ class TestCapture(BaseTest):
|
||||
]
|
||||
)
|
||||
def test_cors_allows_tracing_headers(self, _: str, path: str, headers: List[str]) -> None:
|
||||
expected_headers = ",".join(["X-Requested-With", "Content-Type"] + headers)
|
||||
presented_headers = ",".join(headers + ["someotherrandomheader"])
|
||||
expected_headers = ",".join(["X-Requested-With", "Content-Type", *headers])
|
||||
presented_headers = ",".join([*headers, "someotherrandomheader"])
|
||||
response = self.client.options(
|
||||
path,
|
||||
HTTP_ORIGIN="https://localhost",
|
||||
|
@ -7,7 +7,9 @@ from posthog.test.base import APIBaseTest, QueryMatchingTest
|
||||
|
||||
|
||||
class TestComments(APIBaseTest, QueryMatchingTest):
|
||||
def _create_comment(self, data={}) -> Any:
|
||||
def _create_comment(self, data=None) -> Any:
|
||||
if data is None:
|
||||
data = {}
|
||||
payload = {
|
||||
"content": "my content",
|
||||
"scope": "Notebook",
|
||||
|
@ -73,12 +73,14 @@ class TestDecide(BaseTest, QueryMatchingTest):
|
||||
origin="http://127.0.0.1:8000",
|
||||
api_version=1,
|
||||
distinct_id="example_id",
|
||||
groups={},
|
||||
groups=None,
|
||||
geoip_disable=False,
|
||||
ip="127.0.0.1",
|
||||
disable_flags=False,
|
||||
user_agent: Optional[str] = None,
|
||||
):
|
||||
if groups is None:
|
||||
groups = {}
|
||||
return self.client.post(
|
||||
f"/decide/?v={api_version}",
|
||||
{
|
||||
@ -3336,10 +3338,12 @@ class TestDatabaseCheckForDecide(BaseTest, QueryMatchingTest):
|
||||
origin="http://127.0.0.1:8000",
|
||||
api_version=1,
|
||||
distinct_id="example_id",
|
||||
groups={},
|
||||
groups=None,
|
||||
geoip_disable=False,
|
||||
ip="127.0.0.1",
|
||||
):
|
||||
if groups is None:
|
||||
groups = {}
|
||||
return self.client.post(
|
||||
f"/decide/?v={api_version}",
|
||||
{
|
||||
@ -3571,11 +3575,15 @@ class TestDecideUsesReadReplica(TransactionTestCase):
|
||||
origin="http://127.0.0.1:8000",
|
||||
api_version=3,
|
||||
distinct_id="example_id",
|
||||
groups={},
|
||||
person_props={},
|
||||
groups=None,
|
||||
person_props=None,
|
||||
geoip_disable=False,
|
||||
ip="127.0.0.1",
|
||||
):
|
||||
if person_props is None:
|
||||
person_props = {}
|
||||
if groups is None:
|
||||
groups = {}
|
||||
return self.client.post(
|
||||
f"/decide/?v={api_version}",
|
||||
{
|
||||
|
@ -19,7 +19,9 @@ class TestPreflight(APIBaseTest, QueryMatchingTest):
|
||||
def instance_preferences(self, **kwargs):
|
||||
return {"debug_queries": False, "disable_paid_fs": False, **kwargs}
|
||||
|
||||
def preflight_dict(self, options={}):
|
||||
def preflight_dict(self, options=None):
|
||||
if options is None:
|
||||
options = {}
|
||||
return {
|
||||
"django": True,
|
||||
"redis": True,
|
||||
@ -47,7 +49,9 @@ class TestPreflight(APIBaseTest, QueryMatchingTest):
|
||||
**options,
|
||||
}
|
||||
|
||||
def preflight_authenticated_dict(self, options={}):
|
||||
def preflight_authenticated_dict(self, options=None):
|
||||
if options is None:
|
||||
options = {}
|
||||
preflight = {
|
||||
"opt_out_capture": False,
|
||||
"licensed_users_available": None,
|
||||
|
@ -251,8 +251,10 @@ def create_event_definitions_sql(
|
||||
event_type: EventDefinitionType,
|
||||
is_enterprise: bool = False,
|
||||
conditions: str = "",
|
||||
order_expressions: List[Tuple[str, Literal["ASC", "DESC"]]] = [],
|
||||
order_expressions: Optional[List[Tuple[str, Literal["ASC", "DESC"]]]] = None,
|
||||
) -> str:
|
||||
if order_expressions is None:
|
||||
order_expressions = []
|
||||
if is_enterprise:
|
||||
from ee.models import EnterpriseEventDefinition
|
||||
|
||||
|
@ -230,9 +230,11 @@ def fetch_batch_export_log_entries(
|
||||
before: dt.datetime | None = None,
|
||||
search: str | None = None,
|
||||
limit: int | None = None,
|
||||
level_filter: list[BatchExportLogEntryLevel] = [],
|
||||
level_filter: typing.Optional[list[BatchExportLogEntryLevel]] = None,
|
||||
) -> list[BatchExportLogEntry]:
|
||||
"""Fetch a list of batch export log entries from ClickHouse."""
|
||||
if level_filter is None:
|
||||
level_filter = []
|
||||
clickhouse_where_parts: list[str] = []
|
||||
clickhouse_kwargs: dict[str, typing.Any] = {}
|
||||
|
||||
|
@ -5,11 +5,14 @@ from infi.clickhouse_orm import migrations
|
||||
from posthog.clickhouse.client.execute import sync_execute
|
||||
|
||||
|
||||
def run_sql_with_exceptions(sql: Union[str, Callable[[], str]], settings={}):
|
||||
def run_sql_with_exceptions(sql: Union[str, Callable[[], str]], settings=None):
|
||||
"""
|
||||
migrations.RunSQL does not raise exceptions, so we need to wrap it in a function that does.
|
||||
"""
|
||||
|
||||
if settings is None:
|
||||
settings = {}
|
||||
|
||||
def run_sql(database):
|
||||
nonlocal sql
|
||||
if callable(sql):
|
||||
|
@ -135,10 +135,12 @@ class EmailMessage:
|
||||
campaign_key: str,
|
||||
subject: str,
|
||||
template_name: str,
|
||||
template_context: Dict = {},
|
||||
template_context: Optional[Dict] = None,
|
||||
headers: Optional[Dict] = None,
|
||||
reply_to: Optional[str] = None,
|
||||
):
|
||||
if template_context is None:
|
||||
template_context = {}
|
||||
if not is_email_available():
|
||||
raise exceptions.ImproperlyConfigured("Email is not enabled in this instance.")
|
||||
|
||||
|
@ -217,7 +217,9 @@ def report_user_organization_membership_level_changed(
|
||||
)
|
||||
|
||||
|
||||
def report_user_action(user: User, event: str, properties: Dict = {}, team: Optional[Team] = None):
|
||||
def report_user_action(user: User, event: str, properties: Optional[Dict] = None, team: Optional[Team] = None):
|
||||
if properties is None:
|
||||
properties = {}
|
||||
posthoganalytics.capture(
|
||||
user.distinct_id,
|
||||
event,
|
||||
@ -252,12 +254,14 @@ def groups(organization: Optional[Organization] = None, team: Optional[Team] = N
|
||||
def report_team_action(
|
||||
team: Team,
|
||||
event: str,
|
||||
properties: Dict = {},
|
||||
properties: Optional[Dict] = None,
|
||||
group_properties: Optional[Dict] = None,
|
||||
):
|
||||
"""
|
||||
For capturing events where it is unclear which user was the core actor we can use the team instead
|
||||
"""
|
||||
if properties is None:
|
||||
properties = {}
|
||||
posthoganalytics.capture(str(team.uuid), event, properties=properties, groups=groups(team=team))
|
||||
|
||||
if group_properties:
|
||||
@ -267,12 +271,14 @@ def report_team_action(
|
||||
def report_organization_action(
|
||||
organization: Organization,
|
||||
event: str,
|
||||
properties: Dict = {},
|
||||
properties: Optional[Dict] = None,
|
||||
group_properties: Optional[Dict] = None,
|
||||
):
|
||||
"""
|
||||
For capturing events where it is unclear which user was the core actor we can use the organization instead
|
||||
"""
|
||||
if properties is None:
|
||||
properties = {}
|
||||
posthoganalytics.capture(
|
||||
str(organization.id),
|
||||
event,
|
||||
|
@ -408,7 +408,7 @@ class PropertyType(Type):
|
||||
joined_subquery_field_name: Optional[str] = field(default=None, init=False)
|
||||
|
||||
def get_child(self, name: str | int, context: HogQLContext) -> "Type":
|
||||
return PropertyType(chain=self.chain + [name], field_type=self.field_type)
|
||||
return PropertyType(chain=[*self.chain, name], field_type=self.field_type)
|
||||
|
||||
def has_child(self, name: str | int, context: HogQLContext) -> bool:
|
||||
return True
|
||||
|
@ -25,7 +25,7 @@ ConstantSupportedData: TypeAlias = (
|
||||
KEYWORDS = ["true", "false", "null"]
|
||||
|
||||
# Keywords you can't alias to
|
||||
RESERVED_KEYWORDS = KEYWORDS + ["team_id"]
|
||||
RESERVED_KEYWORDS = [*KEYWORDS, "team_id"]
|
||||
|
||||
# Limit applied to SELECT statements without LIMIT clause when queried via the API
|
||||
DEFAULT_RETURNED_ROWS = 100
|
||||
|
@ -21,7 +21,7 @@ def argmax_select(
|
||||
fields_to_select.append(
|
||||
ast.Alias(
|
||||
alias=name,
|
||||
expr=argmax_version(ast.Field(chain=[table_name] + chain)),
|
||||
expr=argmax_version(ast.Field(chain=[table_name, *chain])),
|
||||
)
|
||||
)
|
||||
for key in group_fields:
|
||||
|
@ -91,7 +91,7 @@ class Table(FieldOrTable):
|
||||
return []
|
||||
|
||||
def get_asterisk(self):
|
||||
fields_to_avoid = self.avoid_asterisk_fields() + ["team_id"]
|
||||
fields_to_avoid = [*self.avoid_asterisk_fields(), "team_id"]
|
||||
asterisk: Dict[str, FieldOrTable] = {}
|
||||
for key, field in self.fields.items():
|
||||
if key in fields_to_avoid:
|
||||
|
@ -40,7 +40,7 @@ def select_from_cohort_people_table(requested_fields: Dict[str, List[str | int]]
|
||||
requested_fields = {**requested_fields, "cohort_id": ["cohort_id"]}
|
||||
|
||||
fields: List[ast.Expr] = [
|
||||
ast.Alias(alias=name, expr=ast.Field(chain=[table_name] + chain)) for name, chain in requested_fields.items()
|
||||
ast.Alias(alias=name, expr=ast.Field(chain=[table_name, *chain])) for name, chain in requested_fields.items()
|
||||
]
|
||||
|
||||
return ast.SelectQuery(
|
||||
|
@ -35,7 +35,7 @@ class ReplayConsoleLogsLogEntriesTable(LazyTable):
|
||||
fields: Dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS
|
||||
|
||||
def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node):
|
||||
fields: List[ast.Expr] = [ast.Field(chain=["log_entries"] + chain) for name, chain in requested_fields.items()]
|
||||
fields: List[ast.Expr] = [ast.Field(chain=["log_entries", *chain]) for name, chain in requested_fields.items()]
|
||||
|
||||
return ast.SelectQuery(
|
||||
select=fields,
|
||||
@ -58,7 +58,7 @@ class BatchExportLogEntriesTable(LazyTable):
|
||||
fields: Dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS
|
||||
|
||||
def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node):
|
||||
fields: List[ast.Expr] = [ast.Field(chain=["log_entries"] + chain) for name, chain in requested_fields.items()]
|
||||
fields: List[ast.Expr] = [ast.Field(chain=["log_entries", *chain]) for name, chain in requested_fields.items()]
|
||||
|
||||
return ast.SelectQuery(
|
||||
select=fields,
|
||||
|
@ -96,8 +96,8 @@ def select_from_session_replay_events_table(requested_fields: Dict[str, List[str
|
||||
if name in aggregate_fields:
|
||||
select_fields.append(ast.Alias(alias=name, expr=aggregate_fields[name]))
|
||||
else:
|
||||
select_fields.append(ast.Alias(alias=name, expr=ast.Field(chain=[table_name] + chain)))
|
||||
group_by_fields.append(ast.Field(chain=[table_name] + chain))
|
||||
select_fields.append(ast.Alias(alias=name, expr=ast.Field(chain=[table_name, *chain])))
|
||||
group_by_fields.append(ast.Field(chain=[table_name, *chain]))
|
||||
|
||||
return ast.SelectQuery(
|
||||
select=select_fields,
|
||||
|
@ -752,7 +752,7 @@ class HogQLParseTreeConverter(ParseTreeVisitor):
|
||||
def visitColumnExprAsterisk(self, ctx: HogQLParser.ColumnExprAsteriskContext):
|
||||
if ctx.tableIdentifier():
|
||||
table = self.visit(ctx.tableIdentifier())
|
||||
return ast.Field(chain=table + ["*"])
|
||||
return ast.Field(chain=[*table, "*"])
|
||||
return ast.Field(chain=["*"])
|
||||
|
||||
def visitColumnExprTagElement(self, ctx: HogQLParser.ColumnExprTagElementContext):
|
||||
|
@ -235,7 +235,7 @@ class _Printer(Visitor):
|
||||
if where is None:
|
||||
where = extra_where
|
||||
elif isinstance(where, ast.And):
|
||||
where = ast.And(exprs=[extra_where] + where.exprs)
|
||||
where = ast.And(exprs=[extra_where, *where.exprs])
|
||||
else:
|
||||
where = ast.And(exprs=[extra_where, where])
|
||||
else:
|
||||
@ -1169,7 +1169,7 @@ class _Printer(Visitor):
|
||||
return escape_hogql_string(name, timezone=self._get_timezone())
|
||||
|
||||
def _unsafe_json_extract_trim_quotes(self, unsafe_field: str, unsafe_args: List[str]) -> str:
|
||||
return f"replaceRegexpAll(nullIf(nullIf(JSONExtractRaw({', '.join([unsafe_field] + unsafe_args)}), ''), 'null'), '^\"|\"$', '')"
|
||||
return f"replaceRegexpAll(nullIf(nullIf(JSONExtractRaw({', '.join([unsafe_field, *unsafe_args])}), ''), 'null'), '^\"|\"$', '')"
|
||||
|
||||
def _get_materialized_column(
|
||||
self, table_name: str, property_name: PropertyName, field_name: TableColumn
|
||||
|
@ -163,7 +163,7 @@ def property_to_expr(
|
||||
chain = ["properties"]
|
||||
|
||||
properties_field = ast.Field(chain=chain)
|
||||
field = ast.Field(chain=chain + [property.key])
|
||||
field = ast.Field(chain=[*chain, property.key])
|
||||
|
||||
if isinstance(value, list):
|
||||
if len(value) == 0:
|
||||
|
@ -464,7 +464,7 @@ class Resolver(CloningVisitor):
|
||||
if table_count > 1:
|
||||
raise QueryError("Cannot use '*' without table name when there are multiple tables in the query")
|
||||
table_type = (
|
||||
scope.anonymous_tables[0] if len(scope.anonymous_tables) > 0 else list(scope.tables.values())[0]
|
||||
scope.anonymous_tables[0] if len(scope.anonymous_tables) > 0 else next(iter(scope.tables.values()))
|
||||
)
|
||||
type = ast.AsteriskType(table_type=table_type)
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from posthog.hogql import ast
|
||||
from posthog.hogql.context import HogQLContext
|
||||
@ -18,10 +18,10 @@ from posthog.test.base import BaseTest
|
||||
class TestFilters(BaseTest):
|
||||
maxDiff = None
|
||||
|
||||
def _parse_expr(self, expr: str, placeholders: Dict[str, Any] = None):
|
||||
def _parse_expr(self, expr: str, placeholders: Optional[Dict[str, Any]] = None):
|
||||
return clear_locations(parse_expr(expr, placeholders=placeholders))
|
||||
|
||||
def _parse_select(self, select: str, placeholders: Dict[str, Any] = None):
|
||||
def _parse_select(self, select: str, placeholders: Optional[Dict[str, Any]] = None):
|
||||
return clear_locations(parse_select(select, placeholders=placeholders))
|
||||
|
||||
def _print_ast(self, node: ast.Expr):
|
||||
|
@ -46,7 +46,7 @@ class TestProperty(BaseTest):
|
||||
def _selector_to_expr(self, selector: str):
|
||||
return clear_locations(selector_to_expr(selector))
|
||||
|
||||
def _parse_expr(self, expr: str, placeholders: Dict[str, Any] = None):
|
||||
def _parse_expr(self, expr: str, placeholders: Optional[Dict[str, Any]] = None):
|
||||
return clear_locations(parse_expr(expr, placeholders=placeholders))
|
||||
|
||||
def test_has_aggregation(self):
|
||||
|
@ -729,7 +729,7 @@ class FunnelBase(ABC):
|
||||
):
|
||||
events = []
|
||||
for i in range(0, max_steps):
|
||||
event_fields = ["latest"] + self.extra_event_fields_and_properties
|
||||
event_fields = ["latest", *self.extra_event_fields_and_properties]
|
||||
event_fields_with_step = ", ".join([f"{field}_{i}" for field in event_fields])
|
||||
event_clause = f"({event_fields_with_step}) as step_{i}_matching_event"
|
||||
events.append(parse_expr(event_clause))
|
||||
|
@ -245,9 +245,9 @@ class FunnelCorrelationQueryRunner(QueryRunner):
|
||||
|
||||
# Get the total success/failure counts from the results
|
||||
results = [result for result in response.results if result[0] != self.TOTAL_IDENTIFIER]
|
||||
_, success_total, failure_total = [result for result in response.results if result[0] == self.TOTAL_IDENTIFIER][
|
||||
0
|
||||
]
|
||||
_, success_total, failure_total = next(
|
||||
result for result in response.results if result[0] == self.TOTAL_IDENTIFIER
|
||||
)
|
||||
|
||||
# Add a little structure, and keep it close to the query definition so it's
|
||||
# obvious what's going on with result indices.
|
||||
|
@ -1,4 +1,4 @@
|
||||
from typing import List, Set, Union
|
||||
from typing import List, Set, Union, Optional
|
||||
from posthog.clickhouse.materialized_columns.column import ColumnName
|
||||
from posthog.hogql import ast
|
||||
from posthog.hogql.parser import parse_expr
|
||||
@ -21,9 +21,13 @@ class FunnelEventQuery:
|
||||
def __init__(
|
||||
self,
|
||||
context: FunnelQueryContext,
|
||||
extra_fields: List[ColumnName] = [],
|
||||
extra_event_properties: List[PropertyName] = [],
|
||||
extra_fields: Optional[List[ColumnName]] = None,
|
||||
extra_event_properties: Optional[List[PropertyName]] = None,
|
||||
):
|
||||
if extra_event_properties is None:
|
||||
extra_event_properties = []
|
||||
if extra_fields is None:
|
||||
extra_fields = []
|
||||
self.context = context
|
||||
|
||||
self._extra_fields = extra_fields
|
||||
|
@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict, cast
|
||||
from typing import Dict, cast, Optional
|
||||
from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner
|
||||
from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query
|
||||
|
||||
@ -116,7 +116,11 @@ class TestFunnelBreakdownsByCurrentURL(ClickhouseTestMixin, APIBaseTest):
|
||||
|
||||
journeys_for(journey, team=self.team, create_people=True)
|
||||
|
||||
def _run(self, extra: Dict = {}, events_extra: Dict = {}):
|
||||
def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None):
|
||||
if events_extra is None:
|
||||
events_extra = {}
|
||||
if extra is None:
|
||||
extra = {}
|
||||
filters = {
|
||||
"events": [
|
||||
{
|
||||
|
@ -74,7 +74,7 @@ class TestFunnelTrendsPersons(ClickhouseTestMixin, APIBaseTest):
|
||||
self.assertEqual(results[0][0], persons["user_one"].uuid)
|
||||
self.assertEqual(
|
||||
# [person["matched_recordings"][0]["session_id"] for person in results],
|
||||
[list(results[0][2])[0]["session_id"]],
|
||||
[next(iter(results[0][2]))["session_id"]],
|
||||
["s1b"],
|
||||
)
|
||||
|
||||
@ -124,7 +124,7 @@ class TestFunnelTrendsPersons(ClickhouseTestMixin, APIBaseTest):
|
||||
self.assertEqual(results[0][0], persons["user_one"].uuid)
|
||||
self.assertEqual(
|
||||
# [person["matched_recordings"][0]["session_id"] for person in results],
|
||||
[list(results[0][2])[0]["session_id"]],
|
||||
[next(iter(results[0][2]))["session_id"]],
|
||||
["s1c"],
|
||||
)
|
||||
|
||||
@ -163,6 +163,6 @@ class TestFunnelTrendsPersons(ClickhouseTestMixin, APIBaseTest):
|
||||
self.assertEqual(results[0][0], persons["user_one"].uuid)
|
||||
self.assertEqual(
|
||||
# [person["matched_recordings"][0].get("session_id") for person in results],
|
||||
[list(results[0][2])[0]["session_id"]],
|
||||
[next(iter(results[0][2]))["session_id"]],
|
||||
["s1a"],
|
||||
)
|
||||
|
@ -1,4 +1,4 @@
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from freezegun import freeze_time
|
||||
|
||||
@ -69,7 +69,9 @@ class TestInsightActorsQueryRunner(ClickhouseTestMixin, APIBaseTest):
|
||||
]
|
||||
)
|
||||
|
||||
def select(self, query: str, placeholders: Dict[str, Any] = {}):
|
||||
def select(self, query: str, placeholders: Optional[Dict[str, Any]] = None):
|
||||
if placeholders is None:
|
||||
placeholders = {}
|
||||
return execute_hogql_query(
|
||||
query=query,
|
||||
team=self.team,
|
||||
|
@ -228,7 +228,7 @@ class BreakdownValues:
|
||||
if self.hide_other_aggregation is not True and self.histogram_bin_count is None:
|
||||
values = [BREAKDOWN_NULL_STRING_LABEL if value in (None, "") else value for value in values]
|
||||
if needs_other:
|
||||
values = [BREAKDOWN_OTHER_STRING_LABEL] + values
|
||||
values = [BREAKDOWN_OTHER_STRING_LABEL, *values]
|
||||
|
||||
if len(values) == 0:
|
||||
values.insert(0, None)
|
||||
|
@ -121,8 +121,8 @@ def is_old_style_properties(properties):
|
||||
|
||||
|
||||
def transform_old_style_properties(properties):
|
||||
key = list(properties.keys())[0]
|
||||
value = list(properties.values())[0]
|
||||
key = next(iter(properties.keys()))
|
||||
value = next(iter(properties.values()))
|
||||
key_split = key.split("__")
|
||||
return [
|
||||
{
|
||||
|
@ -381,7 +381,7 @@ def _insight_filter(filter: Dict):
|
||||
else:
|
||||
raise Exception(f"Invalid insight type {filter.get('insight')}.")
|
||||
|
||||
if len(list(insight_filter.values())[0].model_dump(exclude_defaults=True)) == 0:
|
||||
if len(next(iter(insight_filter.values())).model_dump(exclude_defaults=True)) == 0:
|
||||
return {}
|
||||
|
||||
return insight_filter
|
||||
|
@ -55,21 +55,19 @@ class WebAnalyticsQueryRunner(QueryRunner, ABC):
|
||||
return [p for p in self.query.properties if p.key != "$pathname"]
|
||||
|
||||
def session_where(self, include_previous_period: Optional[bool] = None):
|
||||
properties = (
|
||||
[
|
||||
parse_expr(
|
||||
"events.timestamp < {date_to} AND events.timestamp >= minus({date_from}, toIntervalHour(1))",
|
||||
placeholders={
|
||||
"date_from": self.query_date_range.previous_period_date_from_as_hogql()
|
||||
if include_previous_period
|
||||
else self.query_date_range.date_from_as_hogql(),
|
||||
"date_to": self.query_date_range.date_to_as_hogql(),
|
||||
},
|
||||
)
|
||||
]
|
||||
+ self.property_filters_without_pathname
|
||||
+ self._test_account_filters
|
||||
)
|
||||
properties = [
|
||||
parse_expr(
|
||||
"events.timestamp < {date_to} AND events.timestamp >= minus({date_from}, toIntervalHour(1))",
|
||||
placeholders={
|
||||
"date_from": self.query_date_range.previous_period_date_from_as_hogql()
|
||||
if include_previous_period
|
||||
else self.query_date_range.date_from_as_hogql(),
|
||||
"date_to": self.query_date_range.date_to_as_hogql(),
|
||||
},
|
||||
),
|
||||
*self.property_filters_without_pathname,
|
||||
*self._test_account_filters,
|
||||
]
|
||||
return property_to_expr(
|
||||
properties,
|
||||
self.team,
|
||||
|
@ -120,7 +120,9 @@ LIMIT 1
|
||||
query_number = 0
|
||||
|
||||
|
||||
def print_and_execute_query(sql: str, name: str, dry_run: bool, timeout=180, query_args={}) -> Any:
|
||||
def print_and_execute_query(sql: str, name: str, dry_run: bool, timeout=180, query_args=None) -> Any:
|
||||
if query_args is None:
|
||||
query_args = {}
|
||||
global query_number
|
||||
|
||||
if not settings.TEST:
|
||||
|
@ -62,8 +62,8 @@ class Command(BaseCommand):
|
||||
entries: OrderedDict[Tuple[str, str], SourceEntry] = OrderedDict(map(handle_entry, split_items))
|
||||
|
||||
# add google domains to this, from https://www.google.com/supported_domains
|
||||
for google_domain in (
|
||||
".google.com .google.ad .google.ae .google.com.af .google.com.ag .google.al .google.am .google.co.ao "
|
||||
for google_domain in [
|
||||
*".google.com .google.ad .google.ae .google.com.af .google.com.ag .google.al .google.am .google.co.ao "
|
||||
".google.com.ar .google.as .google.at .google.com.au .google.az .google.ba .google.com.bd .google.be "
|
||||
".google.bf .google.bg .google.com.bh .google.bi .google.bj .google.com.bn .google.com.bo "
|
||||
".google.com.br .google.bs .google.bt .google.co.bw .google.by .google.com.bz .google.ca .google.cd "
|
||||
@ -87,8 +87,9 @@ class Command(BaseCommand):
|
||||
".google.co.th .google.com.tj .google.tl .google.tm .google.tn .google.to .google.com.tr .google.tt "
|
||||
".google.com.tw .google.co.tz .google.com.ua .google.co.ug .google.co.uk .google.com.uy .google.co.uz "
|
||||
".google.com.vc .google.co.ve .google.co.vi .google.com.vn .google.vu .google.ws .google.rs "
|
||||
".google.co.za .google.co.zm .google.co.zw .google.cat"
|
||||
).split(" ") + ["google"]:
|
||||
".google.co.za .google.co.zm .google.co.zw .google.cat".split(" "),
|
||||
"google",
|
||||
]:
|
||||
google_domain = google_domain.strip()
|
||||
if google_domain[0] == ".":
|
||||
google_domain = google_domain[1:]
|
||||
|
@ -99,7 +99,7 @@ class Command(BaseCommand):
|
||||
retry_policy = RetryPolicy(maximum_attempts=int(options["max_attempts"]))
|
||||
|
||||
try:
|
||||
workflow = [workflow for workflow in WORKFLOWS if workflow.is_named(workflow_name)][0]
|
||||
workflow = next(workflow for workflow in WORKFLOWS if workflow.is_named(workflow_name))
|
||||
except IndexError:
|
||||
raise ValueError(f"No workflow with name '{workflow_name}'")
|
||||
except AttributeError:
|
||||
|
@ -94,7 +94,7 @@ class AllowIPMiddleware:
|
||||
client_ip = forwarded_for.pop(0)
|
||||
if settings.TRUST_ALL_PROXIES:
|
||||
return client_ip
|
||||
proxies = [closest_proxy] + forwarded_for
|
||||
proxies = [closest_proxy, *forwarded_for]
|
||||
for proxy in proxies:
|
||||
if proxy not in self.trusted_proxies:
|
||||
return None
|
||||
@ -486,7 +486,7 @@ class CaptureMiddleware:
|
||||
|
||||
|
||||
def per_request_logging_context_middleware(
|
||||
get_response: Callable[[HttpRequest], HttpResponse]
|
||||
get_response: Callable[[HttpRequest], HttpResponse],
|
||||
) -> Callable[[HttpRequest], HttpResponse]:
|
||||
"""
|
||||
We get some default logging context from the django-structlog middleware,
|
||||
@ -517,7 +517,7 @@ def per_request_logging_context_middleware(
|
||||
|
||||
|
||||
def user_logging_context_middleware(
|
||||
get_response: Callable[[HttpRequest], HttpResponse]
|
||||
get_response: Callable[[HttpRequest], HttpResponse],
|
||||
) -> Callable[[HttpRequest], HttpResponse]:
|
||||
"""
|
||||
This middleware adds the team_id to the logging context if it exists. Note
|
||||
|
@ -31,7 +31,7 @@ def create_event(
|
||||
team: Team,
|
||||
distinct_id: str,
|
||||
timestamp: Optional[Union[timezone.datetime, str]] = None,
|
||||
properties: Optional[Dict] = {},
|
||||
properties: Optional[Dict] = None,
|
||||
elements: Optional[List[Element]] = None,
|
||||
person_id: Optional[uuid.UUID] = None,
|
||||
person_properties: Optional[Dict] = None,
|
||||
@ -48,6 +48,8 @@ def create_event(
|
||||
group4_created_at: Optional[Union[timezone.datetime, str]] = None,
|
||||
person_mode: Literal["full", "propertyless"] = "full",
|
||||
) -> str:
|
||||
if properties is None:
|
||||
properties = {}
|
||||
if not timestamp:
|
||||
timestamp = timezone.now()
|
||||
assert timestamp is not None
|
||||
@ -285,9 +287,11 @@ class ElementSerializer(serializers.ModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
def parse_properties(properties: str, allow_list: Set[str] = set()) -> Dict:
|
||||
def parse_properties(properties: str, allow_list: Optional[Set[str]] = None) -> Dict:
|
||||
# parse_constants gets called for any NaN, Infinity etc values
|
||||
# we just want those to be returned as None
|
||||
if allow_list is None:
|
||||
allow_list = set()
|
||||
props = json.loads(properties or "{}", parse_constant=lambda x: None)
|
||||
return {
|
||||
key: value.strip('"') if isinstance(value, str) else value
|
||||
|
@ -135,14 +135,22 @@ class FeatureFlagMatcher:
|
||||
self,
|
||||
feature_flags: List[FeatureFlag],
|
||||
distinct_id: str,
|
||||
groups: Dict[GroupTypeName, str] = {},
|
||||
groups: Optional[Dict[GroupTypeName, str]] = None,
|
||||
cache: Optional[FlagsMatcherCache] = None,
|
||||
hash_key_overrides: Dict[str, str] = {},
|
||||
property_value_overrides: Dict[str, Union[str, int]] = {},
|
||||
group_property_value_overrides: Dict[str, Dict[str, Union[str, int]]] = {},
|
||||
hash_key_overrides: Optional[Dict[str, str]] = None,
|
||||
property_value_overrides: Optional[Dict[str, Union[str, int]]] = None,
|
||||
group_property_value_overrides: Optional[Dict[str, Dict[str, Union[str, int]]]] = None,
|
||||
skip_database_flags: bool = False,
|
||||
cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None,
|
||||
):
|
||||
if group_property_value_overrides is None:
|
||||
group_property_value_overrides = {}
|
||||
if property_value_overrides is None:
|
||||
property_value_overrides = {}
|
||||
if hash_key_overrides is None:
|
||||
hash_key_overrides = {}
|
||||
if groups is None:
|
||||
groups = {}
|
||||
self.feature_flags = feature_flags
|
||||
self.distinct_id = distinct_id
|
||||
self.groups = groups
|
||||
@ -712,11 +720,17 @@ def _get_all_feature_flags(
|
||||
team_id: int,
|
||||
distinct_id: str,
|
||||
person_overrides: Optional[Dict[str, str]] = None,
|
||||
groups: Dict[GroupTypeName, str] = {},
|
||||
property_value_overrides: Dict[str, Union[str, int]] = {},
|
||||
group_property_value_overrides: Dict[str, Dict[str, Union[str, int]]] = {},
|
||||
groups: Optional[Dict[GroupTypeName, str]] = None,
|
||||
property_value_overrides: Optional[Dict[str, Union[str, int]]] = None,
|
||||
group_property_value_overrides: Optional[Dict[str, Dict[str, Union[str, int]]]] = None,
|
||||
skip_database_flags: bool = False,
|
||||
) -> Tuple[Dict[str, Union[str, bool]], Dict[str, dict], Dict[str, object], bool]:
|
||||
if group_property_value_overrides is None:
|
||||
group_property_value_overrides = {}
|
||||
if property_value_overrides is None:
|
||||
property_value_overrides = {}
|
||||
if groups is None:
|
||||
groups = {}
|
||||
cache = FlagsMatcherCache(team_id)
|
||||
|
||||
if feature_flags:
|
||||
@ -738,11 +752,17 @@ def _get_all_feature_flags(
|
||||
def get_all_feature_flags(
|
||||
team_id: int,
|
||||
distinct_id: str,
|
||||
groups: Dict[GroupTypeName, str] = {},
|
||||
groups: Optional[Dict[GroupTypeName, str]] = None,
|
||||
hash_key_override: Optional[str] = None,
|
||||
property_value_overrides: Dict[str, Union[str, int]] = {},
|
||||
group_property_value_overrides: Dict[str, Dict[str, Union[str, int]]] = {},
|
||||
property_value_overrides: Optional[Dict[str, Union[str, int]]] = None,
|
||||
group_property_value_overrides: Optional[Dict[str, Dict[str, Union[str, int]]]] = None,
|
||||
) -> Tuple[Dict[str, Union[str, bool]], Dict[str, dict], Dict[str, object], bool]:
|
||||
if group_property_value_overrides is None:
|
||||
group_property_value_overrides = {}
|
||||
if property_value_overrides is None:
|
||||
property_value_overrides = {}
|
||||
if groups is None:
|
||||
groups = {}
|
||||
property_value_overrides, group_property_value_overrides = add_local_person_and_group_properties(
|
||||
distinct_id, groups, property_value_overrides, group_property_value_overrides
|
||||
)
|
||||
|
@ -48,7 +48,9 @@ class RetentionFilter(
|
||||
SampleMixin,
|
||||
BaseFilter,
|
||||
):
|
||||
def __init__(self, data: Dict[str, Any] = {}, request: Optional[Request] = None, **kwargs) -> None:
|
||||
def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None:
|
||||
if data is None:
|
||||
data = {}
|
||||
if data:
|
||||
data["insight"] = INSIGHT_RETENTION
|
||||
else:
|
||||
|
@ -993,8 +993,10 @@ class TestDjangoPropertiesToQ(property_to_Q_test_factory(_filter_persons, _creat
|
||||
|
||||
|
||||
def filter_persons_with_property_group(
|
||||
filter: Filter, team: Team, property_overrides: Dict[str, Any] = {}
|
||||
filter: Filter, team: Team, property_overrides: Optional[Dict[str, Any]] = None
|
||||
) -> List[str]:
|
||||
if property_overrides is None:
|
||||
property_overrides = {}
|
||||
flush_persons_and_events()
|
||||
persons = Person.objects.filter(property_group_to_Q(team.pk, filter.property_groups, property_overrides))
|
||||
persons = persons.filter(team_id=team.pk)
|
||||
|
@ -21,12 +21,14 @@ def earliest_timestamp_func(team_id: int):
|
||||
return get_earliest_timestamp(team_id)
|
||||
|
||||
|
||||
def get_filter(team, data: dict = {}, request: Optional[Request] = None):
|
||||
def get_filter(team, data: Optional[dict] = None, request: Optional[Request] = None):
|
||||
from .filter import Filter
|
||||
from .path_filter import PathFilter
|
||||
from .retention_filter import RetentionFilter
|
||||
from .stickiness_filter import StickinessFilter
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
insight = data.get("insight")
|
||||
if not insight and request:
|
||||
insight = request.GET.get("insight") or request.data.get("insight")
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""https://developer.mozilla.org/en-US/docs/Web/API/PerformanceEntry"""
|
||||
|
||||
from posthog import settings
|
||||
from posthog.clickhouse.kafka_engine import (
|
||||
KAFKA_COLUMNS_WITH_PARTITION,
|
||||
|
@ -127,13 +127,15 @@ def create_person(
|
||||
team_id: int,
|
||||
version: int,
|
||||
uuid: Optional[str] = None,
|
||||
properties: Optional[Dict] = {},
|
||||
properties: Optional[Dict] = None,
|
||||
sync: bool = False,
|
||||
is_identified: bool = False,
|
||||
is_deleted: bool = False,
|
||||
timestamp: Optional[Union[datetime.datetime, str]] = None,
|
||||
created_at: Optional[datetime.datetime] = None,
|
||||
) -> str:
|
||||
if properties is None:
|
||||
properties = {}
|
||||
if uuid:
|
||||
uuid = str(uuid)
|
||||
else:
|
||||
|
@ -421,8 +421,10 @@ def fetch_plugin_log_entries(
|
||||
before: Optional[timezone.datetime] = None,
|
||||
search: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
type_filter: List[PluginLogEntryType] = [],
|
||||
type_filter: Optional[List[PluginLogEntryType]] = None,
|
||||
) -> List[PluginLogEntry]:
|
||||
if type_filter is None:
|
||||
type_filter = []
|
||||
clickhouse_where_parts: List[str] = []
|
||||
clickhouse_kwargs: Dict[str, Any] = {}
|
||||
if team_id is not None:
|
||||
|
@ -80,12 +80,11 @@ class PropertyDefinition(UUIDModel):
|
||||
# creates an index pganalyze identified as missing
|
||||
# https://app.pganalyze.com/servers/i35ydkosi5cy5n7tly45vkjcqa/checks/index_advisor/missing_index/15282978
|
||||
models.Index(fields=["team_id", "type", "is_numerical"]),
|
||||
] + [
|
||||
GinIndex(
|
||||
name="index_property_definition_name",
|
||||
fields=["name"],
|
||||
opclasses=["gin_trgm_ops"],
|
||||
) # To speed up DB-based fuzzy searching
|
||||
), # To speed up DB-based fuzzy searching
|
||||
]
|
||||
constraints = [
|
||||
models.CheckConstraint(
|
||||
|
@ -102,7 +102,7 @@ class TaggedItem(UUIDModel):
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ("tag",) + RELATED_OBJECTS
|
||||
unique_together = ("tag", *RELATED_OBJECTS)
|
||||
# Make sure to add new key to uniqueness constraint when extending tag functionality to new model
|
||||
constraints = [
|
||||
*[build_partial_uniqueness_constraint(field=field) for field in RELATED_OBJECTS],
|
||||
|
@ -81,13 +81,9 @@ class TeamManager(models.Manager):
|
||||
example_email = re.search(r"@[\w.]+", example_emails[0])
|
||||
if example_email:
|
||||
return [
|
||||
{
|
||||
"key": "email",
|
||||
"operator": "not_icontains",
|
||||
"value": example_email.group(),
|
||||
"type": "person",
|
||||
}
|
||||
] + filters
|
||||
{"key": "email", "operator": "not_icontains", "value": example_email.group(), "type": "person"},
|
||||
*filters,
|
||||
]
|
||||
return filters
|
||||
|
||||
def create_with_data(self, user: Any = None, default_dashboards: bool = True, **kwargs) -> "Team":
|
||||
|
@ -122,7 +122,7 @@ class UUIDClassicModel(models.Model):
|
||||
|
||||
def sane_repr(*attrs: str, include_id=True) -> Callable[[object], str]:
|
||||
if "id" not in attrs and "pk" not in attrs and include_id:
|
||||
attrs = ("id",) + attrs
|
||||
attrs = ("id", *attrs)
|
||||
|
||||
def _repr(self):
|
||||
pairs = (f"{attr}={repr(getattr(self, attr))}" for attr in attrs)
|
||||
@ -206,7 +206,7 @@ def create_with_slug(create_func: Callable[..., T], default_slug: str = "", *arg
|
||||
|
||||
def get_deferred_field_set_for_model(
|
||||
model: Type[models.Model],
|
||||
fields_not_deferred: Set[str] = set(),
|
||||
fields_not_deferred: Optional[Set[str]] = None,
|
||||
field_prefix: str = "",
|
||||
) -> Set[str]:
|
||||
"""Return a set of field names to be deferred for a given model. Used with `.defer()` after `select_related`
|
||||
@ -225,6 +225,8 @@ def get_deferred_field_set_for_model(
|
||||
fields_not_deferred: the models fields to exclude from the deferred field set
|
||||
field_prefix: a prefix to add to the field names e.g. ("team__organization__") to work in the query set
|
||||
"""
|
||||
if fields_not_deferred is None:
|
||||
fields_not_deferred = set()
|
||||
return {f"{field_prefix}{x.name}" for x in model._meta.fields if x.name not in fields_not_deferred}
|
||||
|
||||
|
||||
|
@ -276,10 +276,12 @@ def lookup_q(key: str, value: Any) -> Q:
|
||||
def property_to_Q(
|
||||
team_id: int,
|
||||
property: Property,
|
||||
override_property_values: Dict[str, Any] = {},
|
||||
override_property_values: Optional[Dict[str, Any]] = None,
|
||||
cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None,
|
||||
using_database: str = "default",
|
||||
) -> Q:
|
||||
if override_property_values is None:
|
||||
override_property_values = {}
|
||||
if property.type not in ["person", "group", "cohort", "event"]:
|
||||
# We need to support event type for backwards compatibility, even though it's treated as a person property type
|
||||
raise ValueError(f"property_to_Q: type is not supported: {repr(property.type)}")
|
||||
@ -380,10 +382,12 @@ def property_to_Q(
|
||||
def property_group_to_Q(
|
||||
team_id: int,
|
||||
property_group: PropertyGroup,
|
||||
override_property_values: Dict[str, Any] = {},
|
||||
override_property_values: Optional[Dict[str, Any]] = None,
|
||||
cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None,
|
||||
using_database: str = "default",
|
||||
) -> Q:
|
||||
if override_property_values is None:
|
||||
override_property_values = {}
|
||||
filters = Q()
|
||||
|
||||
if not property_group or len(property_group.values) == 0:
|
||||
@ -423,7 +427,7 @@ def property_group_to_Q(
|
||||
def properties_to_Q(
|
||||
team_id: int,
|
||||
properties: List[Property],
|
||||
override_property_values: Dict[str, Any] = {},
|
||||
override_property_values: Optional[Dict[str, Any]] = None,
|
||||
cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None,
|
||||
using_database: str = "default",
|
||||
) -> Q:
|
||||
@ -431,6 +435,8 @@ def properties_to_Q(
|
||||
Converts a filter to Q, for use in Django ORM .filter()
|
||||
If you're filtering a Person/Group QuerySet, use is_direct_query to avoid doing an unnecessary nested loop
|
||||
"""
|
||||
if override_property_values is None:
|
||||
override_property_values = {}
|
||||
filters = Q()
|
||||
|
||||
if len(properties) == 0:
|
||||
|
@ -46,7 +46,7 @@ def get_breakdown_prop_values(
|
||||
entity: Entity,
|
||||
aggregate_operation: str,
|
||||
team: Team,
|
||||
extra_params={},
|
||||
extra_params=None,
|
||||
column_optimizer: Optional[ColumnOptimizer] = None,
|
||||
person_properties_mode: PersonPropertiesMode = PersonPropertiesMode.USING_PERSON_PROPERTIES_COLUMN,
|
||||
use_all_funnel_entities: bool = False,
|
||||
@ -58,6 +58,8 @@ def get_breakdown_prop_values(
|
||||
|
||||
When dealing with a histogram though, buckets are returned instead of values.
|
||||
"""
|
||||
if extra_params is None:
|
||||
extra_params = {}
|
||||
column_optimizer = column_optimizer or ColumnOptimizer(filter, team.id)
|
||||
|
||||
date_params = {}
|
||||
|
@ -60,13 +60,19 @@ class EventQuery(metaclass=ABCMeta):
|
||||
should_join_persons=False,
|
||||
should_join_sessions=False,
|
||||
# Extra events/person table columns to fetch since parent query needs them
|
||||
extra_fields: List[ColumnName] = [],
|
||||
extra_event_properties: List[PropertyName] = [],
|
||||
extra_person_fields: List[ColumnName] = [],
|
||||
extra_fields: Optional[List[ColumnName]] = None,
|
||||
extra_event_properties: Optional[List[PropertyName]] = None,
|
||||
extra_person_fields: Optional[List[ColumnName]] = None,
|
||||
override_aggregate_users_by_distinct_id: Optional[bool] = None,
|
||||
person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
if extra_person_fields is None:
|
||||
extra_person_fields = []
|
||||
if extra_event_properties is None:
|
||||
extra_event_properties = []
|
||||
if extra_fields is None:
|
||||
extra_fields = []
|
||||
self._filter = filter
|
||||
self._team_id = team.pk
|
||||
self._team = team
|
||||
|
@ -139,12 +139,18 @@ class FOSSCohortQuery(EventQuery):
|
||||
should_join_distinct_ids=False,
|
||||
should_join_persons=False,
|
||||
# Extra events/person table columns to fetch since parent query needs them
|
||||
extra_fields: List[ColumnName] = [],
|
||||
extra_event_properties: List[PropertyName] = [],
|
||||
extra_person_fields: List[ColumnName] = [],
|
||||
extra_fields: Optional[List[ColumnName]] = None,
|
||||
extra_event_properties: Optional[List[PropertyName]] = None,
|
||||
extra_person_fields: Optional[List[ColumnName]] = None,
|
||||
override_aggregate_users_by_distinct_id: Optional[bool] = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
if extra_person_fields is None:
|
||||
extra_person_fields = []
|
||||
if extra_event_properties is None:
|
||||
extra_event_properties = []
|
||||
if extra_fields is None:
|
||||
extra_fields = []
|
||||
self._fields = []
|
||||
self._events = []
|
||||
self._earliest_time_for_event_query = None
|
||||
|
@ -667,7 +667,7 @@ class ClickhouseFunnelBase(ABC):
|
||||
if self._filter.include_recordings:
|
||||
events = []
|
||||
for i in range(0, max_steps):
|
||||
event_fields = ["latest"] + self.extra_event_fields_and_properties
|
||||
event_fields = ["latest", *self.extra_event_fields_and_properties]
|
||||
event_fields_with_step = ", ".join([f'"{field}_{i}"' for field in event_fields])
|
||||
event_clause = f"({event_fields_with_step}) as step_{i}_matching_event"
|
||||
events.append(event_clause)
|
||||
|
@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict
|
||||
from typing import Dict, Optional
|
||||
|
||||
from posthog.models import Filter
|
||||
from posthog.queries.funnels import ClickhouseFunnel
|
||||
@ -115,7 +115,11 @@ class TestBreakdownsByCurrentURL(ClickhouseTestMixin, APIBaseTest):
|
||||
|
||||
journeys_for(journey, team=self.team, create_people=True)
|
||||
|
||||
def _run(self, extra: Dict = {}, events_extra: Dict = {}):
|
||||
def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None):
|
||||
if events_extra is None:
|
||||
events_extra = {}
|
||||
if extra is None:
|
||||
extra = {}
|
||||
response = ClickhouseFunnel(
|
||||
Filter(
|
||||
data={
|
||||
|
@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict
|
||||
from typing import Dict, Optional
|
||||
|
||||
from posthog.constants import TRENDS_TABLE
|
||||
from posthog.models import Filter
|
||||
@ -104,7 +104,11 @@ class TestBreakdowns(ClickhouseTestMixin, APIBaseTest):
|
||||
|
||||
journeys_for(journey, team=self.team, create_people=True)
|
||||
|
||||
def _run(self, extra: Dict = {}, events_extra: Dict = {}):
|
||||
def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None):
|
||||
if events_extra is None:
|
||||
events_extra = {}
|
||||
if extra is None:
|
||||
extra = {}
|
||||
response = Trends().run(
|
||||
Filter(
|
||||
data={
|
||||
|
@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict
|
||||
from typing import Dict, Optional
|
||||
|
||||
from posthog.models import Filter
|
||||
from posthog.queries.trends.trends import Trends
|
||||
@ -99,7 +99,11 @@ class TestBreakdownsByCurrentURL(ClickhouseTestMixin, APIBaseTest):
|
||||
|
||||
journeys_for(journey, team=self.team, create_people=True)
|
||||
|
||||
def _run(self, extra: Dict = {}, events_extra: Dict = {}):
|
||||
def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None):
|
||||
if events_extra is None:
|
||||
events_extra = {}
|
||||
if extra is None:
|
||||
extra = {}
|
||||
response = Trends().run(
|
||||
Filter(
|
||||
data={
|
||||
|
@ -129,7 +129,9 @@ class TestFormula(ClickhouseTestMixin, APIBaseTest):
|
||||
},
|
||||
)
|
||||
|
||||
def _run(self, extra: Dict = {}, run_at: Optional[str] = None):
|
||||
def _run(self, extra: Optional[Dict] = None, run_at: Optional[str] = None):
|
||||
if extra is None:
|
||||
extra = {}
|
||||
with freeze_time(run_at or "2020-01-04T13:01:01Z"):
|
||||
action_response = Trends().run(
|
||||
Filter(
|
||||
|
@ -38,7 +38,9 @@ class TestPagingBreakdowns(APIBaseTest):
|
||||
create_people=True,
|
||||
)
|
||||
|
||||
def _run(self, extra: Dict = {}, run_at: Optional[str] = None):
|
||||
def _run(self, extra: Optional[Dict] = None, run_at: Optional[str] = None):
|
||||
if extra is None:
|
||||
extra = {}
|
||||
with freeze_time(run_at or "2020-01-04T13:01:01Z"):
|
||||
action_response = Trends().run(
|
||||
Filter(
|
||||
|
@ -102,9 +102,11 @@ def process_math(
|
||||
def parse_response(
|
||||
stats: Dict,
|
||||
filter: Filter,
|
||||
additional_values: Dict = {},
|
||||
additional_values: Optional[Dict] = None,
|
||||
entity: Optional[Entity] = None,
|
||||
) -> Dict[str, Any]:
|
||||
if additional_values is None:
|
||||
additional_values = {}
|
||||
counts = stats[1]
|
||||
labels = [item.strftime("%-d-%b-%Y{}".format(" %H:%M" if filter.interval == "hour" else "")) for item in stats[0]]
|
||||
days = [item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if filter.interval == "hour" else "")) for item in stats[0]]
|
||||
|
@ -25,8 +25,10 @@ class TestSessionRecordingProperties(BaseTest, ClickhouseTestMixin):
|
||||
timestamp,
|
||||
team=None,
|
||||
event_name="$pageview",
|
||||
properties={"$os": "Windows 95", "$current_url": "aloha.com/2"},
|
||||
properties=None,
|
||||
):
|
||||
if properties is None:
|
||||
properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"}
|
||||
if team is None:
|
||||
team = self.team
|
||||
_create_event(
|
||||
|
@ -280,7 +280,6 @@ def test_new_ingestion_large_full_snapshot_is_separated(raw_snapshot_events, moc
|
||||
"distinct_id": "abc123",
|
||||
},
|
||||
},
|
||||
] + [
|
||||
{
|
||||
"event": "$snapshot",
|
||||
"properties": {
|
||||
|
@ -780,7 +780,7 @@ class TestSessionRecordings(APIBaseTest, ClickhouseTestMixin, QueryMatchingTest)
|
||||
# by default a session recording is deleted, so we have to explicitly mark the mock as not deleted
|
||||
mock_get_session_recording.return_value = SessionRecording(session_id=session_id, team=self.team, deleted=False)
|
||||
|
||||
annoying_data_from_javascript = "\uD801\uDC37 probably from console logs"
|
||||
annoying_data_from_javascript = "\ud801\udc37 probably from console logs"
|
||||
|
||||
mock_realtime_snapshots.return_value = [
|
||||
{"some": annoying_data_from_javascript},
|
||||
|
@ -4,7 +4,8 @@ from posthog.settings.utils import get_list
|
||||
|
||||
# These flags will be force-enabled on the frontend
|
||||
# The features here are released, but the flags are just not yet removed from the code
|
||||
PERSISTED_FEATURE_FLAGS = get_list(os.getenv("PERSISTED_FEATURE_FLAGS", "")) + [
|
||||
PERSISTED_FEATURE_FLAGS = [
|
||||
*get_list(os.getenv("PERSISTED_FEATURE_FLAGS", "")),
|
||||
"simplify-actions",
|
||||
"historical-exports-v2",
|
||||
"ingestion-warnings-enabled",
|
||||
|
@ -341,7 +341,7 @@ KAFKA_PRODUCE_ACK_TIMEOUT_SECONDS = int(os.getenv("KAFKA_PRODUCE_ACK_TIMEOUT_SEC
|
||||
# https://github.com/korfuri/django-prometheus for more details
|
||||
|
||||
# We keep the number of buckets low to reduce resource usage on the Prometheus
|
||||
PROMETHEUS_LATENCY_BUCKETS = [0.1, 0.3, 0.9, 2.7, 8.1] + [float("inf")]
|
||||
PROMETHEUS_LATENCY_BUCKETS = [0.1, 0.3, 0.9, 2.7, 8.1, float("inf")]
|
||||
|
||||
SALT_KEY = os.getenv("SALT_KEY", "0123456789abcdefghijklmnopqrstuvwxyz")
|
||||
|
||||
|
@ -174,9 +174,9 @@ async def send_batch_export_run_failure(
|
||||
# NOTE: We are taking only the date component to cap the number of emails at one per day per batch export.
|
||||
last_updated_at_date = batch_export_run.last_updated_at.strftime("%Y-%m-%d")
|
||||
|
||||
campaign_key: (
|
||||
str
|
||||
) = f"batch_export_run_email_batch_export_{batch_export_run.batch_export.id}_last_updated_at_{last_updated_at_date}"
|
||||
campaign_key: str = (
|
||||
f"batch_export_run_email_batch_export_{batch_export_run.batch_export.id}_last_updated_at_{last_updated_at_date}"
|
||||
)
|
||||
|
||||
message = await sync_to_async(EmailMessage)(
|
||||
campaign_key=campaign_key,
|
||||
|
@ -325,7 +325,7 @@ class UsageReport(APIBaseTest, ClickhouseTestMixin, ClickhouseDestroyTablesMixin
|
||||
flush_persons_and_events()
|
||||
|
||||
def _select_report_by_org_id(self, org_id: str, reports: List[Dict]) -> Dict:
|
||||
return [report for report in reports if report["organization_id"] == org_id][0]
|
||||
return next(report for report in reports if report["organization_id"] == org_id)
|
||||
|
||||
def _create_plugin(self, name: str, enabled: bool) -> None:
|
||||
plugin = Plugin.objects.create(organization_id=self.team.organization.pk, name=name)
|
||||
|
@ -9,7 +9,7 @@ T = typing.TypeVar("T")
|
||||
|
||||
|
||||
def peek_first_and_rewind(
|
||||
gen: collections.abc.Generator[T, None, None]
|
||||
gen: collections.abc.Generator[T, None, None],
|
||||
) -> tuple[T, collections.abc.Generator[T, None, None]]:
|
||||
"""Peek into the first element in a generator and rewind the advance.
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""
|
||||
This module handles how credentials are read in dlt sources
|
||||
"""
|
||||
|
||||
from typing import ClassVar, List, Union
|
||||
from dlt.common.configuration import configspec
|
||||
from dlt.common.configuration.specs import CredentialsConfiguration
|
||||
|
@ -82,7 +82,7 @@ class CaptureKafkaProducer:
|
||||
def producer(self) -> aiokafka.AIOKafkaProducer:
|
||||
if self._producer is None:
|
||||
self._producer = aiokafka.AIOKafkaProducer(
|
||||
bootstrap_servers=settings.KAFKA_HOSTS + ["localhost:9092"],
|
||||
bootstrap_servers=[*settings.KAFKA_HOSTS, "localhost:9092"],
|
||||
security_protocol=settings.KAFKA_SECURITY_PROTOCOL or "PLAINTEXT",
|
||||
acks="all",
|
||||
request_timeout_ms=1000000,
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test utilities that operate with datetime.datetimes."""
|
||||
|
||||
import datetime as dt
|
||||
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test utilities that deal with test event generation."""
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
import random
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test utilities to manipulate BatchExport* models."""
|
||||
|
||||
import uuid
|
||||
|
||||
import temporalio.client
|
||||
|
@ -409,9 +409,9 @@ def cleanup_materialized_columns():
|
||||
|
||||
|
||||
def also_test_with_materialized_columns(
|
||||
event_properties=[],
|
||||
person_properties=[],
|
||||
group_properties=[],
|
||||
event_properties=None,
|
||||
person_properties=None,
|
||||
group_properties=None,
|
||||
verify_no_jsonextract=True,
|
||||
# :TODO: Remove this when groups-on-events is released
|
||||
materialize_only_with_person_on_events=False,
|
||||
@ -422,6 +422,12 @@ def also_test_with_materialized_columns(
|
||||
Requires a unittest class with ClickhouseTestMixin mixed in
|
||||
"""
|
||||
|
||||
if group_properties is None:
|
||||
group_properties = []
|
||||
if person_properties is None:
|
||||
person_properties = []
|
||||
if event_properties is None:
|
||||
event_properties = []
|
||||
try:
|
||||
from ee.clickhouse.materialized_columns.analyze import materialize
|
||||
except:
|
||||
|
@ -33,6 +33,6 @@ class TestLatestMigrations(TestCase):
|
||||
def _get_latest_migration_from_manifest(django_app: str) -> str:
|
||||
root = pathlib.Path().resolve()
|
||||
manifest = pathlib.Path(f"{root}/latest_migrations.manifest").read_text()
|
||||
posthog_latest_migration = [line for line in manifest.splitlines() if line.startswith(f"{django_app}: ")][0]
|
||||
posthog_latest_migration = next(line for line in manifest.splitlines() if line.startswith(f"{django_app}: "))
|
||||
|
||||
return posthog_latest_migration.replace(f"{django_app}: ", "")
|
||||
|
@ -275,7 +275,7 @@ def get_js_url(request: HttpRequest) -> str:
|
||||
def render_template(
|
||||
template_name: str,
|
||||
request: HttpRequest,
|
||||
context: Dict = {},
|
||||
context: Optional[Dict] = None,
|
||||
*,
|
||||
team_for_public_context: Optional["Team"] = None,
|
||||
) -> HttpResponse:
|
||||
@ -284,6 +284,8 @@ def render_template(
|
||||
If team_for_public_context is provided, this means this is a public page such as a shared dashboard.
|
||||
"""
|
||||
|
||||
if context is None:
|
||||
context = {}
|
||||
template = get_template(template_name)
|
||||
|
||||
context["opt_out_capture"] = settings.OPT_OUT_CAPTURE
|
||||
@ -471,7 +473,7 @@ def get_frontend_apps(team_id: int) -> Dict[int, Dict[str, Any]]:
|
||||
for p in plugin_configs:
|
||||
config = p["pluginconfig__config"] or {}
|
||||
config_schema = p["config_schema"] or {}
|
||||
secret_fields = {field["key"] for field in config_schema if "secret" in field and field["secret"]}
|
||||
secret_fields = {field["key"] for field in config_schema if field.get("secret")}
|
||||
for key in secret_fields:
|
||||
if key in config:
|
||||
config[key] = "** SECRET FIELD **"
|
||||
|
@ -14,8 +14,9 @@ exclude = [
|
||||
"./env",
|
||||
"./posthog/hogql/grammar",
|
||||
]
|
||||
|
||||
[tool.ruff.lint]
|
||||
ignore = [
|
||||
"B006",
|
||||
"B017",
|
||||
"B019",
|
||||
"B904",
|
||||
@ -34,14 +35,18 @@ select = [
|
||||
"C9",
|
||||
"E",
|
||||
"F",
|
||||
"RUF005",
|
||||
"RUF013",
|
||||
"RUF015",
|
||||
"RUF019",
|
||||
"T2",
|
||||
"W",
|
||||
]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
[tool.ruff.lint.mccabe]
|
||||
max-complexity = 10
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"./posthog/queries/column_optimizer/column_optimizer.py" = ["F401"]
|
||||
"./posthog/migrations/0027_move_elements_to_group.py" = ["T201"]
|
||||
"./posthog/queries/cohort_query.py" = ["F401"]
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
-c requirements.txt
|
||||
|
||||
ruff>=0.1.2
|
||||
ruff~=0.3.7
|
||||
pip-tools==7.3.0
|
||||
mypy~=1.8.0
|
||||
mypy-baseline~=0.6.1
|
||||
|
@ -59,7 +59,9 @@ coreapi==2.3.3
|
||||
coreschema==0.0.4
|
||||
# via coreapi
|
||||
coverage[toml]==5.5
|
||||
# via pytest-cov
|
||||
# via
|
||||
# coverage
|
||||
# pytest-cov
|
||||
datamodel-code-generator==0.25.2
|
||||
# via -r requirements-dev.in
|
||||
django==4.2.11
|
||||
@ -90,7 +92,9 @@ exceptiongroup==1.2.0
|
||||
faker==17.5.0
|
||||
# via -r requirements-dev.in
|
||||
fakeredis[lua]==2.11.0
|
||||
# via -r requirements-dev.in
|
||||
# via
|
||||
# -r requirements-dev.in
|
||||
# fakeredis
|
||||
flaky==3.7.0
|
||||
# via -r requirements-dev.in
|
||||
freezegun==1.2.2
|
||||
@ -168,6 +172,7 @@ pydantic[email]==2.5.3
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# datamodel-code-generator
|
||||
# pydantic
|
||||
pydantic-core==2.14.6
|
||||
# via
|
||||
# -c requirements.txt
|
||||
@ -225,7 +230,7 @@ requests==2.31.0
|
||||
# responses
|
||||
responses==0.23.1
|
||||
# via -r requirements-dev.in
|
||||
ruff==0.1.2
|
||||
ruff==0.3.7
|
||||
# via -r requirements-dev.in
|
||||
six==1.16.0
|
||||
# via
|
||||
|
@ -9,6 +9,7 @@ aioboto3==12.0.0
|
||||
aiobotocore[boto3]==2.7.0
|
||||
# via
|
||||
# aioboto3
|
||||
# aiobotocore
|
||||
# s3fs
|
||||
aiohttp==3.9.3
|
||||
# via
|
||||
@ -252,6 +253,7 @@ giturlparse==0.12.0
|
||||
# via dlt
|
||||
google-api-core[grpc]==2.11.1
|
||||
# via
|
||||
# google-api-core
|
||||
# google-cloud-bigquery
|
||||
# google-cloud-core
|
||||
google-auth==2.22.0
|
||||
@ -447,7 +449,9 @@ protobuf==4.22.1
|
||||
# proto-plus
|
||||
# temporalio
|
||||
psycopg[binary]==3.1.13
|
||||
# via -r requirements.in
|
||||
# via
|
||||
# -r requirements.in
|
||||
# psycopg
|
||||
psycopg-binary==3.1.13
|
||||
# via psycopg
|
||||
psycopg2-binary==2.9.7
|
||||
@ -707,6 +711,7 @@ urllib3[secure,socks]==1.26.18
|
||||
# requests
|
||||
# selenium
|
||||
# sentry-sdk
|
||||
# urllib3
|
||||
urllib3-secure-extra==0.1.0
|
||||
# via urllib3
|
||||
vine==5.0.0
|
||||
|
Loading…
Reference in New Issue
Block a user