From eac199d24af4e25d4c98be929ee3bfc700fee5c0 Mon Sep 17 00:00:00 2001 From: Sandy Spicer Date: Thu, 27 Jun 2024 14:16:27 -0700 Subject: [PATCH] chore: upgrade python to 3.11 (#23206) --- .github/actions/run-backend-tests/action.yml | 2 +- .github/workflows/benchmark.yml | 2 +- .github/workflows/build-hogql-parser.yml | 2 +- .../ci-backend-update-test-timing.yml | 2 +- .github/workflows/ci-backend.yml | 10 +- .github/workflows/ci-hog.yml | 2 +- .github/workflows/ci-plugin-server.yml | 4 +- bin/build-schema-python.sh | 20 ++-- .../__snapshots__/test_time_to_see_data.ambr | 2 +- .../views/test/test_clickhouse_experiments.py | 6 +- .../test/test_summarize_session.py | 4 +- .../test/test_session_recording_extensions.py | 4 +- .../test/test_session_recording_playlist.py | 6 +- ee/tasks/subscriptions/subscription_utils.py | 2 +- mypy.ini | 2 +- posthog/api/app_metrics.py | 4 +- posthog/api/authentication.py | 2 +- posthog/api/comments.py | 2 + posthog/api/feature_flag.py | 4 + posthog/api/plugin.py | 3 + posthog/api/routing.py | 26 +++++ .../api/test/__snapshots__/test_api_docs.ambr | 2 +- .../api/test/batch_exports/test_log_entry.py | 10 +- posthog/api/test/batch_exports/test_pause.py | 4 +- posthog/api/test/batch_exports/test_update.py | 4 +- posthog/api/test/test_app_metrics.py | 4 +- posthog/api/test/test_capture.py | 4 +- posthog/api/user.py | 2 + posthog/api/utils.py | 11 +- posthog/async_migrations/test/test_utils.py | 6 +- posthog/batch_exports/http.py | 6 +- posthog/batch_exports/models.py | 4 +- posthog/batch_exports/service.py | 4 +- posthog/clickhouse/client/execute_async.py | 6 +- posthog/clickhouse/table_engines.py | 4 +- .../clickhouse/test/test_person_overrides.py | 4 +- posthog/constants.py | 26 ++--- posthog/decorators.py | 4 +- posthog/demo/matrix/models.py | 4 +- posthog/demo/matrix/randomization.py | 4 +- posthog/demo/products/hedgebox/models.py | 4 +- posthog/hogql/ast.py | 6 +- posthog/hogql/constants.py | 4 +- posthog/hogql/database/schema/persons.py | 3 +- posthog/hogql/test/test_resolver.py | 4 +- .../test/test_trends_actors_query_builder.py | 4 +- .../legacy_compatibility/filter_to_query.py | 4 +- posthog/hogql_queries/query_runner.py | 8 +- posthog/jwt.py | 4 +- posthog/kafka_client/client.py | 4 +- .../commands/create_batch_export_from_app.py | 2 +- .../create_channel_definitions_file.py | 4 +- .../management/commands/generate_demo_data.py | 2 +- posthog/management/commands/migrate_team.py | 4 +- .../commands/plugin_server_load_test.py | 2 +- .../test/test_sync_persons_to_clickhouse.py | 14 +-- posthog/models/feature_flag/flag_matching.py | 4 +- posthog/models/filters/stickiness_filter.py | 2 +- posthog/models/plugin.py | 6 +- posthog/models/property/property.py | 4 +- .../models/test/test_async_deletion_model.py | 8 +- .../models/test/test_person_override_model.py | 2 +- posthog/schema.py | 96 ++++++++-------- .../session_recording_api.py | 6 +- .../session_recording_helpers.py | 4 +- .../test/test_session_recordings.py | 4 +- .../test/test_process_scheduled_changes.py | 18 +-- posthog/tasks/test/test_warehouse.py | 10 +- posthog/tasks/warehouse.py | 4 +- .../batch_exports/backfill_batch_export.py | 2 +- .../batch_exports/squash_person_overrides.py | 6 +- .../temporal/tests/batch_exports/conftest.py | 2 +- .../test_backfill_batch_export.py | 76 ++++++------- .../tests/batch_exports/test_batch_exports.py | 14 +-- .../test_bigquery_batch_export_workflow.py | 16 +-- .../test_http_batch_export_workflow.py | 10 +- .../tests/batch_exports/test_logger.py | 12 +- .../test_postgres_batch_export_workflow.py | 6 +- .../test_redshift_batch_export_workflow.py | 6 +- .../tests/batch_exports/test_run_updates.py | 16 +-- .../test_snowflake_batch_export_workflow.py | 4 +- .../test_squash_person_overrides_workflow.py | 6 +- posthog/temporal/tests/test_clickhouse.py | 4 +- posthog/temporal/tests/utils/datetimes.py | 2 +- posthog/test/test_datetime.py | 4 +- posthog/utils.py | 4 +- .../external_data_source/workspace.py | 2 +- production.Dockerfile | 106 +----------------- pyproject.toml | 4 +- requirements-dev.in | 2 +- requirements-dev.txt | 3 +- requirements.in | 2 +- requirements.txt | 3 +- unit.json.tpl | 4 +- 94 files changed, 362 insertions(+), 419 deletions(-) diff --git a/.github/actions/run-backend-tests/action.yml b/.github/actions/run-backend-tests/action.yml index 3c5a4d19860..d7c9689f559 100644 --- a/.github/actions/run-backend-tests/action.yml +++ b/.github/actions/run-backend-tests/action.yml @@ -6,7 +6,7 @@ name: Run Django tests inputs: python-version: required: true - description: Python version, e.g. 3.10.10 + description: Python version, e.g. 3.11.9 clickhouse-server-image: required: true description: ClickHouse server image tag, e.g. clickhouse/clickhouse-server:latest diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index bd50811fae6..9478b7d2f8c 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -54,7 +54,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.10.10 + python-version: 3.11.9 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/.github/workflows/build-hogql-parser.yml b/.github/workflows/build-hogql-parser.yml index 50653eaa02e..4b950b281f0 100644 --- a/.github/workflows/build-hogql-parser.yml +++ b/.github/workflows/build-hogql-parser.yml @@ -73,7 +73,7 @@ jobs: - if: ${{ !endsWith(matrix.os, '-arm') }} uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: '3.11' # Compiling Python 3.11 from source on ARM. We tried using the "deadsnakes" ARM repo, but it was flakey. - if: ${{ endsWith(matrix.os, '-arm') }} diff --git a/.github/workflows/ci-backend-update-test-timing.yml b/.github/workflows/ci-backend-update-test-timing.yml index a2082f6b989..01ad7d33ce3 100644 --- a/.github/workflows/ci-backend-update-test-timing.yml +++ b/.github/workflows/ci-backend-update-test-timing.yml @@ -28,7 +28,7 @@ jobs: concurrency: 1 group: 1 token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} - python-version: '3.10.10' + python-version: '3.11.9' clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.5.81-alpine' segment: 'FOSS' person-on-events: false diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index 14d2c1045c2..b757f69c8f8 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -108,7 +108,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.10.10 + python-version: 3.11.9 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} @@ -163,7 +163,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.10.10 + python-version: 3.11.9 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} @@ -232,7 +232,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.10.10'] + python-version: ['3.11.9'] clickhouse-server-image: ['clickhouse/clickhouse-server:23.12.5.81-alpine'] segment: ['Core'] person-on-events: [false, true] @@ -243,7 +243,7 @@ jobs: - segment: 'Temporal' person-on-events: false clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.5.81-alpine' - python-version: '3.10.10' + python-version: '3.11.9' concurrency: 1 group: 1 @@ -331,7 +331,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.10.10 + python-version: 3.11.9 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/.github/workflows/ci-hog.yml b/.github/workflows/ci-hog.yml index 860f0b6e47b..2a2ee8ecb86 100644 --- a/.github/workflows/ci-hog.yml +++ b/.github/workflows/ci-hog.yml @@ -70,7 +70,7 @@ jobs: if: needs.changes.outputs.hog == 'true' uses: actions/setup-python@v5 with: - python-version: 3.10.10 + python-version: 3.11.9 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/.github/workflows/ci-plugin-server.yml b/.github/workflows/ci-plugin-server.yml index dac67b705b6..b4d6cb0a17f 100644 --- a/.github/workflows/ci-plugin-server.yml +++ b/.github/workflows/ci-plugin-server.yml @@ -115,7 +115,7 @@ jobs: if: needs.changes.outputs.plugin-server == 'true' uses: actions/setup-python@v5 with: - python-version: 3.10.10 + python-version: 3.11.9 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} @@ -207,7 +207,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.10.10 + python-version: 3.11.9 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/bin/build-schema-python.sh b/bin/build-schema-python.sh index d033c5f4f1e..efd65bb091b 100755 --- a/bin/build-schema-python.sh +++ b/bin/build-schema-python.sh @@ -4,25 +4,27 @@ set -e # Generate schema.py from schema.json datamodel-codegen \ - --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.10 --disable-timestamp \ + --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.11 --disable-timestamp \ --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum \ --input frontend/src/queries/schema.json --input-file-type jsonschema \ --output posthog/schema.py --output-model-type pydantic_v2.BaseModel \ --custom-file-header "# mypy: disable-error-code=\"assignment\"" \ --set-default-enum-member --capitalise-enum-members \ --wrap-string-literal + # Format schema.py ruff format posthog/schema.py + # Check schema.py and autofix ruff check --fix posthog/schema.py -# HACK: Datamodel-codegen output for enum-type fields with a default is invalid – the default value is a plain string, -# and not the expected enum member. We fix this using sed, which is pretty hacky, but does the job. -# Specifically, we need to replace `Optional[PropertyOperator] = "exact"` -# with `Optional[PropertyOperator] = PropertyOperator("exact")` to make the default value valid. -# Remove this when https://github.com/koxudaxi/datamodel-code-generator/issues/1929 is resolved. + +# Replace class Foo(str, Enum) with class Foo(StrEnum) for proper handling in format strings in python 3.11 +# Remove this when https://github.com/koxudaxi/datamodel-code-generator/issues/1313 is resolved if [[ "$OSTYPE" == "darwin"* ]]; then # sed needs `-i` to be followed by `''` on macOS - sed -i '' -e 's/Optional\[PropertyOperator\] = \("[A-Za-z_]*"\)/Optional[PropertyOperator] = PropertyOperator(\1)/g' posthog/schema.py + sed -i '' -e 's/str, Enum/StrEnum/g' posthog/schema.py + sed -i '' 's/from enum import Enum/from enum import Enum, StrEnum/g' posthog/schema.py else - sed -i -e 's/Optional\[PropertyOperator\] = \("[A-Za-z_]*"\)/Optional[PropertyOperator] = PropertyOperator(\1)/g' posthog/schema.py -fi \ No newline at end of file + sed -i -e 's/str, Enum/StrEnum/g' posthog/schema.py + sed -i 's/from enum import Enum/from enum import Enum, StrEnum/g' posthog/schema.py +fi diff --git a/ee/api/test/__snapshots__/test_time_to_see_data.ambr b/ee/api/test/__snapshots__/test_time_to_see_data.ambr index 2d93af68cee..beda2bc14bd 100644 --- a/ee/api/test/__snapshots__/test_time_to_see_data.ambr +++ b/ee/api/test/__snapshots__/test_time_to_see_data.ambr @@ -20,7 +20,7 @@ "first_name": "", "last_name": "", "email": "", - "is_email_verified": false + "is_email_verified": null } }, "children": [ diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index 1a5735473ff..a67d6523858 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from django.core.cache import cache from flaky import flaky from rest_framework import status @@ -1601,8 +1601,8 @@ class TestExperimentAuxiliaryEndpoints(ClickhouseTestMixin, APILicensedTest): explicit_datetime = parser.isoparse(target_filter["explicit_datetime"]) self.assertTrue( - explicit_datetime <= datetime.now(timezone.utc) - timedelta(days=5) - and explicit_datetime >= datetime.now(timezone.utc) - timedelta(days=5, hours=1) + explicit_datetime <= datetime.now(UTC) - timedelta(days=5) + and explicit_datetime >= datetime.now(UTC) - timedelta(days=5, hours=1) ) cohort_id = cohort["id"] diff --git a/ee/session_recordings/session_summary/test/test_summarize_session.py b/ee/session_recordings/session_summary/test/test_summarize_session.py index 69412608dd3..3cc69df02b1 100644 --- a/ee/session_recordings/session_summary/test/test_summarize_session.py +++ b/ee/session_recordings/session_summary/test/test_summarize_session.py @@ -1,4 +1,4 @@ -from datetime import timezone, datetime +from datetime import datetime, UTC from dateutil.parser import isoparse @@ -23,7 +23,7 @@ class TestSummarizeSessions(BaseTest): ["$pageview", isoparse("2021-01-01T00:00:02Z")], ], ), - datetime(2021, 1, 1, 0, 0, 0, tzinfo=timezone.utc), + datetime(2021, 1, 1, 0, 0, 0, tzinfo=UTC), ) assert processed.columns == ["event", "milliseconds_since_start"] assert processed.results == [["$pageview", 0], ["$pageview", 1000], ["$pageview", 2000]] diff --git a/ee/session_recordings/test/test_session_recording_extensions.py b/ee/session_recordings/test/test_session_recording_extensions.py index ad545e5cec3..e425213f747 100644 --- a/ee/session_recordings/test/test_session_recording_extensions.py +++ b/ee/session_recordings/test/test_session_recording_extensions.py @@ -1,5 +1,5 @@ import gzip -from datetime import timedelta, datetime, timezone +from datetime import timedelta, datetime, UTC from secrets import token_urlsafe from unittest.mock import patch, MagicMock from uuid import uuid4 @@ -84,7 +84,7 @@ class TestSessionRecordingExtensions(ClickhouseTestMixin, APIBaseTest): def test_persists_recording_from_blob_ingested_storage(self): with self.settings(OBJECT_STORAGE_SESSION_RECORDING_BLOB_INGESTION_FOLDER=TEST_BUCKET): - two_minutes_ago = (datetime.now() - timedelta(minutes=2)).replace(tzinfo=timezone.utc) + two_minutes_ago = (datetime.now() - timedelta(minutes=2)).replace(tzinfo=UTC) with freeze_time(two_minutes_ago): session_id = f"test_persists_recording_from_blob_ingested_storage-s1-{uuid4()}" diff --git a/ee/session_recordings/test/test_session_recording_playlist.py b/ee/session_recordings/test/test_session_recording_playlist.py index 6fb6a730a7a..0ec14e0decb 100644 --- a/ee/session_recordings/test/test_session_recording_playlist.py +++ b/ee/session_recordings/test/test_session_recording_playlist.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from unittest import mock from unittest.mock import MagicMock, patch from uuid import uuid4 @@ -187,7 +187,7 @@ class TestSessionRecordingPlaylist(APILicensedTest): session_one = f"test_fetch_playlist_recordings-session1-{uuid4()}" session_two = f"test_fetch_playlist_recordings-session2-{uuid4()}" - three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=timezone.utc) + three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=UTC) produce_replay_summary( team_id=self.team.id, @@ -242,7 +242,7 @@ class TestSessionRecordingPlaylist(APILicensedTest): session_one = f"test_fetch_playlist_recordings-session1-{uuid4()}" session_two = f"test_fetch_playlist_recordings-session2-{uuid4()}" - three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=timezone.utc) + three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=UTC) for session_id in [session_one, session_two]: produce_replay_summary( diff --git a/ee/tasks/subscriptions/subscription_utils.py b/ee/tasks/subscriptions/subscription_utils.py index 6fa4b63960f..eb8afed13cb 100644 --- a/ee/tasks/subscriptions/subscription_utils.py +++ b/ee/tasks/subscriptions/subscription_utils.py @@ -56,7 +56,7 @@ def generate_assets( # Wait for all assets to be exported tasks = [exporter.export_asset.si(asset.id) for asset in assets] # run them one after the other, so we don't exhaust celery workers - exports_expire = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta( + exports_expire = datetime.datetime.now(tz=datetime.UTC) + datetime.timedelta( minutes=settings.PARALLEL_ASSET_GENERATION_MAX_TIMEOUT_MINUTES ) parallel_job = chain(*tasks).apply_async(expires=exports_expire, retry=False) diff --git a/mypy.ini b/mypy.ini index 414b1d25217..438b5f47ef6 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,5 +1,5 @@ [mypy] -python_version = 3.10 +python_version = 3.11 plugins = mypy_django_plugin.main, mypy_drf_plugin.main, diff --git a/posthog/api/app_metrics.py b/posthog/api/app_metrics.py index 6fe56947b42..12d54838737 100644 --- a/posthog/api/app_metrics.py +++ b/posthog/api/app_metrics.py @@ -90,9 +90,7 @@ class AppMetricsViewSet(TeamAndOrgViewSetMixin, mixins.RetrieveModelMixin, views after = self.request.GET.get("date_from", "-30d") before = self.request.GET.get("date_to", None) after_datetime = relative_date_parse(after, self.team.timezone_info) - before_datetime = ( - relative_date_parse(before, self.team.timezone_info) if before else dt.datetime.now(dt.timezone.utc) - ) + before_datetime = relative_date_parse(before, self.team.timezone_info) if before else dt.datetime.now(dt.UTC) date_range = (after_datetime, before_datetime) runs = ( BatchExportRun.objects.select_related("batch_export__destination") diff --git a/posthog/api/authentication.py b/posthog/api/authentication.py index d82d958f274..b04707ca559 100644 --- a/posthog/api/authentication.py +++ b/posthog/api/authentication.py @@ -290,7 +290,7 @@ class PasswordResetSerializer(serializers.Serializer): user = None if user: - user.requested_password_reset_at = datetime.datetime.now(datetime.timezone.utc) + user.requested_password_reset_at = datetime.datetime.now(datetime.UTC) user.save() token = password_reset_token_generator.make_token(user) send_password_reset(user.id, token) diff --git a/posthog/api/comments.py b/posthog/api/comments.py index 20961be0e3c..06443f92b2f 100644 --- a/posthog/api/comments.py +++ b/posthog/api/comments.py @@ -11,11 +11,13 @@ from posthog.api.forbid_destroy_model import ForbidDestroyModel from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer +from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer from posthog.models.comment import Comment class CommentSerializer(serializers.ModelSerializer): created_by = UserBasicSerializer(read_only=True) + deleted = ClassicBehaviorBooleanFieldSerializer() class Meta: model = Comment diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index 6887b85dcf5..029a3186d43 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -23,6 +23,7 @@ from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer from posthog.api.tagged_item import TaggedItemSerializerMixin, TaggedItemViewSetMixin from posthog.api.dashboards.dashboard import Dashboard +from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer from posthog.auth import PersonalAPIKeyAuthentication, TemporaryTokenAuthentication from posthog.constants import FlagRequestType from posthog.event_usage import report_user_action @@ -89,6 +90,9 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo is_simple_flag = serializers.SerializerMethodField() rollout_percentage = serializers.SerializerMethodField() + ensure_experience_continuity = ClassicBehaviorBooleanFieldSerializer() + has_enriched_analytics = ClassicBehaviorBooleanFieldSerializer() + experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True) surveys: serializers.SerializerMethodField = serializers.SerializerMethodField() features: serializers.SerializerMethodField = serializers.SerializerMethodField() diff --git a/posthog/api/plugin.py b/posthog/api/plugin.py index 47a5ab5b3bb..481b63476f1 100644 --- a/posthog/api/plugin.py +++ b/posthog/api/plugin.py @@ -22,6 +22,7 @@ from rest_framework.response import Response from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import FiltersSerializer +from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer from posthog.models import Plugin, PluginAttachment, PluginConfig, User from posthog.models.activity_logging.activity_log import ( ActivityPage, @@ -586,6 +587,8 @@ class PluginConfigSerializer(serializers.ModelSerializer): delivery_rate_24h = serializers.SerializerMethodField() error = serializers.SerializerMethodField() + deleted = ClassicBehaviorBooleanFieldSerializer() + class Meta: model = PluginConfig fields = [ diff --git a/posthog/api/routing.py b/posthog/api/routing.py index c4e67d18262..f2816f9a2b1 100644 --- a/posthog/api/routing.py +++ b/posthog/api/routing.py @@ -36,6 +36,32 @@ else: class DefaultRouterPlusPlus(ExtendedDefaultRouter): """DefaultRouter with optional trailing slash and drf-extensions nesting.""" + # This is an override because of changes in djangorestframework 3.15, which is required for python 3.11 + # changes taken from and explained here: https://github.com/nautobot/nautobot/pull/5546/files#diff-81850a2ccad5814aab4f477d447f85cc0a82e9c10fd88fd72327cda51a750471R30 + def _register(self, prefix, viewset, basename=None): + """ + Override DRF's BaseRouter.register() to bypass an unnecessary restriction added in version 3.15.0. + (Reference: https://github.com/encode/django-rest-framework/pull/8438) + """ + if basename is None: + basename = self.get_default_basename(viewset) + + # DRF: + # if self.is_already_registered(basename): + # msg = (f'Router with basename "{basename}" is already registered. ' + # f'Please provide a unique basename for viewset "{viewset}"') + # raise ImproperlyConfigured(msg) + # + # We bypass this because we have at least one use case (/api/extras/jobs/) where we are *intentionally* + # registering two viewsets with the same basename, but have carefully defined them so as not to conflict. + + # resuming standard DRF code... + self.registry.append((prefix, viewset, basename)) + + # invalidate the urls cache + if hasattr(self, "_urls"): + del self._urls + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.trailing_slash = r"/?" diff --git a/posthog/api/test/__snapshots__/test_api_docs.ambr b/posthog/api/test/__snapshots__/test_api_docs.ambr index 8793984c350..2ded9229008 100644 --- a/posthog/api/test/__snapshots__/test_api_docs.ambr +++ b/posthog/api/test/__snapshots__/test_api_docs.ambr @@ -77,7 +77,7 @@ "/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Error [PropertyDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')", '/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Warning [PropertyDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.property_definition.PropertyDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', - '/opt/hostedtoolcache/Python/3.10.10/x64/lib/python3.10/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes and . This will very likely result in an incorrect schema. Try renaming one.', + '/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes and . This will very likely result in an incorrect schema. Try renaming one.', '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/query.py: Error [QueryViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.', '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', diff --git a/posthog/api/test/batch_exports/test_log_entry.py b/posthog/api/test/batch_exports/test_log_entry.py index b166583ee0b..06dcb6ce4a8 100644 --- a/posthog/api/test/batch_exports/test_log_entry.py +++ b/posthog/api/test/batch_exports/test_log_entry.py @@ -38,7 +38,7 @@ def create_batch_export_log_entry( "log_source": "batch_exports", "log_source_id": batch_export_id, "instance_id": run_id, - "timestamp": dt.datetime.now(dt.timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f"), + "timestamp": dt.datetime.now(dt.UTC).strftime("%Y-%m-%d %H:%M:%S.%f"), "level": level, "message": message, }, @@ -147,7 +147,7 @@ def test_log_level_filter(batch_export, team, level): results = [] timeout = 10 - start = dt.datetime.now(dt.timezone.utc) + start = dt.datetime.now(dt.UTC) while not results: results = fetch_batch_export_log_entries( @@ -157,7 +157,7 @@ def test_log_level_filter(batch_export, team, level): after=dt.datetime(2023, 9, 22, 0, 59, 59), before=dt.datetime(2023, 9, 22, 1, 0, 1), ) - if (dt.datetime.now(dt.timezone.utc) - start) > dt.timedelta(seconds=timeout): + if (dt.datetime.now(dt.UTC) - start) > dt.timedelta(seconds=timeout): break results.sort(key=lambda record: record.message) @@ -195,7 +195,7 @@ def test_log_level_filter_with_lowercase(batch_export, team, level): results = [] timeout = 10 - start = dt.datetime.now(dt.timezone.utc) + start = dt.datetime.now(dt.UTC) while not results: results = fetch_batch_export_log_entries( @@ -205,7 +205,7 @@ def test_log_level_filter_with_lowercase(batch_export, team, level): after=dt.datetime(2023, 9, 22, 0, 59, 59), before=dt.datetime(2023, 9, 22, 1, 0, 1), ) - if (dt.datetime.now(dt.timezone.utc) - start) > dt.timedelta(seconds=timeout): + if (dt.datetime.now(dt.UTC) - start) > dt.timedelta(seconds=timeout): break results.sort(key=lambda record: record.message) diff --git a/posthog/api/test/batch_exports/test_pause.py b/posthog/api/test/batch_exports/test_pause.py index 7db786347e0..33c32f1a200 100644 --- a/posthog/api/test/batch_exports/test_pause.py +++ b/posthog/api/test/batch_exports/test_pause.py @@ -397,8 +397,8 @@ def test_unpause_can_trigger_a_backfill(client: HttpClient): data = get_batch_export_ok(client, team.pk, batch_export_id) assert batch_export["last_updated_at"] < data["last_updated_at"] - start_at = dt.datetime.strptime(data["last_paused_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.timezone.utc) - end_at = dt.datetime.strptime(data["last_updated_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.timezone.utc) + start_at = dt.datetime.strptime(data["last_paused_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.UTC) + end_at = dt.datetime.strptime(data["last_updated_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.UTC) mock_backfill.assert_called_once_with( ANY, batch_export["id"], diff --git a/posthog/api/test/batch_exports/test_update.py b/posthog/api/test/batch_exports/test_update.py index 7b749c62dc2..2a7f3241fd0 100644 --- a/posthog/api/test/batch_exports/test_update.py +++ b/posthog/api/test/batch_exports/test_update.py @@ -94,8 +94,8 @@ def test_can_put_config(client: HttpClient): new_schedule = describe_schedule(temporal, batch_export["id"]) assert old_schedule.schedule.spec.intervals[0].every != new_schedule.schedule.spec.intervals[0].every assert new_schedule.schedule.spec.intervals[0].every == dt.timedelta(days=1) - assert new_schedule.schedule.spec.start_at == dt.datetime(2022, 7, 19, 0, 0, 0, tzinfo=dt.timezone.utc) - assert new_schedule.schedule.spec.end_at == dt.datetime(2023, 7, 20, 0, 0, 0, tzinfo=dt.timezone.utc) + assert new_schedule.schedule.spec.start_at == dt.datetime(2022, 7, 19, 0, 0, 0, tzinfo=dt.UTC) + assert new_schedule.schedule.spec.end_at == dt.datetime(2023, 7, 20, 0, 0, 0, tzinfo=dt.UTC) decoded_payload = async_to_sync(codec.decode)(new_schedule.schedule.action.args) args = json.loads(decoded_payload[0].data) diff --git a/posthog/api/test/test_app_metrics.py b/posthog/api/test/test_app_metrics.py index c639b37aee6..67b9a0a42ea 100644 --- a/posthog/api/test/test_app_metrics.py +++ b/posthog/api/test/test_app_metrics.py @@ -100,7 +100,7 @@ class TestAppMetricsAPI(ClickhouseTestMixin, APIBaseTest): temporal = sync_connect() - now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc) + now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.UTC) with start_test_worker(temporal): response = create_batch_export_ok( self.client, @@ -191,7 +191,7 @@ class TestAppMetricsAPI(ClickhouseTestMixin, APIBaseTest): } temporal = sync_connect() - now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc) + now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.UTC) with start_test_worker(temporal): response = create_batch_export_ok( diff --git a/posthog/api/test/test_capture.py b/posthog/api/test/test_capture.py index a7d605d9a3a..756d1638a3f 100644 --- a/posthog/api/test/test_capture.py +++ b/posthog/api/test/test_capture.py @@ -13,7 +13,7 @@ import string import structlog import zlib from datetime import datetime, timedelta -from datetime import timezone as tz +from datetime import UTC from django.http import HttpResponse from django.test.client import MULTIPART_CONTENT, Client from django.utils import timezone @@ -1415,7 +1415,7 @@ class TestCapture(BaseTest): # right time sent as sent_at to process_event sent_at = datetime.fromisoformat(arguments["sent_at"]) - self.assertEqual(sent_at.tzinfo, tz.utc) + self.assertEqual(sent_at.tzinfo, UTC) timediff = sent_at.timestamp() - tomorrow_sent_at.timestamp() self.assertLess(abs(timediff), 1) diff --git a/posthog/api/user.py b/posthog/api/user.py index ad5ef32de0e..ee2b66c47eb 100644 --- a/posthog/api/user.py +++ b/posthog/api/user.py @@ -38,6 +38,7 @@ from posthog.api.shared import OrganizationBasicSerializer, TeamBasicSerializer from posthog.api.utils import ( PublicIPOnlyHttpAdapter, raise_if_user_provided_url_unsafe, + ClassicBehaviorBooleanFieldSerializer, ) from posthog.auth import ( PersonalAPIKeyAuthentication, @@ -87,6 +88,7 @@ class UserSerializer(serializers.ModelSerializer): current_password = serializers.CharField(write_only=True, required=False) notification_settings = serializers.DictField(required=False) scene_personalisation = ScenePersonalisationBasicSerializer(many=True, read_only=True) + anonymize_data = ClassicBehaviorBooleanFieldSerializer() class Meta: model = User diff --git a/posthog/api/utils.py b/posthog/api/utils.py index 65cff4897eb..2f1bd5c087b 100644 --- a/posthog/api/utils.py +++ b/posthog/api/utils.py @@ -7,6 +7,7 @@ from ipaddress import ip_address from requests.adapters import HTTPAdapter from typing import Literal, Optional, Union +from rest_framework.fields import Field from urllib3 import HTTPSConnectionPool, HTTPConnectionPool, PoolManager from uuid import UUID @@ -14,7 +15,7 @@ import structlog from django.core.exceptions import RequestDataTooBig from django.db.models import QuerySet from prometheus_client import Counter -from rest_framework import request, status +from rest_framework import request, status, serializers from rest_framework.exceptions import ValidationError from statshog.defaults.django import statsd @@ -35,6 +36,14 @@ class PaginationMode(Enum): previous = auto() +# This overrides a change in DRF 3.15 that alters our behavior. If the user passes an empty argument, +# the new version keeps it as null vs coalescing it to the default. +# Don't add this to new classes +class ClassicBehaviorBooleanFieldSerializer(serializers.BooleanField): + def __init__(self, **kwargs): + Field.__init__(self, allow_null=True, required=False, **kwargs) + + def get_target_entity(filter: Union[Filter, StickinessFilter]) -> Entity: # Except for "events", we require an entity id and type to be provided if not filter.target_entity_id and filter.target_entity_type != "events": diff --git a/posthog/async_migrations/test/test_utils.py b/posthog/async_migrations/test/test_utils.py index da01ec9dda5..15d88019e78 100644 --- a/posthog/async_migrations/test/test_utils.py +++ b/posthog/async_migrations/test/test_utils.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from unittest.mock import patch import pytest @@ -49,7 +49,7 @@ class TestUtils(AsyncMigrationBaseTest): sm.refresh_from_db() self.assertEqual(sm.status, MigrationStatus.Errored) - self.assertGreater(sm.finished_at, datetime.now(timezone.utc) - timedelta(hours=1)) + self.assertGreater(sm.finished_at, datetime.now(UTC) - timedelta(hours=1)) errors = AsyncMigrationError.objects.filter(async_migration=sm).order_by("created_at") self.assertEqual(errors.count(), 2) self.assertEqual(errors[0].description, "some error") @@ -81,7 +81,7 @@ class TestUtils(AsyncMigrationBaseTest): sm.refresh_from_db() self.assertEqual(sm.status, MigrationStatus.CompletedSuccessfully) - self.assertGreater(sm.finished_at, datetime.now(timezone.utc) - timedelta(hours=1)) + self.assertGreater(sm.finished_at, datetime.now(UTC) - timedelta(hours=1)) self.assertEqual(sm.progress, 100) errors = AsyncMigrationError.objects.filter(async_migration=sm) diff --git a/posthog/batch_exports/http.py b/posthog/batch_exports/http.py index 98a97a74b3f..ec812db0b47 100644 --- a/posthog/batch_exports/http.py +++ b/posthog/batch_exports/http.py @@ -76,11 +76,11 @@ def validate_date_input(date_input: Any, team: Team | None = None) -> dt.datetim if parsed.tzinfo is None: if team: - parsed = parsed.replace(tzinfo=team.timezone_info).astimezone(dt.timezone.utc) + parsed = parsed.replace(tzinfo=team.timezone_info).astimezone(dt.UTC) else: - parsed = parsed.replace(tzinfo=dt.timezone.utc) + parsed = parsed.replace(tzinfo=dt.UTC) else: - parsed = parsed.astimezone(dt.timezone.utc) + parsed = parsed.astimezone(dt.UTC) return parsed diff --git a/posthog/batch_exports/models.py b/posthog/batch_exports/models.py index f891089e7a3..7c1b3b7b0a4 100644 --- a/posthog/batch_exports/models.py +++ b/posthog/batch_exports/models.py @@ -1,7 +1,7 @@ import collections.abc import dataclasses import datetime as dt -from enum import Enum +import enum import typing from datetime import timedelta @@ -254,7 +254,7 @@ class BatchExport(UUIDModel): raise ValueError(f"Invalid interval: '{self.interval}'") -class BatchExportLogEntryLevel(str, Enum): +class BatchExportLogEntryLevel(enum.StrEnum): """Enumeration of batch export log levels.""" DEBUG = "DEBUG" diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index 2483738cefb..9ac836e261c 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -269,7 +269,7 @@ def pause_batch_export(temporal: Client, batch_export_id: str, note: str | None raise BatchExportServiceRPCError(f"BatchExport {batch_export_id} could not be paused") from exc batch_export.paused = True - batch_export.last_paused_at = dt.datetime.now(dt.timezone.utc) + batch_export.last_paused_at = dt.datetime.now(dt.UTC) batch_export.save() return True @@ -297,7 +297,7 @@ async def apause_batch_export(temporal: Client, batch_export_id: str, note: str raise BatchExportServiceRPCError(f"BatchExport {batch_export_id} could not be paused") from exc batch_export.paused = True - batch_export.last_paused_at = dt.datetime.now(dt.timezone.utc) + batch_export.last_paused_at = dt.datetime.now(dt.UTC) await batch_export.asave() return True diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py index 91e33d79c2d..42b82d38867 100644 --- a/posthog/clickhouse/client/execute_async.py +++ b/posthog/clickhouse/client/execute_async.py @@ -156,7 +156,7 @@ def execute_process_query( query_status.error = True # Assume error in case nothing below ends up working - pickup_time = datetime.datetime.now(datetime.timezone.utc) + pickup_time = datetime.datetime.now(datetime.UTC) if query_status.start_time: wait_duration = (pickup_time - query_status.start_time) / datetime.timedelta(seconds=1) QUERY_WAIT_TIME.labels( @@ -177,7 +177,7 @@ def execute_process_query( query_status.complete = True query_status.error = False query_status.results = results - query_status.end_time = datetime.datetime.now(datetime.timezone.utc) + query_status.end_time = datetime.datetime.now(datetime.UTC) query_status.expiration_time = query_status.end_time + datetime.timedelta(seconds=manager.STATUS_TTL_SECONDS) process_duration = (query_status.end_time - pickup_time) / datetime.timedelta(seconds=1) QUERY_PROCESS_TIME.labels(team=team_id).observe(process_duration) @@ -218,7 +218,7 @@ def enqueue_process_query_task( return manager.get_query_status() # Immediately set status, so we don't have race with celery - query_status = QueryStatus(id=query_id, team_id=team.id, start_time=datetime.datetime.now(datetime.timezone.utc)) + query_status = QueryStatus(id=query_id, team_id=team.id, start_time=datetime.datetime.now(datetime.UTC)) manager.store_query_status(query_status) task_signature = process_query_task.si( diff --git a/posthog/clickhouse/table_engines.py b/posthog/clickhouse/table_engines.py index e2b83d3f290..b67ef9be5bc 100644 --- a/posthog/clickhouse/table_engines.py +++ b/posthog/clickhouse/table_engines.py @@ -1,11 +1,11 @@ import uuid -from enum import Enum +from enum import StrEnum from typing import Optional from django.conf import settings -class ReplicationScheme(str, Enum): +class ReplicationScheme(StrEnum): NOT_SHARDED = "NOT_SHARDED" SHARDED = "SHARDED" REPLICATED = "REPLICATED" diff --git a/posthog/clickhouse/test/test_person_overrides.py b/posthog/clickhouse/test/test_person_overrides.py index ec632eebe77..4dbf0900b53 100644 --- a/posthog/clickhouse/test/test_person_overrides.py +++ b/posthog/clickhouse/test/test_person_overrides.py @@ -1,5 +1,5 @@ import json -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from time import sleep from typing import TypedDict from uuid import UUID, uuid4 @@ -124,7 +124,7 @@ def test_person_overrides_dict(): "override_person_id": uuid4(), "merged_at": datetime.fromisoformat("2020-01-02T00:00:00+00:00"), "oldest_event": datetime.fromisoformat("2020-01-01T00:00:00+00:00"), - "created_at": datetime.now(timezone.utc), + "created_at": datetime.now(UTC), "version": 1, } diff --git a/posthog/constants.py b/posthog/constants.py index fc8f7a91421..af1e627bc71 100644 --- a/posthog/constants.py +++ b/posthog/constants.py @@ -1,4 +1,4 @@ -from enum import Enum +from enum import StrEnum from typing import Literal from semantic_version import Version @@ -9,7 +9,7 @@ INTERNAL_BOT_EMAIL_SUFFIX = "@posthogbot.user" # N.B. Keep this in sync with frontend enum (types.ts) # AND ensure it is added to the Billing Service -class AvailableFeature(str, Enum): +class AvailableFeature(StrEnum): ZAPIER = "zapier" ORGANIZATIONS_PROJECTS = "organizations_projects" PROJECT_BASED_PERMISSIONING = "project_based_permissioning" @@ -215,19 +215,19 @@ SAMPLING_FACTOR = "sampling_factor" BREAKDOWN_TYPES = Literal["event", "person", "cohort", "group", "session", "hogql"] -class FunnelOrderType(str, Enum): +class FunnelOrderType(StrEnum): STRICT = "strict" UNORDERED = "unordered" ORDERED = "ordered" -class FunnelVizType(str, Enum): +class FunnelVizType(StrEnum): TRENDS = "trends" TIME_TO_CONVERT = "time_to_convert" STEPS = "steps" -class FunnelCorrelationType(str, Enum): +class FunnelCorrelationType(StrEnum): EVENTS = "events" PROPERTIES = "properties" EVENT_WITH_PROPERTIES = "event_with_properties" @@ -240,7 +240,7 @@ DISTINCT_ID_FILTER = "distinct_id" PERSON_UUID_FILTER = "person_uuid" -class AnalyticsDBMS(str, Enum): +class AnalyticsDBMS(StrEnum): POSTGRES = "postgres" CLICKHOUSE = "clickhouse" @@ -251,13 +251,13 @@ WEEKLY_ACTIVE = "weekly_active" MONTHLY_ACTIVE = "monthly_active" -class RetentionQueryType(str, Enum): +class RetentionQueryType(StrEnum): RETURNING = "returning" TARGET = "target" TARGET_FIRST_TIME = "target_first_time" -class ExperimentSignificanceCode(str, Enum): +class ExperimentSignificanceCode(StrEnum): SIGNIFICANT = "significant" NOT_ENOUGH_EXPOSURE = "not_enough_exposure" LOW_WIN_PROBABILITY = "low_win_probability" @@ -265,7 +265,7 @@ class ExperimentSignificanceCode(str, Enum): HIGH_P_VALUE = "high_p_value" -class ExperimentNoResultsErrorKeys(str, Enum): +class ExperimentNoResultsErrorKeys(StrEnum): NO_EVENTS = "no-events" NO_FLAG_INFO = "no-flag-info" NO_CONTROL_VARIANT = "no-control-variant" @@ -273,12 +273,12 @@ class ExperimentNoResultsErrorKeys(str, Enum): NO_RESULTS = "no-results" -class PropertyOperatorType(str, Enum): +class PropertyOperatorType(StrEnum): AND = "AND" OR = "OR" -class BreakdownAttributionType(str, Enum): +class BreakdownAttributionType(StrEnum): FIRST_TOUCH = "first_touch" # FIRST_TOUCH attribution means the breakdown value is the first property value found within all funnel steps LAST_TOUCH = "last_touch" @@ -294,7 +294,7 @@ MAX_SLUG_LENGTH = 48 GROUP_TYPES_LIMIT = 5 -class EventDefinitionType(str, Enum): +class EventDefinitionType(StrEnum): # Mimics EventDefinitionType in frontend/src/types.ts ALL = "all" ACTION_EVENT = "action_event" @@ -303,7 +303,7 @@ class EventDefinitionType(str, Enum): EVENT_CUSTOM = "event_custom" -class FlagRequestType(str, Enum): +class FlagRequestType(StrEnum): DECIDE = "decide" LOCAL_EVALUATION = "local-evaluation" diff --git a/posthog/decorators.py b/posthog/decorators.py index eb66afcf422..c4aba39e3d2 100644 --- a/posthog/decorators.py +++ b/posthog/decorators.py @@ -1,4 +1,4 @@ -from enum import Enum +from enum import StrEnum from functools import wraps from typing import Any, TypeVar, Union, cast from collections.abc import Callable @@ -17,7 +17,7 @@ from posthog.utils import refresh_requested_by_client from .utils import generate_cache_key, get_safe_cache -class CacheType(str, Enum): +class CacheType(StrEnum): TRENDS = "Trends" FUNNEL = "Funnel" RETENTION = "Retention" diff --git a/posthog/demo/matrix/models.py b/posthog/demo/matrix/models.py index 50747fb65ca..511da24c1e0 100644 --- a/posthog/demo/matrix/models.py +++ b/posthog/demo/matrix/models.py @@ -106,9 +106,7 @@ class SimEvent: group4_created_at: Optional[dt.datetime] = None def __str__(self) -> str: - separator = ( - "-" if self.timestamp < dt.datetime.now(dt.timezone.utc) else "+" - ) # Future events are denoted by a '+' + separator = "-" if self.timestamp < dt.datetime.now(dt.UTC) else "+" # Future events are denoted by a '+' display = f"{self.timestamp} {separator} {self.event} # {self.distinct_id}" if current_url := self.properties.get("$current_url"): display += f" @ {current_url}" diff --git a/posthog/demo/matrix/randomization.py b/posthog/demo/matrix/randomization.py index 9500f72778a..71701d2c6ce 100644 --- a/posthog/demo/matrix/randomization.py +++ b/posthog/demo/matrix/randomization.py @@ -1,11 +1,11 @@ -from enum import Enum +from enum import StrEnum import mimesis.random WeightedPool = tuple[list[str], list[int]] -class Industry(str, Enum): +class Industry(StrEnum): TECHNOLOGY = "technology" FINANCE = "finance" MEDIA = "media" diff --git a/posthog/demo/products/hedgebox/models.py b/posthog/demo/products/hedgebox/models.py index af7b3d6862f..9b0c72afc69 100644 --- a/posthog/demo/products/hedgebox/models.py +++ b/posthog/demo/products/hedgebox/models.py @@ -1,7 +1,7 @@ import datetime as dt import math from dataclasses import dataclass, field -from enum import auto, Enum +from enum import auto, StrEnum from typing import ( TYPE_CHECKING, Any, @@ -66,7 +66,7 @@ class HedgeboxSessionIntent(SimSessionIntent): DOWNGRADE_PLAN = auto() -class HedgeboxPlan(str, Enum): +class HedgeboxPlan(StrEnum): PERSONAL_FREE = "personal/free" PERSONAL_PRO = "personal/pro" BUSINESS_STANDARD = "business/standard" diff --git a/posthog/hogql/ast.py b/posthog/hogql/ast.py index a21a74f4a91..72b2c32f7b7 100644 --- a/posthog/hogql/ast.py +++ b/posthog/hogql/ast.py @@ -1,4 +1,4 @@ -from enum import Enum +from enum import StrEnum from typing import Any, Literal, Optional, Union from dataclasses import dataclass, field @@ -554,7 +554,7 @@ class Alias(Expr): hidden: bool = False -class ArithmeticOperationOp(str, Enum): +class ArithmeticOperationOp(StrEnum): Add = "+" Sub = "-" Mult = "*" @@ -581,7 +581,7 @@ class Or(Expr): type: Optional[ConstantType] = None -class CompareOperationOp(str, Enum): +class CompareOperationOp(StrEnum): Eq = "==" NotEq = "!=" Gt = ">" diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index 769d4a250e6..f484a6d0fad 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -1,5 +1,5 @@ from datetime import date, datetime -from enum import Enum +from enum import StrEnum from typing import Optional, Literal, TypeAlias from uuid import UUID from pydantic import ConfigDict, BaseModel @@ -47,7 +47,7 @@ BREAKDOWN_VALUES_LIMIT = 25 BREAKDOWN_VALUES_LIMIT_FOR_COUNTRIES = 300 -class LimitContext(str, Enum): +class LimitContext(StrEnum): QUERY = "query" QUERY_ASYNC = "query_async" EXPORT = "export" diff --git a/posthog/hogql/database/schema/persons.py b/posthog/hogql/database/schema/persons.py index 12f4d364001..0b0747593b7 100644 --- a/posthog/hogql/database/schema/persons.py +++ b/posthog/hogql/database/schema/persons.py @@ -1,5 +1,4 @@ -from typing import cast, Optional -from typing_extensions import Self +from typing import cast, Optional, Self import posthoganalytics from posthog.hogql.ast import SelectQuery, And, CompareOperation, CompareOperationOp, Field, JoinExpr diff --git a/posthog/hogql/test/test_resolver.py b/posthog/hogql/test/test_resolver.py index 6b9e0d166d5..cc4cde4554a 100644 --- a/posthog/hogql/test/test_resolver.py +++ b/posthog/hogql/test/test_resolver.py @@ -1,4 +1,4 @@ -from datetime import timezone, datetime, date +from datetime import datetime, date, UTC from typing import Optional, cast import pytest from django.test import override_settings @@ -97,7 +97,7 @@ class TestResolver(BaseTest): "SELECT 1, 'boo', true, 1.1232, null, {date}, {datetime}, {uuid}, {array}, {array12}, {tuple}", placeholders={ "date": ast.Constant(value=date(2020, 1, 10)), - "datetime": ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc)), + "datetime": ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=UTC)), "uuid": ast.Constant(value=UUID("00000000-0000-4000-8000-000000000000")), "array": ast.Constant(value=[]), "array12": ast.Constant(value=[1, 2]), diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_actors_query_builder.py b/posthog/hogql_queries/insights/trends/test/test_trends_actors_query_builder.py index fd95febddf6..b12f15ac641 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_actors_query_builder.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_actors_query_builder.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import datetime, UTC from typing import Optional, cast from freezegun import freeze_time @@ -70,7 +70,7 @@ class TestTrendsActorsQueryBuilder(BaseTest): def _get_utc_string(self, dt: datetime | None) -> str | None: if dt is None: return None - return dt.astimezone(timezone.utc).strftime("%Y-%m-%d %H:%M:%SZ") + return dt.astimezone(UTC).strftime("%Y-%m-%d %H:%M:%SZ") def test_time_frame(self): self.team.timezone = "Europe/Berlin" diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py index 0fc639b08fc..0d1dd48e9bf 100644 --- a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py @@ -1,5 +1,5 @@ import copy -from enum import Enum +from enum import StrEnum import json import re from typing import Any, Literal @@ -35,7 +35,7 @@ from posthog.types import InsightQueryNode from posthog.utils import str_to_bool -class MathAvailability(str, Enum): +class MathAvailability(StrEnum): Unavailable = ("Unavailable",) All = ("All",) ActorsOnly = "ActorsOnly" diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index d38cd03626f..38e1ccc255a 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from enum import IntEnum from typing import Any, Generic, Optional, TypeVar, Union, cast, TypeGuard from zoneinfo import ZoneInfo @@ -445,7 +445,7 @@ class QueryRunner(ABC, Generic[Q, R, CR]): elif execution_mode == ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE: # We're allowed to calculate if the cache is older than 24 hours, but we'll do it asynchronously assert isinstance(cached_response, CachedResponse) - if datetime.now(timezone.utc) - cached_response.last_refresh > EXTENDED_CACHE_AGE: + if datetime.now(UTC) - cached_response.last_refresh > EXTENDED_CACHE_AGE: query_status_response = self.enqueue_async_calculation(cache_key=cache_key, user=user) cached_response.query_status = query_status_response.query_status return cached_response @@ -490,8 +490,8 @@ class QueryRunner(ABC, Generic[Q, R, CR]): fresh_response_dict = { **self.calculate().model_dump(), "is_cached": False, - "last_refresh": datetime.now(timezone.utc), - "next_allowed_client_refresh": datetime.now(timezone.utc) + self._refresh_frequency(), + "last_refresh": datetime.now(UTC), + "next_allowed_client_refresh": datetime.now(UTC) + self._refresh_frequency(), "cache_key": cache_key, "timezone": self.team.timezone, } diff --git a/posthog/jwt.py b/posthog/jwt.py index ead4196aa47..897abf98ee9 100644 --- a/posthog/jwt.py +++ b/posthog/jwt.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from enum import Enum from typing import Any @@ -23,7 +23,7 @@ def encode_jwt(payload: dict, expiry_delta: timedelta, audience: PosthogJwtAudie encoded_jwt = jwt.encode( { **payload, - "exp": datetime.now(tz=timezone.utc) + expiry_delta, + "exp": datetime.now(tz=UTC) + expiry_delta, "aud": audience.value, }, settings.SECRET_KEY, diff --git a/posthog/kafka_client/client.py b/posthog/kafka_client/client.py index 3f58e572417..f0008c4ba72 100644 --- a/posthog/kafka_client/client.py +++ b/posthog/kafka_client/client.py @@ -1,5 +1,5 @@ import json -from enum import Enum +from enum import StrEnum from typing import Any, Optional from collections.abc import Callable @@ -83,7 +83,7 @@ class KafkaConsumerForTests: return -class _KafkaSecurityProtocol(str, Enum): +class _KafkaSecurityProtocol(StrEnum): PLAINTEXT = "PLAINTEXT" SSL = "SSL" SASL_PLAINTEXT = "SASL_PLAINTEXT" diff --git a/posthog/management/commands/create_batch_export_from_app.py b/posthog/management/commands/create_batch_export_from_app.py index 90806ad900f..80907dccea7 100644 --- a/posthog/management/commands/create_batch_export_from_app.py +++ b/posthog/management/commands/create_batch_export_from_app.py @@ -116,7 +116,7 @@ class Command(BaseCommand): if options.get("backfill_batch_export", False) and dry_run is False: client = sync_connect() - end_at = dt.datetime.now(dt.timezone.utc) + end_at = dt.datetime.now(dt.UTC) start_at = end_at - (dt.timedelta(hours=1) if interval == "hour" else dt.timedelta(days=1)) backfill_export( client, diff --git a/posthog/management/commands/create_channel_definitions_file.py b/posthog/management/commands/create_channel_definitions_file.py index cab70bf31d3..bea98c02b52 100644 --- a/posthog/management/commands/create_channel_definitions_file.py +++ b/posthog/management/commands/create_channel_definitions_file.py @@ -3,7 +3,7 @@ import re import subprocess from collections import OrderedDict from dataclasses import dataclass -from enum import Enum +from enum import StrEnum from typing import Optional from django.core.management.base import BaseCommand @@ -12,7 +12,7 @@ from django.core.management.base import BaseCommand OUTPUT_FILE = "posthog/models/channel_type/channel_definitions.json" -class EntryKind(str, Enum): +class EntryKind(StrEnum): source = "source" medium = "medium" diff --git a/posthog/management/commands/generate_demo_data.py b/posthog/management/commands/generate_demo_data.py index f75f1512595..ce094620453 100644 --- a/posthog/management/commands/generate_demo_data.py +++ b/posthog/management/commands/generate_demo_data.py @@ -65,7 +65,7 @@ class Command(BaseCommand): def handle(self, *args, **options): timer = monotonic() seed = options.get("seed") or secrets.token_hex(16) - now = options.get("now") or dt.datetime.now(dt.timezone.utc) + now = options.get("now") or dt.datetime.now(dt.UTC) existing_team_id = options.get("team_id") if ( existing_team_id is not None diff --git a/posthog/management/commands/migrate_team.py b/posthog/management/commands/migrate_team.py index d964a7db0c0..e2395a46e2a 100644 --- a/posthog/management/commands/migrate_team.py +++ b/posthog/management/commands/migrate_team.py @@ -254,7 +254,7 @@ def create_migration( raise CommandError("Didn't receive 'y', exiting") print() # noqa: T201 - now = dt.datetime.now(dt.timezone.utc) + now = dt.datetime.now(dt.UTC) # This is a precaution so we don't accidentally leave the export running indefinitely. end_at = now + dt.timedelta(days=end_days_from_now) @@ -299,5 +299,5 @@ def parse_to_utc(date_str: str) -> dt.datetime: except ValueError: raise ValueError("Invalid date format. Expected 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.") - utc_datetime = parsed_datetime.replace(tzinfo=dt.timezone.utc) + utc_datetime = parsed_datetime.replace(tzinfo=dt.UTC) return utc_datetime diff --git a/posthog/management/commands/plugin_server_load_test.py b/posthog/management/commands/plugin_server_load_test.py index 4adfe8941e6..a97a5f69738 100644 --- a/posthog/management/commands/plugin_server_load_test.py +++ b/posthog/management/commands/plugin_server_load_test.py @@ -63,7 +63,7 @@ class Command(BaseCommand): def handle(self, *args, **options): seed = options.get("seed") or secrets.token_hex(16) - now = options.get("now") or dt.datetime.now(dt.timezone.utc) + now = options.get("now") or dt.datetime.now(dt.UTC) admin = KafkaAdminClient(bootstrap_servers=settings.KAFKA_HOSTS) consumer = KafkaConsumer(KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, bootstrap_servers=settings.KAFKA_HOSTS) diff --git a/posthog/management/commands/test/test_sync_persons_to_clickhouse.py b/posthog/management/commands/test/test_sync_persons_to_clickhouse.py index 3609a358054..b38d0fbe138 100644 --- a/posthog/management/commands/test/test_sync_persons_to_clickhouse.py +++ b/posthog/management/commands/test/test_sync_persons_to_clickhouse.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from unittest import mock from uuid import UUID, uuid4 @@ -143,7 +143,7 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin): wraps=posthog.management.commands.sync_persons_to_clickhouse.raw_create_group_ch, ) def test_group_sync(self, mocked_ch_call): - ts = datetime.now(timezone.utc) + ts = datetime.now(UTC) Group.objects.create( team_id=self.team.pk, group_type_index=2, @@ -183,12 +183,12 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin): 2, "group-key", {"a": 5}, - timestamp=datetime.now(timezone.utc) - timedelta(hours=3), + timestamp=datetime.now(UTC) - timedelta(hours=3), ) group.group_properties = {"a": 5, "b": 3} group.save() - ts_before = datetime.now(timezone.utc) + ts_before = datetime.now(UTC) run_group_sync(self.team.pk, live_run=True, sync=True) mocked_ch_call.assert_called_once() @@ -213,7 +213,7 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin): ) self.assertLessEqual( ch_group[4].strftime("%Y-%m-%d %H:%M:%S"), - datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S"), + datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S"), ) # second time it's a no-op @@ -225,7 +225,7 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin): wraps=posthog.management.commands.sync_persons_to_clickhouse.raw_create_group_ch, ) def test_group_sync_multiple_entries(self, mocked_ch_call): - ts = datetime.now(timezone.utc) + ts = datetime.now(UTC) Group.objects.create( team_id=self.team.pk, group_type_index=2, @@ -430,7 +430,7 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin): group_type_index=2, group_key="group-key", group_properties={"a": 1234}, - created_at=datetime.now(timezone.utc) - timedelta(hours=3), + created_at=datetime.now(UTC) - timedelta(hours=3), version=5, ) diff --git a/posthog/models/feature_flag/flag_matching.py b/posthog/models/feature_flag/flag_matching.py index 7c56a547252..90783f68604 100644 --- a/posthog/models/feature_flag/flag_matching.py +++ b/posthog/models/feature_flag/flag_matching.py @@ -1,6 +1,6 @@ import hashlib from dataclasses import dataclass -from enum import Enum +from enum import StrEnum import time import structlog from typing import Literal, Optional, Union, cast @@ -67,7 +67,7 @@ ENTITY_EXISTS_PREFIX = "flag_entity_exists_" PERSON_KEY = "person" -class FeatureFlagMatchReason(str, Enum): +class FeatureFlagMatchReason(StrEnum): SUPER_CONDITION_VALUE = "super_condition_value" CONDITION_MATCH = "condition_match" NO_CONDITION_MATCH = "no_condition_match" diff --git a/posthog/models/filters/stickiness_filter.py b/posthog/models/filters/stickiness_filter.py index cde6d802092..d19b2418e06 100644 --- a/posthog/models/filters/stickiness_filter.py +++ b/posthog/models/filters/stickiness_filter.py @@ -72,7 +72,7 @@ class StickinessFilter( else: data = {"insight": INSIGHT_STICKINESS} super().__init__(data, request, **kwargs) - team: Optional["Team"] = kwargs.get("team", None) + team: Optional[Team] = kwargs.get("team", None) if not team: raise ValidationError("Team must be provided to stickiness filter") self.team = team diff --git a/posthog/models/plugin.py b/posthog/models/plugin.py index 46ddfb9177f..87ab0497c81 100644 --- a/posthog/models/plugin.py +++ b/posthog/models/plugin.py @@ -1,7 +1,7 @@ import datetime import os from dataclasses import dataclass -from enum import Enum +from enum import StrEnum from typing import Any, Optional, cast from uuid import UUID @@ -288,13 +288,13 @@ class PluginStorage(models.Model): value: models.TextField = models.TextField(blank=True, null=True) -class PluginLogEntrySource(str, Enum): +class PluginLogEntrySource(StrEnum): SYSTEM = "SYSTEM" PLUGIN = "PLUGIN" CONSOLE = "CONSOLE" -class PluginLogEntryType(str, Enum): +class PluginLogEntryType(StrEnum): DEBUG = "DEBUG" LOG = "LOG" INFO = "INFO" diff --git a/posthog/models/property/property.py b/posthog/models/property/property.py index 7185306b8cc..bb378b7616d 100644 --- a/posthog/models/property/property.py +++ b/posthog/models/property/property.py @@ -1,5 +1,5 @@ import json -from enum import Enum +from enum import StrEnum from typing import ( Any, Literal, @@ -14,7 +14,7 @@ from posthog.models.filters.utils import GroupTypeIndex, validate_group_type_ind from posthog.utils import str_to_bool -class BehavioralPropertyType(str, Enum): +class BehavioralPropertyType(StrEnum): PERFORMED_EVENT = "performed_event" PERFORMED_EVENT_MULTIPLE = "performed_event_multiple" PERFORMED_EVENT_FIRST_TIME = "performed_event_first_time" diff --git a/posthog/models/test/test_async_deletion_model.py b/posthog/models/test/test_async_deletion_model.py index 060c9381866..8f4125be67a 100644 --- a/posthog/models/test/test_async_deletion_model.py +++ b/posthog/models/test/test_async_deletion_model.py @@ -65,7 +65,7 @@ class TestAsyncDeletion(ClickhouseTestMixin, ClickhouseDestroyTablesMixin, BaseT @snapshot_clickhouse_queries def test_mark_deletions_done_person(self): - base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC) _create_event( event_uuid=uuid4(), @@ -101,7 +101,7 @@ class TestAsyncDeletion(ClickhouseTestMixin, ClickhouseDestroyTablesMixin, BaseT @snapshot_clickhouse_queries def test_mark_deletions_done_person_when_not_done(self): - base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC) _create_event( event_uuid=uuid4(), @@ -226,7 +226,7 @@ class TestAsyncDeletion(ClickhouseTestMixin, ClickhouseDestroyTablesMixin, BaseT @snapshot_clickhouse_alter_queries def test_delete_person(self): - base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC) # Event for person, created before AsyncDeletion, so it should be deleted _create_event( @@ -264,7 +264,7 @@ class TestAsyncDeletion(ClickhouseTestMixin, ClickhouseDestroyTablesMixin, BaseT @snapshot_clickhouse_alter_queries def test_delete_person_unrelated(self): - base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC) _create_event( event_uuid=uuid4(), diff --git a/posthog/models/test/test_person_override_model.py b/posthog/models/test/test_person_override_model.py index d0809703296..ea64d7a9c97 100644 --- a/posthog/models/test/test_person_override_model.py +++ b/posthog/models/test/test_person_override_model.py @@ -48,7 +48,7 @@ def people(team): @pytest.fixture def oldest_event(): - return dt.datetime.now(dt.timezone.utc) + return dt.datetime.now(dt.UTC) @pytest.mark.django_db(transaction=True) diff --git a/posthog/schema.py b/posthog/schema.py index c44c3362371..c0819617713 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -2,7 +2,7 @@ from __future__ import annotations -from enum import Enum +from enum import Enum, StrEnum from typing import Any, Literal, Optional, Union from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, RootModel @@ -20,7 +20,7 @@ class MathGroupTypeIndex(float, Enum): NUMBER_4 = 4 -class AggregationAxisFormat(str, Enum): +class AggregationAxisFormat(StrEnum): NUMERIC = "numeric" DURATION = "duration" DURATION_MS = "duration_ms" @@ -28,7 +28,7 @@ class AggregationAxisFormat(str, Enum): PERCENTAGE_SCALED = "percentage_scaled" -class Kind(str, Enum): +class Kind(StrEnum): METHOD = "Method" FUNCTION = "Function" CONSTRUCTOR = "Constructor" @@ -87,7 +87,7 @@ class AutocompleteCompletionItem(BaseModel): ) -class BaseMathType(str, Enum): +class BaseMathType(StrEnum): TOTAL = "total" DAU = "dau" WEEKLY_ACTIVE = "weekly_active" @@ -95,14 +95,14 @@ class BaseMathType(str, Enum): UNIQUE_SESSION = "unique_session" -class BreakdownAttributionType(str, Enum): +class BreakdownAttributionType(StrEnum): FIRST_TOUCH = "first_touch" LAST_TOUCH = "last_touch" ALL_EVENTS = "all_events" STEP = "step" -class BreakdownType(str, Enum): +class BreakdownType(StrEnum): COHORT = "cohort" PERSON = "person" EVENT = "event" @@ -164,7 +164,7 @@ class ChartAxis(BaseModel): column: str -class ChartDisplayType(str, Enum): +class ChartDisplayType(StrEnum): ACTIONS_LINE_GRAPH = "ActionsLineGraph" ACTIONS_BAR = "ActionsBar" ACTIONS_AREA_GRAPH = "ActionsAreaGraph" @@ -205,7 +205,7 @@ class CompareFilter(BaseModel): compare_to: Optional[str] = None -class CountPerActorMathType(str, Enum): +class CountPerActorMathType(StrEnum): AVG_COUNT_PER_ACTOR = "avg_count_per_actor" MIN_COUNT_PER_ACTOR = "min_count_per_actor" MAX_COUNT_PER_ACTOR = "max_count_per_actor" @@ -255,14 +255,14 @@ class DatabaseSchemaSource(BaseModel): status: str -class Type(str, Enum): +class Type(StrEnum): POSTHOG = "posthog" DATA_WAREHOUSE = "data_warehouse" VIEW = "view" BATCH_EXPORT = "batch_export" -class DatabaseSerializedFieldType(str, Enum): +class DatabaseSerializedFieldType(StrEnum): INTEGER = "integer" FLOAT = "float" STRING = "string" @@ -301,13 +301,13 @@ class Day(RootModel[int]): root: int -class DurationType(str, Enum): +class DurationType(StrEnum): DURATION = "duration" ACTIVE_SECONDS = "active_seconds" INACTIVE_SECONDS = "inactive_seconds" -class Key(str, Enum): +class Key(StrEnum): TAG_NAME = "tag_name" TEXT = "text" HREF = "href" @@ -336,14 +336,14 @@ class EmptyPropertyFilter(BaseModel): ) -class EntityType(str, Enum): +class EntityType(StrEnum): ACTIONS = "actions" EVENTS = "events" DATA_WAREHOUSE = "data_warehouse" NEW_ENTITY = "new_entity" -class ErrorTrackingOrder(str, Enum): +class ErrorTrackingOrder(StrEnum): LAST_SEEN = "last_seen" FIRST_SEEN = "first_seen" UNIQUE_OCCURRENCES = "unique_occurrences" @@ -360,7 +360,7 @@ class EventDefinition(BaseModel): properties: dict[str, Any] -class CorrelationType(str, Enum): +class CorrelationType(StrEnum): SUCCESS = "success" FAILURE = "failure" @@ -418,12 +418,12 @@ class EventsQueryPersonColumn(BaseModel): uuid: str -class FilterLogicalOperator(str, Enum): +class FilterLogicalOperator(StrEnum): AND_ = "AND" OR_ = "OR" -class FunnelConversionWindowTimeUnit(str, Enum): +class FunnelConversionWindowTimeUnit(StrEnum): SECOND = "second" MINUTE = "minute" HOUR = "hour" @@ -440,7 +440,7 @@ class FunnelCorrelationResult(BaseModel): skewed: bool -class FunnelCorrelationResultsType(str, Enum): +class FunnelCorrelationResultsType(StrEnum): EVENTS = "events" PROPERTIES = "properties" EVENT_WITH_PROPERTIES = "event_with_properties" @@ -468,18 +468,18 @@ class FunnelExclusionSteps(BaseModel): funnelToStep: int -class FunnelLayout(str, Enum): +class FunnelLayout(StrEnum): HORIZONTAL = "horizontal" VERTICAL = "vertical" -class FunnelPathType(str, Enum): +class FunnelPathType(StrEnum): FUNNEL_PATH_BEFORE_STEP = "funnel_path_before_step" FUNNEL_PATH_BETWEEN_STEPS = "funnel_path_between_steps" FUNNEL_PATH_AFTER_STEP = "funnel_path_after_step" -class FunnelStepReference(str, Enum): +class FunnelStepReference(StrEnum): TOTAL = "total" PREVIOUS = "previous" @@ -492,7 +492,7 @@ class FunnelTimeToConvertResults(BaseModel): bins: list[list[int]] -class FunnelVizType(str, Enum): +class FunnelVizType(StrEnum): STEPS = "steps" TIME_TO_CONVERT = "time_to_convert" TRENDS = "trends" @@ -516,44 +516,44 @@ class HogQLNotice(BaseModel): start: Optional[int] = None -class BounceRatePageViewMode(str, Enum): +class BounceRatePageViewMode(StrEnum): COUNT_PAGEVIEWS = "count_pageviews" UNIQ_URLS = "uniq_urls" -class InCohortVia(str, Enum): +class InCohortVia(StrEnum): AUTO = "auto" LEFTJOIN = "leftjoin" SUBQUERY = "subquery" LEFTJOIN_CONJOINED = "leftjoin_conjoined" -class MaterializationMode(str, Enum): +class MaterializationMode(StrEnum): AUTO = "auto" LEGACY_NULL_AS_STRING = "legacy_null_as_string" LEGACY_NULL_AS_NULL = "legacy_null_as_null" DISABLED = "disabled" -class PersonsArgMaxVersion(str, Enum): +class PersonsArgMaxVersion(StrEnum): AUTO = "auto" V1 = "v1" V2 = "v2" -class PersonsJoinMode(str, Enum): +class PersonsJoinMode(StrEnum): INNER = "inner" LEFT = "left" -class PersonsOnEventsMode(str, Enum): +class PersonsOnEventsMode(StrEnum): DISABLED = "disabled" PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS = "person_id_no_override_properties_on_events" PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS = "person_id_override_properties_on_events" PERSON_ID_OVERRIDE_PROPERTIES_JOINED = "person_id_override_properties_joined" -class SessionTableVersion(str, Enum): +class SessionTableVersion(StrEnum): AUTO = "auto" V1 = "v1" V2 = "v2" @@ -586,7 +586,7 @@ class HogQueryResponse(BaseModel): stdout: Optional[str] = None -class Compare(str, Enum): +class Compare(StrEnum): CURRENT = "current" PREVIOUS = "previous" @@ -626,7 +626,7 @@ class InsightDateRange(BaseModel): ) -class InsightFilterProperty(str, Enum): +class InsightFilterProperty(StrEnum): TRENDS_FILTER = "trendsFilter" FUNNELS_FILTER = "funnelsFilter" RETENTION_FILTER = "retentionFilter" @@ -635,7 +635,7 @@ class InsightFilterProperty(str, Enum): LIFECYCLE_FILTER = "lifecycleFilter" -class InsightNodeKind(str, Enum): +class InsightNodeKind(StrEnum): TRENDS_QUERY = "TrendsQuery" FUNNELS_QUERY = "FunnelsQuery" RETENTION_QUERY = "RetentionQuery" @@ -644,7 +644,7 @@ class InsightNodeKind(str, Enum): LIFECYCLE_QUERY = "LifecycleQuery" -class InsightType(str, Enum): +class InsightType(StrEnum): TRENDS = "TRENDS" STICKINESS = "STICKINESS" LIFECYCLE = "LIFECYCLE" @@ -656,7 +656,7 @@ class InsightType(str, Enum): HOG = "HOG" -class IntervalType(str, Enum): +class IntervalType(StrEnum): MINUTE = "minute" HOUR = "hour" DAY = "day" @@ -664,14 +664,14 @@ class IntervalType(str, Enum): MONTH = "month" -class LifecycleToggle(str, Enum): +class LifecycleToggle(StrEnum): NEW = "new" RESURRECTING = "resurrecting" RETURNING = "returning" DORMANT = "dormant" -class NodeKind(str, Enum): +class NodeKind(StrEnum): EVENTS_NODE = "EventsNode" ACTIONS_NODE = "ActionsNode" DATA_WAREHOUSE_NODE = "DataWarehouseNode" @@ -716,7 +716,7 @@ class PathCleaningFilter(BaseModel): regex: Optional[str] = None -class PathType(str, Enum): +class PathType(StrEnum): FIELD_PAGEVIEW = "$pageview" FIELD_SCREEN = "$screen" CUSTOM_EVENT = "custom_event" @@ -765,7 +765,7 @@ class PathsFilterLegacy(BaseModel): step_limit: Optional[int] = None -class PropertyFilterType(str, Enum): +class PropertyFilterType(StrEnum): META = "meta" EVENT = "event" PERSON = "person" @@ -780,7 +780,7 @@ class PropertyFilterType(str, Enum): DATA_WAREHOUSE_PERSON_PROPERTY = "data_warehouse_person_property" -class PropertyMathType(str, Enum): +class PropertyMathType(StrEnum): AVG = "avg" SUM = "sum" MIN = "min" @@ -791,7 +791,7 @@ class PropertyMathType(str, Enum): P99 = "p99" -class PropertyOperator(str, Enum): +class PropertyOperator(StrEnum): EXACT = "exact" IS_NOT = "is_not" ICONTAINS = "icontains" @@ -909,7 +909,7 @@ class RecordingPropertyFilter(BaseModel): value: Optional[Union[str, float, list[Union[str, float]]]] = None -class Kind1(str, Enum): +class Kind1(StrEnum): ACTIONS_NODE = "ActionsNode" EVENTS_NODE = "EventsNode" @@ -927,19 +927,19 @@ class RetentionEntity(BaseModel): uuid: Optional[str] = None -class RetentionReference(str, Enum): +class RetentionReference(StrEnum): TOTAL = "total" PREVIOUS = "previous" -class RetentionPeriod(str, Enum): +class RetentionPeriod(StrEnum): HOUR = "Hour" DAY = "Day" WEEK = "Week" MONTH = "Month" -class RetentionType(str, Enum): +class RetentionType(StrEnum): RETENTION_RECURRING = "retention_recurring" RETENTION_FIRST_TIME = "retention_first_time" @@ -970,7 +970,7 @@ class SessionPropertyFilter(BaseModel): value: Optional[Union[str, float, list[Union[str, float]]]] = None -class StepOrderValue(str, Enum): +class StepOrderValue(StrEnum): STRICT = "strict" UNORDERED = "unordered" ORDERED = "ordered" @@ -1101,7 +1101,7 @@ class TimelineEntry(BaseModel): sessionId: Optional[str] = Field(default=None, description="Session ID. None means out-of-session events") -class YAxisScaleType(str, Enum): +class YAxisScaleType(StrEnum): LOG10 = "log10" LINEAR = "linear" @@ -1191,7 +1191,7 @@ class VizSpecificOptions(BaseModel): RETENTION: Optional[RETENTION] = None -class Kind2(str, Enum): +class Kind2(StrEnum): UNIT = "unit" DURATION_S = "duration_s" PERCENTAGE = "percentage" @@ -1238,7 +1238,7 @@ class WebOverviewQueryResponse(BaseModel): ) -class WebStatsBreakdown(str, Enum): +class WebStatsBreakdown(StrEnum): PAGE = "Page" INITIAL_PAGE = "InitialPage" EXIT_PAGE = "ExitPage" diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py index e0ac5d701a3..e4ecc1ccfe3 100644 --- a/posthog/session_recordings/session_recording_api.py +++ b/posthog/session_recordings/session_recording_api.py @@ -1,7 +1,7 @@ import os import time from contextlib import contextmanager -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from prometheus_client import Histogram import json from typing import Any, cast @@ -430,7 +430,7 @@ class SessionRecordingViewSet(TeamAndOrgViewSetMixin, viewsets.GenericViewSet): # Keys are like 1619712000-1619712060 blob_key = full_key.replace(blob_prefix.rstrip("/") + "/", "") blob_key_base = blob_key.split(".")[0] # Remove the extension if it exists - time_range = [datetime.fromtimestamp(int(x) / 1000, tz=timezone.utc) for x in blob_key_base.split("-")] + time_range = [datetime.fromtimestamp(int(x) / 1000, tz=UTC) for x in blob_key_base.split("-")] sources.append( { @@ -446,7 +446,7 @@ class SessionRecordingViewSet(TeamAndOrgViewSetMixin, viewsets.GenericViewSet): newest_timestamp = min(sources, key=lambda k: k["end_timestamp"])["end_timestamp"] if might_have_realtime: - might_have_realtime = oldest_timestamp + timedelta(hours=24) > datetime.now(timezone.utc) + might_have_realtime = oldest_timestamp + timedelta(hours=24) > datetime.now(UTC) if might_have_realtime: sources.append( { diff --git a/posthog/session_recordings/session_recording_helpers.py b/posthog/session_recordings/session_recording_helpers.py index 8dfb1c0ad23..c54117603c9 100644 --- a/posthog/session_recordings/session_recording_helpers.py +++ b/posthog/session_recordings/session_recording_helpers.py @@ -2,7 +2,7 @@ import base64 import gzip import json from collections import defaultdict -from datetime import datetime, timezone +from datetime import datetime, UTC from typing import Any from collections.abc import Callable, Generator @@ -268,7 +268,7 @@ def is_active_event(event: SessionRecordingEventSummary) -> bool: def parse_snapshot_timestamp(timestamp: int): - return datetime.fromtimestamp(timestamp / 1000, timezone.utc) + return datetime.fromtimestamp(timestamp / 1000, UTC) def convert_to_timestamp(source: str) -> int: diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index eee3c288c3b..bed84f6be51 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -1,7 +1,7 @@ import json import time import uuid -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from unittest.mock import ANY, patch, MagicMock, call from urllib.parse import urlencode @@ -395,7 +395,7 @@ class TestSessionRecordings(APIBaseTest, ClickhouseTestMixin, QueryMatchingTest) "distinct_id": "d1", "viewed": False, "recording_duration": 30, - "start_time": base_time.replace(tzinfo=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"), + "start_time": base_time.replace(tzinfo=UTC).strftime("%Y-%m-%dT%H:%M:%SZ"), "end_time": (base_time + relativedelta(seconds=30)).strftime("%Y-%m-%dT%H:%M:%SZ"), "click_count": 0, "keypress_count": 0, diff --git a/posthog/tasks/test/test_process_scheduled_changes.py b/posthog/tasks/test/test_process_scheduled_changes.py index 0e1fb9b9db3..452c97a6e24 100644 --- a/posthog/tasks/test/test_process_scheduled_changes.py +++ b/posthog/tasks/test/test_process_scheduled_changes.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, UTC from posthog.models import ScheduledChange, FeatureFlag from posthog.test.base import APIBaseTest, QueryMatchingTest, snapshot_postgres_queries from posthog.tasks.process_scheduled_changes import process_scheduled_changes @@ -21,7 +21,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest): record_id=feature_flag.id, model_name="FeatureFlag", payload={"operation": "update_status", "value": True}, - scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)).isoformat(), + scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)).isoformat(), ) process_scheduled_changes() @@ -55,7 +55,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest): record_id=feature_flag.id, model_name="FeatureFlag", payload=payload, - scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)), + scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)), ) process_scheduled_changes() @@ -105,7 +105,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest): record_id=feature_flag.id, model_name="FeatureFlag", payload=payload, - scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)), + scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)), ) process_scheduled_changes() @@ -131,7 +131,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest): record_id=feature_flag.id, model_name="FeatureFlag", payload=payload, - scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)), + scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)), ) process_scheduled_changes() @@ -169,11 +169,11 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest): "operation": "add_release_condition", "value": {"groups": [change_past_condition], "multivariate": None, "payloads": {}}, }, - scheduled_at=(datetime.now(timezone.utc) - timedelta(hours=1)), + scheduled_at=(datetime.now(UTC) - timedelta(hours=1)), ) # 2. Due in the past and already executed - change_past_executed_at = datetime.now(timezone.utc) - timedelta(hours=5) + change_past_executed_at = datetime.now(UTC) - timedelta(hours=5) change_past_executed = ScheduledChange.objects.create( team=self.team, record_id=feature_flag.id, @@ -197,7 +197,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest): "operation": "add_release_condition", "value": {"groups": [change_due_now_condition], "multivariate": None, "payloads": {}}, }, - scheduled_at=datetime.now(timezone.utc), + scheduled_at=datetime.now(UTC), ) # 4. Due in the future @@ -206,7 +206,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest): record_id=feature_flag.id, model_name="FeatureFlag", payload={"operation": "update_status", "value": False}, - scheduled_at=(datetime.now(timezone.utc) + timedelta(hours=1)), + scheduled_at=(datetime.now(UTC) + timedelta(hours=1)), ) process_scheduled_changes() diff --git a/posthog/tasks/test/test_warehouse.py b/posthog/tasks/test/test_warehouse.py index dec9da654c5..b03c04146a5 100644 --- a/posthog/tasks/test/test_warehouse.py +++ b/posthog/tasks/test/test_warehouse.py @@ -46,7 +46,7 @@ class TestWarehouse(APIBaseTest): @patch("posthog.tasks.warehouse.get_ph_client") @patch( "posthog.tasks.warehouse.DEFAULT_DATE_TIME", - datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.UTC), ) @freeze_time("2023-11-07") def test_capture_workspace_rows_synced_by_team_month_cutoff(self, mock_get_ph_client: MagicMock) -> None: @@ -87,13 +87,13 @@ class TestWarehouse(APIBaseTest): self.team.refresh_from_db() self.assertEqual( self.team.external_data_workspace_last_synced_at, - datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.UTC), ) @patch("posthog.tasks.warehouse.get_ph_client") @patch( "posthog.tasks.warehouse.DEFAULT_DATE_TIME", - datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.UTC), ) @freeze_time("2023-11-07") def test_capture_workspace_rows_synced_by_team_month_cutoff_field_set(self, mock_get_ph_client: MagicMock) -> None: @@ -101,7 +101,7 @@ class TestWarehouse(APIBaseTest): mock_get_ph_client.return_value = mock_ph_client self.team.external_data_workspace_last_synced_at = datetime.datetime( - 2023, 10, 30, 19, 32, 41, tzinfo=datetime.timezone.utc + 2023, 10, 30, 19, 32, 41, tzinfo=datetime.UTC ) self.team.save() @@ -142,5 +142,5 @@ class TestWarehouse(APIBaseTest): self.team.refresh_from_db() self.assertEqual( self.team.external_data_workspace_last_synced_at, - datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.UTC), ) diff --git a/posthog/tasks/warehouse.py b/posthog/tasks/warehouse.py index ff76f40e344..0bff919c739 100644 --- a/posthog/tasks/warehouse.py +++ b/posthog/tasks/warehouse.py @@ -18,7 +18,7 @@ logger = structlog.get_logger(__name__) MONTHLY_LIMIT = 500_000_000 # TODO: adjust to whenever billing officially starts -DEFAULT_DATE_TIME = datetime.datetime(2024, 6, 1, tzinfo=datetime.timezone.utc) +DEFAULT_DATE_TIME = datetime.datetime(2024, 6, 1, tzinfo=datetime.UTC) def capture_external_data_rows_synced() -> None: @@ -91,7 +91,7 @@ def check_synced_row_limits_of_team(team_id: int) -> None: def capture_workspace_rows_synced_by_team(team_id: int) -> None: ph_client = get_ph_client() team = Team.objects.get(pk=team_id) - now = datetime.datetime.now(datetime.timezone.utc) + now = datetime.datetime.now(datetime.UTC) begin = team.external_data_workspace_last_synced_at or DEFAULT_DATE_TIME team.external_data_workspace_last_synced_at = now diff --git a/posthog/temporal/batch_exports/backfill_batch_export.py b/posthog/temporal/batch_exports/backfill_batch_export.py index 75df851caef..c7e6d53c47a 100644 --- a/posthog/temporal/batch_exports/backfill_batch_export.py +++ b/posthog/temporal/batch_exports/backfill_batch_export.py @@ -114,7 +114,7 @@ class BackfillScheduleInputs: def get_utcnow(): """Return the current time in UTC. This function is only required for mocking during tests, because mocking the global datetime breaks Temporal.""" - return dt.datetime.now(dt.timezone.utc) + return dt.datetime.now(dt.UTC) @temporalio.activity.defn diff --git a/posthog/temporal/batch_exports/squash_person_overrides.py b/posthog/temporal/batch_exports/squash_person_overrides.py index eac97a38a6f..69ecd87c0da 100644 --- a/posthog/temporal/batch_exports/squash_person_overrides.py +++ b/posthog/temporal/batch_exports/squash_person_overrides.py @@ -5,7 +5,7 @@ import contextlib import json import typing from dataclasses import dataclass, field -from datetime import date, datetime, timedelta, timezone +from datetime import date, datetime, timedelta, timezone, UTC from temporalio import activity, workflow from temporalio.common import RetryPolicy @@ -14,7 +14,7 @@ from posthog.temporal.batch_exports.base import PostHogWorkflow from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.heartbeat import Heartbeater -EPOCH = datetime(1970, 1, 1, 0, 0, tzinfo=timezone.utc) +EPOCH = datetime(1970, 1, 1, 0, 0, tzinfo=UTC) CREATE_TABLE_PERSON_DISTINCT_ID_OVERRIDES_JOIN = """ @@ -174,7 +174,7 @@ MUTATIONS = { } -def parse_clickhouse_timestamp(s: str, tzinfo: timezone = timezone.utc) -> datetime: +def parse_clickhouse_timestamp(s: str, tzinfo: timezone = UTC) -> datetime: """Parse a timestamp from ClickHouse.""" return datetime.strptime(s.strip(), "%Y-%m-%d %H:%M:%S.%f").replace(tzinfo=tzinfo) diff --git a/posthog/temporal/tests/batch_exports/conftest.py b/posthog/temporal/tests/batch_exports/conftest.py index 98bd2e80422..617cfe1559b 100644 --- a/posthog/temporal/tests/batch_exports/conftest.py +++ b/posthog/temporal/tests/batch_exports/conftest.py @@ -203,7 +203,7 @@ def data_interval_start(data_interval_end, interval): @pytest.fixture def data_interval_end(interval): """Set a test data interval end.""" - return dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) + return dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) @pytest_asyncio.fixture diff --git a/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py b/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py index f8823710c27..e0eb79ab10c 100644 --- a/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py +++ b/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py @@ -60,66 +60,66 @@ async def temporal_schedule(temporal_client, team): "start_at,end_at,step,expected", [ ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), dt.timedelta(days=1), [ ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), ) ], ), ( - dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 1, 12, 20, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 12, 20, 0, tzinfo=dt.UTC), dt.timedelta(hours=1), [ ( - dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.UTC), ), ( - dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.UTC), ), ], ), ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), dt.timedelta(hours=12), [ ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.UTC), ), ( - dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), ), ], ), ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.UTC), dt.timedelta(days=1), [ ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), ), ( - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.UTC), ), ( - dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.UTC), ), ( - dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.UTC), ), ], ), @@ -145,8 +145,8 @@ async def test_get_schedule_frequency(activity_environment, temporal_worker, tem @pytest.mark.django_db(transaction=True) async def test_backfill_schedule_activity(activity_environment, temporal_worker, temporal_client, temporal_schedule): """Test backfill_schedule activity schedules all backfill runs.""" - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) - end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC) desc = await temporal_schedule.describe() inputs = BackfillScheduleInputs( @@ -199,8 +199,8 @@ async def test_backfill_schedule_activity(activity_environment, temporal_worker, @pytest.mark.django_db(transaction=True) async def test_backfill_batch_export_workflow(temporal_worker, temporal_schedule, temporal_client, team): """Test BackfillBatchExportWorkflow executes all backfill runs and updates model.""" - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) - end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC) desc = await temporal_schedule.describe() @@ -275,9 +275,9 @@ async def test_backfill_batch_export_workflow_no_end_at( """Test BackfillBatchExportWorkflow executes all backfill runs and updates model.""" # Note the mocked time here, we should stop backfilling at 8 minutes and unpause the job. - mock_utcnow.return_value = dt.datetime(2023, 1, 1, 0, 8, 12, tzinfo=dt.timezone.utc) + mock_utcnow.return_value = dt.datetime(2023, 1, 1, 0, 8, 12, tzinfo=dt.UTC) - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) end_at = None desc = await temporal_schedule.describe() @@ -356,8 +356,8 @@ async def test_backfill_batch_export_workflow_fails_when_schedule_deleted( temporal_worker, temporal_schedule, temporal_client, team ): """Test BackfillBatchExportWorkflow fails when its underlying Temporal Schedule is deleted.""" - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) - end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC) desc = await temporal_schedule.describe() @@ -398,8 +398,8 @@ async def test_backfill_batch_export_workflow_fails_when_schedule_deleted_after_ In this test, in contrats to the previous one, we wait until we have started running some backfill runs before cancelling. """ - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) - end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC) desc = await temporal_schedule.describe() @@ -471,8 +471,8 @@ async def test_backfill_batch_export_workflow_is_cancelled_on_repeated_failures( temporal_worker, failing_s3_batch_export, temporal_client, ateam, clickhouse_client ): """Test BackfillBatchExportWorkflow will be cancelled on repeated failures.""" - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) - end_at = dt.datetime(2023, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + end_at = dt.datetime(2023, 1, 1, 1, 0, 0, tzinfo=dt.UTC) # We need some data otherwise the S3 batch export will not fail as it short-circuits. for d in date_range(start_at, end_at, dt.timedelta(minutes=5)): diff --git a/posthog/temporal/tests/batch_exports/test_batch_exports.py b/posthog/temporal/tests/batch_exports/test_batch_exports.py index 2634da9c1df..dda307dda00 100644 --- a/posthog/temporal/tests/batch_exports/test_batch_exports.py +++ b/posthog/temporal/tests/batch_exports/test_batch_exports.py @@ -41,9 +41,7 @@ def assert_records_match_events(records, events): key in ("timestamp", "_inserted_at", "created_at") and expected.get(key.removeprefix("_"), None) is not None ): - assert value == dt.datetime.fromisoformat(expected[key.removeprefix("_")]).replace( - tzinfo=dt.timezone.utc - ), msg + assert value == dt.datetime.fromisoformat(expected[key.removeprefix("_")]).replace(tzinfo=dt.UTC), msg elif isinstance(expected[key], dict): assert value == json.dumps(expected[key]), msg else: @@ -289,7 +287,7 @@ async def test_iter_records_with_single_field_and_alias(clickhouse_client, field if isinstance(result, dt.datetime): # Event generation function returns datetimes as strings. - expected_value = dt.datetime.fromisoformat(expected_value).replace(tzinfo=dt.timezone.utc) + expected_value = dt.datetime.fromisoformat(expected_value).replace(tzinfo=dt.UTC) assert result == expected_value @@ -388,16 +386,16 @@ async def test_iter_records_uses_extra_query_parameters(clickhouse_client): "hour", "2023-08-01T00:00:00+00:00", ( - dt.datetime(2023, 7, 31, 23, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 7, 31, 23, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.UTC), ), ), ( "day", "2023-08-01T00:00:00+00:00", ( - dt.datetime(2023, 7, 31, 0, 0, 0, tzinfo=dt.timezone.utc), - dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 7, 31, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.UTC), ), ), ], diff --git a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py index 99802232b9a..fc3ee12b981 100644 --- a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py @@ -50,7 +50,7 @@ SKIP_IF_MISSING_GOOGLE_APPLICATION_CREDENTIALS = pytest.mark.skipif( pytestmark = [SKIP_IF_MISSING_GOOGLE_APPLICATION_CREDENTIALS, pytest.mark.asyncio, pytest.mark.django_db] -TEST_TIME = dt.datetime.now(dt.timezone.utc) +TEST_TIME = dt.datetime.now(dt.UTC) async def assert_clickhouse_records_in_bigquery( @@ -144,7 +144,7 @@ async def assert_clickhouse_records_in_bigquery( if k in json_columns and v is not None: expected_record[k] = json.loads(v) elif isinstance(v, dt.datetime): - expected_record[k] = v.replace(tzinfo=dt.timezone.utc) + expected_record[k] = v.replace(tzinfo=dt.UTC) else: expected_record[k] = v @@ -298,7 +298,7 @@ async def test_insert_into_bigquery_activity_inserts_data_into_bigquery_table( with freeze_time(TEST_TIME) as frozen_time: await activity_environment.run(insert_into_bigquery_activity, insert_inputs) - ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc) + ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) await assert_clickhouse_records_in_bigquery( bigquery_client=bigquery_client, @@ -352,7 +352,7 @@ async def test_insert_into_bigquery_activity_merges_data_in_follow_up_runs( with freeze_time(TEST_TIME) as frozen_time: await activity_environment.run(insert_into_bigquery_activity, insert_inputs) - ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc) + ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) await assert_clickhouse_records_in_bigquery( bigquery_client=bigquery_client, @@ -393,7 +393,7 @@ async def test_insert_into_bigquery_activity_merges_data_in_follow_up_runs( with freeze_time(TEST_TIME) as frozen_time: await activity_environment.run(insert_into_bigquery_activity, insert_inputs) - ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc) + ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) await assert_clickhouse_records_in_bigquery( bigquery_client=bigquery_client, @@ -523,7 +523,7 @@ async def test_bigquery_export_workflow( persons_to_export_created ) - ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc) + ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) await assert_clickhouse_records_in_bigquery( bigquery_client=bigquery_client, clickhouse_client=clickhouse_client, @@ -773,7 +773,7 @@ async def test_bigquery_export_workflow_handles_cancellation(ateam, bigquery_bat ([{"test": 6.0}], [bigquery.SchemaField("test", "FLOAT64")]), ([{"test": True}], [bigquery.SchemaField("test", "BOOL")]), ([{"test": dt.datetime.now()}], [bigquery.SchemaField("test", "TIMESTAMP")]), - ([{"test": dt.datetime.now(tz=dt.timezone.utc)}], [bigquery.SchemaField("test", "TIMESTAMP")]), + ([{"test": dt.datetime.now(tz=dt.UTC)}], [bigquery.SchemaField("test", "TIMESTAMP")]), ( [ { @@ -783,7 +783,7 @@ async def test_bigquery_export_workflow_handles_cancellation(ateam, bigquery_bat "test_float": 6.0, "test_bool": False, "test_timestamp": dt.datetime.now(), - "test_timestamptz": dt.datetime.now(tz=dt.timezone.utc), + "test_timestamptz": dt.datetime.now(tz=dt.UTC), } ], [ diff --git a/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py index 4dfb8563ff9..5821a8a98a9 100644 --- a/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py @@ -99,7 +99,7 @@ async def assert_clickhouse_records_in_mock_server( if k == "properties": expected_record[k] = json.loads(v) if v else {} elif isinstance(v, dt.datetime): - expected_record[k] = v.replace(tzinfo=dt.timezone.utc).isoformat() + expected_record[k] = v.replace(tzinfo=dt.UTC).isoformat() else: expected_record[k] = v @@ -134,8 +134,8 @@ async def test_insert_into_http_activity_inserts_data_into_http_endpoint( * Are not duplicates of other events that are in the same batch. * Do not have an event name contained in the batch export's exclude_events. """ - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) # Generate a random team id integer. There's still a chance of a collision, # but it's very small. @@ -211,8 +211,8 @@ async def test_insert_into_http_activity_throws_on_bad_http_status( clickhouse_client, activity_environment, http_config, exclude_events ): """Test that the insert_into_http_activity function throws on status >= 400""" - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) # Generate a random team id integer. There's still a chance of a collision, # but it's very small. diff --git a/posthog/temporal/tests/batch_exports/test_logger.py b/posthog/temporal/tests/batch_exports/test_logger.py index 4ee3ca9a014..3ee605882e3 100644 --- a/posthog/temporal/tests/batch_exports/test_logger.py +++ b/posthog/temporal/tests/batch_exports/test_logger.py @@ -211,13 +211,13 @@ BATCH_EXPORT_ID = str(uuid.uuid4()) "activity_environment", [ ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.timezone.utc)}", + workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.UTC)}", workflow_type="s3-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), ), ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.timezone.utc)}", + workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.UTC)}", workflow_type="backfill-batch-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), @@ -262,13 +262,13 @@ async def test_batch_exports_logger_binds_activity_context( "activity_environment", [ ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.timezone.utc)}", + workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.UTC)}", workflow_type="s3-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), ), ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.timezone.utc)}", + workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.UTC)}", workflow_type="backfill-batch-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), @@ -324,13 +324,13 @@ def log_entries_table(): "activity_environment", [ ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.timezone.utc)}", + workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.UTC)}", workflow_type="s3-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), ), ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.timezone.utc)}", + workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.UTC)}", workflow_type="backfill-batch-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), diff --git a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py index 3d4722fe6db..54f638a68d6 100644 --- a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py @@ -117,7 +117,7 @@ async def assert_clickhouse_records_in_postgres( if k in {"properties", "set", "set_once", "person_properties"} and v is not None: expected_record[k] = json.loads(v) elif isinstance(v, dt.datetime): - expected_record[k] = v.replace(tzinfo=dt.timezone.utc) + expected_record[k] = v.replace(tzinfo=dt.UTC) else: expected_record[k] = v @@ -201,8 +201,8 @@ async def test_insert_into_postgres_activity_inserts_data_into_postgres_table( development postgres instance for testing. But we setup and manage our own database to avoid conflicting with PostHog itself. """ - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) # Generate a random team id integer. There's still a chance of a collision, # but it's very small. diff --git a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py index db8257a7ee5..40071bd153b 100644 --- a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py @@ -130,7 +130,7 @@ async def assert_clickhouse_records_in_redshfit( remove_escaped_whitespace_recursive(json.loads(v)), ensure_ascii=False ) elif isinstance(v, dt.datetime): - expected_record[k] = v.replace(tzinfo=dt.timezone.utc) # type: ignore + expected_record[k] = v.replace(tzinfo=dt.UTC) # type: ignore else: expected_record[k] = v @@ -242,8 +242,8 @@ async def test_insert_into_redshift_activity_inserts_data_into_redshift_table( Once we have these events, we pass them to the assert_events_in_redshift function to check that they appear in the expected Redshift table. """ - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) # Generate a random team id integer. There's still a chance of a collision, # but it's very small. diff --git a/posthog/temporal/tests/batch_exports/test_run_updates.py b/posthog/temporal/tests/batch_exports/test_run_updates.py index 1e50e13325b..649585f5283 100644 --- a/posthog/temporal/tests/batch_exports/test_run_updates.py +++ b/posthog/temporal/tests/batch_exports/test_run_updates.py @@ -85,8 +85,8 @@ async def test_start_batch_export_run(activity_environment, team, batch_export): We check if a 'BatchExportRun' is created after the activity runs. """ - start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) - end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) + start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC) + end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC) inputs = StartBatchExportRunInputs( team_id=team.id, @@ -110,8 +110,8 @@ async def test_start_batch_export_run(activity_environment, team, batch_export): @pytest.mark.asyncio async def test_finish_batch_export_run(activity_environment, team, batch_export): """Test the export_run_status activity.""" - start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) - end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) + start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC) + end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC) inputs = StartBatchExportRunInputs( team_id=team.id, @@ -145,8 +145,8 @@ async def test_finish_batch_export_run(activity_environment, team, batch_export) @pytest.mark.asyncio async def test_finish_batch_export_run_pauses_if_reaching_failure_threshold(activity_environment, team, batch_export): """Test if 'finish_batch_export_run' will pause a batch export upon reaching failure_threshold.""" - start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) - end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) + start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC) + end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC) inputs = StartBatchExportRunInputs( team_id=team.id, @@ -183,8 +183,8 @@ async def test_finish_batch_export_run_pauses_if_reaching_failure_threshold(acti @pytest.mark.asyncio async def test_finish_batch_export_run_never_pauses_with_small_check_window(activity_environment, team, batch_export): """Test if 'finish_batch_export_run' will never pause a batch export with a small check window.""" - start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) - end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) + start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC) + end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC) inputs = StartBatchExportRunInputs( team_id=team.id, diff --git a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py index 41863344d84..1462fd03b0b 100644 --- a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py @@ -981,8 +981,8 @@ async def test_insert_into_snowflake_activity_inserts_data_into_snowflake_table( that they appear in the expected Snowflake table. This function runs against a real Snowflake instance, so the environment should be populated with the necessary credentials. """ - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) team_id = random.randint(1, 1000000) await generate_test_events_in_clickhouse( diff --git a/posthog/temporal/tests/persons_on_events_squash/test_squash_person_overrides_workflow.py b/posthog/temporal/tests/persons_on_events_squash/test_squash_person_overrides_workflow.py index 19bf42bad8f..71a5bff99c5 100644 --- a/posthog/temporal/tests/persons_on_events_squash/test_squash_person_overrides_workflow.py +++ b/posthog/temporal/tests/persons_on_events_squash/test_squash_person_overrides_workflow.py @@ -1,7 +1,7 @@ import operator import random from collections import defaultdict -from datetime import datetime, timezone +from datetime import datetime, UTC from typing import NamedTuple, TypedDict from uuid import UUID, uuid4 @@ -862,7 +862,7 @@ async def test_delete_person_overrides_mutation_within_grace_period( activity_environment, events_to_override, person_overrides_data, clickhouse_client ): """Test we do not delete person overrides if they are within the grace period.""" - now = datetime.now(tz=timezone.utc) + now = datetime.now(tz=UTC) override_timestamp = int(now.timestamp()) team_id, person_override = next(iter(person_overrides_data.items())) distinct_id, _ = next(iter(person_override)) @@ -914,7 +914,7 @@ async def test_delete_person_overrides_mutation_within_grace_period( assert int(row[0]) == not_deleted_person["team_id"] assert row[1] == not_deleted_person["distinct_id"] assert UUID(row[2]) == UUID(not_deleted_person["person_id"]) - _timestamp = datetime.strptime(row[3], "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc) + _timestamp = datetime.strptime(row[3], "%Y-%m-%d %H:%M:%S").replace(tzinfo=UTC) # _timestamp is up to second precision assert _timestamp == now.replace(microsecond=0) diff --git a/posthog/temporal/tests/test_clickhouse.py b/posthog/temporal/tests/test_clickhouse.py index 0d02dcd5bf7..79003f718e6 100644 --- a/posthog/temporal/tests/test_clickhouse.py +++ b/posthog/temporal/tests/test_clickhouse.py @@ -23,12 +23,12 @@ from posthog.temporal.common.clickhouse import encode_clickhouse_data (("; DROP TABLE events --",), b"('; DROP TABLE events --')"), (("'a'); DROP TABLE events --",), b"('\\'a\\'); DROP TABLE events --')"), ( - dt.datetime(2023, 7, 14, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 7, 14, 0, 0, 0, tzinfo=dt.UTC), b"toDateTime('2023-07-14 00:00:00', 'UTC')", ), (dt.datetime(2023, 7, 14, 0, 0, 0), b"toDateTime('2023-07-14 00:00:00')"), ( - dt.datetime(2023, 7, 14, 0, 0, 0, 5555, tzinfo=dt.timezone.utc), + dt.datetime(2023, 7, 14, 0, 0, 0, 5555, tzinfo=dt.UTC), b"toDateTime64('2023-07-14 00:00:00.005555', 6, 'UTC')", ), ], diff --git a/posthog/temporal/tests/utils/datetimes.py b/posthog/temporal/tests/utils/datetimes.py index c168e885a3e..d5a3f747bf8 100644 --- a/posthog/temporal/tests/utils/datetimes.py +++ b/posthog/temporal/tests/utils/datetimes.py @@ -16,4 +16,4 @@ def to_isoformat(d: str | None) -> str | None: """Parse a string and return it as default isoformatted.""" if d is None: return None - return dt.datetime.fromisoformat(d).replace(tzinfo=dt.timezone.utc).isoformat() + return dt.datetime.fromisoformat(d).replace(tzinfo=dt.UTC).isoformat() diff --git a/posthog/test/test_datetime.py b/posthog/test/test_datetime.py index 2b8e6b087e5..9365cffb085 100644 --- a/posthog/test/test_datetime.py +++ b/posthog/test/test_datetime.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import datetime, UTC from posthog.datetime import ( start_of_hour, @@ -23,7 +23,7 @@ def test_start_of_day(): def test_end_of_day(): assert end_of_day(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime( - 2023, 2, 8, 23, 59, 59, 999999, tzinfo=timezone.utc + 2023, 2, 8, 23, 59, 59, 999999, tzinfo=UTC ) diff --git a/posthog/utils.py b/posthog/utils.py index 18f271c4a5d..a8faf24b19e 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -1289,12 +1289,12 @@ async def wait_for_parallel_celery_group(task: Any, expires: Optional[datetime.d default_expires = datetime.timedelta(minutes=5) if not expires: - expires = datetime.datetime.now(tz=datetime.timezone.utc) + default_expires + expires = datetime.datetime.now(tz=datetime.UTC) + default_expires sleep_generator = sleep_time_generator() while not task.ready(): - if datetime.datetime.now(tz=datetime.timezone.utc) > expires: + if datetime.datetime.now(tz=datetime.UTC) > expires: child_states = [] child: AsyncResult children = task.children or [] diff --git a/posthog/warehouse/external_data_source/workspace.py b/posthog/warehouse/external_data_source/workspace.py index 0a9f9436bab..f7e80761eb1 100644 --- a/posthog/warehouse/external_data_source/workspace.py +++ b/posthog/warehouse/external_data_source/workspace.py @@ -27,7 +27,7 @@ def get_or_create_workspace(team_id: int): workspace_id = create_workspace(team_id) team.external_data_workspace_id = workspace_id # start tracking from now - team.external_data_workspace_last_synced_at = datetime.datetime.now(datetime.timezone.utc) + team.external_data_workspace_last_synced_at = datetime.datetime.now(datetime.UTC) team.save() return team.external_data_workspace_id diff --git a/production.Dockerfile b/production.Dockerfile index 1e3eb2d1155..b64293dcb69 100644 --- a/production.Dockerfile +++ b/production.Dockerfile @@ -83,7 +83,7 @@ RUN corepack enable && \ # # --------------------------------------------------------- # -FROM python:3.10.10-slim-bullseye AS posthog-build +FROM python:3.11.9-slim-bullseye AS posthog-build WORKDIR /code SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] @@ -99,10 +99,11 @@ RUN apt-get update && \ "libxmlsec1" \ "libxmlsec1-dev" \ "libffi-dev" \ + "zlib1g-dev" \ "pkg-config" \ && \ rm -rf /var/lib/apt/lists/* && \ - pip install -r requirements.txt --compile --no-cache-dir --target=/python-runtime + PIP_NO_BINARY=lxml,xmlsec pip install -r requirements.txt --compile --no-cache-dir --target=/python-runtime ENV PATH=/python-runtime/bin:$PATH \ PYTHONPATH=/python-runtime @@ -139,104 +140,7 @@ RUN apt-get update && \ # # --------------------------------------------------------- # -# Build a version of the unit docker image for python3.10 -# We can remove this step once we are on python3.11 -FROM unit:python3.11 as unit -FROM python:3.10-bullseye as unit-131-python-310 - -# copied from https://github.com/nginx/unit/blob/master/pkg/docker/Dockerfile.python3.11 -LABEL org.opencontainers.image.title="Unit (python3.10)" -LABEL org.opencontainers.image.description="Official build of Unit for Docker." -LABEL org.opencontainers.image.url="https://unit.nginx.org" -LABEL org.opencontainers.image.source="https://github.com/nginx/unit" -LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" -LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers " -LABEL org.opencontainers.image.version="1.31.1" - -RUN set -ex \ - && savedAptMark="$(apt-mark showmanual)" \ - && apt-get update \ - && apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \ - && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ - && mkdir -p /usr/src/unit \ - && cd /usr/src/unit \ - && hg clone -u 1.31.1-1 https://hg.nginx.org/unit \ - && cd unit \ - && NCPU="$(getconf _NPROCESSORS_ONLN)" \ - && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ - && CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \ - && LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \ - && CONFIGURE_ARGS_MODULES="--prefix=/usr \ - --statedir=/var/lib/unit \ - --control=unix:/var/run/control.unit.sock \ - --runstatedir=/var/run \ - --pid=/var/run/unit.pid \ - --logdir=/var/log \ - --log=/var/log/unit.log \ - --tmpdir=/var/tmp \ - --user=unit \ - --group=unit \ - --openssl \ - --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \ - && CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \ - --njs" \ - && make -j $NCPU -C pkg/contrib .njs \ - && export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \ - && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \ - && make -j $NCPU unitd \ - && install -pm755 build/sbin/unitd /usr/sbin/unitd-debug \ - && make clean \ - && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/modules \ - && make -j $NCPU unitd \ - && install -pm755 build/sbin/unitd /usr/sbin/unitd \ - && make clean \ - && /bin/true \ - && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \ - && ./configure python --config=/usr/local/bin/python3-config \ - && make -j $NCPU python3-install \ - && make clean \ - && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \ - && ./configure python --config=/usr/local/bin/python3-config \ - && make -j $NCPU python3-install \ - && cd \ - && rm -rf /usr/src/unit \ - && for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \ - ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \ - done \ - && apt-mark showmanual | xargs apt-mark auto > /dev/null \ - && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \ - && /bin/true \ - && mkdir -p /var/lib/unit/ \ - && mkdir -p /docker-entrypoint.d/ \ - && groupadd --gid 998 unit \ - && useradd \ - --uid 998 \ - --gid unit \ - --no-create-home \ - --home /nonexistent \ - --comment "unit user" \ - --shell /bin/false \ - unit \ - && apt-get update \ - && apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \ - && apt-get purge -y --auto-remove build-essential \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /requirements.apt \ - && ln -sf /dev/stdout /var/log/unit.log - -COPY --from=unit /usr/local/bin/docker-entrypoint.sh /usr/local/bin/ -COPY --from=unit /usr/share/unit/welcome/welcome.* /usr/share/unit/welcome/ - -STOPSIGNAL SIGTERM - -ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"] -EXPOSE 80 -CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"] - -# -# --------------------------------------------------------- -# -FROM unit-131-python-310 +FROM unit:python3.11 WORKDIR /code SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] ENV PYTHONUNBUFFERED 1 @@ -265,7 +169,7 @@ RUN apt-get install -y --no-install-recommends \ # Install and use a non-root user. RUN groupadd -g 1000 posthog && \ - useradd -u 999 -r -g posthog posthog && \ + useradd -r -g posthog posthog && \ chown posthog:posthog /code USER posthog diff --git a/pyproject.toml b/pyproject.toml index 58de4e0f9f6..e4861307d72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,9 @@ [project] -requires-python = ">=3.10" +requires-python = ">=3.11" [tool.black] line-length = 120 -target-version = ['py310'] +target-version = ['py311'] [tool.isort] profile = "black" diff --git a/requirements-dev.in b/requirements-dev.in index 03858feaa89..a2413e07cf1 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -11,7 +11,7 @@ -c requirements.txt -ruff~=0.4.3 +ruff~=0.4.10 mypy~=1.10.0 mypy-baseline~=0.7.0 mypy-extensions==1.0.0 diff --git a/requirements-dev.txt b/requirements-dev.txt index dbf468cd45b..c534a931f0c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -288,8 +288,7 @@ ruamel-yaml==0.18.6 # via prance ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.4.3 - # via -r requirements-dev.in +ruff==0.4.10 six==1.16.0 # via # -c requirements.txt diff --git a/requirements.in b/requirements.in index 9a4dcaa36ee..af1e1c39a26 100644 --- a/requirements.in +++ b/requirements.in @@ -29,7 +29,7 @@ django-redis==5.2.0 django-statsd==2.5.2 django-structlog==2.1.3 django-revproxy==0.12.0 -djangorestframework==3.14.0 +djangorestframework==3.15.1 djangorestframework-csv==2.1.1 djangorestframework-dataclasses==1.2.0 django-fernet-encrypted-fields==0.1.3 diff --git a/requirements.txt b/requirements.txt index dcb21290076..e95a2b28ad5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -198,7 +198,7 @@ django-structlog==2.1.3 # via -r requirements.in django-two-factor-auth==1.14.0 # via -r requirements.in -djangorestframework==3.14.0 +djangorestframework==3.15.1 # via # -r requirements.in # djangorestframework-csv @@ -475,7 +475,6 @@ pytz==2023.3 # via # -r requirements.in # clickhouse-driver - # djangorestframework # dlt # infi-clickhouse-orm # pandas diff --git a/unit.json.tpl b/unit.json.tpl index ef1ba4b3ffe..42f23a75a03 100644 --- a/unit.json.tpl +++ b/unit.json.tpl @@ -39,7 +39,7 @@ }, "applications": { "posthog": { - "type": "python 3.10", + "type": "python 3.11", "processes": $NGINX_UNIT_APP_PROCESSES, "working_directory": "/code", "path": ".", @@ -51,7 +51,7 @@ } }, "metrics": { - "type": "python 3.10", + "type": "python 3.11", "processes": 1, "working_directory": "/code/bin", "path": ".",