mirror of
https://github.com/PostHog/posthog.git
synced 2024-11-24 09:14:46 +01:00
chore: upgrade python to 3.11 (#23206)
This commit is contained in:
parent
56e7a4c469
commit
eac199d24a
2
.github/actions/run-backend-tests/action.yml
vendored
2
.github/actions/run-backend-tests/action.yml
vendored
@ -6,7 +6,7 @@ name: Run Django tests
|
||||
inputs:
|
||||
python-version:
|
||||
required: true
|
||||
description: Python version, e.g. 3.10.10
|
||||
description: Python version, e.g. 3.11.9
|
||||
clickhouse-server-image:
|
||||
required: true
|
||||
description: ClickHouse server image tag, e.g. clickhouse/clickhouse-server:latest
|
||||
|
2
.github/workflows/benchmark.yml
vendored
2
.github/workflows/benchmark.yml
vendored
@ -54,7 +54,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.10.10
|
||||
python-version: 3.11.9
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/requirements*.txt'
|
||||
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
|
||||
|
2
.github/workflows/build-hogql-parser.yml
vendored
2
.github/workflows/build-hogql-parser.yml
vendored
@ -73,7 +73,7 @@ jobs:
|
||||
- if: ${{ !endsWith(matrix.os, '-arm') }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.11'
|
||||
|
||||
# Compiling Python 3.11 from source on ARM. We tried using the "deadsnakes" ARM repo, but it was flakey.
|
||||
- if: ${{ endsWith(matrix.os, '-arm') }}
|
||||
|
@ -28,7 +28,7 @@ jobs:
|
||||
concurrency: 1
|
||||
group: 1
|
||||
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
|
||||
python-version: '3.10.10'
|
||||
python-version: '3.11.9'
|
||||
clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.5.81-alpine'
|
||||
segment: 'FOSS'
|
||||
person-on-events: false
|
||||
|
10
.github/workflows/ci-backend.yml
vendored
10
.github/workflows/ci-backend.yml
vendored
@ -108,7 +108,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.10.10
|
||||
python-version: 3.11.9
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/requirements*.txt'
|
||||
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
|
||||
@ -163,7 +163,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.10.10
|
||||
python-version: 3.11.9
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/requirements*.txt'
|
||||
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
|
||||
@ -232,7 +232,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ['3.10.10']
|
||||
python-version: ['3.11.9']
|
||||
clickhouse-server-image: ['clickhouse/clickhouse-server:23.12.5.81-alpine']
|
||||
segment: ['Core']
|
||||
person-on-events: [false, true]
|
||||
@ -243,7 +243,7 @@ jobs:
|
||||
- segment: 'Temporal'
|
||||
person-on-events: false
|
||||
clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.5.81-alpine'
|
||||
python-version: '3.10.10'
|
||||
python-version: '3.11.9'
|
||||
concurrency: 1
|
||||
group: 1
|
||||
|
||||
@ -331,7 +331,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.10.10
|
||||
python-version: 3.11.9
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/requirements*.txt'
|
||||
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
|
||||
|
2
.github/workflows/ci-hog.yml
vendored
2
.github/workflows/ci-hog.yml
vendored
@ -70,7 +70,7 @@ jobs:
|
||||
if: needs.changes.outputs.hog == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.10.10
|
||||
python-version: 3.11.9
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/requirements*.txt'
|
||||
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
|
||||
|
4
.github/workflows/ci-plugin-server.yml
vendored
4
.github/workflows/ci-plugin-server.yml
vendored
@ -115,7 +115,7 @@ jobs:
|
||||
if: needs.changes.outputs.plugin-server == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.10.10
|
||||
python-version: 3.11.9
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/requirements*.txt'
|
||||
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
|
||||
@ -207,7 +207,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.10.10
|
||||
python-version: 3.11.9
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/requirements*.txt'
|
||||
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
|
||||
|
@ -4,25 +4,27 @@ set -e
|
||||
|
||||
# Generate schema.py from schema.json
|
||||
datamodel-codegen \
|
||||
--class-name='SchemaRoot' --collapse-root-models --target-python-version 3.10 --disable-timestamp \
|
||||
--class-name='SchemaRoot' --collapse-root-models --target-python-version 3.11 --disable-timestamp \
|
||||
--use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum \
|
||||
--input frontend/src/queries/schema.json --input-file-type jsonschema \
|
||||
--output posthog/schema.py --output-model-type pydantic_v2.BaseModel \
|
||||
--custom-file-header "# mypy: disable-error-code=\"assignment\"" \
|
||||
--set-default-enum-member --capitalise-enum-members \
|
||||
--wrap-string-literal
|
||||
|
||||
# Format schema.py
|
||||
ruff format posthog/schema.py
|
||||
|
||||
# Check schema.py and autofix
|
||||
ruff check --fix posthog/schema.py
|
||||
# HACK: Datamodel-codegen output for enum-type fields with a default is invalid – the default value is a plain string,
|
||||
# and not the expected enum member. We fix this using sed, which is pretty hacky, but does the job.
|
||||
# Specifically, we need to replace `Optional[PropertyOperator] = "exact"`
|
||||
# with `Optional[PropertyOperator] = PropertyOperator("exact")` to make the default value valid.
|
||||
# Remove this when https://github.com/koxudaxi/datamodel-code-generator/issues/1929 is resolved.
|
||||
|
||||
# Replace class Foo(str, Enum) with class Foo(StrEnum) for proper handling in format strings in python 3.11
|
||||
# Remove this when https://github.com/koxudaxi/datamodel-code-generator/issues/1313 is resolved
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# sed needs `-i` to be followed by `''` on macOS
|
||||
sed -i '' -e 's/Optional\[PropertyOperator\] = \("[A-Za-z_]*"\)/Optional[PropertyOperator] = PropertyOperator(\1)/g' posthog/schema.py
|
||||
sed -i '' -e 's/str, Enum/StrEnum/g' posthog/schema.py
|
||||
sed -i '' 's/from enum import Enum/from enum import Enum, StrEnum/g' posthog/schema.py
|
||||
else
|
||||
sed -i -e 's/Optional\[PropertyOperator\] = \("[A-Za-z_]*"\)/Optional[PropertyOperator] = PropertyOperator(\1)/g' posthog/schema.py
|
||||
sed -i -e 's/str, Enum/StrEnum/g' posthog/schema.py
|
||||
sed -i 's/from enum import Enum/from enum import Enum, StrEnum/g' posthog/schema.py
|
||||
fi
|
@ -20,7 +20,7 @@
|
||||
"first_name": "",
|
||||
"last_name": "",
|
||||
"email": "",
|
||||
"is_email_verified": false
|
||||
"is_email_verified": null
|
||||
}
|
||||
},
|
||||
"children": [
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from django.core.cache import cache
|
||||
from flaky import flaky
|
||||
from rest_framework import status
|
||||
@ -1601,8 +1601,8 @@ class TestExperimentAuxiliaryEndpoints(ClickhouseTestMixin, APILicensedTest):
|
||||
explicit_datetime = parser.isoparse(target_filter["explicit_datetime"])
|
||||
|
||||
self.assertTrue(
|
||||
explicit_datetime <= datetime.now(timezone.utc) - timedelta(days=5)
|
||||
and explicit_datetime >= datetime.now(timezone.utc) - timedelta(days=5, hours=1)
|
||||
explicit_datetime <= datetime.now(UTC) - timedelta(days=5)
|
||||
and explicit_datetime >= datetime.now(UTC) - timedelta(days=5, hours=1)
|
||||
)
|
||||
|
||||
cohort_id = cohort["id"]
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import timezone, datetime
|
||||
from datetime import datetime, UTC
|
||||
|
||||
from dateutil.parser import isoparse
|
||||
|
||||
@ -23,7 +23,7 @@ class TestSummarizeSessions(BaseTest):
|
||||
["$pageview", isoparse("2021-01-01T00:00:02Z")],
|
||||
],
|
||||
),
|
||||
datetime(2021, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
|
||||
datetime(2021, 1, 1, 0, 0, 0, tzinfo=UTC),
|
||||
)
|
||||
assert processed.columns == ["event", "milliseconds_since_start"]
|
||||
assert processed.results == [["$pageview", 0], ["$pageview", 1000], ["$pageview", 2000]]
|
||||
|
@ -1,5 +1,5 @@
|
||||
import gzip
|
||||
from datetime import timedelta, datetime, timezone
|
||||
from datetime import timedelta, datetime, UTC
|
||||
from secrets import token_urlsafe
|
||||
from unittest.mock import patch, MagicMock
|
||||
from uuid import uuid4
|
||||
@ -84,7 +84,7 @@ class TestSessionRecordingExtensions(ClickhouseTestMixin, APIBaseTest):
|
||||
|
||||
def test_persists_recording_from_blob_ingested_storage(self):
|
||||
with self.settings(OBJECT_STORAGE_SESSION_RECORDING_BLOB_INGESTION_FOLDER=TEST_BUCKET):
|
||||
two_minutes_ago = (datetime.now() - timedelta(minutes=2)).replace(tzinfo=timezone.utc)
|
||||
two_minutes_ago = (datetime.now() - timedelta(minutes=2)).replace(tzinfo=UTC)
|
||||
|
||||
with freeze_time(two_minutes_ago):
|
||||
session_id = f"test_persists_recording_from_blob_ingested_storage-s1-{uuid4()}"
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from unittest import mock
|
||||
from unittest.mock import MagicMock, patch
|
||||
from uuid import uuid4
|
||||
@ -187,7 +187,7 @@ class TestSessionRecordingPlaylist(APILicensedTest):
|
||||
|
||||
session_one = f"test_fetch_playlist_recordings-session1-{uuid4()}"
|
||||
session_two = f"test_fetch_playlist_recordings-session2-{uuid4()}"
|
||||
three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=timezone.utc)
|
||||
three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=UTC)
|
||||
|
||||
produce_replay_summary(
|
||||
team_id=self.team.id,
|
||||
@ -242,7 +242,7 @@ class TestSessionRecordingPlaylist(APILicensedTest):
|
||||
|
||||
session_one = f"test_fetch_playlist_recordings-session1-{uuid4()}"
|
||||
session_two = f"test_fetch_playlist_recordings-session2-{uuid4()}"
|
||||
three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=timezone.utc)
|
||||
three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=UTC)
|
||||
|
||||
for session_id in [session_one, session_two]:
|
||||
produce_replay_summary(
|
||||
|
@ -56,7 +56,7 @@ def generate_assets(
|
||||
# Wait for all assets to be exported
|
||||
tasks = [exporter.export_asset.si(asset.id) for asset in assets]
|
||||
# run them one after the other, so we don't exhaust celery workers
|
||||
exports_expire = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(
|
||||
exports_expire = datetime.datetime.now(tz=datetime.UTC) + datetime.timedelta(
|
||||
minutes=settings.PARALLEL_ASSET_GENERATION_MAX_TIMEOUT_MINUTES
|
||||
)
|
||||
parallel_job = chain(*tasks).apply_async(expires=exports_expire, retry=False)
|
||||
|
2
mypy.ini
2
mypy.ini
@ -1,5 +1,5 @@
|
||||
[mypy]
|
||||
python_version = 3.10
|
||||
python_version = 3.11
|
||||
plugins =
|
||||
mypy_django_plugin.main,
|
||||
mypy_drf_plugin.main,
|
||||
|
@ -90,9 +90,7 @@ class AppMetricsViewSet(TeamAndOrgViewSetMixin, mixins.RetrieveModelMixin, views
|
||||
after = self.request.GET.get("date_from", "-30d")
|
||||
before = self.request.GET.get("date_to", None)
|
||||
after_datetime = relative_date_parse(after, self.team.timezone_info)
|
||||
before_datetime = (
|
||||
relative_date_parse(before, self.team.timezone_info) if before else dt.datetime.now(dt.timezone.utc)
|
||||
)
|
||||
before_datetime = relative_date_parse(before, self.team.timezone_info) if before else dt.datetime.now(dt.UTC)
|
||||
date_range = (after_datetime, before_datetime)
|
||||
runs = (
|
||||
BatchExportRun.objects.select_related("batch_export__destination")
|
||||
|
@ -290,7 +290,7 @@ class PasswordResetSerializer(serializers.Serializer):
|
||||
user = None
|
||||
|
||||
if user:
|
||||
user.requested_password_reset_at = datetime.datetime.now(datetime.timezone.utc)
|
||||
user.requested_password_reset_at = datetime.datetime.now(datetime.UTC)
|
||||
user.save()
|
||||
token = password_reset_token_generator.make_token(user)
|
||||
send_password_reset(user.id, token)
|
||||
|
@ -11,11 +11,13 @@ from posthog.api.forbid_destroy_model import ForbidDestroyModel
|
||||
|
||||
from posthog.api.routing import TeamAndOrgViewSetMixin
|
||||
from posthog.api.shared import UserBasicSerializer
|
||||
from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer
|
||||
from posthog.models.comment import Comment
|
||||
|
||||
|
||||
class CommentSerializer(serializers.ModelSerializer):
|
||||
created_by = UserBasicSerializer(read_only=True)
|
||||
deleted = ClassicBehaviorBooleanFieldSerializer()
|
||||
|
||||
class Meta:
|
||||
model = Comment
|
||||
|
@ -23,6 +23,7 @@ from posthog.api.routing import TeamAndOrgViewSetMixin
|
||||
from posthog.api.shared import UserBasicSerializer
|
||||
from posthog.api.tagged_item import TaggedItemSerializerMixin, TaggedItemViewSetMixin
|
||||
from posthog.api.dashboards.dashboard import Dashboard
|
||||
from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer
|
||||
from posthog.auth import PersonalAPIKeyAuthentication, TemporaryTokenAuthentication
|
||||
from posthog.constants import FlagRequestType
|
||||
from posthog.event_usage import report_user_action
|
||||
@ -89,6 +90,9 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo
|
||||
is_simple_flag = serializers.SerializerMethodField()
|
||||
rollout_percentage = serializers.SerializerMethodField()
|
||||
|
||||
ensure_experience_continuity = ClassicBehaviorBooleanFieldSerializer()
|
||||
has_enriched_analytics = ClassicBehaviorBooleanFieldSerializer()
|
||||
|
||||
experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
surveys: serializers.SerializerMethodField = serializers.SerializerMethodField()
|
||||
features: serializers.SerializerMethodField = serializers.SerializerMethodField()
|
||||
|
@ -22,6 +22,7 @@ from rest_framework.response import Response
|
||||
|
||||
from posthog.api.routing import TeamAndOrgViewSetMixin
|
||||
from posthog.api.shared import FiltersSerializer
|
||||
from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer
|
||||
from posthog.models import Plugin, PluginAttachment, PluginConfig, User
|
||||
from posthog.models.activity_logging.activity_log import (
|
||||
ActivityPage,
|
||||
@ -586,6 +587,8 @@ class PluginConfigSerializer(serializers.ModelSerializer):
|
||||
delivery_rate_24h = serializers.SerializerMethodField()
|
||||
error = serializers.SerializerMethodField()
|
||||
|
||||
deleted = ClassicBehaviorBooleanFieldSerializer()
|
||||
|
||||
class Meta:
|
||||
model = PluginConfig
|
||||
fields = [
|
||||
|
@ -36,6 +36,32 @@ else:
|
||||
class DefaultRouterPlusPlus(ExtendedDefaultRouter):
|
||||
"""DefaultRouter with optional trailing slash and drf-extensions nesting."""
|
||||
|
||||
# This is an override because of changes in djangorestframework 3.15, which is required for python 3.11
|
||||
# changes taken from and explained here: https://github.com/nautobot/nautobot/pull/5546/files#diff-81850a2ccad5814aab4f477d447f85cc0a82e9c10fd88fd72327cda51a750471R30
|
||||
def _register(self, prefix, viewset, basename=None):
|
||||
"""
|
||||
Override DRF's BaseRouter.register() to bypass an unnecessary restriction added in version 3.15.0.
|
||||
(Reference: https://github.com/encode/django-rest-framework/pull/8438)
|
||||
"""
|
||||
if basename is None:
|
||||
basename = self.get_default_basename(viewset)
|
||||
|
||||
# DRF:
|
||||
# if self.is_already_registered(basename):
|
||||
# msg = (f'Router with basename "{basename}" is already registered. '
|
||||
# f'Please provide a unique basename for viewset "{viewset}"')
|
||||
# raise ImproperlyConfigured(msg)
|
||||
#
|
||||
# We bypass this because we have at least one use case (/api/extras/jobs/) where we are *intentionally*
|
||||
# registering two viewsets with the same basename, but have carefully defined them so as not to conflict.
|
||||
|
||||
# resuming standard DRF code...
|
||||
self.registry.append((prefix, viewset, basename))
|
||||
|
||||
# invalidate the urls cache
|
||||
if hasattr(self, "_urls"):
|
||||
del self._urls
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.trailing_slash = r"/?"
|
||||
|
@ -77,7 +77,7 @@
|
||||
"/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Error [PropertyDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')",
|
||||
'/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Warning [PropertyDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.property_definition.PropertyDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
|
||||
'/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. <int:project_id>) or annotating the parameter type with @extend_schema. Defaulting to "string".',
|
||||
'/opt/hostedtoolcache/Python/3.10.10/x64/lib/python3.10/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes <class \'str\'> and <class \'posthog.api.person.PersonSerializer\'>. This will very likely result in an incorrect schema. Try renaming one.',
|
||||
'/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes <class \'str\'> and <class \'posthog.api.person.PersonSerializer\'>. This will very likely result in an incorrect schema. Try renaming one.',
|
||||
'/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. <int:id>) or annotating the parameter type with @extend_schema. Defaulting to "string".',
|
||||
'/home/runner/work/posthog/posthog/posthog/api/query.py: Error [QueryViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.',
|
||||
'/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
|
||||
|
@ -38,7 +38,7 @@ def create_batch_export_log_entry(
|
||||
"log_source": "batch_exports",
|
||||
"log_source_id": batch_export_id,
|
||||
"instance_id": run_id,
|
||||
"timestamp": dt.datetime.now(dt.timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f"),
|
||||
"timestamp": dt.datetime.now(dt.UTC).strftime("%Y-%m-%d %H:%M:%S.%f"),
|
||||
"level": level,
|
||||
"message": message,
|
||||
},
|
||||
@ -147,7 +147,7 @@ def test_log_level_filter(batch_export, team, level):
|
||||
|
||||
results = []
|
||||
timeout = 10
|
||||
start = dt.datetime.now(dt.timezone.utc)
|
||||
start = dt.datetime.now(dt.UTC)
|
||||
|
||||
while not results:
|
||||
results = fetch_batch_export_log_entries(
|
||||
@ -157,7 +157,7 @@ def test_log_level_filter(batch_export, team, level):
|
||||
after=dt.datetime(2023, 9, 22, 0, 59, 59),
|
||||
before=dt.datetime(2023, 9, 22, 1, 0, 1),
|
||||
)
|
||||
if (dt.datetime.now(dt.timezone.utc) - start) > dt.timedelta(seconds=timeout):
|
||||
if (dt.datetime.now(dt.UTC) - start) > dt.timedelta(seconds=timeout):
|
||||
break
|
||||
|
||||
results.sort(key=lambda record: record.message)
|
||||
@ -195,7 +195,7 @@ def test_log_level_filter_with_lowercase(batch_export, team, level):
|
||||
|
||||
results = []
|
||||
timeout = 10
|
||||
start = dt.datetime.now(dt.timezone.utc)
|
||||
start = dt.datetime.now(dt.UTC)
|
||||
|
||||
while not results:
|
||||
results = fetch_batch_export_log_entries(
|
||||
@ -205,7 +205,7 @@ def test_log_level_filter_with_lowercase(batch_export, team, level):
|
||||
after=dt.datetime(2023, 9, 22, 0, 59, 59),
|
||||
before=dt.datetime(2023, 9, 22, 1, 0, 1),
|
||||
)
|
||||
if (dt.datetime.now(dt.timezone.utc) - start) > dt.timedelta(seconds=timeout):
|
||||
if (dt.datetime.now(dt.UTC) - start) > dt.timedelta(seconds=timeout):
|
||||
break
|
||||
|
||||
results.sort(key=lambda record: record.message)
|
||||
|
@ -397,8 +397,8 @@ def test_unpause_can_trigger_a_backfill(client: HttpClient):
|
||||
|
||||
data = get_batch_export_ok(client, team.pk, batch_export_id)
|
||||
assert batch_export["last_updated_at"] < data["last_updated_at"]
|
||||
start_at = dt.datetime.strptime(data["last_paused_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.timezone.utc)
|
||||
end_at = dt.datetime.strptime(data["last_updated_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.timezone.utc)
|
||||
start_at = dt.datetime.strptime(data["last_paused_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.UTC)
|
||||
end_at = dt.datetime.strptime(data["last_updated_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.UTC)
|
||||
mock_backfill.assert_called_once_with(
|
||||
ANY,
|
||||
batch_export["id"],
|
||||
|
@ -94,8 +94,8 @@ def test_can_put_config(client: HttpClient):
|
||||
new_schedule = describe_schedule(temporal, batch_export["id"])
|
||||
assert old_schedule.schedule.spec.intervals[0].every != new_schedule.schedule.spec.intervals[0].every
|
||||
assert new_schedule.schedule.spec.intervals[0].every == dt.timedelta(days=1)
|
||||
assert new_schedule.schedule.spec.start_at == dt.datetime(2022, 7, 19, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
assert new_schedule.schedule.spec.end_at == dt.datetime(2023, 7, 20, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
assert new_schedule.schedule.spec.start_at == dt.datetime(2022, 7, 19, 0, 0, 0, tzinfo=dt.UTC)
|
||||
assert new_schedule.schedule.spec.end_at == dt.datetime(2023, 7, 20, 0, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
decoded_payload = async_to_sync(codec.decode)(new_schedule.schedule.action.args)
|
||||
args = json.loads(decoded_payload[0].data)
|
||||
|
@ -100,7 +100,7 @@ class TestAppMetricsAPI(ClickhouseTestMixin, APIBaseTest):
|
||||
|
||||
temporal = sync_connect()
|
||||
|
||||
now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc)
|
||||
now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.UTC)
|
||||
with start_test_worker(temporal):
|
||||
response = create_batch_export_ok(
|
||||
self.client,
|
||||
@ -191,7 +191,7 @@ class TestAppMetricsAPI(ClickhouseTestMixin, APIBaseTest):
|
||||
}
|
||||
|
||||
temporal = sync_connect()
|
||||
now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc)
|
||||
now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.UTC)
|
||||
|
||||
with start_test_worker(temporal):
|
||||
response = create_batch_export_ok(
|
||||
|
@ -13,7 +13,7 @@ import string
|
||||
import structlog
|
||||
import zlib
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import timezone as tz
|
||||
from datetime import UTC
|
||||
from django.http import HttpResponse
|
||||
from django.test.client import MULTIPART_CONTENT, Client
|
||||
from django.utils import timezone
|
||||
@ -1415,7 +1415,7 @@ class TestCapture(BaseTest):
|
||||
# right time sent as sent_at to process_event
|
||||
|
||||
sent_at = datetime.fromisoformat(arguments["sent_at"])
|
||||
self.assertEqual(sent_at.tzinfo, tz.utc)
|
||||
self.assertEqual(sent_at.tzinfo, UTC)
|
||||
|
||||
timediff = sent_at.timestamp() - tomorrow_sent_at.timestamp()
|
||||
self.assertLess(abs(timediff), 1)
|
||||
|
@ -38,6 +38,7 @@ from posthog.api.shared import OrganizationBasicSerializer, TeamBasicSerializer
|
||||
from posthog.api.utils import (
|
||||
PublicIPOnlyHttpAdapter,
|
||||
raise_if_user_provided_url_unsafe,
|
||||
ClassicBehaviorBooleanFieldSerializer,
|
||||
)
|
||||
from posthog.auth import (
|
||||
PersonalAPIKeyAuthentication,
|
||||
@ -87,6 +88,7 @@ class UserSerializer(serializers.ModelSerializer):
|
||||
current_password = serializers.CharField(write_only=True, required=False)
|
||||
notification_settings = serializers.DictField(required=False)
|
||||
scene_personalisation = ScenePersonalisationBasicSerializer(many=True, read_only=True)
|
||||
anonymize_data = ClassicBehaviorBooleanFieldSerializer()
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
|
@ -7,6 +7,7 @@ from ipaddress import ip_address
|
||||
from requests.adapters import HTTPAdapter
|
||||
from typing import Literal, Optional, Union
|
||||
|
||||
from rest_framework.fields import Field
|
||||
from urllib3 import HTTPSConnectionPool, HTTPConnectionPool, PoolManager
|
||||
from uuid import UUID
|
||||
|
||||
@ -14,7 +15,7 @@ import structlog
|
||||
from django.core.exceptions import RequestDataTooBig
|
||||
from django.db.models import QuerySet
|
||||
from prometheus_client import Counter
|
||||
from rest_framework import request, status
|
||||
from rest_framework import request, status, serializers
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from statshog.defaults.django import statsd
|
||||
|
||||
@ -35,6 +36,14 @@ class PaginationMode(Enum):
|
||||
previous = auto()
|
||||
|
||||
|
||||
# This overrides a change in DRF 3.15 that alters our behavior. If the user passes an empty argument,
|
||||
# the new version keeps it as null vs coalescing it to the default.
|
||||
# Don't add this to new classes
|
||||
class ClassicBehaviorBooleanFieldSerializer(serializers.BooleanField):
|
||||
def __init__(self, **kwargs):
|
||||
Field.__init__(self, allow_null=True, required=False, **kwargs)
|
||||
|
||||
|
||||
def get_target_entity(filter: Union[Filter, StickinessFilter]) -> Entity:
|
||||
# Except for "events", we require an entity id and type to be provided
|
||||
if not filter.target_entity_id and filter.target_entity_type != "events":
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
@ -49,7 +49,7 @@ class TestUtils(AsyncMigrationBaseTest):
|
||||
|
||||
sm.refresh_from_db()
|
||||
self.assertEqual(sm.status, MigrationStatus.Errored)
|
||||
self.assertGreater(sm.finished_at, datetime.now(timezone.utc) - timedelta(hours=1))
|
||||
self.assertGreater(sm.finished_at, datetime.now(UTC) - timedelta(hours=1))
|
||||
errors = AsyncMigrationError.objects.filter(async_migration=sm).order_by("created_at")
|
||||
self.assertEqual(errors.count(), 2)
|
||||
self.assertEqual(errors[0].description, "some error")
|
||||
@ -81,7 +81,7 @@ class TestUtils(AsyncMigrationBaseTest):
|
||||
sm.refresh_from_db()
|
||||
|
||||
self.assertEqual(sm.status, MigrationStatus.CompletedSuccessfully)
|
||||
self.assertGreater(sm.finished_at, datetime.now(timezone.utc) - timedelta(hours=1))
|
||||
self.assertGreater(sm.finished_at, datetime.now(UTC) - timedelta(hours=1))
|
||||
|
||||
self.assertEqual(sm.progress, 100)
|
||||
errors = AsyncMigrationError.objects.filter(async_migration=sm)
|
||||
|
@ -76,11 +76,11 @@ def validate_date_input(date_input: Any, team: Team | None = None) -> dt.datetim
|
||||
|
||||
if parsed.tzinfo is None:
|
||||
if team:
|
||||
parsed = parsed.replace(tzinfo=team.timezone_info).astimezone(dt.timezone.utc)
|
||||
parsed = parsed.replace(tzinfo=team.timezone_info).astimezone(dt.UTC)
|
||||
else:
|
||||
parsed = parsed.replace(tzinfo=dt.timezone.utc)
|
||||
parsed = parsed.replace(tzinfo=dt.UTC)
|
||||
else:
|
||||
parsed = parsed.astimezone(dt.timezone.utc)
|
||||
parsed = parsed.astimezone(dt.UTC)
|
||||
|
||||
return parsed
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import collections.abc
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
from enum import Enum
|
||||
import enum
|
||||
import typing
|
||||
from datetime import timedelta
|
||||
|
||||
@ -254,7 +254,7 @@ class BatchExport(UUIDModel):
|
||||
raise ValueError(f"Invalid interval: '{self.interval}'")
|
||||
|
||||
|
||||
class BatchExportLogEntryLevel(str, Enum):
|
||||
class BatchExportLogEntryLevel(enum.StrEnum):
|
||||
"""Enumeration of batch export log levels."""
|
||||
|
||||
DEBUG = "DEBUG"
|
||||
|
@ -269,7 +269,7 @@ def pause_batch_export(temporal: Client, batch_export_id: str, note: str | None
|
||||
raise BatchExportServiceRPCError(f"BatchExport {batch_export_id} could not be paused") from exc
|
||||
|
||||
batch_export.paused = True
|
||||
batch_export.last_paused_at = dt.datetime.now(dt.timezone.utc)
|
||||
batch_export.last_paused_at = dt.datetime.now(dt.UTC)
|
||||
batch_export.save()
|
||||
|
||||
return True
|
||||
@ -297,7 +297,7 @@ async def apause_batch_export(temporal: Client, batch_export_id: str, note: str
|
||||
raise BatchExportServiceRPCError(f"BatchExport {batch_export_id} could not be paused") from exc
|
||||
|
||||
batch_export.paused = True
|
||||
batch_export.last_paused_at = dt.datetime.now(dt.timezone.utc)
|
||||
batch_export.last_paused_at = dt.datetime.now(dt.UTC)
|
||||
await batch_export.asave()
|
||||
|
||||
return True
|
||||
|
@ -156,7 +156,7 @@ def execute_process_query(
|
||||
|
||||
query_status.error = True # Assume error in case nothing below ends up working
|
||||
|
||||
pickup_time = datetime.datetime.now(datetime.timezone.utc)
|
||||
pickup_time = datetime.datetime.now(datetime.UTC)
|
||||
if query_status.start_time:
|
||||
wait_duration = (pickup_time - query_status.start_time) / datetime.timedelta(seconds=1)
|
||||
QUERY_WAIT_TIME.labels(
|
||||
@ -177,7 +177,7 @@ def execute_process_query(
|
||||
query_status.complete = True
|
||||
query_status.error = False
|
||||
query_status.results = results
|
||||
query_status.end_time = datetime.datetime.now(datetime.timezone.utc)
|
||||
query_status.end_time = datetime.datetime.now(datetime.UTC)
|
||||
query_status.expiration_time = query_status.end_time + datetime.timedelta(seconds=manager.STATUS_TTL_SECONDS)
|
||||
process_duration = (query_status.end_time - pickup_time) / datetime.timedelta(seconds=1)
|
||||
QUERY_PROCESS_TIME.labels(team=team_id).observe(process_duration)
|
||||
@ -218,7 +218,7 @@ def enqueue_process_query_task(
|
||||
return manager.get_query_status()
|
||||
|
||||
# Immediately set status, so we don't have race with celery
|
||||
query_status = QueryStatus(id=query_id, team_id=team.id, start_time=datetime.datetime.now(datetime.timezone.utc))
|
||||
query_status = QueryStatus(id=query_id, team_id=team.id, start_time=datetime.datetime.now(datetime.UTC))
|
||||
manager.store_query_status(query_status)
|
||||
|
||||
task_signature = process_query_task.si(
|
||||
|
@ -1,11 +1,11 @@
|
||||
import uuid
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
from typing import Optional
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class ReplicationScheme(str, Enum):
|
||||
class ReplicationScheme(StrEnum):
|
||||
NOT_SHARDED = "NOT_SHARDED"
|
||||
SHARDED = "SHARDED"
|
||||
REPLICATED = "REPLICATED"
|
||||
|
@ -1,5 +1,5 @@
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from time import sleep
|
||||
from typing import TypedDict
|
||||
from uuid import UUID, uuid4
|
||||
@ -124,7 +124,7 @@ def test_person_overrides_dict():
|
||||
"override_person_id": uuid4(),
|
||||
"merged_at": datetime.fromisoformat("2020-01-02T00:00:00+00:00"),
|
||||
"oldest_event": datetime.fromisoformat("2020-01-01T00:00:00+00:00"),
|
||||
"created_at": datetime.now(timezone.utc),
|
||||
"created_at": datetime.now(UTC),
|
||||
"version": 1,
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
from typing import Literal
|
||||
|
||||
from semantic_version import Version
|
||||
@ -9,7 +9,7 @@ INTERNAL_BOT_EMAIL_SUFFIX = "@posthogbot.user"
|
||||
|
||||
# N.B. Keep this in sync with frontend enum (types.ts)
|
||||
# AND ensure it is added to the Billing Service
|
||||
class AvailableFeature(str, Enum):
|
||||
class AvailableFeature(StrEnum):
|
||||
ZAPIER = "zapier"
|
||||
ORGANIZATIONS_PROJECTS = "organizations_projects"
|
||||
PROJECT_BASED_PERMISSIONING = "project_based_permissioning"
|
||||
@ -215,19 +215,19 @@ SAMPLING_FACTOR = "sampling_factor"
|
||||
BREAKDOWN_TYPES = Literal["event", "person", "cohort", "group", "session", "hogql"]
|
||||
|
||||
|
||||
class FunnelOrderType(str, Enum):
|
||||
class FunnelOrderType(StrEnum):
|
||||
STRICT = "strict"
|
||||
UNORDERED = "unordered"
|
||||
ORDERED = "ordered"
|
||||
|
||||
|
||||
class FunnelVizType(str, Enum):
|
||||
class FunnelVizType(StrEnum):
|
||||
TRENDS = "trends"
|
||||
TIME_TO_CONVERT = "time_to_convert"
|
||||
STEPS = "steps"
|
||||
|
||||
|
||||
class FunnelCorrelationType(str, Enum):
|
||||
class FunnelCorrelationType(StrEnum):
|
||||
EVENTS = "events"
|
||||
PROPERTIES = "properties"
|
||||
EVENT_WITH_PROPERTIES = "event_with_properties"
|
||||
@ -240,7 +240,7 @@ DISTINCT_ID_FILTER = "distinct_id"
|
||||
PERSON_UUID_FILTER = "person_uuid"
|
||||
|
||||
|
||||
class AnalyticsDBMS(str, Enum):
|
||||
class AnalyticsDBMS(StrEnum):
|
||||
POSTGRES = "postgres"
|
||||
CLICKHOUSE = "clickhouse"
|
||||
|
||||
@ -251,13 +251,13 @@ WEEKLY_ACTIVE = "weekly_active"
|
||||
MONTHLY_ACTIVE = "monthly_active"
|
||||
|
||||
|
||||
class RetentionQueryType(str, Enum):
|
||||
class RetentionQueryType(StrEnum):
|
||||
RETURNING = "returning"
|
||||
TARGET = "target"
|
||||
TARGET_FIRST_TIME = "target_first_time"
|
||||
|
||||
|
||||
class ExperimentSignificanceCode(str, Enum):
|
||||
class ExperimentSignificanceCode(StrEnum):
|
||||
SIGNIFICANT = "significant"
|
||||
NOT_ENOUGH_EXPOSURE = "not_enough_exposure"
|
||||
LOW_WIN_PROBABILITY = "low_win_probability"
|
||||
@ -265,7 +265,7 @@ class ExperimentSignificanceCode(str, Enum):
|
||||
HIGH_P_VALUE = "high_p_value"
|
||||
|
||||
|
||||
class ExperimentNoResultsErrorKeys(str, Enum):
|
||||
class ExperimentNoResultsErrorKeys(StrEnum):
|
||||
NO_EVENTS = "no-events"
|
||||
NO_FLAG_INFO = "no-flag-info"
|
||||
NO_CONTROL_VARIANT = "no-control-variant"
|
||||
@ -273,12 +273,12 @@ class ExperimentNoResultsErrorKeys(str, Enum):
|
||||
NO_RESULTS = "no-results"
|
||||
|
||||
|
||||
class PropertyOperatorType(str, Enum):
|
||||
class PropertyOperatorType(StrEnum):
|
||||
AND = "AND"
|
||||
OR = "OR"
|
||||
|
||||
|
||||
class BreakdownAttributionType(str, Enum):
|
||||
class BreakdownAttributionType(StrEnum):
|
||||
FIRST_TOUCH = "first_touch"
|
||||
# FIRST_TOUCH attribution means the breakdown value is the first property value found within all funnel steps
|
||||
LAST_TOUCH = "last_touch"
|
||||
@ -294,7 +294,7 @@ MAX_SLUG_LENGTH = 48
|
||||
GROUP_TYPES_LIMIT = 5
|
||||
|
||||
|
||||
class EventDefinitionType(str, Enum):
|
||||
class EventDefinitionType(StrEnum):
|
||||
# Mimics EventDefinitionType in frontend/src/types.ts
|
||||
ALL = "all"
|
||||
ACTION_EVENT = "action_event"
|
||||
@ -303,7 +303,7 @@ class EventDefinitionType(str, Enum):
|
||||
EVENT_CUSTOM = "event_custom"
|
||||
|
||||
|
||||
class FlagRequestType(str, Enum):
|
||||
class FlagRequestType(StrEnum):
|
||||
DECIDE = "decide"
|
||||
LOCAL_EVALUATION = "local-evaluation"
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
from functools import wraps
|
||||
from typing import Any, TypeVar, Union, cast
|
||||
from collections.abc import Callable
|
||||
@ -17,7 +17,7 @@ from posthog.utils import refresh_requested_by_client
|
||||
from .utils import generate_cache_key, get_safe_cache
|
||||
|
||||
|
||||
class CacheType(str, Enum):
|
||||
class CacheType(StrEnum):
|
||||
TRENDS = "Trends"
|
||||
FUNNEL = "Funnel"
|
||||
RETENTION = "Retention"
|
||||
|
@ -106,9 +106,7 @@ class SimEvent:
|
||||
group4_created_at: Optional[dt.datetime] = None
|
||||
|
||||
def __str__(self) -> str:
|
||||
separator = (
|
||||
"-" if self.timestamp < dt.datetime.now(dt.timezone.utc) else "+"
|
||||
) # Future events are denoted by a '+'
|
||||
separator = "-" if self.timestamp < dt.datetime.now(dt.UTC) else "+" # Future events are denoted by a '+'
|
||||
display = f"{self.timestamp} {separator} {self.event} # {self.distinct_id}"
|
||||
if current_url := self.properties.get("$current_url"):
|
||||
display += f" @ {current_url}"
|
||||
|
@ -1,11 +1,11 @@
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
|
||||
import mimesis.random
|
||||
|
||||
WeightedPool = tuple[list[str], list[int]]
|
||||
|
||||
|
||||
class Industry(str, Enum):
|
||||
class Industry(StrEnum):
|
||||
TECHNOLOGY = "technology"
|
||||
FINANCE = "finance"
|
||||
MEDIA = "media"
|
||||
|
@ -1,7 +1,7 @@
|
||||
import datetime as dt
|
||||
import math
|
||||
from dataclasses import dataclass, field
|
||||
from enum import auto, Enum
|
||||
from enum import auto, StrEnum
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@ -66,7 +66,7 @@ class HedgeboxSessionIntent(SimSessionIntent):
|
||||
DOWNGRADE_PLAN = auto()
|
||||
|
||||
|
||||
class HedgeboxPlan(str, Enum):
|
||||
class HedgeboxPlan(StrEnum):
|
||||
PERSONAL_FREE = "personal/free"
|
||||
PERSONAL_PRO = "personal/pro"
|
||||
BUSINESS_STANDARD = "business/standard"
|
||||
|
@ -1,4 +1,4 @@
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
from typing import Any, Literal, Optional, Union
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
@ -554,7 +554,7 @@ class Alias(Expr):
|
||||
hidden: bool = False
|
||||
|
||||
|
||||
class ArithmeticOperationOp(str, Enum):
|
||||
class ArithmeticOperationOp(StrEnum):
|
||||
Add = "+"
|
||||
Sub = "-"
|
||||
Mult = "*"
|
||||
@ -581,7 +581,7 @@ class Or(Expr):
|
||||
type: Optional[ConstantType] = None
|
||||
|
||||
|
||||
class CompareOperationOp(str, Enum):
|
||||
class CompareOperationOp(StrEnum):
|
||||
Eq = "=="
|
||||
NotEq = "!="
|
||||
Gt = ">"
|
||||
|
@ -1,5 +1,5 @@
|
||||
from datetime import date, datetime
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
from typing import Optional, Literal, TypeAlias
|
||||
from uuid import UUID
|
||||
from pydantic import ConfigDict, BaseModel
|
||||
@ -47,7 +47,7 @@ BREAKDOWN_VALUES_LIMIT = 25
|
||||
BREAKDOWN_VALUES_LIMIT_FOR_COUNTRIES = 300
|
||||
|
||||
|
||||
class LimitContext(str, Enum):
|
||||
class LimitContext(StrEnum):
|
||||
QUERY = "query"
|
||||
QUERY_ASYNC = "query_async"
|
||||
EXPORT = "export"
|
||||
|
@ -1,5 +1,4 @@
|
||||
from typing import cast, Optional
|
||||
from typing_extensions import Self
|
||||
from typing import cast, Optional, Self
|
||||
import posthoganalytics
|
||||
|
||||
from posthog.hogql.ast import SelectQuery, And, CompareOperation, CompareOperationOp, Field, JoinExpr
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import timezone, datetime, date
|
||||
from datetime import datetime, date, UTC
|
||||
from typing import Optional, cast
|
||||
import pytest
|
||||
from django.test import override_settings
|
||||
@ -97,7 +97,7 @@ class TestResolver(BaseTest):
|
||||
"SELECT 1, 'boo', true, 1.1232, null, {date}, {datetime}, {uuid}, {array}, {array12}, {tuple}",
|
||||
placeholders={
|
||||
"date": ast.Constant(value=date(2020, 1, 10)),
|
||||
"datetime": ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc)),
|
||||
"datetime": ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=UTC)),
|
||||
"uuid": ast.Constant(value=UUID("00000000-0000-4000-8000-000000000000")),
|
||||
"array": ast.Constant(value=[]),
|
||||
"array12": ast.Constant(value=[1, 2]),
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, UTC
|
||||
from typing import Optional, cast
|
||||
|
||||
from freezegun import freeze_time
|
||||
@ -70,7 +70,7 @@ class TestTrendsActorsQueryBuilder(BaseTest):
|
||||
def _get_utc_string(self, dt: datetime | None) -> str | None:
|
||||
if dt is None:
|
||||
return None
|
||||
return dt.astimezone(timezone.utc).strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
return dt.astimezone(UTC).strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
|
||||
def test_time_frame(self):
|
||||
self.team.timezone = "Europe/Berlin"
|
||||
|
@ -1,5 +1,5 @@
|
||||
import copy
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
import json
|
||||
import re
|
||||
from typing import Any, Literal
|
||||
@ -35,7 +35,7 @@ from posthog.types import InsightQueryNode
|
||||
from posthog.utils import str_to_bool
|
||||
|
||||
|
||||
class MathAvailability(str, Enum):
|
||||
class MathAvailability(StrEnum):
|
||||
Unavailable = ("Unavailable",)
|
||||
All = ("All",)
|
||||
ActorsOnly = "ActorsOnly"
|
||||
|
@ -1,5 +1,5 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from enum import IntEnum
|
||||
from typing import Any, Generic, Optional, TypeVar, Union, cast, TypeGuard
|
||||
from zoneinfo import ZoneInfo
|
||||
@ -445,7 +445,7 @@ class QueryRunner(ABC, Generic[Q, R, CR]):
|
||||
elif execution_mode == ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE:
|
||||
# We're allowed to calculate if the cache is older than 24 hours, but we'll do it asynchronously
|
||||
assert isinstance(cached_response, CachedResponse)
|
||||
if datetime.now(timezone.utc) - cached_response.last_refresh > EXTENDED_CACHE_AGE:
|
||||
if datetime.now(UTC) - cached_response.last_refresh > EXTENDED_CACHE_AGE:
|
||||
query_status_response = self.enqueue_async_calculation(cache_key=cache_key, user=user)
|
||||
cached_response.query_status = query_status_response.query_status
|
||||
return cached_response
|
||||
@ -490,8 +490,8 @@ class QueryRunner(ABC, Generic[Q, R, CR]):
|
||||
fresh_response_dict = {
|
||||
**self.calculate().model_dump(),
|
||||
"is_cached": False,
|
||||
"last_refresh": datetime.now(timezone.utc),
|
||||
"next_allowed_client_refresh": datetime.now(timezone.utc) + self._refresh_frequency(),
|
||||
"last_refresh": datetime.now(UTC),
|
||||
"next_allowed_client_refresh": datetime.now(UTC) + self._refresh_frequency(),
|
||||
"cache_key": cache_key,
|
||||
"timezone": self.team.timezone,
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
@ -23,7 +23,7 @@ def encode_jwt(payload: dict, expiry_delta: timedelta, audience: PosthogJwtAudie
|
||||
encoded_jwt = jwt.encode(
|
||||
{
|
||||
**payload,
|
||||
"exp": datetime.now(tz=timezone.utc) + expiry_delta,
|
||||
"exp": datetime.now(tz=UTC) + expiry_delta,
|
||||
"aud": audience.value,
|
||||
},
|
||||
settings.SECRET_KEY,
|
||||
|
@ -1,5 +1,5 @@
|
||||
import json
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
from typing import Any, Optional
|
||||
from collections.abc import Callable
|
||||
|
||||
@ -83,7 +83,7 @@ class KafkaConsumerForTests:
|
||||
return
|
||||
|
||||
|
||||
class _KafkaSecurityProtocol(str, Enum):
|
||||
class _KafkaSecurityProtocol(StrEnum):
|
||||
PLAINTEXT = "PLAINTEXT"
|
||||
SSL = "SSL"
|
||||
SASL_PLAINTEXT = "SASL_PLAINTEXT"
|
||||
|
@ -116,7 +116,7 @@ class Command(BaseCommand):
|
||||
|
||||
if options.get("backfill_batch_export", False) and dry_run is False:
|
||||
client = sync_connect()
|
||||
end_at = dt.datetime.now(dt.timezone.utc)
|
||||
end_at = dt.datetime.now(dt.UTC)
|
||||
start_at = end_at - (dt.timedelta(hours=1) if interval == "hour" else dt.timedelta(days=1))
|
||||
backfill_export(
|
||||
client,
|
||||
|
@ -3,7 +3,7 @@ import re
|
||||
import subprocess
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
from typing import Optional
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
@ -12,7 +12,7 @@ from django.core.management.base import BaseCommand
|
||||
OUTPUT_FILE = "posthog/models/channel_type/channel_definitions.json"
|
||||
|
||||
|
||||
class EntryKind(str, Enum):
|
||||
class EntryKind(StrEnum):
|
||||
source = "source"
|
||||
medium = "medium"
|
||||
|
||||
|
@ -65,7 +65,7 @@ class Command(BaseCommand):
|
||||
def handle(self, *args, **options):
|
||||
timer = monotonic()
|
||||
seed = options.get("seed") or secrets.token_hex(16)
|
||||
now = options.get("now") or dt.datetime.now(dt.timezone.utc)
|
||||
now = options.get("now") or dt.datetime.now(dt.UTC)
|
||||
existing_team_id = options.get("team_id")
|
||||
if (
|
||||
existing_team_id is not None
|
||||
|
@ -254,7 +254,7 @@ def create_migration(
|
||||
raise CommandError("Didn't receive 'y', exiting")
|
||||
print() # noqa: T201
|
||||
|
||||
now = dt.datetime.now(dt.timezone.utc)
|
||||
now = dt.datetime.now(dt.UTC)
|
||||
# This is a precaution so we don't accidentally leave the export running indefinitely.
|
||||
end_at = now + dt.timedelta(days=end_days_from_now)
|
||||
|
||||
@ -299,5 +299,5 @@ def parse_to_utc(date_str: str) -> dt.datetime:
|
||||
except ValueError:
|
||||
raise ValueError("Invalid date format. Expected 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")
|
||||
|
||||
utc_datetime = parsed_datetime.replace(tzinfo=dt.timezone.utc)
|
||||
utc_datetime = parsed_datetime.replace(tzinfo=dt.UTC)
|
||||
return utc_datetime
|
||||
|
@ -63,7 +63,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
seed = options.get("seed") or secrets.token_hex(16)
|
||||
now = options.get("now") or dt.datetime.now(dt.timezone.utc)
|
||||
now = options.get("now") or dt.datetime.now(dt.UTC)
|
||||
|
||||
admin = KafkaAdminClient(bootstrap_servers=settings.KAFKA_HOSTS)
|
||||
consumer = KafkaConsumer(KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, bootstrap_servers=settings.KAFKA_HOSTS)
|
||||
|
@ -1,5 +1,5 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from unittest import mock
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
@ -143,7 +143,7 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin):
|
||||
wraps=posthog.management.commands.sync_persons_to_clickhouse.raw_create_group_ch,
|
||||
)
|
||||
def test_group_sync(self, mocked_ch_call):
|
||||
ts = datetime.now(timezone.utc)
|
||||
ts = datetime.now(UTC)
|
||||
Group.objects.create(
|
||||
team_id=self.team.pk,
|
||||
group_type_index=2,
|
||||
@ -183,12 +183,12 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin):
|
||||
2,
|
||||
"group-key",
|
||||
{"a": 5},
|
||||
timestamp=datetime.now(timezone.utc) - timedelta(hours=3),
|
||||
timestamp=datetime.now(UTC) - timedelta(hours=3),
|
||||
)
|
||||
group.group_properties = {"a": 5, "b": 3}
|
||||
group.save()
|
||||
|
||||
ts_before = datetime.now(timezone.utc)
|
||||
ts_before = datetime.now(UTC)
|
||||
run_group_sync(self.team.pk, live_run=True, sync=True)
|
||||
mocked_ch_call.assert_called_once()
|
||||
|
||||
@ -213,7 +213,7 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin):
|
||||
)
|
||||
self.assertLessEqual(
|
||||
ch_group[4].strftime("%Y-%m-%d %H:%M:%S"),
|
||||
datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S"),
|
||||
datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S"),
|
||||
)
|
||||
|
||||
# second time it's a no-op
|
||||
@ -225,7 +225,7 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin):
|
||||
wraps=posthog.management.commands.sync_persons_to_clickhouse.raw_create_group_ch,
|
||||
)
|
||||
def test_group_sync_multiple_entries(self, mocked_ch_call):
|
||||
ts = datetime.now(timezone.utc)
|
||||
ts = datetime.now(UTC)
|
||||
Group.objects.create(
|
||||
team_id=self.team.pk,
|
||||
group_type_index=2,
|
||||
@ -430,7 +430,7 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin):
|
||||
group_type_index=2,
|
||||
group_key="group-key",
|
||||
group_properties={"a": 1234},
|
||||
created_at=datetime.now(timezone.utc) - timedelta(hours=3),
|
||||
created_at=datetime.now(UTC) - timedelta(hours=3),
|
||||
version=5,
|
||||
)
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import hashlib
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
import time
|
||||
import structlog
|
||||
from typing import Literal, Optional, Union, cast
|
||||
@ -67,7 +67,7 @@ ENTITY_EXISTS_PREFIX = "flag_entity_exists_"
|
||||
PERSON_KEY = "person"
|
||||
|
||||
|
||||
class FeatureFlagMatchReason(str, Enum):
|
||||
class FeatureFlagMatchReason(StrEnum):
|
||||
SUPER_CONDITION_VALUE = "super_condition_value"
|
||||
CONDITION_MATCH = "condition_match"
|
||||
NO_CONDITION_MATCH = "no_condition_match"
|
||||
|
@ -72,7 +72,7 @@ class StickinessFilter(
|
||||
else:
|
||||
data = {"insight": INSIGHT_STICKINESS}
|
||||
super().__init__(data, request, **kwargs)
|
||||
team: Optional["Team"] = kwargs.get("team", None)
|
||||
team: Optional[Team] = kwargs.get("team", None)
|
||||
if not team:
|
||||
raise ValidationError("Team must be provided to stickiness filter")
|
||||
self.team = team
|
||||
|
@ -1,7 +1,7 @@
|
||||
import datetime
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
from typing import Any, Optional, cast
|
||||
from uuid import UUID
|
||||
|
||||
@ -288,13 +288,13 @@ class PluginStorage(models.Model):
|
||||
value: models.TextField = models.TextField(blank=True, null=True)
|
||||
|
||||
|
||||
class PluginLogEntrySource(str, Enum):
|
||||
class PluginLogEntrySource(StrEnum):
|
||||
SYSTEM = "SYSTEM"
|
||||
PLUGIN = "PLUGIN"
|
||||
CONSOLE = "CONSOLE"
|
||||
|
||||
|
||||
class PluginLogEntryType(str, Enum):
|
||||
class PluginLogEntryType(StrEnum):
|
||||
DEBUG = "DEBUG"
|
||||
LOG = "LOG"
|
||||
INFO = "INFO"
|
||||
|
@ -1,5 +1,5 @@
|
||||
import json
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
from typing import (
|
||||
Any,
|
||||
Literal,
|
||||
@ -14,7 +14,7 @@ from posthog.models.filters.utils import GroupTypeIndex, validate_group_type_ind
|
||||
from posthog.utils import str_to_bool
|
||||
|
||||
|
||||
class BehavioralPropertyType(str, Enum):
|
||||
class BehavioralPropertyType(StrEnum):
|
||||
PERFORMED_EVENT = "performed_event"
|
||||
PERFORMED_EVENT_MULTIPLE = "performed_event_multiple"
|
||||
PERFORMED_EVENT_FIRST_TIME = "performed_event_first_time"
|
||||
|
@ -65,7 +65,7 @@ class TestAsyncDeletion(ClickhouseTestMixin, ClickhouseDestroyTablesMixin, BaseT
|
||||
|
||||
@snapshot_clickhouse_queries
|
||||
def test_mark_deletions_done_person(self):
|
||||
base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
_create_event(
|
||||
event_uuid=uuid4(),
|
||||
@ -101,7 +101,7 @@ class TestAsyncDeletion(ClickhouseTestMixin, ClickhouseDestroyTablesMixin, BaseT
|
||||
|
||||
@snapshot_clickhouse_queries
|
||||
def test_mark_deletions_done_person_when_not_done(self):
|
||||
base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
_create_event(
|
||||
event_uuid=uuid4(),
|
||||
@ -226,7 +226,7 @@ class TestAsyncDeletion(ClickhouseTestMixin, ClickhouseDestroyTablesMixin, BaseT
|
||||
|
||||
@snapshot_clickhouse_alter_queries
|
||||
def test_delete_person(self):
|
||||
base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Event for person, created before AsyncDeletion, so it should be deleted
|
||||
_create_event(
|
||||
@ -264,7 +264,7 @@ class TestAsyncDeletion(ClickhouseTestMixin, ClickhouseDestroyTablesMixin, BaseT
|
||||
|
||||
@snapshot_clickhouse_alter_queries
|
||||
def test_delete_person_unrelated(self):
|
||||
base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
_create_event(
|
||||
event_uuid=uuid4(),
|
||||
|
@ -48,7 +48,7 @@ def people(team):
|
||||
|
||||
@pytest.fixture
|
||||
def oldest_event():
|
||||
return dt.datetime.now(dt.timezone.utc)
|
||||
return dt.datetime.now(dt.UTC)
|
||||
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from enum import Enum, StrEnum
|
||||
from typing import Any, Literal, Optional, Union
|
||||
|
||||
from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, RootModel
|
||||
@ -20,7 +20,7 @@ class MathGroupTypeIndex(float, Enum):
|
||||
NUMBER_4 = 4
|
||||
|
||||
|
||||
class AggregationAxisFormat(str, Enum):
|
||||
class AggregationAxisFormat(StrEnum):
|
||||
NUMERIC = "numeric"
|
||||
DURATION = "duration"
|
||||
DURATION_MS = "duration_ms"
|
||||
@ -28,7 +28,7 @@ class AggregationAxisFormat(str, Enum):
|
||||
PERCENTAGE_SCALED = "percentage_scaled"
|
||||
|
||||
|
||||
class Kind(str, Enum):
|
||||
class Kind(StrEnum):
|
||||
METHOD = "Method"
|
||||
FUNCTION = "Function"
|
||||
CONSTRUCTOR = "Constructor"
|
||||
@ -87,7 +87,7 @@ class AutocompleteCompletionItem(BaseModel):
|
||||
)
|
||||
|
||||
|
||||
class BaseMathType(str, Enum):
|
||||
class BaseMathType(StrEnum):
|
||||
TOTAL = "total"
|
||||
DAU = "dau"
|
||||
WEEKLY_ACTIVE = "weekly_active"
|
||||
@ -95,14 +95,14 @@ class BaseMathType(str, Enum):
|
||||
UNIQUE_SESSION = "unique_session"
|
||||
|
||||
|
||||
class BreakdownAttributionType(str, Enum):
|
||||
class BreakdownAttributionType(StrEnum):
|
||||
FIRST_TOUCH = "first_touch"
|
||||
LAST_TOUCH = "last_touch"
|
||||
ALL_EVENTS = "all_events"
|
||||
STEP = "step"
|
||||
|
||||
|
||||
class BreakdownType(str, Enum):
|
||||
class BreakdownType(StrEnum):
|
||||
COHORT = "cohort"
|
||||
PERSON = "person"
|
||||
EVENT = "event"
|
||||
@ -164,7 +164,7 @@ class ChartAxis(BaseModel):
|
||||
column: str
|
||||
|
||||
|
||||
class ChartDisplayType(str, Enum):
|
||||
class ChartDisplayType(StrEnum):
|
||||
ACTIONS_LINE_GRAPH = "ActionsLineGraph"
|
||||
ACTIONS_BAR = "ActionsBar"
|
||||
ACTIONS_AREA_GRAPH = "ActionsAreaGraph"
|
||||
@ -205,7 +205,7 @@ class CompareFilter(BaseModel):
|
||||
compare_to: Optional[str] = None
|
||||
|
||||
|
||||
class CountPerActorMathType(str, Enum):
|
||||
class CountPerActorMathType(StrEnum):
|
||||
AVG_COUNT_PER_ACTOR = "avg_count_per_actor"
|
||||
MIN_COUNT_PER_ACTOR = "min_count_per_actor"
|
||||
MAX_COUNT_PER_ACTOR = "max_count_per_actor"
|
||||
@ -255,14 +255,14 @@ class DatabaseSchemaSource(BaseModel):
|
||||
status: str
|
||||
|
||||
|
||||
class Type(str, Enum):
|
||||
class Type(StrEnum):
|
||||
POSTHOG = "posthog"
|
||||
DATA_WAREHOUSE = "data_warehouse"
|
||||
VIEW = "view"
|
||||
BATCH_EXPORT = "batch_export"
|
||||
|
||||
|
||||
class DatabaseSerializedFieldType(str, Enum):
|
||||
class DatabaseSerializedFieldType(StrEnum):
|
||||
INTEGER = "integer"
|
||||
FLOAT = "float"
|
||||
STRING = "string"
|
||||
@ -301,13 +301,13 @@ class Day(RootModel[int]):
|
||||
root: int
|
||||
|
||||
|
||||
class DurationType(str, Enum):
|
||||
class DurationType(StrEnum):
|
||||
DURATION = "duration"
|
||||
ACTIVE_SECONDS = "active_seconds"
|
||||
INACTIVE_SECONDS = "inactive_seconds"
|
||||
|
||||
|
||||
class Key(str, Enum):
|
||||
class Key(StrEnum):
|
||||
TAG_NAME = "tag_name"
|
||||
TEXT = "text"
|
||||
HREF = "href"
|
||||
@ -336,14 +336,14 @@ class EmptyPropertyFilter(BaseModel):
|
||||
)
|
||||
|
||||
|
||||
class EntityType(str, Enum):
|
||||
class EntityType(StrEnum):
|
||||
ACTIONS = "actions"
|
||||
EVENTS = "events"
|
||||
DATA_WAREHOUSE = "data_warehouse"
|
||||
NEW_ENTITY = "new_entity"
|
||||
|
||||
|
||||
class ErrorTrackingOrder(str, Enum):
|
||||
class ErrorTrackingOrder(StrEnum):
|
||||
LAST_SEEN = "last_seen"
|
||||
FIRST_SEEN = "first_seen"
|
||||
UNIQUE_OCCURRENCES = "unique_occurrences"
|
||||
@ -360,7 +360,7 @@ class EventDefinition(BaseModel):
|
||||
properties: dict[str, Any]
|
||||
|
||||
|
||||
class CorrelationType(str, Enum):
|
||||
class CorrelationType(StrEnum):
|
||||
SUCCESS = "success"
|
||||
FAILURE = "failure"
|
||||
|
||||
@ -418,12 +418,12 @@ class EventsQueryPersonColumn(BaseModel):
|
||||
uuid: str
|
||||
|
||||
|
||||
class FilterLogicalOperator(str, Enum):
|
||||
class FilterLogicalOperator(StrEnum):
|
||||
AND_ = "AND"
|
||||
OR_ = "OR"
|
||||
|
||||
|
||||
class FunnelConversionWindowTimeUnit(str, Enum):
|
||||
class FunnelConversionWindowTimeUnit(StrEnum):
|
||||
SECOND = "second"
|
||||
MINUTE = "minute"
|
||||
HOUR = "hour"
|
||||
@ -440,7 +440,7 @@ class FunnelCorrelationResult(BaseModel):
|
||||
skewed: bool
|
||||
|
||||
|
||||
class FunnelCorrelationResultsType(str, Enum):
|
||||
class FunnelCorrelationResultsType(StrEnum):
|
||||
EVENTS = "events"
|
||||
PROPERTIES = "properties"
|
||||
EVENT_WITH_PROPERTIES = "event_with_properties"
|
||||
@ -468,18 +468,18 @@ class FunnelExclusionSteps(BaseModel):
|
||||
funnelToStep: int
|
||||
|
||||
|
||||
class FunnelLayout(str, Enum):
|
||||
class FunnelLayout(StrEnum):
|
||||
HORIZONTAL = "horizontal"
|
||||
VERTICAL = "vertical"
|
||||
|
||||
|
||||
class FunnelPathType(str, Enum):
|
||||
class FunnelPathType(StrEnum):
|
||||
FUNNEL_PATH_BEFORE_STEP = "funnel_path_before_step"
|
||||
FUNNEL_PATH_BETWEEN_STEPS = "funnel_path_between_steps"
|
||||
FUNNEL_PATH_AFTER_STEP = "funnel_path_after_step"
|
||||
|
||||
|
||||
class FunnelStepReference(str, Enum):
|
||||
class FunnelStepReference(StrEnum):
|
||||
TOTAL = "total"
|
||||
PREVIOUS = "previous"
|
||||
|
||||
@ -492,7 +492,7 @@ class FunnelTimeToConvertResults(BaseModel):
|
||||
bins: list[list[int]]
|
||||
|
||||
|
||||
class FunnelVizType(str, Enum):
|
||||
class FunnelVizType(StrEnum):
|
||||
STEPS = "steps"
|
||||
TIME_TO_CONVERT = "time_to_convert"
|
||||
TRENDS = "trends"
|
||||
@ -516,44 +516,44 @@ class HogQLNotice(BaseModel):
|
||||
start: Optional[int] = None
|
||||
|
||||
|
||||
class BounceRatePageViewMode(str, Enum):
|
||||
class BounceRatePageViewMode(StrEnum):
|
||||
COUNT_PAGEVIEWS = "count_pageviews"
|
||||
UNIQ_URLS = "uniq_urls"
|
||||
|
||||
|
||||
class InCohortVia(str, Enum):
|
||||
class InCohortVia(StrEnum):
|
||||
AUTO = "auto"
|
||||
LEFTJOIN = "leftjoin"
|
||||
SUBQUERY = "subquery"
|
||||
LEFTJOIN_CONJOINED = "leftjoin_conjoined"
|
||||
|
||||
|
||||
class MaterializationMode(str, Enum):
|
||||
class MaterializationMode(StrEnum):
|
||||
AUTO = "auto"
|
||||
LEGACY_NULL_AS_STRING = "legacy_null_as_string"
|
||||
LEGACY_NULL_AS_NULL = "legacy_null_as_null"
|
||||
DISABLED = "disabled"
|
||||
|
||||
|
||||
class PersonsArgMaxVersion(str, Enum):
|
||||
class PersonsArgMaxVersion(StrEnum):
|
||||
AUTO = "auto"
|
||||
V1 = "v1"
|
||||
V2 = "v2"
|
||||
|
||||
|
||||
class PersonsJoinMode(str, Enum):
|
||||
class PersonsJoinMode(StrEnum):
|
||||
INNER = "inner"
|
||||
LEFT = "left"
|
||||
|
||||
|
||||
class PersonsOnEventsMode(str, Enum):
|
||||
class PersonsOnEventsMode(StrEnum):
|
||||
DISABLED = "disabled"
|
||||
PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS = "person_id_no_override_properties_on_events"
|
||||
PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS = "person_id_override_properties_on_events"
|
||||
PERSON_ID_OVERRIDE_PROPERTIES_JOINED = "person_id_override_properties_joined"
|
||||
|
||||
|
||||
class SessionTableVersion(str, Enum):
|
||||
class SessionTableVersion(StrEnum):
|
||||
AUTO = "auto"
|
||||
V1 = "v1"
|
||||
V2 = "v2"
|
||||
@ -586,7 +586,7 @@ class HogQueryResponse(BaseModel):
|
||||
stdout: Optional[str] = None
|
||||
|
||||
|
||||
class Compare(str, Enum):
|
||||
class Compare(StrEnum):
|
||||
CURRENT = "current"
|
||||
PREVIOUS = "previous"
|
||||
|
||||
@ -626,7 +626,7 @@ class InsightDateRange(BaseModel):
|
||||
)
|
||||
|
||||
|
||||
class InsightFilterProperty(str, Enum):
|
||||
class InsightFilterProperty(StrEnum):
|
||||
TRENDS_FILTER = "trendsFilter"
|
||||
FUNNELS_FILTER = "funnelsFilter"
|
||||
RETENTION_FILTER = "retentionFilter"
|
||||
@ -635,7 +635,7 @@ class InsightFilterProperty(str, Enum):
|
||||
LIFECYCLE_FILTER = "lifecycleFilter"
|
||||
|
||||
|
||||
class InsightNodeKind(str, Enum):
|
||||
class InsightNodeKind(StrEnum):
|
||||
TRENDS_QUERY = "TrendsQuery"
|
||||
FUNNELS_QUERY = "FunnelsQuery"
|
||||
RETENTION_QUERY = "RetentionQuery"
|
||||
@ -644,7 +644,7 @@ class InsightNodeKind(str, Enum):
|
||||
LIFECYCLE_QUERY = "LifecycleQuery"
|
||||
|
||||
|
||||
class InsightType(str, Enum):
|
||||
class InsightType(StrEnum):
|
||||
TRENDS = "TRENDS"
|
||||
STICKINESS = "STICKINESS"
|
||||
LIFECYCLE = "LIFECYCLE"
|
||||
@ -656,7 +656,7 @@ class InsightType(str, Enum):
|
||||
HOG = "HOG"
|
||||
|
||||
|
||||
class IntervalType(str, Enum):
|
||||
class IntervalType(StrEnum):
|
||||
MINUTE = "minute"
|
||||
HOUR = "hour"
|
||||
DAY = "day"
|
||||
@ -664,14 +664,14 @@ class IntervalType(str, Enum):
|
||||
MONTH = "month"
|
||||
|
||||
|
||||
class LifecycleToggle(str, Enum):
|
||||
class LifecycleToggle(StrEnum):
|
||||
NEW = "new"
|
||||
RESURRECTING = "resurrecting"
|
||||
RETURNING = "returning"
|
||||
DORMANT = "dormant"
|
||||
|
||||
|
||||
class NodeKind(str, Enum):
|
||||
class NodeKind(StrEnum):
|
||||
EVENTS_NODE = "EventsNode"
|
||||
ACTIONS_NODE = "ActionsNode"
|
||||
DATA_WAREHOUSE_NODE = "DataWarehouseNode"
|
||||
@ -716,7 +716,7 @@ class PathCleaningFilter(BaseModel):
|
||||
regex: Optional[str] = None
|
||||
|
||||
|
||||
class PathType(str, Enum):
|
||||
class PathType(StrEnum):
|
||||
FIELD_PAGEVIEW = "$pageview"
|
||||
FIELD_SCREEN = "$screen"
|
||||
CUSTOM_EVENT = "custom_event"
|
||||
@ -765,7 +765,7 @@ class PathsFilterLegacy(BaseModel):
|
||||
step_limit: Optional[int] = None
|
||||
|
||||
|
||||
class PropertyFilterType(str, Enum):
|
||||
class PropertyFilterType(StrEnum):
|
||||
META = "meta"
|
||||
EVENT = "event"
|
||||
PERSON = "person"
|
||||
@ -780,7 +780,7 @@ class PropertyFilterType(str, Enum):
|
||||
DATA_WAREHOUSE_PERSON_PROPERTY = "data_warehouse_person_property"
|
||||
|
||||
|
||||
class PropertyMathType(str, Enum):
|
||||
class PropertyMathType(StrEnum):
|
||||
AVG = "avg"
|
||||
SUM = "sum"
|
||||
MIN = "min"
|
||||
@ -791,7 +791,7 @@ class PropertyMathType(str, Enum):
|
||||
P99 = "p99"
|
||||
|
||||
|
||||
class PropertyOperator(str, Enum):
|
||||
class PropertyOperator(StrEnum):
|
||||
EXACT = "exact"
|
||||
IS_NOT = "is_not"
|
||||
ICONTAINS = "icontains"
|
||||
@ -909,7 +909,7 @@ class RecordingPropertyFilter(BaseModel):
|
||||
value: Optional[Union[str, float, list[Union[str, float]]]] = None
|
||||
|
||||
|
||||
class Kind1(str, Enum):
|
||||
class Kind1(StrEnum):
|
||||
ACTIONS_NODE = "ActionsNode"
|
||||
EVENTS_NODE = "EventsNode"
|
||||
|
||||
@ -927,19 +927,19 @@ class RetentionEntity(BaseModel):
|
||||
uuid: Optional[str] = None
|
||||
|
||||
|
||||
class RetentionReference(str, Enum):
|
||||
class RetentionReference(StrEnum):
|
||||
TOTAL = "total"
|
||||
PREVIOUS = "previous"
|
||||
|
||||
|
||||
class RetentionPeriod(str, Enum):
|
||||
class RetentionPeriod(StrEnum):
|
||||
HOUR = "Hour"
|
||||
DAY = "Day"
|
||||
WEEK = "Week"
|
||||
MONTH = "Month"
|
||||
|
||||
|
||||
class RetentionType(str, Enum):
|
||||
class RetentionType(StrEnum):
|
||||
RETENTION_RECURRING = "retention_recurring"
|
||||
RETENTION_FIRST_TIME = "retention_first_time"
|
||||
|
||||
@ -970,7 +970,7 @@ class SessionPropertyFilter(BaseModel):
|
||||
value: Optional[Union[str, float, list[Union[str, float]]]] = None
|
||||
|
||||
|
||||
class StepOrderValue(str, Enum):
|
||||
class StepOrderValue(StrEnum):
|
||||
STRICT = "strict"
|
||||
UNORDERED = "unordered"
|
||||
ORDERED = "ordered"
|
||||
@ -1101,7 +1101,7 @@ class TimelineEntry(BaseModel):
|
||||
sessionId: Optional[str] = Field(default=None, description="Session ID. None means out-of-session events")
|
||||
|
||||
|
||||
class YAxisScaleType(str, Enum):
|
||||
class YAxisScaleType(StrEnum):
|
||||
LOG10 = "log10"
|
||||
LINEAR = "linear"
|
||||
|
||||
@ -1191,7 +1191,7 @@ class VizSpecificOptions(BaseModel):
|
||||
RETENTION: Optional[RETENTION] = None
|
||||
|
||||
|
||||
class Kind2(str, Enum):
|
||||
class Kind2(StrEnum):
|
||||
UNIT = "unit"
|
||||
DURATION_S = "duration_s"
|
||||
PERCENTAGE = "percentage"
|
||||
@ -1238,7 +1238,7 @@ class WebOverviewQueryResponse(BaseModel):
|
||||
)
|
||||
|
||||
|
||||
class WebStatsBreakdown(str, Enum):
|
||||
class WebStatsBreakdown(StrEnum):
|
||||
PAGE = "Page"
|
||||
INITIAL_PAGE = "InitialPage"
|
||||
EXIT_PAGE = "ExitPage"
|
||||
|
@ -1,7 +1,7 @@
|
||||
import os
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from prometheus_client import Histogram
|
||||
import json
|
||||
from typing import Any, cast
|
||||
@ -430,7 +430,7 @@ class SessionRecordingViewSet(TeamAndOrgViewSetMixin, viewsets.GenericViewSet):
|
||||
# Keys are like 1619712000-1619712060
|
||||
blob_key = full_key.replace(blob_prefix.rstrip("/") + "/", "")
|
||||
blob_key_base = blob_key.split(".")[0] # Remove the extension if it exists
|
||||
time_range = [datetime.fromtimestamp(int(x) / 1000, tz=timezone.utc) for x in blob_key_base.split("-")]
|
||||
time_range = [datetime.fromtimestamp(int(x) / 1000, tz=UTC) for x in blob_key_base.split("-")]
|
||||
|
||||
sources.append(
|
||||
{
|
||||
@ -446,7 +446,7 @@ class SessionRecordingViewSet(TeamAndOrgViewSetMixin, viewsets.GenericViewSet):
|
||||
newest_timestamp = min(sources, key=lambda k: k["end_timestamp"])["end_timestamp"]
|
||||
|
||||
if might_have_realtime:
|
||||
might_have_realtime = oldest_timestamp + timedelta(hours=24) > datetime.now(timezone.utc)
|
||||
might_have_realtime = oldest_timestamp + timedelta(hours=24) > datetime.now(UTC)
|
||||
if might_have_realtime:
|
||||
sources.append(
|
||||
{
|
||||
|
@ -2,7 +2,7 @@ import base64
|
||||
import gzip
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, UTC
|
||||
from typing import Any
|
||||
from collections.abc import Callable, Generator
|
||||
|
||||
@ -268,7 +268,7 @@ def is_active_event(event: SessionRecordingEventSummary) -> bool:
|
||||
|
||||
|
||||
def parse_snapshot_timestamp(timestamp: int):
|
||||
return datetime.fromtimestamp(timestamp / 1000, timezone.utc)
|
||||
return datetime.fromtimestamp(timestamp / 1000, UTC)
|
||||
|
||||
|
||||
def convert_to_timestamp(source: str) -> int:
|
||||
|
@ -1,7 +1,7 @@
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from unittest.mock import ANY, patch, MagicMock, call
|
||||
from urllib.parse import urlencode
|
||||
|
||||
@ -395,7 +395,7 @@ class TestSessionRecordings(APIBaseTest, ClickhouseTestMixin, QueryMatchingTest)
|
||||
"distinct_id": "d1",
|
||||
"viewed": False,
|
||||
"recording_duration": 30,
|
||||
"start_time": base_time.replace(tzinfo=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"start_time": base_time.replace(tzinfo=UTC).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"end_time": (base_time + relativedelta(seconds=30)).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"click_count": 0,
|
||||
"keypress_count": 0,
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from posthog.models import ScheduledChange, FeatureFlag
|
||||
from posthog.test.base import APIBaseTest, QueryMatchingTest, snapshot_postgres_queries
|
||||
from posthog.tasks.process_scheduled_changes import process_scheduled_changes
|
||||
@ -21,7 +21,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest):
|
||||
record_id=feature_flag.id,
|
||||
model_name="FeatureFlag",
|
||||
payload={"operation": "update_status", "value": True},
|
||||
scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)).isoformat(),
|
||||
scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)).isoformat(),
|
||||
)
|
||||
|
||||
process_scheduled_changes()
|
||||
@ -55,7 +55,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest):
|
||||
record_id=feature_flag.id,
|
||||
model_name="FeatureFlag",
|
||||
payload=payload,
|
||||
scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)),
|
||||
scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)),
|
||||
)
|
||||
|
||||
process_scheduled_changes()
|
||||
@ -105,7 +105,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest):
|
||||
record_id=feature_flag.id,
|
||||
model_name="FeatureFlag",
|
||||
payload=payload,
|
||||
scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)),
|
||||
scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)),
|
||||
)
|
||||
|
||||
process_scheduled_changes()
|
||||
@ -131,7 +131,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest):
|
||||
record_id=feature_flag.id,
|
||||
model_name="FeatureFlag",
|
||||
payload=payload,
|
||||
scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)),
|
||||
scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)),
|
||||
)
|
||||
|
||||
process_scheduled_changes()
|
||||
@ -169,11 +169,11 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest):
|
||||
"operation": "add_release_condition",
|
||||
"value": {"groups": [change_past_condition], "multivariate": None, "payloads": {}},
|
||||
},
|
||||
scheduled_at=(datetime.now(timezone.utc) - timedelta(hours=1)),
|
||||
scheduled_at=(datetime.now(UTC) - timedelta(hours=1)),
|
||||
)
|
||||
|
||||
# 2. Due in the past and already executed
|
||||
change_past_executed_at = datetime.now(timezone.utc) - timedelta(hours=5)
|
||||
change_past_executed_at = datetime.now(UTC) - timedelta(hours=5)
|
||||
change_past_executed = ScheduledChange.objects.create(
|
||||
team=self.team,
|
||||
record_id=feature_flag.id,
|
||||
@ -197,7 +197,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest):
|
||||
"operation": "add_release_condition",
|
||||
"value": {"groups": [change_due_now_condition], "multivariate": None, "payloads": {}},
|
||||
},
|
||||
scheduled_at=datetime.now(timezone.utc),
|
||||
scheduled_at=datetime.now(UTC),
|
||||
)
|
||||
|
||||
# 4. Due in the future
|
||||
@ -206,7 +206,7 @@ class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest):
|
||||
record_id=feature_flag.id,
|
||||
model_name="FeatureFlag",
|
||||
payload={"operation": "update_status", "value": False},
|
||||
scheduled_at=(datetime.now(timezone.utc) + timedelta(hours=1)),
|
||||
scheduled_at=(datetime.now(UTC) + timedelta(hours=1)),
|
||||
)
|
||||
|
||||
process_scheduled_changes()
|
||||
|
@ -46,7 +46,7 @@ class TestWarehouse(APIBaseTest):
|
||||
@patch("posthog.tasks.warehouse.get_ph_client")
|
||||
@patch(
|
||||
"posthog.tasks.warehouse.DEFAULT_DATE_TIME",
|
||||
datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.UTC),
|
||||
)
|
||||
@freeze_time("2023-11-07")
|
||||
def test_capture_workspace_rows_synced_by_team_month_cutoff(self, mock_get_ph_client: MagicMock) -> None:
|
||||
@ -87,13 +87,13 @@ class TestWarehouse(APIBaseTest):
|
||||
self.team.refresh_from_db()
|
||||
self.assertEqual(
|
||||
self.team.external_data_workspace_last_synced_at,
|
||||
datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.UTC),
|
||||
)
|
||||
|
||||
@patch("posthog.tasks.warehouse.get_ph_client")
|
||||
@patch(
|
||||
"posthog.tasks.warehouse.DEFAULT_DATE_TIME",
|
||||
datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.UTC),
|
||||
)
|
||||
@freeze_time("2023-11-07")
|
||||
def test_capture_workspace_rows_synced_by_team_month_cutoff_field_set(self, mock_get_ph_client: MagicMock) -> None:
|
||||
@ -101,7 +101,7 @@ class TestWarehouse(APIBaseTest):
|
||||
mock_get_ph_client.return_value = mock_ph_client
|
||||
|
||||
self.team.external_data_workspace_last_synced_at = datetime.datetime(
|
||||
2023, 10, 30, 19, 32, 41, tzinfo=datetime.timezone.utc
|
||||
2023, 10, 30, 19, 32, 41, tzinfo=datetime.UTC
|
||||
)
|
||||
self.team.save()
|
||||
|
||||
@ -142,5 +142,5 @@ class TestWarehouse(APIBaseTest):
|
||||
self.team.refresh_from_db()
|
||||
self.assertEqual(
|
||||
self.team.external_data_workspace_last_synced_at,
|
||||
datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.UTC),
|
||||
)
|
||||
|
@ -18,7 +18,7 @@ logger = structlog.get_logger(__name__)
|
||||
MONTHLY_LIMIT = 500_000_000
|
||||
|
||||
# TODO: adjust to whenever billing officially starts
|
||||
DEFAULT_DATE_TIME = datetime.datetime(2024, 6, 1, tzinfo=datetime.timezone.utc)
|
||||
DEFAULT_DATE_TIME = datetime.datetime(2024, 6, 1, tzinfo=datetime.UTC)
|
||||
|
||||
|
||||
def capture_external_data_rows_synced() -> None:
|
||||
@ -91,7 +91,7 @@ def check_synced_row_limits_of_team(team_id: int) -> None:
|
||||
def capture_workspace_rows_synced_by_team(team_id: int) -> None:
|
||||
ph_client = get_ph_client()
|
||||
team = Team.objects.get(pk=team_id)
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
begin = team.external_data_workspace_last_synced_at or DEFAULT_DATE_TIME
|
||||
|
||||
team.external_data_workspace_last_synced_at = now
|
||||
|
@ -114,7 +114,7 @@ class BackfillScheduleInputs:
|
||||
def get_utcnow():
|
||||
"""Return the current time in UTC. This function is only required for mocking during tests,
|
||||
because mocking the global datetime breaks Temporal."""
|
||||
return dt.datetime.now(dt.timezone.utc)
|
||||
return dt.datetime.now(dt.UTC)
|
||||
|
||||
|
||||
@temporalio.activity.defn
|
||||
|
@ -5,7 +5,7 @@ import contextlib
|
||||
import json
|
||||
import typing
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from datetime import date, datetime, timedelta, timezone, UTC
|
||||
|
||||
from temporalio import activity, workflow
|
||||
from temporalio.common import RetryPolicy
|
||||
@ -14,7 +14,7 @@ from posthog.temporal.batch_exports.base import PostHogWorkflow
|
||||
from posthog.temporal.common.clickhouse import get_client
|
||||
from posthog.temporal.common.heartbeat import Heartbeater
|
||||
|
||||
EPOCH = datetime(1970, 1, 1, 0, 0, tzinfo=timezone.utc)
|
||||
EPOCH = datetime(1970, 1, 1, 0, 0, tzinfo=UTC)
|
||||
|
||||
|
||||
CREATE_TABLE_PERSON_DISTINCT_ID_OVERRIDES_JOIN = """
|
||||
@ -174,7 +174,7 @@ MUTATIONS = {
|
||||
}
|
||||
|
||||
|
||||
def parse_clickhouse_timestamp(s: str, tzinfo: timezone = timezone.utc) -> datetime:
|
||||
def parse_clickhouse_timestamp(s: str, tzinfo: timezone = UTC) -> datetime:
|
||||
"""Parse a timestamp from ClickHouse."""
|
||||
return datetime.strptime(s.strip(), "%Y-%m-%d %H:%M:%S.%f").replace(tzinfo=tzinfo)
|
||||
|
||||
|
@ -203,7 +203,7 @@ def data_interval_start(data_interval_end, interval):
|
||||
@pytest.fixture
|
||||
def data_interval_end(interval):
|
||||
"""Set a test data interval end."""
|
||||
return dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc)
|
||||
return dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
|
@ -60,66 +60,66 @@ async def temporal_schedule(temporal_client, team):
|
||||
"start_at,end_at,step,expected",
|
||||
[
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.timedelta(days=1),
|
||||
[
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC),
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 12, 20, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 1, 12, 20, 0, tzinfo=dt.UTC),
|
||||
dt.timedelta(hours=1),
|
||||
[
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.timedelta(hours=12),
|
||||
[
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.timedelta(days=1),
|
||||
[
|
||||
(
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
(
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
(
|
||||
dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
(
|
||||
dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
],
|
||||
),
|
||||
@ -145,8 +145,8 @@ async def test_get_schedule_frequency(activity_environment, temporal_worker, tem
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
async def test_backfill_schedule_activity(activity_environment, temporal_worker, temporal_client, temporal_schedule):
|
||||
"""Test backfill_schedule activity schedules all backfill runs."""
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc)
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC)
|
||||
|
||||
desc = await temporal_schedule.describe()
|
||||
inputs = BackfillScheduleInputs(
|
||||
@ -199,8 +199,8 @@ async def test_backfill_schedule_activity(activity_environment, temporal_worker,
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
async def test_backfill_batch_export_workflow(temporal_worker, temporal_schedule, temporal_client, team):
|
||||
"""Test BackfillBatchExportWorkflow executes all backfill runs and updates model."""
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc)
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC)
|
||||
|
||||
desc = await temporal_schedule.describe()
|
||||
|
||||
@ -275,9 +275,9 @@ async def test_backfill_batch_export_workflow_no_end_at(
|
||||
"""Test BackfillBatchExportWorkflow executes all backfill runs and updates model."""
|
||||
|
||||
# Note the mocked time here, we should stop backfilling at 8 minutes and unpause the job.
|
||||
mock_utcnow.return_value = dt.datetime(2023, 1, 1, 0, 8, 12, tzinfo=dt.timezone.utc)
|
||||
mock_utcnow.return_value = dt.datetime(2023, 1, 1, 0, 8, 12, tzinfo=dt.UTC)
|
||||
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
end_at = None
|
||||
|
||||
desc = await temporal_schedule.describe()
|
||||
@ -356,8 +356,8 @@ async def test_backfill_batch_export_workflow_fails_when_schedule_deleted(
|
||||
temporal_worker, temporal_schedule, temporal_client, team
|
||||
):
|
||||
"""Test BackfillBatchExportWorkflow fails when its underlying Temporal Schedule is deleted."""
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc)
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC)
|
||||
|
||||
desc = await temporal_schedule.describe()
|
||||
|
||||
@ -398,8 +398,8 @@ async def test_backfill_batch_export_workflow_fails_when_schedule_deleted_after_
|
||||
In this test, in contrats to the previous one, we wait until we have started running some
|
||||
backfill runs before cancelling.
|
||||
"""
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc)
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC)
|
||||
|
||||
desc = await temporal_schedule.describe()
|
||||
|
||||
@ -471,8 +471,8 @@ async def test_backfill_batch_export_workflow_is_cancelled_on_repeated_failures(
|
||||
temporal_worker, failing_s3_batch_export, temporal_client, ateam, clickhouse_client
|
||||
):
|
||||
"""Test BackfillBatchExportWorkflow will be cancelled on repeated failures."""
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc)
|
||||
end_at = dt.datetime(2023, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc)
|
||||
start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC)
|
||||
end_at = dt.datetime(2023, 1, 1, 1, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
# We need some data otherwise the S3 batch export will not fail as it short-circuits.
|
||||
for d in date_range(start_at, end_at, dt.timedelta(minutes=5)):
|
||||
|
@ -41,9 +41,7 @@ def assert_records_match_events(records, events):
|
||||
key in ("timestamp", "_inserted_at", "created_at")
|
||||
and expected.get(key.removeprefix("_"), None) is not None
|
||||
):
|
||||
assert value == dt.datetime.fromisoformat(expected[key.removeprefix("_")]).replace(
|
||||
tzinfo=dt.timezone.utc
|
||||
), msg
|
||||
assert value == dt.datetime.fromisoformat(expected[key.removeprefix("_")]).replace(tzinfo=dt.UTC), msg
|
||||
elif isinstance(expected[key], dict):
|
||||
assert value == json.dumps(expected[key]), msg
|
||||
else:
|
||||
@ -289,7 +287,7 @@ async def test_iter_records_with_single_field_and_alias(clickhouse_client, field
|
||||
|
||||
if isinstance(result, dt.datetime):
|
||||
# Event generation function returns datetimes as strings.
|
||||
expected_value = dt.datetime.fromisoformat(expected_value).replace(tzinfo=dt.timezone.utc)
|
||||
expected_value = dt.datetime.fromisoformat(expected_value).replace(tzinfo=dt.UTC)
|
||||
|
||||
assert result == expected_value
|
||||
|
||||
@ -388,16 +386,16 @@ async def test_iter_records_uses_extra_query_parameters(clickhouse_client):
|
||||
"hour",
|
||||
"2023-08-01T00:00:00+00:00",
|
||||
(
|
||||
dt.datetime(2023, 7, 31, 23, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 7, 31, 23, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
),
|
||||
(
|
||||
"day",
|
||||
"2023-08-01T00:00:00+00:00",
|
||||
(
|
||||
dt.datetime(2023, 7, 31, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 7, 31, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.UTC),
|
||||
),
|
||||
),
|
||||
],
|
||||
|
@ -50,7 +50,7 @@ SKIP_IF_MISSING_GOOGLE_APPLICATION_CREDENTIALS = pytest.mark.skipif(
|
||||
|
||||
pytestmark = [SKIP_IF_MISSING_GOOGLE_APPLICATION_CREDENTIALS, pytest.mark.asyncio, pytest.mark.django_db]
|
||||
|
||||
TEST_TIME = dt.datetime.now(dt.timezone.utc)
|
||||
TEST_TIME = dt.datetime.now(dt.UTC)
|
||||
|
||||
|
||||
async def assert_clickhouse_records_in_bigquery(
|
||||
@ -144,7 +144,7 @@ async def assert_clickhouse_records_in_bigquery(
|
||||
if k in json_columns and v is not None:
|
||||
expected_record[k] = json.loads(v)
|
||||
elif isinstance(v, dt.datetime):
|
||||
expected_record[k] = v.replace(tzinfo=dt.timezone.utc)
|
||||
expected_record[k] = v.replace(tzinfo=dt.UTC)
|
||||
else:
|
||||
expected_record[k] = v
|
||||
|
||||
@ -298,7 +298,7 @@ async def test_insert_into_bigquery_activity_inserts_data_into_bigquery_table(
|
||||
with freeze_time(TEST_TIME) as frozen_time:
|
||||
await activity_environment.run(insert_into_bigquery_activity, insert_inputs)
|
||||
|
||||
ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc)
|
||||
ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC)
|
||||
|
||||
await assert_clickhouse_records_in_bigquery(
|
||||
bigquery_client=bigquery_client,
|
||||
@ -352,7 +352,7 @@ async def test_insert_into_bigquery_activity_merges_data_in_follow_up_runs(
|
||||
with freeze_time(TEST_TIME) as frozen_time:
|
||||
await activity_environment.run(insert_into_bigquery_activity, insert_inputs)
|
||||
|
||||
ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc)
|
||||
ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC)
|
||||
|
||||
await assert_clickhouse_records_in_bigquery(
|
||||
bigquery_client=bigquery_client,
|
||||
@ -393,7 +393,7 @@ async def test_insert_into_bigquery_activity_merges_data_in_follow_up_runs(
|
||||
with freeze_time(TEST_TIME) as frozen_time:
|
||||
await activity_environment.run(insert_into_bigquery_activity, insert_inputs)
|
||||
|
||||
ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc)
|
||||
ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC)
|
||||
|
||||
await assert_clickhouse_records_in_bigquery(
|
||||
bigquery_client=bigquery_client,
|
||||
@ -523,7 +523,7 @@ async def test_bigquery_export_workflow(
|
||||
persons_to_export_created
|
||||
)
|
||||
|
||||
ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc)
|
||||
ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC)
|
||||
await assert_clickhouse_records_in_bigquery(
|
||||
bigquery_client=bigquery_client,
|
||||
clickhouse_client=clickhouse_client,
|
||||
@ -773,7 +773,7 @@ async def test_bigquery_export_workflow_handles_cancellation(ateam, bigquery_bat
|
||||
([{"test": 6.0}], [bigquery.SchemaField("test", "FLOAT64")]),
|
||||
([{"test": True}], [bigquery.SchemaField("test", "BOOL")]),
|
||||
([{"test": dt.datetime.now()}], [bigquery.SchemaField("test", "TIMESTAMP")]),
|
||||
([{"test": dt.datetime.now(tz=dt.timezone.utc)}], [bigquery.SchemaField("test", "TIMESTAMP")]),
|
||||
([{"test": dt.datetime.now(tz=dt.UTC)}], [bigquery.SchemaField("test", "TIMESTAMP")]),
|
||||
(
|
||||
[
|
||||
{
|
||||
@ -783,7 +783,7 @@ async def test_bigquery_export_workflow_handles_cancellation(ateam, bigquery_bat
|
||||
"test_float": 6.0,
|
||||
"test_bool": False,
|
||||
"test_timestamp": dt.datetime.now(),
|
||||
"test_timestamptz": dt.datetime.now(tz=dt.timezone.utc),
|
||||
"test_timestamptz": dt.datetime.now(tz=dt.UTC),
|
||||
}
|
||||
],
|
||||
[
|
||||
|
@ -99,7 +99,7 @@ async def assert_clickhouse_records_in_mock_server(
|
||||
if k == "properties":
|
||||
expected_record[k] = json.loads(v) if v else {}
|
||||
elif isinstance(v, dt.datetime):
|
||||
expected_record[k] = v.replace(tzinfo=dt.timezone.utc).isoformat()
|
||||
expected_record[k] = v.replace(tzinfo=dt.UTC).isoformat()
|
||||
else:
|
||||
expected_record[k] = v
|
||||
|
||||
@ -134,8 +134,8 @@ async def test_insert_into_http_activity_inserts_data_into_http_endpoint(
|
||||
* Are not duplicates of other events that are in the same batch.
|
||||
* Do not have an event name contained in the batch export's exclude_events.
|
||||
"""
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Generate a random team id integer. There's still a chance of a collision,
|
||||
# but it's very small.
|
||||
@ -211,8 +211,8 @@ async def test_insert_into_http_activity_throws_on_bad_http_status(
|
||||
clickhouse_client, activity_environment, http_config, exclude_events
|
||||
):
|
||||
"""Test that the insert_into_http_activity function throws on status >= 400"""
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Generate a random team id integer. There's still a chance of a collision,
|
||||
# but it's very small.
|
||||
|
@ -211,13 +211,13 @@ BATCH_EXPORT_ID = str(uuid.uuid4())
|
||||
"activity_environment",
|
||||
[
|
||||
ActivityInfo(
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.timezone.utc)}",
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.UTC)}",
|
||||
workflow_type="s3-export",
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
attempt=random.randint(1, 10000),
|
||||
),
|
||||
ActivityInfo(
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.timezone.utc)}",
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.UTC)}",
|
||||
workflow_type="backfill-batch-export",
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
attempt=random.randint(1, 10000),
|
||||
@ -262,13 +262,13 @@ async def test_batch_exports_logger_binds_activity_context(
|
||||
"activity_environment",
|
||||
[
|
||||
ActivityInfo(
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.timezone.utc)}",
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.UTC)}",
|
||||
workflow_type="s3-export",
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
attempt=random.randint(1, 10000),
|
||||
),
|
||||
ActivityInfo(
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.timezone.utc)}",
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.UTC)}",
|
||||
workflow_type="backfill-batch-export",
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
attempt=random.randint(1, 10000),
|
||||
@ -324,13 +324,13 @@ def log_entries_table():
|
||||
"activity_environment",
|
||||
[
|
||||
ActivityInfo(
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.timezone.utc)}",
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.UTC)}",
|
||||
workflow_type="s3-export",
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
attempt=random.randint(1, 10000),
|
||||
),
|
||||
ActivityInfo(
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.timezone.utc)}",
|
||||
workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.UTC)}",
|
||||
workflow_type="backfill-batch-export",
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
attempt=random.randint(1, 10000),
|
||||
|
@ -117,7 +117,7 @@ async def assert_clickhouse_records_in_postgres(
|
||||
if k in {"properties", "set", "set_once", "person_properties"} and v is not None:
|
||||
expected_record[k] = json.loads(v)
|
||||
elif isinstance(v, dt.datetime):
|
||||
expected_record[k] = v.replace(tzinfo=dt.timezone.utc)
|
||||
expected_record[k] = v.replace(tzinfo=dt.UTC)
|
||||
else:
|
||||
expected_record[k] = v
|
||||
|
||||
@ -201,8 +201,8 @@ async def test_insert_into_postgres_activity_inserts_data_into_postgres_table(
|
||||
development postgres instance for testing. But we setup and manage our own database
|
||||
to avoid conflicting with PostHog itself.
|
||||
"""
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Generate a random team id integer. There's still a chance of a collision,
|
||||
# but it's very small.
|
||||
|
@ -130,7 +130,7 @@ async def assert_clickhouse_records_in_redshfit(
|
||||
remove_escaped_whitespace_recursive(json.loads(v)), ensure_ascii=False
|
||||
)
|
||||
elif isinstance(v, dt.datetime):
|
||||
expected_record[k] = v.replace(tzinfo=dt.timezone.utc) # type: ignore
|
||||
expected_record[k] = v.replace(tzinfo=dt.UTC) # type: ignore
|
||||
else:
|
||||
expected_record[k] = v
|
||||
|
||||
@ -242,8 +242,8 @@ async def test_insert_into_redshift_activity_inserts_data_into_redshift_table(
|
||||
Once we have these events, we pass them to the assert_events_in_redshift function to check
|
||||
that they appear in the expected Redshift table.
|
||||
"""
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Generate a random team id integer. There's still a chance of a collision,
|
||||
# but it's very small.
|
||||
|
@ -85,8 +85,8 @@ async def test_start_batch_export_run(activity_environment, team, batch_export):
|
||||
|
||||
We check if a 'BatchExportRun' is created after the activity runs.
|
||||
"""
|
||||
start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc)
|
||||
end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc)
|
||||
start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC)
|
||||
end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC)
|
||||
|
||||
inputs = StartBatchExportRunInputs(
|
||||
team_id=team.id,
|
||||
@ -110,8 +110,8 @@ async def test_start_batch_export_run(activity_environment, team, batch_export):
|
||||
@pytest.mark.asyncio
|
||||
async def test_finish_batch_export_run(activity_environment, team, batch_export):
|
||||
"""Test the export_run_status activity."""
|
||||
start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc)
|
||||
end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc)
|
||||
start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC)
|
||||
end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC)
|
||||
|
||||
inputs = StartBatchExportRunInputs(
|
||||
team_id=team.id,
|
||||
@ -145,8 +145,8 @@ async def test_finish_batch_export_run(activity_environment, team, batch_export)
|
||||
@pytest.mark.asyncio
|
||||
async def test_finish_batch_export_run_pauses_if_reaching_failure_threshold(activity_environment, team, batch_export):
|
||||
"""Test if 'finish_batch_export_run' will pause a batch export upon reaching failure_threshold."""
|
||||
start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc)
|
||||
end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc)
|
||||
start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC)
|
||||
end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC)
|
||||
|
||||
inputs = StartBatchExportRunInputs(
|
||||
team_id=team.id,
|
||||
@ -183,8 +183,8 @@ async def test_finish_batch_export_run_pauses_if_reaching_failure_threshold(acti
|
||||
@pytest.mark.asyncio
|
||||
async def test_finish_batch_export_run_never_pauses_with_small_check_window(activity_environment, team, batch_export):
|
||||
"""Test if 'finish_batch_export_run' will never pause a batch export with a small check window."""
|
||||
start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc)
|
||||
end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc)
|
||||
start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC)
|
||||
end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC)
|
||||
|
||||
inputs = StartBatchExportRunInputs(
|
||||
team_id=team.id,
|
||||
|
@ -981,8 +981,8 @@ async def test_insert_into_snowflake_activity_inserts_data_into_snowflake_table(
|
||||
that they appear in the expected Snowflake table. This function runs against a real Snowflake
|
||||
instance, so the environment should be populated with the necessary credentials.
|
||||
"""
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc)
|
||||
data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC)
|
||||
data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC)
|
||||
|
||||
team_id = random.randint(1, 1000000)
|
||||
await generate_test_events_in_clickhouse(
|
||||
|
@ -1,7 +1,7 @@
|
||||
import operator
|
||||
import random
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, UTC
|
||||
from typing import NamedTuple, TypedDict
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
@ -862,7 +862,7 @@ async def test_delete_person_overrides_mutation_within_grace_period(
|
||||
activity_environment, events_to_override, person_overrides_data, clickhouse_client
|
||||
):
|
||||
"""Test we do not delete person overrides if they are within the grace period."""
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
now = datetime.now(tz=UTC)
|
||||
override_timestamp = int(now.timestamp())
|
||||
team_id, person_override = next(iter(person_overrides_data.items()))
|
||||
distinct_id, _ = next(iter(person_override))
|
||||
@ -914,7 +914,7 @@ async def test_delete_person_overrides_mutation_within_grace_period(
|
||||
assert int(row[0]) == not_deleted_person["team_id"]
|
||||
assert row[1] == not_deleted_person["distinct_id"]
|
||||
assert UUID(row[2]) == UUID(not_deleted_person["person_id"])
|
||||
_timestamp = datetime.strptime(row[3], "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc)
|
||||
_timestamp = datetime.strptime(row[3], "%Y-%m-%d %H:%M:%S").replace(tzinfo=UTC)
|
||||
# _timestamp is up to second precision
|
||||
assert _timestamp == now.replace(microsecond=0)
|
||||
|
||||
|
@ -23,12 +23,12 @@ from posthog.temporal.common.clickhouse import encode_clickhouse_data
|
||||
(("; DROP TABLE events --",), b"('; DROP TABLE events --')"),
|
||||
(("'a'); DROP TABLE events --",), b"('\\'a\\'); DROP TABLE events --')"),
|
||||
(
|
||||
dt.datetime(2023, 7, 14, 0, 0, 0, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 7, 14, 0, 0, 0, tzinfo=dt.UTC),
|
||||
b"toDateTime('2023-07-14 00:00:00', 'UTC')",
|
||||
),
|
||||
(dt.datetime(2023, 7, 14, 0, 0, 0), b"toDateTime('2023-07-14 00:00:00')"),
|
||||
(
|
||||
dt.datetime(2023, 7, 14, 0, 0, 0, 5555, tzinfo=dt.timezone.utc),
|
||||
dt.datetime(2023, 7, 14, 0, 0, 0, 5555, tzinfo=dt.UTC),
|
||||
b"toDateTime64('2023-07-14 00:00:00.005555', 6, 'UTC')",
|
||||
),
|
||||
],
|
||||
|
@ -16,4 +16,4 @@ def to_isoformat(d: str | None) -> str | None:
|
||||
"""Parse a string and return it as default isoformatted."""
|
||||
if d is None:
|
||||
return None
|
||||
return dt.datetime.fromisoformat(d).replace(tzinfo=dt.timezone.utc).isoformat()
|
||||
return dt.datetime.fromisoformat(d).replace(tzinfo=dt.UTC).isoformat()
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, UTC
|
||||
|
||||
from posthog.datetime import (
|
||||
start_of_hour,
|
||||
@ -23,7 +23,7 @@ def test_start_of_day():
|
||||
|
||||
def test_end_of_day():
|
||||
assert end_of_day(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime(
|
||||
2023, 2, 8, 23, 59, 59, 999999, tzinfo=timezone.utc
|
||||
2023, 2, 8, 23, 59, 59, 999999, tzinfo=UTC
|
||||
)
|
||||
|
||||
|
||||
|
@ -1289,12 +1289,12 @@ async def wait_for_parallel_celery_group(task: Any, expires: Optional[datetime.d
|
||||
default_expires = datetime.timedelta(minutes=5)
|
||||
|
||||
if not expires:
|
||||
expires = datetime.datetime.now(tz=datetime.timezone.utc) + default_expires
|
||||
expires = datetime.datetime.now(tz=datetime.UTC) + default_expires
|
||||
|
||||
sleep_generator = sleep_time_generator()
|
||||
|
||||
while not task.ready():
|
||||
if datetime.datetime.now(tz=datetime.timezone.utc) > expires:
|
||||
if datetime.datetime.now(tz=datetime.UTC) > expires:
|
||||
child_states = []
|
||||
child: AsyncResult
|
||||
children = task.children or []
|
||||
|
@ -27,7 +27,7 @@ def get_or_create_workspace(team_id: int):
|
||||
workspace_id = create_workspace(team_id)
|
||||
team.external_data_workspace_id = workspace_id
|
||||
# start tracking from now
|
||||
team.external_data_workspace_last_synced_at = datetime.datetime.now(datetime.timezone.utc)
|
||||
team.external_data_workspace_last_synced_at = datetime.datetime.now(datetime.UTC)
|
||||
team.save()
|
||||
|
||||
return team.external_data_workspace_id
|
||||
|
@ -83,7 +83,7 @@ RUN corepack enable && \
|
||||
#
|
||||
# ---------------------------------------------------------
|
||||
#
|
||||
FROM python:3.10.10-slim-bullseye AS posthog-build
|
||||
FROM python:3.11.9-slim-bullseye AS posthog-build
|
||||
WORKDIR /code
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
@ -99,10 +99,11 @@ RUN apt-get update && \
|
||||
"libxmlsec1" \
|
||||
"libxmlsec1-dev" \
|
||||
"libffi-dev" \
|
||||
"zlib1g-dev" \
|
||||
"pkg-config" \
|
||||
&& \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
pip install -r requirements.txt --compile --no-cache-dir --target=/python-runtime
|
||||
PIP_NO_BINARY=lxml,xmlsec pip install -r requirements.txt --compile --no-cache-dir --target=/python-runtime
|
||||
|
||||
ENV PATH=/python-runtime/bin:$PATH \
|
||||
PYTHONPATH=/python-runtime
|
||||
@ -139,104 +140,7 @@ RUN apt-get update && \
|
||||
#
|
||||
# ---------------------------------------------------------
|
||||
#
|
||||
# Build a version of the unit docker image for python3.10
|
||||
# We can remove this step once we are on python3.11
|
||||
FROM unit:python3.11 as unit
|
||||
FROM python:3.10-bullseye as unit-131-python-310
|
||||
|
||||
# copied from https://github.com/nginx/unit/blob/master/pkg/docker/Dockerfile.python3.11
|
||||
LABEL org.opencontainers.image.title="Unit (python3.10)"
|
||||
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
|
||||
LABEL org.opencontainers.image.url="https://unit.nginx.org"
|
||||
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
|
||||
LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
|
||||
LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
|
||||
LABEL org.opencontainers.image.version="1.31.1"
|
||||
|
||||
RUN set -ex \
|
||||
&& savedAptMark="$(apt-mark showmanual)" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
|
||||
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
|
||||
&& mkdir -p /usr/src/unit \
|
||||
&& cd /usr/src/unit \
|
||||
&& hg clone -u 1.31.1-1 https://hg.nginx.org/unit \
|
||||
&& cd unit \
|
||||
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
|
||||
&& DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
|
||||
&& CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \
|
||||
&& LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \
|
||||
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
|
||||
--statedir=/var/lib/unit \
|
||||
--control=unix:/var/run/control.unit.sock \
|
||||
--runstatedir=/var/run \
|
||||
--pid=/var/run/unit.pid \
|
||||
--logdir=/var/log \
|
||||
--log=/var/log/unit.log \
|
||||
--tmpdir=/var/tmp \
|
||||
--user=unit \
|
||||
--group=unit \
|
||||
--openssl \
|
||||
--libdir=/usr/lib/$DEB_HOST_MULTIARCH" \
|
||||
&& CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \
|
||||
--njs" \
|
||||
&& make -j $NCPU -C pkg/contrib .njs \
|
||||
&& export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \
|
||||
&& ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
|
||||
&& make -j $NCPU unitd \
|
||||
&& install -pm755 build/sbin/unitd /usr/sbin/unitd-debug \
|
||||
&& make clean \
|
||||
&& ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/modules \
|
||||
&& make -j $NCPU unitd \
|
||||
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
|
||||
&& make clean \
|
||||
&& /bin/true \
|
||||
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
|
||||
&& ./configure python --config=/usr/local/bin/python3-config \
|
||||
&& make -j $NCPU python3-install \
|
||||
&& make clean \
|
||||
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \
|
||||
&& ./configure python --config=/usr/local/bin/python3-config \
|
||||
&& make -j $NCPU python3-install \
|
||||
&& cd \
|
||||
&& rm -rf /usr/src/unit \
|
||||
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
|
||||
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
|
||||
done \
|
||||
&& apt-mark showmanual | xargs apt-mark auto > /dev/null \
|
||||
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
|
||||
&& /bin/true \
|
||||
&& mkdir -p /var/lib/unit/ \
|
||||
&& mkdir -p /docker-entrypoint.d/ \
|
||||
&& groupadd --gid 998 unit \
|
||||
&& useradd \
|
||||
--uid 998 \
|
||||
--gid unit \
|
||||
--no-create-home \
|
||||
--home /nonexistent \
|
||||
--comment "unit user" \
|
||||
--shell /bin/false \
|
||||
unit \
|
||||
&& apt-get update \
|
||||
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
|
||||
&& apt-get purge -y --auto-remove build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& rm -f /requirements.apt \
|
||||
&& ln -sf /dev/stdout /var/log/unit.log
|
||||
|
||||
COPY --from=unit /usr/local/bin/docker-entrypoint.sh /usr/local/bin/
|
||||
COPY --from=unit /usr/share/unit/welcome/welcome.* /usr/share/unit/welcome/
|
||||
|
||||
STOPSIGNAL SIGTERM
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
|
||||
EXPOSE 80
|
||||
CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"]
|
||||
|
||||
#
|
||||
# ---------------------------------------------------------
|
||||
#
|
||||
FROM unit-131-python-310
|
||||
FROM unit:python3.11
|
||||
WORKDIR /code
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
@ -265,7 +169,7 @@ RUN apt-get install -y --no-install-recommends \
|
||||
|
||||
# Install and use a non-root user.
|
||||
RUN groupadd -g 1000 posthog && \
|
||||
useradd -u 999 -r -g posthog posthog && \
|
||||
useradd -r -g posthog posthog && \
|
||||
chown posthog:posthog /code
|
||||
USER posthog
|
||||
|
||||
|
@ -1,9 +1,9 @@
|
||||
[project]
|
||||
requires-python = ">=3.10"
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
target-version = ['py310']
|
||||
target-version = ['py311']
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
-c requirements.txt
|
||||
|
||||
ruff~=0.4.3
|
||||
ruff~=0.4.10
|
||||
mypy~=1.10.0
|
||||
mypy-baseline~=0.7.0
|
||||
mypy-extensions==1.0.0
|
||||
|
@ -288,8 +288,7 @@ ruamel-yaml==0.18.6
|
||||
# via prance
|
||||
ruamel-yaml-clib==0.2.8
|
||||
# via ruamel-yaml
|
||||
ruff==0.4.3
|
||||
# via -r requirements-dev.in
|
||||
ruff==0.4.10
|
||||
six==1.16.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
|
@ -29,7 +29,7 @@ django-redis==5.2.0
|
||||
django-statsd==2.5.2
|
||||
django-structlog==2.1.3
|
||||
django-revproxy==0.12.0
|
||||
djangorestframework==3.14.0
|
||||
djangorestframework==3.15.1
|
||||
djangorestframework-csv==2.1.1
|
||||
djangorestframework-dataclasses==1.2.0
|
||||
django-fernet-encrypted-fields==0.1.3
|
||||
|
@ -198,7 +198,7 @@ django-structlog==2.1.3
|
||||
# via -r requirements.in
|
||||
django-two-factor-auth==1.14.0
|
||||
# via -r requirements.in
|
||||
djangorestframework==3.14.0
|
||||
djangorestframework==3.15.1
|
||||
# via
|
||||
# -r requirements.in
|
||||
# djangorestframework-csv
|
||||
@ -475,7 +475,6 @@ pytz==2023.3
|
||||
# via
|
||||
# -r requirements.in
|
||||
# clickhouse-driver
|
||||
# djangorestframework
|
||||
# dlt
|
||||
# infi-clickhouse-orm
|
||||
# pandas
|
||||
|
@ -39,7 +39,7 @@
|
||||
},
|
||||
"applications": {
|
||||
"posthog": {
|
||||
"type": "python 3.10",
|
||||
"type": "python 3.11",
|
||||
"processes": $NGINX_UNIT_APP_PROCESSES,
|
||||
"working_directory": "/code",
|
||||
"path": ".",
|
||||
@ -51,7 +51,7 @@
|
||||
}
|
||||
},
|
||||
"metrics": {
|
||||
"type": "python 3.10",
|
||||
"type": "python 3.11",
|
||||
"processes": 1,
|
||||
"working_directory": "/code/bin",
|
||||
"path": ".",
|
||||
|
Loading…
Reference in New Issue
Block a user