0
0
mirror of https://github.com/PostHog/posthog.git synced 2024-12-01 12:21:02 +01:00
posthog/ee/clickhouse/util.py
James Greenhill 751a35cd35
Make DDLs more friendly towards running on a cluster and cleanups (#5091)
* Make DDLs more friendly towards running on a cluster

* Use primary CLICKHOUSE host for migrations and DDL

* loose ends on person kafka create

* posthog -> cluster typo

* add cluster to KAFKA create for plugin logs

* Feed the type monster

* clusterfy local clickhouse

* test docker-compose backed github action

* run just clickhouse and postgres from docker-compose

* move option to between up and <services>

* posthog all the things

* suggest tests run on  cluster

* posthog cluster for ci

* use deploy path for docker-compose

* fix for a clickhouse bug 🐛

* complete CH bug fixes

* 5439 the github actions pg configs

* remove CLICKHOUSE_DATABASE (handled automatically)

* update DATABASE_URL for code quality checks

* Missed a few DDLs on Person

* 5439 -> 5432 to please the people

* cleanup persons and use f strings <3 f strings

* remove auto parens

* Update requirements to use our fork of infi.clickhouse_orm

* fix person.py formatting

* Include boilerplate macros for a cluster
2021-07-15 17:20:37 -07:00

78 lines
2.5 KiB
Python

from contextlib import contextmanager
from clickhouse_driver.errors import ServerException
from django.db import DEFAULT_DB_ALIAS
from ee.clickhouse.client import sync_execute
from ee.clickhouse.sql.cohort import CREATE_COHORTPEOPLE_TABLE_SQL, DROP_COHORTPEOPLE_TABLE_SQL
from ee.clickhouse.sql.events import DROP_EVENTS_TABLE_SQL, EVENTS_TABLE_SQL
from ee.clickhouse.sql.person import (
DROP_PERSON_DISTINCT_ID_TABLE_SQL,
DROP_PERSON_STATIC_COHORT_TABLE_SQL,
DROP_PERSON_TABLE_SQL,
PERSON_STATIC_COHORT_TABLE_SQL,
PERSONS_DISTINCT_ID_TABLE_SQL,
PERSONS_TABLE_SQL,
)
from ee.clickhouse.sql.session_recording_events import (
DROP_SESSION_RECORDING_EVENTS_TABLE_SQL,
SESSION_RECORDING_EVENTS_TABLE_SQL,
)
class ClickhouseTestMixin:
def tearDown(self):
try:
self._destroy_event_tables()
self._destroy_person_tables()
self._destroy_session_recording_tables()
self._destroy_cohortpeople_table()
except ServerException as e:
print(e)
pass
try:
self._create_event_tables()
self._create_person_tables()
self._create_session_recording_tables()
self._create_cohortpeople_table()
except ServerException as e:
print(e)
pass
def _destroy_person_tables(self):
sync_execute(DROP_PERSON_TABLE_SQL)
sync_execute(DROP_PERSON_DISTINCT_ID_TABLE_SQL)
sync_execute(DROP_PERSON_STATIC_COHORT_TABLE_SQL)
def _create_person_tables(self):
sync_execute(PERSONS_TABLE_SQL)
sync_execute(PERSONS_DISTINCT_ID_TABLE_SQL)
sync_execute(PERSON_STATIC_COHORT_TABLE_SQL)
def _destroy_session_recording_tables(self):
sync_execute(DROP_SESSION_RECORDING_EVENTS_TABLE_SQL)
def _create_session_recording_tables(self):
sync_execute(SESSION_RECORDING_EVENTS_TABLE_SQL)
def _destroy_event_tables(self):
sync_execute(DROP_EVENTS_TABLE_SQL)
def _create_event_tables(self):
sync_execute(EVENTS_TABLE_SQL)
def _destroy_cohortpeople_table(self):
sync_execute(DROP_COHORTPEOPLE_TABLE_SQL)
def _create_cohortpeople_table(self):
sync_execute(CREATE_COHORTPEOPLE_TABLE_SQL)
@contextmanager
def _assertNumQueries(self, func):
yield
# Ignore assertNumQueries in clickhouse tests
def assertNumQueries(self, num, func=None, *args, using=DEFAULT_DB_ALIAS, **kwargs):
return self._assertNumQueries(func)