mirror of
https://github.com/PostHog/posthog.git
synced 2024-11-21 13:39:22 +01:00
Yeetcode (#7830)
* remove django query tests * remove funnel and caching check * remove ee available var * remove is_clickhouse_enabled * remove abstract tests * change primary db * missing func * unnecessary test * try new e2e ci * func arg * remove param * ci * remove plugins in docker * background * change ur; * add kafka url * add step * update docker * primary docker file * mount volumes correctly * one more * remove postgres tests * remove foss * remove all is_clickhouse_neabled * remove irrelelvant test * remove extra arg * remove var * arg * add foss comment * add foss comment * plugin server config * Update posthog/utils.py Co-authored-by: Karl-Aksel Puulmann <macobo@users.noreply.github.com> * migrate commands * comment * add clickhouse to pg tests * change script * change ordering * deepsource * restore foss tests * test remove KAFKA_ENABLED from CI * always wait * up proper resources * use one conftest * restore * remove unnecessary tests * remove more pg * log event tests * fix more tests * more tests * type * fix more tests * last test * typing * account for shared class setup * temp test cloud * restore cloud master checkout * adjust contexts * backwards Co-authored-by: Karl-Aksel Puulmann <macobo@users.noreply.github.com> Co-authored-by: yakkomajuri <yakko.majuri@gmail.com>
This commit is contained in:
parent
cbb9b22d0b
commit
a71e899605
@ -59,9 +59,9 @@ services:
|
||||
- '9009:9009'
|
||||
volumes:
|
||||
- ../ee/idl:/idl
|
||||
- ../docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
|
||||
- ../docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../docker/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- ./docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
|
||||
- ./docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./docker/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- clickhouse-data:/var/lib/clickhouse/data
|
||||
|
||||
# Needed for 1. clickhouse distributed queries 2. kafka replication
|
||||
|
3
.github/workflows/benchmark.yml
vendored
3
.github/workflows/benchmark.yml
vendored
@ -22,7 +22,6 @@ jobs:
|
||||
SAML_DISABLED: '1'
|
||||
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog_test'
|
||||
REDIS_URL: 'redis://localhost'
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
DEBUG: '1'
|
||||
CLICKHOUSE_DATABASE: posthog
|
||||
CLICKHOUSE_HOST: ${{ secrets.BENCHMARKS_CLICKHOUSE_HOST }}
|
||||
@ -91,7 +90,7 @@ jobs:
|
||||
|
||||
- name: Set up PostHog
|
||||
run: |
|
||||
./bin/docker-migrate & wait
|
||||
python manage.py migrate & wait
|
||||
python manage.py setup_dev --no-data
|
||||
|
||||
- name: Configure benchmarks
|
||||
|
18
.github/workflows/ci-backend.yml
vendored
18
.github/workflows/ci-backend.yml
vendored
@ -156,8 +156,8 @@ jobs:
|
||||
|
||||
- name: Start stack with Docker Compose
|
||||
run: |
|
||||
docker-compose -f ee/docker-compose.ch.yml down
|
||||
docker-compose -f ee/docker-compose.ch.yml up -d ${{ matrix.foss && 'db' || 'db clickhouse zookeeper kafka' }} &
|
||||
docker-compose -f docker-compose.dev.yml down
|
||||
docker-compose -f docker-compose.dev.yml up -d db clickhouse zookeeper kafka &
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
@ -209,26 +209,20 @@ jobs:
|
||||
touch frontend/dist/shared_dashboard.html
|
||||
|
||||
- name: Wait for Clickhouse & Kafka
|
||||
if: ${{ !matrix.foss }}
|
||||
run: bin/check_kafka_clickhouse_up
|
||||
|
||||
- name: Run FOSS tests
|
||||
if: ${{ matrix.foss }}
|
||||
run: |
|
||||
rm -rf ee/
|
||||
pytest -m "not ee" posthog/ --cov --cov-report=xml:coverage-postgres.xml
|
||||
|
||||
- name: Run SAML tests
|
||||
if: ${{ matrix.saml }}
|
||||
env:
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
run: |
|
||||
pytest ee -m "saml_only"
|
||||
|
||||
- name: Run ee/ tests
|
||||
if: ${{ matrix.ee }}
|
||||
env:
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
run: |
|
||||
pytest ee -m "not saml_only" \
|
||||
--splits ${{ matrix.concurrency }} \
|
||||
@ -239,8 +233,6 @@ jobs:
|
||||
|
||||
- name: Run pytest.mark.ee tests
|
||||
if: ${{ matrix.ee && matrix.group == '1' }}
|
||||
env:
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
run: |
|
||||
pytest posthog -m "ee"
|
||||
|
||||
@ -282,8 +274,8 @@ jobs:
|
||||
cat requirements.txt >> deploy/requirements.txt
|
||||
- name: Start stack with Docker Compose
|
||||
run: |
|
||||
docker-compose -f deploy/ee/docker-compose.ch.yml down
|
||||
docker-compose -f deploy/ee/docker-compose.ch.yml up -d db clickhouse zookeeper kafka &
|
||||
docker-compose -f deploy/docker-compose.dev.yml down
|
||||
docker-compose -f deploy/docker-compose.dev.yml up -d db clickhouse zookeeper kafka &
|
||||
- name: Set up Python 3.8
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
@ -351,8 +343,6 @@ jobs:
|
||||
touch frontend/dist/shared_dashboard.html
|
||||
|
||||
- name: Run cloud tests (posthog-cloud)
|
||||
env:
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
run: |
|
||||
source .env.template
|
||||
cd deploy
|
||||
|
28
.github/workflows/ci-plugin-server.yml
vendored
28
.github/workflows/ci-plugin-server.yml
vendored
@ -60,11 +60,22 @@ jobs:
|
||||
--health-retries 5
|
||||
env:
|
||||
REDIS_URL: 'redis://localhost'
|
||||
CLICKHOUSE_HOST: 'localhost'
|
||||
CLICKHOUSE_DATABASE: 'posthog_test'
|
||||
KAFKA_HOSTS: 'kafka:9092'
|
||||
|
||||
steps:
|
||||
- name: Check out Django server for database setup
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Fix Kafka Hostname
|
||||
run: |
|
||||
sudo bash -c 'echo "127.0.0.1 kafka zookeeper" >> /etc/hosts'
|
||||
ping -c 1 kafka
|
||||
ping -c 1 zookeeper
|
||||
- name: Start Kafka, ClickHouse, Zookeeper
|
||||
run: docker-compose -f docker-compose.dev.yml up -d zookeeper kafka clickhouse
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
@ -130,11 +141,22 @@ jobs:
|
||||
--health-retries 5
|
||||
env:
|
||||
REDIS_URL: 'redis://localhost'
|
||||
CLICKHOUSE_HOST: 'localhost'
|
||||
CLICKHOUSE_DATABASE: 'posthog_test'
|
||||
KAFKA_HOSTS: 'kafka:9092'
|
||||
|
||||
steps:
|
||||
- name: Check out Django server for database setup
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Fix Kafka Hostname
|
||||
run: |
|
||||
sudo bash -c 'echo "127.0.0.1 kafka zookeeper" >> /etc/hosts'
|
||||
ping -c 1 kafka
|
||||
ping -c 1 zookeeper
|
||||
- name: Start Kafka, ClickHouse, Zookeeper
|
||||
run: docker-compose -f docker-compose.dev.yml up -d zookeeper kafka clickhouse
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
@ -215,7 +237,7 @@ jobs:
|
||||
ping -c 1 kafka
|
||||
ping -c 1 zookeeper
|
||||
- name: Start Kafka, ClickHouse, Zookeeper
|
||||
run: docker-compose -f ee/docker-compose.ch.yml up -d zookeeper kafka clickhouse
|
||||
run: docker-compose -f docker-compose.dev.yml up -d zookeeper kafka clickhouse
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
@ -245,7 +267,6 @@ jobs:
|
||||
env:
|
||||
SECRET_KEY: 'abcdef' # unsafe - for testing only
|
||||
DATABASE_URL: 'postgres://postgres:postgres@localhost:5432/posthog'
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
TEST: 'true'
|
||||
run: python manage.py setup_test_environment
|
||||
|
||||
@ -298,7 +319,7 @@ jobs:
|
||||
ping -c 1 kafka
|
||||
ping -c 1 zookeeper
|
||||
- name: Start Kafka, ClickHouse, Zookeeper
|
||||
run: docker-compose -f ee/docker-compose.ch.yml up -d zookeeper kafka clickhouse
|
||||
run: docker-compose -f docker-compose.dev.yml up -d zookeeper kafka clickhouse
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
@ -328,7 +349,6 @@ jobs:
|
||||
env:
|
||||
SECRET_KEY: 'abcdef' # unsafe - for testing only
|
||||
DATABASE_URL: 'postgres://postgres:postgres@localhost:5432/posthog'
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
TEST: 'true'
|
||||
run: python manage.py setup_test_environment
|
||||
|
||||
|
19
.github/workflows/e2e.yml
vendored
19
.github/workflows/e2e.yml
vendored
@ -17,7 +17,6 @@ env:
|
||||
EMAIL_HOST: 'email.test.posthog.net' # used to test password resets
|
||||
SITE_URL: 'test.posthog.net' # used to test password resets
|
||||
NO_RESTART_LOOP: 1
|
||||
PRIMARY_DB: clickhouse
|
||||
CLICKHOUSE_SECURE: 0
|
||||
|
||||
jobs:
|
||||
@ -50,10 +49,9 @@ jobs:
|
||||
yarn add cypress@6.7.0 cypress-terminal-report@2.1.0 @cypress/react@4.16.4 @cypress/webpack-preprocessor@5.7.0
|
||||
cd plugin-server
|
||||
yarn install --frozen-lockfile
|
||||
|
||||
cypress:
|
||||
name: Cypress E2E tests (${{ strategy.job-index }})
|
||||
if: ${{ github.ref != 'refs/heads/master' }} # Don't run on master, we only cace about node_modules cache
|
||||
if: ${{ github.ref != 'refs/heads/master' }} # Don't run on master, we only cace about node_modules cache
|
||||
runs-on: ubuntu-18.04
|
||||
needs: [cypress_prep]
|
||||
|
||||
@ -70,9 +68,8 @@ jobs:
|
||||
|
||||
- name: Start stack with Docker Compose
|
||||
run: |
|
||||
docker-compose -f ee/docker-compose.ch.yml down
|
||||
docker-compose -f ee/docker-compose.ch.yml up -d db clickhouse zookeeper kafka redis &
|
||||
|
||||
docker-compose -f docker-compose.dev.yml down
|
||||
docker-compose -f docker-compose.dev.yml up -d db clickhouse zookeeper kafka redis &
|
||||
- name: Add kafka host to /etc/hosts for kafka connectivity
|
||||
run: sudo echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts
|
||||
|
||||
@ -107,8 +104,8 @@ jobs:
|
||||
${{ runner.os }}-cypress-node-modules-3-
|
||||
- name: Yarn install deps
|
||||
# NOTE: we always try to run yarn install, as we're using a cache
|
||||
# from the prep phase that hasn't been proven to be correct. We
|
||||
# should still get some cache benefits.
|
||||
# from the prep phase that hasn't been proven to be correct. We
|
||||
# should still get some cache benefits.
|
||||
run: |
|
||||
yarn install --frozen-lockfile
|
||||
yarn add cypress@6.7.0 cypress-terminal-report@2.1.0 @cypress/react@4.16.4 @cypress/webpack-preprocessor@5.7.0
|
||||
@ -123,11 +120,9 @@ jobs:
|
||||
yarn build
|
||||
- name: Boot PostHog
|
||||
run: |
|
||||
python manage.py collectstatic --noinput &
|
||||
./bin/docker-migrate &
|
||||
wait
|
||||
python manage.py collectstatic --noinput
|
||||
bin/check_kafka_clickhouse_up
|
||||
python manage.py migrate_clickhouse
|
||||
./bin/migrate
|
||||
python manage.py setup_dev
|
||||
mkdir -p /tmp/logs
|
||||
./bin/docker-worker &> /tmp/logs/worker.txt &
|
||||
|
@ -3,10 +3,11 @@
|
||||
|
||||
name: Docker FOSS release image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*.**'
|
||||
# TODO: Don't sync until ee code relocation is done
|
||||
# on:
|
||||
# push:
|
||||
# tags:
|
||||
# - '*.**'
|
||||
|
||||
jobs:
|
||||
build-push:
|
||||
|
11
.github/workflows/foss-sync.yml
vendored
11
.github/workflows/foss-sync.yml
vendored
@ -1,10 +1,11 @@
|
||||
name: Sync PostHog FOSS
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
# TODO: Don't sync until the ee code relocation is done
|
||||
# on:
|
||||
# push:
|
||||
# branches:
|
||||
# - master
|
||||
# - main
|
||||
|
||||
jobs:
|
||||
repo-sync:
|
||||
|
@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
./bin/docker-migrate
|
||||
./bin/migrate
|
||||
./bin/docker-worker &
|
||||
./bin/docker-server
|
||||
|
2
bin/docker-migrate
Executable file → Normal file
2
bin/docker-migrate
Executable file → Normal file
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
python manage.py migrate
|
||||
python manage.py migrate
|
@ -25,8 +25,8 @@ export PGPASSWORD="${PGPASSWORD:=posthog}"
|
||||
export PGPORT="${PGPORT:=5432}"
|
||||
export DATABASE_URL="postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:${PGPORT}/${DATABASE}"
|
||||
|
||||
nc -z localhost 9092 || ( echo -e "\033[0;31mKafka isn't running. Please run\n\tdocker compose -f ee/docker-compose.ch.arm64.yml up zookeeper kafka clickhouse db redis\nI'll wait while you do that.\033[0m" ; bin/check_kafka_clickhouse_up )
|
||||
wget -nv -t1 --spider 'http://localhost:8123/' || ( echo -e "\033[0;31mClickhouse isn't running. Please run\n\tdocker compose -f ee/docker-compose.ch.arm64.yml up zookeeper kafka clickhouse db redis.\nI'll wait while you do that.\033[0m" ; bin/check_kafka_clickhouse_up )
|
||||
nc -z localhost 9092 || ( echo -e "\033[0;31mKafka isn't running. Please run\n\tdocker compose -f docker-compose.arm64.yml up zookeeper kafka clickhouse db redis\nI'll wait while you do that.\033[0m" ; bin/check_kafka_clickhouse_up )
|
||||
wget -nv -t1 --spider 'http://localhost:8123/' || ( echo -e "\033[0;31mClickhouse isn't running. Please run\n\tdocker compose -f docker-compose.arm64.yml up zookeeper kafka clickhouse db redis.\nI'll wait while you do that.\033[0m" ; bin/check_kafka_clickhouse_up )
|
||||
|
||||
|
||||
trap "trap - SIGTERM && yarn remove cypress cypress-terminal-report @cypress/react @cypress/webpack-preprocessor && kill -- -$$" SIGINT SIGTERM EXIT
|
||||
|
137
conftest.py
137
conftest.py
@ -1,4 +1,141 @@
|
||||
import pytest
|
||||
from infi.clickhouse_orm import Database
|
||||
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from ee.clickhouse.sql.dead_letter_queue import (
|
||||
DEAD_LETTER_QUEUE_TABLE_MV_SQL,
|
||||
KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL,
|
||||
TRUNCATE_DEAD_LETTER_QUEUE_TABLE_MV_SQL,
|
||||
)
|
||||
from posthog.settings import (
|
||||
CLICKHOUSE_DATABASE,
|
||||
CLICKHOUSE_HTTP_URL,
|
||||
CLICKHOUSE_PASSWORD,
|
||||
CLICKHOUSE_USER,
|
||||
CLICKHOUSE_VERIFY,
|
||||
)
|
||||
from posthog.test.base import TestMixin
|
||||
|
||||
|
||||
def create_clickhouse_tables(num_tables: int):
|
||||
# Reset clickhouse tables to default before running test
|
||||
# Mostly so that test runs locally work correctly
|
||||
from ee.clickhouse.sql.cohort import CREATE_COHORTPEOPLE_TABLE_SQL
|
||||
from ee.clickhouse.sql.dead_letter_queue import DEAD_LETTER_QUEUE_TABLE_SQL
|
||||
from ee.clickhouse.sql.events import EVENTS_TABLE_SQL
|
||||
from ee.clickhouse.sql.groups import GROUPS_TABLE_SQL
|
||||
from ee.clickhouse.sql.person import (
|
||||
PERSON_DISTINCT_ID2_TABLE_SQL,
|
||||
PERSON_STATIC_COHORT_TABLE_SQL,
|
||||
PERSONS_DISTINCT_ID_TABLE_SQL,
|
||||
PERSONS_TABLE_SQL,
|
||||
)
|
||||
from ee.clickhouse.sql.plugin_log_entries import PLUGIN_LOG_ENTRIES_TABLE_SQL
|
||||
from ee.clickhouse.sql.session_recording_events import SESSION_RECORDING_EVENTS_TABLE_SQL
|
||||
|
||||
# REMEMBER TO ADD ANY NEW CLICKHOUSE TABLES TO THIS ARRAY!
|
||||
TABLES_TO_CREATE_DROP = [
|
||||
EVENTS_TABLE_SQL(),
|
||||
PERSONS_TABLE_SQL(),
|
||||
PERSONS_DISTINCT_ID_TABLE_SQL(),
|
||||
PERSON_DISTINCT_ID2_TABLE_SQL(),
|
||||
PERSON_STATIC_COHORT_TABLE_SQL(),
|
||||
SESSION_RECORDING_EVENTS_TABLE_SQL(),
|
||||
PLUGIN_LOG_ENTRIES_TABLE_SQL(),
|
||||
CREATE_COHORTPEOPLE_TABLE_SQL(),
|
||||
KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL,
|
||||
DEAD_LETTER_QUEUE_TABLE_SQL(),
|
||||
DEAD_LETTER_QUEUE_TABLE_MV_SQL,
|
||||
GROUPS_TABLE_SQL(),
|
||||
]
|
||||
|
||||
if num_tables == len(TABLES_TO_CREATE_DROP):
|
||||
return
|
||||
|
||||
for item in TABLES_TO_CREATE_DROP:
|
||||
sync_execute(item)
|
||||
|
||||
|
||||
def reset_clickhouse_tables():
|
||||
# Reset clickhouse tables to default before running test
|
||||
# Mostly so that test runs locally work correctly
|
||||
from ee.clickhouse.sql.cohort import TRUNCATE_COHORTPEOPLE_TABLE_SQL
|
||||
from ee.clickhouse.sql.dead_letter_queue import TRUNCATE_DEAD_LETTER_QUEUE_TABLE_SQL
|
||||
from ee.clickhouse.sql.events import TRUNCATE_EVENTS_TABLE_SQL
|
||||
from ee.clickhouse.sql.groups import TRUNCATE_GROUPS_TABLE_SQL
|
||||
from ee.clickhouse.sql.person import (
|
||||
TRUNCATE_PERSON_DISTINCT_ID2_TABLE_SQL,
|
||||
TRUNCATE_PERSON_DISTINCT_ID_TABLE_SQL,
|
||||
TRUNCATE_PERSON_STATIC_COHORT_TABLE_SQL,
|
||||
TRUNCATE_PERSON_TABLE_SQL,
|
||||
)
|
||||
from ee.clickhouse.sql.plugin_log_entries import TRUNCATE_PLUGIN_LOG_ENTRIES_TABLE_SQL
|
||||
from ee.clickhouse.sql.session_recording_events import TRUNCATE_SESSION_RECORDING_EVENTS_TABLE_SQL
|
||||
|
||||
# REMEMBER TO ADD ANY NEW CLICKHOUSE TABLES TO THIS ARRAY!
|
||||
TABLES_TO_CREATE_DROP = [
|
||||
TRUNCATE_EVENTS_TABLE_SQL,
|
||||
TRUNCATE_PERSON_TABLE_SQL,
|
||||
TRUNCATE_PERSON_DISTINCT_ID_TABLE_SQL,
|
||||
TRUNCATE_PERSON_DISTINCT_ID2_TABLE_SQL,
|
||||
TRUNCATE_PERSON_STATIC_COHORT_TABLE_SQL,
|
||||
TRUNCATE_SESSION_RECORDING_EVENTS_TABLE_SQL,
|
||||
TRUNCATE_PLUGIN_LOG_ENTRIES_TABLE_SQL,
|
||||
TRUNCATE_COHORTPEOPLE_TABLE_SQL,
|
||||
TRUNCATE_DEAD_LETTER_QUEUE_TABLE_SQL,
|
||||
TRUNCATE_DEAD_LETTER_QUEUE_TABLE_MV_SQL,
|
||||
TRUNCATE_GROUPS_TABLE_SQL,
|
||||
]
|
||||
|
||||
for item in TABLES_TO_CREATE_DROP:
|
||||
sync_execute(item)
|
||||
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
def django_db_setup(django_db_setup, django_db_keepdb):
|
||||
database = Database(
|
||||
CLICKHOUSE_DATABASE,
|
||||
db_url=CLICKHOUSE_HTTP_URL,
|
||||
username=CLICKHOUSE_USER,
|
||||
password=CLICKHOUSE_PASSWORD,
|
||||
verify_ssl_cert=CLICKHOUSE_VERIFY,
|
||||
)
|
||||
|
||||
if not django_db_keepdb:
|
||||
try:
|
||||
database.drop_database()
|
||||
except:
|
||||
pass
|
||||
|
||||
database.create_database() # Create database if it doesn't exist
|
||||
table_count = sync_execute(
|
||||
"SELECT count() FROM system.tables WHERE database = %(database)s", {"database": CLICKHOUSE_DATABASE}
|
||||
)[0][0]
|
||||
create_clickhouse_tables(table_count)
|
||||
|
||||
yield
|
||||
|
||||
if django_db_keepdb:
|
||||
reset_clickhouse_tables()
|
||||
else:
|
||||
try:
|
||||
database.drop_database()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def base_test_mixin_fixture():
|
||||
kls = TestMixin()
|
||||
kls.setUp()
|
||||
kls.setUpTestData()
|
||||
|
||||
return kls
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def team(base_test_mixin_fixture):
|
||||
return base_test_mixin_fixture.team
|
||||
|
||||
|
||||
# :TRICKY: Integrate syrupy with unittest test cases
|
||||
|
@ -8,7 +8,6 @@
|
||||
"cloud": false,
|
||||
"available_social_auth_providers": { "google-oauth2": false, "github": false, "gitlab": false },
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0.0,
|
||||
|
@ -25,10 +25,10 @@ services:
|
||||
- '9440:9440'
|
||||
- '9009:9009'
|
||||
volumes:
|
||||
- ./idl:/idl
|
||||
- ../docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
|
||||
- ../docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../docker/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- ./ee/idl:/idl
|
||||
- ./docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
|
||||
- ./docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./docker/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
zookeeper:
|
||||
image: zookeeper
|
||||
restart: always
|
||||
@ -43,11 +43,11 @@ services:
|
||||
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
||||
worker: &worker
|
||||
build:
|
||||
context: ../
|
||||
context: .
|
||||
dockerfile: dev.Dockerfile
|
||||
command: ./bin/docker-worker-celery --with-scheduler
|
||||
volumes:
|
||||
- ..:/code
|
||||
- .:/code
|
||||
environment:
|
||||
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
|
||||
CLICKHOUSE_HOST: 'clickhouse'
|
||||
@ -58,7 +58,6 @@ services:
|
||||
REDIS_URL: 'redis://redis:6379/'
|
||||
SECRET_KEY: 'alsdfjiosdajfklalsdjkf'
|
||||
DEBUG: 'true'
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
PGHOST: db
|
||||
PGUSER: posthog
|
||||
PGPASSWORD: posthog
|
||||
@ -80,11 +79,11 @@ services:
|
||||
- '8234:8234'
|
||||
plugins:
|
||||
build:
|
||||
context: ../
|
||||
context: .
|
||||
dockerfile: dev.Dockerfile
|
||||
command: ./bin/plugin-server --no-restart-loop
|
||||
volumes:
|
||||
- ..:/code
|
||||
- .:/code
|
||||
restart: on-failure
|
||||
environment:
|
||||
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
|
@ -3,7 +3,6 @@ version: '3'
|
||||
services:
|
||||
db:
|
||||
image: postgres:12-alpine
|
||||
container_name: posthog_db
|
||||
environment:
|
||||
POSTGRES_USER: posthog
|
||||
POSTGRES_DB: posthog
|
||||
@ -11,21 +10,53 @@ services:
|
||||
ports:
|
||||
- '5432:5432'
|
||||
redis:
|
||||
image: 'redis:5-alpine'
|
||||
container_name: posthog_redis
|
||||
image: 'redis:alpine'
|
||||
ports:
|
||||
- '6379:6379'
|
||||
backend: &backend
|
||||
clickhouse:
|
||||
# KEEP CLICKHOUSE-SERVER VERSION IN SYNC WITH
|
||||
# https://github.com/PostHog/charts-clickhouse/blob/main/charts/posthog/templates/clickhouse_instance.yaml#L88
|
||||
image: yandex/clickhouse-server:21.6.5
|
||||
depends_on:
|
||||
- kafka
|
||||
- zookeeper
|
||||
ports:
|
||||
- '8123:8123'
|
||||
- '9000:9000'
|
||||
- '9440:9440'
|
||||
- '9009:9009'
|
||||
volumes:
|
||||
- ./ee/idl:/idl
|
||||
- ./docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
|
||||
- ./docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./docker/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
depends_on:
|
||||
- zookeeper
|
||||
ports:
|
||||
- '9092:9092'
|
||||
environment:
|
||||
KAFKA_ADVERTISED_HOST_NAME: kafka
|
||||
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
||||
worker: &worker
|
||||
build:
|
||||
context: .
|
||||
dockerfile: dev.Dockerfile
|
||||
command: ./bin/docker-backend
|
||||
command: ./bin/docker-worker-celery --with-scheduler
|
||||
volumes:
|
||||
- .:/code
|
||||
environment:
|
||||
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
|
||||
CLICKHOUSE_HOST: 'clickhouse'
|
||||
CLICKHOUSE_DATABASE: 'posthog'
|
||||
CLICKHOUSE_SECURE: 'false'
|
||||
CLICKHOUSE_VERIFY: 'false'
|
||||
KAFKA_URL: 'kafka://kafka'
|
||||
REDIS_URL: 'redis://redis:6379/'
|
||||
SECRET_KEY: '<randomly generated secret key>'
|
||||
SECRET_KEY: 'alsdfjiosdajfklalsdjkf'
|
||||
DEBUG: 'true'
|
||||
PGHOST: db
|
||||
PGUSER: posthog
|
||||
@ -33,21 +64,40 @@ services:
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- clickhouse
|
||||
- kafka
|
||||
links:
|
||||
- db:db
|
||||
- redis:redis
|
||||
- clickhouse:clickhouse
|
||||
- kafka:kafka
|
||||
web:
|
||||
<<: *worker
|
||||
command: '${CH_WEB_SCRIPT:-./ee/bin/docker-ch-dev-web}'
|
||||
ports:
|
||||
- '8000:8000'
|
||||
frontend:
|
||||
<<: *backend
|
||||
command: ./bin/docker-frontend
|
||||
ports:
|
||||
- '8234:8234'
|
||||
worker:
|
||||
<<: *backend
|
||||
command: ./bin/docker-worker
|
||||
ports: []
|
||||
plugins:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: dev.Dockerfile
|
||||
command: ./bin/plugin-server --no-restart-loop
|
||||
volumes:
|
||||
- .:/code
|
||||
restart: on-failure
|
||||
environment:
|
||||
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
|
||||
KAFKA_ENABLED: 'true'
|
||||
KAFKA_HOSTS: 'kafka:9092'
|
||||
REDIS_URL: 'redis://redis:6379/'
|
||||
CLICKHOUSE_HOST: 'clickhouse'
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- backend
|
||||
- clickhouse
|
||||
- kafka
|
||||
links:
|
||||
- db:db
|
||||
- redis:redis
|
||||
- clickhouse:clickhouse
|
||||
- kafka:kafka
|
||||
|
@ -1,34 +0,0 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
db:
|
||||
image: postgres:alpine
|
||||
container_name: posthog_db
|
||||
environment:
|
||||
POSTGRES_USER: posthog
|
||||
POSTGRES_DB: posthog
|
||||
POSTGRES_PASSWORD: posthog
|
||||
redis:
|
||||
image: 'redis:alpine'
|
||||
container_name: posthog_redis
|
||||
web:
|
||||
container_name: posthog_server
|
||||
build:
|
||||
context: .
|
||||
dockerfile: production.Dockerfile
|
||||
command: ./bin/docker & tail -f /dev/null
|
||||
ports:
|
||||
- '8000:8000'
|
||||
environment:
|
||||
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
|
||||
REDIS_URL: 'redis://redis:6379/'
|
||||
SECRET_KEY: '<randomly generated secret key>'
|
||||
DEBUG: 1
|
||||
DISABLE_SECURE_SSL_REDIRECT: 1
|
||||
OPT_OUT_CAPTURE: 1
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
links:
|
||||
- db:db
|
||||
- redis:redis
|
@ -61,7 +61,6 @@ services:
|
||||
KAFKA_URL: 'kafka://kafka'
|
||||
REDIS_URL: 'redis://redis:6379/'
|
||||
SECRET_KEY: ${POSTHOG_SECRET}
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
PGHOST: db
|
||||
PGUSER: posthog
|
||||
PGPASSWORD: posthog
|
||||
|
@ -3,11 +3,11 @@ version: '3'
|
||||
services:
|
||||
test:
|
||||
build:
|
||||
context: ../
|
||||
context: .
|
||||
dockerfile: dev.Dockerfile
|
||||
command: ./ee/bin/docker-ch-test
|
||||
volumes:
|
||||
- ..:/code
|
||||
- .:/code
|
||||
environment:
|
||||
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
|
||||
CLICKHOUSE_HOST: 'clickhouse'
|
||||
@ -17,7 +17,6 @@ services:
|
||||
REDIS_URL: 'redis://redis:6379/'
|
||||
SECRET_KEY: 'alsdfjiosdajfklalsdjkf'
|
||||
DEBUG: 'true'
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
TEST: 'true'
|
||||
depends_on:
|
||||
- db
|
@ -11,19 +11,59 @@ services:
|
||||
redis:
|
||||
container_name: posthog_redis
|
||||
image: redis:6-alpine
|
||||
clickhouse:
|
||||
# KEEP CLICKHOUSE-SERVER VERSION IN SYNC WITH
|
||||
# https://github.com/PostHog/charts-clickhouse/blob/main/charts/posthog/templates/clickhouse_instance.yaml#L88
|
||||
image: yandex/clickhouse-server:21.6.5
|
||||
depends_on:
|
||||
- kafka
|
||||
- zookeeper
|
||||
ports:
|
||||
- '8123:8123'
|
||||
- '9000:9000'
|
||||
- '9440:9440'
|
||||
- '9009:9009'
|
||||
volumes:
|
||||
- ./ee/idl:/idl
|
||||
- ./docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
|
||||
- ./docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ./docker/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
depends_on:
|
||||
- zookeeper
|
||||
ports:
|
||||
- '9092:9092'
|
||||
environment:
|
||||
KAFKA_ADVERTISED_HOST_NAME: kafka
|
||||
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
||||
web:
|
||||
container_name: posthog_web
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- clickhouse
|
||||
- kafka
|
||||
environment:
|
||||
DATABASE_URL: postgres://posthog:posthog@db:5432/posthog
|
||||
REDIS_URL: redis://redis:6379/
|
||||
KAFKA_URL: 'kafka://kafka'
|
||||
CLICKHOUSE_HOST: 'clickhouse'
|
||||
CLICKHOUSE_DATABASE: 'posthog'
|
||||
CLICKHOUSE_SECURE: 'false'
|
||||
CLICKHOUSE_VERIFY: 'false'
|
||||
SECRET_KEY: <randomly generated secret key>
|
||||
PGHOST: db
|
||||
PGUSER: posthog
|
||||
PGPASSWORD: posthog
|
||||
image: posthog/posthog:latest
|
||||
links:
|
||||
- db:db
|
||||
- redis:redis
|
||||
- clickhouse:clickhouse
|
||||
- kafka:kafka
|
||||
ports:
|
||||
- 8000:8000
|
||||
- 80:8000
|
||||
|
@ -4,7 +4,6 @@ from django.urls.base import resolve
|
||||
from loginas.utils import is_impersonated_session
|
||||
|
||||
from posthog.internal_metrics import incr
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
|
||||
class CHQueries(object):
|
||||
@ -23,11 +22,7 @@ class CHQueries(object):
|
||||
route = resolve(request.path)
|
||||
route_id = f"{route.route} ({route.func.__name__})"
|
||||
client._request_information = {
|
||||
"save": (
|
||||
is_clickhouse_enabled()
|
||||
and request.user.pk
|
||||
and (request.user.is_staff or is_impersonated_session(request) or settings.DEBUG)
|
||||
),
|
||||
"save": (request.user.pk and (request.user.is_staff or is_impersonated_session(request) or settings.DEBUG)),
|
||||
"user_id": request.user.pk,
|
||||
"kind": "request",
|
||||
"id": route_id,
|
||||
|
@ -22,9 +22,8 @@ from ee.kafka_client.topics import KAFKA_PERSON, KAFKA_PERSON_DISTINCT_ID, KAFKA
|
||||
from posthog.models.person import Person, PersonDistinctId
|
||||
from posthog.models.utils import UUIDT
|
||||
from posthog.settings import TEST
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
if is_clickhouse_enabled() and TEST:
|
||||
if TEST:
|
||||
# :KLUDGE: Hooks are kept around for tests. All other code goes through plugin-server or the other methods explicitly
|
||||
|
||||
@receiver(post_save, sender=Person)
|
||||
|
@ -68,18 +68,6 @@ class TestFilters(PGTestFilters):
|
||||
{"properties": [{"type": "precalculated-cohort", "key": "id", "value": cohort.pk, "operator": None},]},
|
||||
)
|
||||
|
||||
def test_simplify_not_ee(self):
|
||||
cohort = Cohort.objects.create(
|
||||
team=self.team,
|
||||
groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}],
|
||||
)
|
||||
filter = Filter(data={"properties": [{"type": "cohort", "key": "id", "value": cohort.pk}]})
|
||||
|
||||
self.assertEqual(
|
||||
filter.simplify(self.team, is_clickhouse_enabled=False).properties_to_dict(),
|
||||
{"properties": [{"type": "cohort", "key": "id", "value": cohort.pk, "operator": None}]},
|
||||
)
|
||||
|
||||
def test_simplify_static_cohort(self):
|
||||
cohort = Cohort.objects.create(team=self.team, groups=[], is_static=True)
|
||||
filter = Filter(data={"properties": [{"type": "cohort", "key": "id", "value": cohort.pk}]})
|
||||
|
@ -1,5 +1,5 @@
|
||||
import json
|
||||
import urllib.parse
|
||||
import uuid
|
||||
from abc import ABC
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union, cast
|
||||
|
||||
@ -26,11 +26,10 @@ from posthog.constants import (
|
||||
TREND_FILTER_TYPE_ACTIONS,
|
||||
)
|
||||
from posthog.models import Entity, Filter, Team
|
||||
from posthog.queries.funnel import Funnel
|
||||
from posthog.utils import relative_date_parse
|
||||
|
||||
|
||||
class ClickhouseFunnelBase(ABC, Funnel):
|
||||
class ClickhouseFunnelBase(ABC):
|
||||
_filter: Filter
|
||||
_team: Team
|
||||
_include_timestamp: Optional[bool]
|
||||
@ -81,6 +80,21 @@ class ClickhouseFunnelBase(ABC, Funnel):
|
||||
results = self._exec_query()
|
||||
return self._format_results(results)
|
||||
|
||||
def _serialize_step(self, step: Entity, count: int, people: Optional[List[uuid.UUID]] = None) -> Dict[str, Any]:
|
||||
if step.type == TREND_FILTER_TYPE_ACTIONS:
|
||||
name = step.get_action().name
|
||||
else:
|
||||
name = step.id
|
||||
return {
|
||||
"action_id": step.id,
|
||||
"name": name,
|
||||
"custom_name": step.custom_name,
|
||||
"order": step.order,
|
||||
"people": people if people else [],
|
||||
"count": count,
|
||||
"type": step.type,
|
||||
}
|
||||
|
||||
def _update_filters(self):
|
||||
# format default dates
|
||||
data: Dict[str, Any] = {}
|
||||
|
@ -8,7 +8,6 @@ from ee.clickhouse.queries.trends.clickhouse_trends import ClickhouseTrends
|
||||
from posthog.constants import TRENDS_CUMULATIVE, TRENDS_PIE
|
||||
from posthog.models import Cohort, Person
|
||||
from posthog.models.filters.filter import Filter
|
||||
from posthog.queries.abstract_test.test_interval import AbstractIntervalTest
|
||||
from posthog.test.base import APIBaseTest
|
||||
|
||||
|
||||
@ -17,7 +16,7 @@ def _create_event(**kwargs):
|
||||
create_event(**kwargs)
|
||||
|
||||
|
||||
class TestFormula(AbstractIntervalTest, APIBaseTest):
|
||||
class TestFormula(APIBaseTest):
|
||||
CLASS_DATA_LEVEL_SETUP = False
|
||||
|
||||
def setUp(self):
|
||||
|
@ -15,7 +15,6 @@ from posthog.settings import (
|
||||
CLICKHOUSE_VERIFY,
|
||||
)
|
||||
from posthog.test.base import TestMixin
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
|
||||
def create_clickhouse_tables(num_tables: int):
|
||||
@ -92,39 +91,37 @@ def reset_clickhouse_tables():
|
||||
sync_execute(item)
|
||||
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
@pytest.fixture(scope="package")
|
||||
def django_db_setup(django_db_setup, django_db_keepdb):
|
||||
database = Database(
|
||||
CLICKHOUSE_DATABASE,
|
||||
db_url=CLICKHOUSE_HTTP_URL,
|
||||
username=CLICKHOUSE_USER,
|
||||
password=CLICKHOUSE_PASSWORD,
|
||||
verify_ssl_cert=CLICKHOUSE_VERIFY,
|
||||
)
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
def django_db_setup(django_db_setup, django_db_keepdb):
|
||||
database = Database(
|
||||
CLICKHOUSE_DATABASE,
|
||||
db_url=CLICKHOUSE_HTTP_URL,
|
||||
username=CLICKHOUSE_USER,
|
||||
password=CLICKHOUSE_PASSWORD,
|
||||
verify_ssl_cert=CLICKHOUSE_VERIFY,
|
||||
)
|
||||
if not django_db_keepdb:
|
||||
try:
|
||||
database.drop_database()
|
||||
except:
|
||||
pass
|
||||
|
||||
if not django_db_keepdb:
|
||||
try:
|
||||
database.drop_database()
|
||||
except:
|
||||
pass
|
||||
database.create_database() # Create database if it doesn't exist
|
||||
table_count = sync_execute(
|
||||
"SELECT count() FROM system.tables WHERE database = %(database)s", {"database": CLICKHOUSE_DATABASE}
|
||||
)[0][0]
|
||||
create_clickhouse_tables(table_count)
|
||||
|
||||
database.create_database() # Create database if it doesn't exist
|
||||
table_count = sync_execute(
|
||||
"SELECT count() FROM system.tables WHERE database = %(database)s", {"database": CLICKHOUSE_DATABASE}
|
||||
)[0][0]
|
||||
create_clickhouse_tables(table_count)
|
||||
yield
|
||||
|
||||
yield
|
||||
|
||||
if django_db_keepdb:
|
||||
reset_clickhouse_tables()
|
||||
else:
|
||||
try:
|
||||
database.drop_database()
|
||||
except:
|
||||
pass
|
||||
if django_db_keepdb:
|
||||
reset_clickhouse_tables()
|
||||
else:
|
||||
try:
|
||||
database.drop_database()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -1,104 +0,0 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
db:
|
||||
image: postgres:12-alpine
|
||||
environment:
|
||||
POSTGRES_USER: posthog
|
||||
POSTGRES_DB: posthog
|
||||
POSTGRES_PASSWORD: posthog
|
||||
ports:
|
||||
- '5432:5432'
|
||||
redis:
|
||||
image: 'redis:alpine'
|
||||
ports:
|
||||
- '6379:6379'
|
||||
clickhouse:
|
||||
# KEEP CLICKHOUSE-SERVER VERSION IN SYNC WITH
|
||||
# https://github.com/PostHog/charts-clickhouse/blob/main/charts/posthog/templates/clickhouse_instance.yaml#L88
|
||||
image: yandex/clickhouse-server:21.6.5
|
||||
depends_on:
|
||||
- kafka
|
||||
- zookeeper
|
||||
ports:
|
||||
- '8123:8123'
|
||||
- '9000:9000'
|
||||
- '9440:9440'
|
||||
- '9009:9009'
|
||||
volumes:
|
||||
- ./idl:/idl
|
||||
- ../docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
|
||||
- ../docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../docker/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
depends_on:
|
||||
- zookeeper
|
||||
ports:
|
||||
- '9092:9092'
|
||||
environment:
|
||||
KAFKA_ADVERTISED_HOST_NAME: kafka
|
||||
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
||||
worker: &worker
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: dev.Dockerfile
|
||||
command: ./bin/docker-worker-celery --with-scheduler
|
||||
volumes:
|
||||
- ..:/code
|
||||
environment:
|
||||
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
|
||||
CLICKHOUSE_HOST: 'clickhouse'
|
||||
CLICKHOUSE_DATABASE: 'posthog'
|
||||
CLICKHOUSE_SECURE: 'false'
|
||||
CLICKHOUSE_VERIFY: 'false'
|
||||
KAFKA_URL: 'kafka://kafka'
|
||||
REDIS_URL: 'redis://redis:6379/'
|
||||
SECRET_KEY: 'alsdfjiosdajfklalsdjkf'
|
||||
DEBUG: 'true'
|
||||
PRIMARY_DB: 'clickhouse'
|
||||
PGHOST: db
|
||||
PGUSER: posthog
|
||||
PGPASSWORD: posthog
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- clickhouse
|
||||
- kafka
|
||||
links:
|
||||
- db:db
|
||||
- redis:redis
|
||||
- clickhouse:clickhouse
|
||||
- kafka:kafka
|
||||
web:
|
||||
<<: *worker
|
||||
command: '${CH_WEB_SCRIPT:-./ee/bin/docker-ch-dev-web}'
|
||||
ports:
|
||||
- '8000:8000'
|
||||
- '8234:8234'
|
||||
plugins:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: dev.Dockerfile
|
||||
command: ./bin/plugin-server --no-restart-loop
|
||||
volumes:
|
||||
- ..:/code
|
||||
restart: on-failure
|
||||
environment:
|
||||
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
|
||||
KAFKA_ENABLED: 'true'
|
||||
KAFKA_HOSTS: 'kafka:9092'
|
||||
REDIS_URL: 'redis://redis:6379/'
|
||||
CLICKHOUSE_HOST: 'clickhouse'
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- clickhouse
|
||||
- kafka
|
||||
links:
|
||||
- db:db
|
||||
- redis:redis
|
||||
- clickhouse:clickhouse
|
||||
- kafka:kafka
|
@ -1,7 +1,5 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Set up databases for non-Python tests that depend on the Django server"
|
||||
@ -13,28 +11,27 @@ class Command(BaseCommand):
|
||||
test_runner.setup_databases()
|
||||
test_runner.setup_test_environment()
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
from infi.clickhouse_orm import Database
|
||||
from infi.clickhouse_orm import Database
|
||||
|
||||
from posthog.settings import (
|
||||
CLICKHOUSE_DATABASE,
|
||||
CLICKHOUSE_HTTP_URL,
|
||||
CLICKHOUSE_PASSWORD,
|
||||
CLICKHOUSE_REPLICATION,
|
||||
CLICKHOUSE_USER,
|
||||
CLICKHOUSE_VERIFY,
|
||||
)
|
||||
from posthog.settings import (
|
||||
CLICKHOUSE_DATABASE,
|
||||
CLICKHOUSE_HTTP_URL,
|
||||
CLICKHOUSE_PASSWORD,
|
||||
CLICKHOUSE_REPLICATION,
|
||||
CLICKHOUSE_USER,
|
||||
CLICKHOUSE_VERIFY,
|
||||
)
|
||||
|
||||
database = Database(
|
||||
CLICKHOUSE_DATABASE,
|
||||
db_url=CLICKHOUSE_HTTP_URL,
|
||||
username=CLICKHOUSE_USER,
|
||||
password=CLICKHOUSE_PASSWORD,
|
||||
verify_ssl_cert=CLICKHOUSE_VERIFY,
|
||||
)
|
||||
database = Database(
|
||||
CLICKHOUSE_DATABASE,
|
||||
db_url=CLICKHOUSE_HTTP_URL,
|
||||
username=CLICKHOUSE_USER,
|
||||
password=CLICKHOUSE_PASSWORD,
|
||||
verify_ssl_cert=CLICKHOUSE_VERIFY,
|
||||
)
|
||||
|
||||
try:
|
||||
database.create_database()
|
||||
except:
|
||||
pass
|
||||
database.migrate("ee.clickhouse.migrations", replicated=CLICKHOUSE_REPLICATION)
|
||||
try:
|
||||
database.create_database()
|
||||
except:
|
||||
pass
|
||||
database.migrate("ee.clickhouse.migrations", replicated=CLICKHOUSE_REPLICATION)
|
||||
|
@ -30,7 +30,7 @@ def create_event_clickhouse(
|
||||
)
|
||||
|
||||
|
||||
class TestOrganizationUsageReport(ClickhouseTestMixin, factory_org_usage_report(create_person, create_event_clickhouse, send_all_org_usage_reports, {"EE_AVAILABLE": True, "USE_TZ": False, "PRIMARY_DB": AnalyticsDBMS.CLICKHOUSE})): # type: ignore
|
||||
class TestOrganizationUsageReport(ClickhouseTestMixin, factory_org_usage_report(create_person, create_event_clickhouse, send_all_org_usage_reports, {"USE_TZ": False, "PRIMARY_DB": AnalyticsDBMS.CLICKHOUSE})): # type: ignore
|
||||
def test_groups_usage(self):
|
||||
GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0)
|
||||
GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1)
|
||||
|
@ -106,7 +106,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {},
|
||||
"opt_out_capture": false,
|
||||
|
@ -77,7 +77,7 @@ export const mockAPI = (
|
||||
export function defaultAPIMocks({ pathname, searchParams }: APIRoute, availableFeatures: AvailableFeature[] = []): any {
|
||||
const organization = { ...MOCK_DEFAULT_ORGANIZATION, available_features: availableFeatures }
|
||||
if (pathname === '_preflight/') {
|
||||
return { is_clickhouse_enabled: true }
|
||||
return {}
|
||||
} else if (pathname === 'api/users/@me/') {
|
||||
return {
|
||||
organization,
|
||||
|
@ -11,7 +11,6 @@ import {
|
||||
TableOutlined,
|
||||
} from '@ant-design/icons'
|
||||
import { ChartDisplayType, FilterType, FunnelVizType, InsightType } from '~/types'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import { ANTD_TOOLTIP_PLACEMENTS } from 'lib/utils'
|
||||
import { insightLogic } from 'scenes/insights/insightLogic'
|
||||
|
||||
@ -25,7 +24,6 @@ export function ChartFilter({ filters, onChange, disabled }: ChartFilterProps):
|
||||
const { insightProps } = useValues(insightLogic)
|
||||
const { chartFilter } = useValues(chartFilterLogic(insightProps))
|
||||
const { setChartFilter } = useActions(chartFilterLogic(insightProps))
|
||||
const { preflight } = useValues(preflightLogic)
|
||||
|
||||
const cumulativeDisabled = filters.insight === InsightType.STICKINESS || filters.insight === InsightType.RETENTION
|
||||
const tableDisabled = false
|
||||
@ -58,28 +56,21 @@ export function ChartFilter({ filters, onChange, disabled }: ChartFilterProps):
|
||||
|
||||
const options =
|
||||
filters.insight === InsightType.FUNNELS
|
||||
? preflight?.is_clickhouse_enabled
|
||||
? [
|
||||
{
|
||||
value: FunnelVizType.Steps,
|
||||
label: <Label icon={<OrderedListOutlined />}>Steps</Label>,
|
||||
},
|
||||
{
|
||||
value: FunnelVizType.Trends,
|
||||
label: (
|
||||
<Label icon={<LineChartOutlined />}>
|
||||
Trends
|
||||
<WarningTag>BETA</WarningTag>
|
||||
</Label>
|
||||
),
|
||||
},
|
||||
]
|
||||
: [
|
||||
{
|
||||
value: FunnelVizType.Steps,
|
||||
label: <Label icon={<OrderedListOutlined />}>Steps</Label>,
|
||||
},
|
||||
]
|
||||
? [
|
||||
{
|
||||
value: FunnelVizType.Steps,
|
||||
label: <Label icon={<OrderedListOutlined />}>Steps</Label>,
|
||||
},
|
||||
{
|
||||
value: FunnelVizType.Trends,
|
||||
label: (
|
||||
<Label icon={<LineChartOutlined />}>
|
||||
Trends
|
||||
<WarningTag>BETA</WarningTag>
|
||||
</Label>
|
||||
),
|
||||
},
|
||||
]
|
||||
: [
|
||||
{
|
||||
label: 'Line Chart',
|
||||
|
@ -20,24 +20,22 @@ export const groupsAccessLogic = kea<groupsAccessLogicType<GroupsAccessStatus>>(
|
||||
teamLogic,
|
||||
['currentTeam'],
|
||||
preflightLogic,
|
||||
['clickhouseEnabled', 'preflight'],
|
||||
['preflight'],
|
||||
userLogic,
|
||||
['hasAvailableFeature', 'upgradeLink'],
|
||||
],
|
||||
},
|
||||
selectors: {
|
||||
groupsCanBeEnabled: [(s) => [s.clickhouseEnabled], (clickhouseEnabled) => clickhouseEnabled],
|
||||
groupsEnabled: [
|
||||
(s) => [s.groupsCanBeEnabled, s.hasAvailableFeature],
|
||||
(groupsCanBeEnabled, hasAvailableFeature) =>
|
||||
groupsCanBeEnabled && hasAvailableFeature(AvailableFeature.GROUP_ANALYTICS),
|
||||
(s) => [s.hasAvailableFeature],
|
||||
(hasAvailableFeature) => hasAvailableFeature(AvailableFeature.GROUP_ANALYTICS),
|
||||
],
|
||||
// Used to toggle various introduction views related to groups
|
||||
groupsAccessStatus: [
|
||||
(s) => [s.groupsCanBeEnabled, s.groupsEnabled, s.currentTeam, s.preflight],
|
||||
(canBeEnabled, isEnabled, currentTeam, preflight): GroupsAccessStatus => {
|
||||
(s) => [s.groupsEnabled, s.currentTeam, s.preflight],
|
||||
(isEnabled, currentTeam, preflight): GroupsAccessStatus => {
|
||||
const hasGroups = currentTeam?.has_group_types
|
||||
if (!canBeEnabled || preflight?.instance_preferences?.disable_paid_fs) {
|
||||
if (preflight?.instance_preferences?.disable_paid_fs) {
|
||||
return GroupsAccessStatus.Hidden
|
||||
} else if (isEnabled && hasGroups) {
|
||||
return GroupsAccessStatus.AlreadyUsing
|
||||
|
@ -35,7 +35,6 @@ export const preflightLogic = kea<preflightLogicType<PreflightMode>>({
|
||||
(preflight): boolean =>
|
||||
Boolean(preflight && Object.values(preflight.available_social_auth_providers).filter((i) => i).length),
|
||||
],
|
||||
clickhouseEnabled: [(s) => [s.preflight], (preflight): boolean => !!preflight?.is_clickhouse_enabled],
|
||||
realm: [
|
||||
(s) => [s.preflight],
|
||||
(preflight): Realm | null => {
|
||||
@ -82,8 +81,6 @@ export const preflightLogic = kea<preflightLogicType<PreflightMode>>({
|
||||
posthog.register({
|
||||
posthog_version: values.preflight.posthog_version,
|
||||
realm: values.realm,
|
||||
is_clickhouse_enabled: values.preflight.is_clickhouse_enabled,
|
||||
ee_available: values.preflight.ee_available,
|
||||
email_service_available: values.preflight.email_service_available,
|
||||
})
|
||||
|
||||
|
@ -5,7 +5,6 @@ import { router } from 'kea-router'
|
||||
import { eventsTableLogic } from 'scenes/events/eventsTableLogic'
|
||||
import { EventsTable } from 'scenes/events'
|
||||
import { urls } from 'scenes/urls'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import { ActionType } from '~/types'
|
||||
import { dayjs } from 'lib/dayjs'
|
||||
import { Spinner } from 'lib/components/Spinner/Spinner'
|
||||
@ -33,8 +32,6 @@ export function Action({ id }: { id?: ActionType['id'] } = {}): JSX.Element {
|
||||
)
|
||||
const { action, isComplete } = useValues(actionLogic({ id, onComplete: fetchEvents }))
|
||||
const { loadAction } = useActions(actionLogic({ id, onComplete: fetchEvents }))
|
||||
const { preflight } = useValues(preflightLogic)
|
||||
const isClickHouseEnabled = !!preflight?.is_clickhouse_enabled
|
||||
|
||||
return (
|
||||
<div>
|
||||
@ -61,25 +58,23 @@ export function Action({ id }: { id?: ActionType['id'] } = {}): JSX.Element {
|
||||
)}
|
||||
{isComplete && (
|
||||
<div style={{ marginTop: 86 }}>
|
||||
{!isClickHouseEnabled ? (
|
||||
<>
|
||||
<h2 className="subtitle">Event List</h2>
|
||||
<p className="text-muted">
|
||||
List of the events that match this action.{' '}
|
||||
{action && (
|
||||
<>
|
||||
This list was{' '}
|
||||
<b>
|
||||
calculated{' '}
|
||||
{action.last_calculated_at
|
||||
? dayjs(action.last_calculated_at).fromNow()
|
||||
: 'a while ago'}
|
||||
</b>
|
||||
</>
|
||||
)}
|
||||
</p>{' '}
|
||||
</>
|
||||
) : null}
|
||||
<>
|
||||
<h2 className="subtitle">Event List</h2>
|
||||
<p className="text-muted">
|
||||
List of the events that match this action.{' '}
|
||||
{action && (
|
||||
<>
|
||||
This list was{' '}
|
||||
<b>
|
||||
calculated{' '}
|
||||
{action.last_calculated_at
|
||||
? dayjs(action.last_calculated_at).fromNow()
|
||||
: 'a while ago'}
|
||||
</b>
|
||||
</>
|
||||
)}
|
||||
</p>{' '}
|
||||
</>
|
||||
{id && (
|
||||
<>
|
||||
<PageHeader
|
||||
|
@ -184,7 +184,6 @@
|
||||
"can_create_org": true,
|
||||
"email_service_available": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
|
@ -184,7 +184,6 @@
|
||||
"can_create_org": true,
|
||||
"email_service_available": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
|
@ -2,13 +2,10 @@ import React from 'react'
|
||||
import { Row, Col } from 'antd'
|
||||
import { CohortType } from '~/types'
|
||||
import { TeamMemberID } from 'lib/components/TeamMemberID'
|
||||
import { useValues } from 'kea'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import { dayjs } from 'lib/dayjs'
|
||||
|
||||
export function CohortDetailsRow({ cohort }: { cohort: CohortType }): JSX.Element {
|
||||
const { preflight } = useValues(preflightLogic)
|
||||
const columnSize = preflight?.is_clickhouse_enabled ? 12 : 8
|
||||
const columnSize = 12
|
||||
return (
|
||||
<Row justify="space-between" align="top" className="mt text-center">
|
||||
<Col span={columnSize}>
|
||||
@ -21,13 +18,11 @@ export function CohortDetailsRow({ cohort }: { cohort: CohortType }): JSX.Elemen
|
||||
<TeamMemberID person={cohort.created_by} />
|
||||
</div>
|
||||
</Col>
|
||||
{!preflight?.is_clickhouse_enabled && (
|
||||
<Col span={columnSize}>
|
||||
<label className="ant-form-item-label">Last calculated at</label>
|
||||
<Col span={columnSize}>
|
||||
<label className="ant-form-item-label">Last calculated at</label>
|
||||
|
||||
<div>{cohort.last_calculation ? dayjs(cohort.last_calculation).fromNow() : 'in progress'}</div>
|
||||
</Col>
|
||||
)}
|
||||
<div>{cohort.last_calculation ? dayjs(cohort.last_calculation).fromNow() : 'in progress'}</div>
|
||||
</Col>
|
||||
</Row>
|
||||
)
|
||||
}
|
||||
|
@ -8,8 +8,6 @@ import { ACTION_TYPE, ENTITY_MATCH_TYPE, EVENT_TYPE, PROPERTY_MATCH_TYPE } from
|
||||
import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters'
|
||||
import { DeleteOutlined } from '@ant-design/icons'
|
||||
import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types'
|
||||
import { useValues } from 'kea'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
|
||||
const { Option } = Select
|
||||
|
||||
@ -155,9 +153,6 @@ function EntityCriteriaRow({
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
const { preflight } = useValues(preflightLogic)
|
||||
const COUNT_ENABLED = preflight?.is_clickhouse_enabled
|
||||
|
||||
return (
|
||||
<div style={{ marginTop: 16, width: '100%' }}>
|
||||
<Row gutter={8}>
|
||||
@ -177,24 +172,20 @@ function EntityCriteriaRow({
|
||||
</Button>
|
||||
<CohortEntityFilterBox open={open} onSelect={onEntityChange} />
|
||||
</Col>
|
||||
{COUNT_ENABLED && (
|
||||
<>
|
||||
<Col span={4}>
|
||||
<OperatorSelect value={count_operator} onChange={onOperatorChange} />
|
||||
</Col>
|
||||
<Col span={3}>
|
||||
<Input
|
||||
required
|
||||
value={count}
|
||||
data-attr="entity-count"
|
||||
onChange={(e) => onEntityCountChange(parseInt(e.target.value))}
|
||||
placeholder="1"
|
||||
type="number"
|
||||
/>
|
||||
</Col>
|
||||
</>
|
||||
)}
|
||||
<Col style={{ display: 'flex', alignItems: 'center' }}>{COUNT_ENABLED && 'times '}in the last</Col>
|
||||
<Col span={4}>
|
||||
<OperatorSelect value={count_operator} onChange={onOperatorChange} />
|
||||
</Col>
|
||||
<Col span={3}>
|
||||
<Input
|
||||
required
|
||||
value={count}
|
||||
data-attr="entity-count"
|
||||
onChange={(e) => onEntityCountChange(parseInt(e.target.value))}
|
||||
placeholder="1"
|
||||
type="number"
|
||||
/>
|
||||
</Col>
|
||||
<Col style={{ display: 'flex', alignItems: 'center' }}>times in the last</Col>
|
||||
<Col span={4}>
|
||||
<DateIntervalSelect value={days} onChange={onDateIntervalChange} />
|
||||
</Col>
|
||||
|
@ -239,7 +239,6 @@
|
||||
"can_create_org": false,
|
||||
"email_service_available": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -3234,7 +3233,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -3359,7 +3357,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -3630,7 +3627,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -3778,7 +3774,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -3907,7 +3902,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -4038,7 +4032,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -4163,7 +4156,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -4462,7 +4454,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -4895,7 +4886,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -5041,7 +5031,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -5170,7 +5159,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -5297,7 +5285,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -5422,7 +5409,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -5549,7 +5535,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -5676,7 +5661,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -5803,7 +5787,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -5928,7 +5911,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -6054,7 +6036,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -6334,7 +6315,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -6576,7 +6556,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -6701,7 +6680,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -6989,7 +6967,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -7118,7 +7095,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -7243,7 +7219,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -7653,7 +7628,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -7778,7 +7752,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -8066,7 +8039,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -8192,7 +8164,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -8317,7 +8288,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -8651,7 +8621,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -8777,7 +8746,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -8902,7 +8870,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -9235,7 +9202,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -9360,7 +9326,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -9485,7 +9450,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -9612,7 +9576,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -9739,7 +9702,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -9891,7 +9853,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -10035,7 +9996,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -10160,7 +10120,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -10301,7 +10260,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -10426,7 +10384,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -10551,7 +10508,6 @@
|
||||
"is_demo_project": true,
|
||||
"posthog_version": "1.29.1",
|
||||
"realm": "hosted-clickhouse",
|
||||
"is_clickhouse_enabled": true,
|
||||
"ee_available": true,
|
||||
"email_service_available": false,
|
||||
"$active_feature_flags": [
|
||||
@ -18402,16 +18358,6 @@
|
||||
"updated_by": null,
|
||||
"query_usage_30_day": 0
|
||||
},
|
||||
{
|
||||
"id": "017989da-7571-0001-bc1f-c48306dd7a5e",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"description": null,
|
||||
"tags": null,
|
||||
"is_numerical": false,
|
||||
"updated_at": null,
|
||||
"updated_by": null,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "01791516-32bb-0000-4094-b9829f5f9651",
|
||||
"name": "is_demo_project",
|
||||
|
@ -104,7 +104,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -854,7 +853,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -1057,7 +1055,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -1144,7 +1141,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -1233,7 +1229,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -1469,7 +1464,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -1684,7 +1678,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -1788,7 +1781,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -1885,7 +1877,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -1972,7 +1963,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -2059,7 +2049,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -2146,7 +2135,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -2235,7 +2223,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -2505,7 +2492,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -2607,7 +2593,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -2703,7 +2688,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -2790,7 +2774,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -2879,7 +2862,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -3168,7 +3150,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -3264,7 +3245,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -3351,7 +3331,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -3438,7 +3417,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -3527,7 +3505,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -3817,7 +3794,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -3920,7 +3896,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -4016,7 +3991,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -4103,7 +4077,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -4192,7 +4165,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -4494,7 +4466,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -4597,7 +4568,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -4693,7 +4663,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -4780,7 +4749,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -4867,7 +4835,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -4954,7 +4921,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -5043,7 +5009,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -5239,7 +5204,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -5539,7 +5503,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -5642,7 +5605,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": true,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -5738,7 +5700,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -5825,7 +5786,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -5913,7 +5873,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -6002,7 +5961,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -6091,7 +6049,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": true,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -6180,7 +6137,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -6269,7 +6225,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -6358,7 +6313,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -6447,7 +6401,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -6598,7 +6551,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -6695,7 +6647,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"phjs-xhr-response-200": 7,
|
||||
"phjs-batch-requests-e/": 4,
|
||||
"email_service_available": false,
|
||||
@ -6787,7 +6738,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -6885,7 +6835,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -6981,7 +6930,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -7069,7 +7017,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -7157,7 +7104,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -7244,7 +7190,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -7333,7 +7278,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -7615,7 +7559,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -7713,7 +7656,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -7805,7 +7747,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -7892,7 +7833,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -7981,7 +7921,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -8282,7 +8221,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -8378,7 +8316,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -8475,7 +8412,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": true,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -8567,7 +8503,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -8655,7 +8590,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -8744,7 +8678,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": true,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -8833,7 +8766,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -8922,7 +8854,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -9011,7 +8942,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -9100,7 +9030,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -9251,7 +9180,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -9348,7 +9276,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"phjs-xhr-response-200": 7,
|
||||
"phjs-batch-requests-e/": 4,
|
||||
"email_service_available": false,
|
||||
@ -9440,7 +9367,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -9538,7 +9464,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -9640,7 +9565,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -9736,7 +9660,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -9823,7 +9746,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -9912,7 +9834,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -10216,7 +10137,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -10320,7 +10240,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -10417,7 +10336,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -10504,7 +10422,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -10591,7 +10508,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -10678,7 +10594,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -10767,7 +10682,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -11063,7 +10977,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -11161,7 +11074,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"is_first_component_load": true,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -11253,7 +11165,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -11341,7 +11252,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -11430,7 +11340,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -11519,7 +11428,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": true,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -11608,7 +11516,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -11697,7 +11604,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -11786,7 +11692,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -11875,7 +11780,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -11963,7 +11867,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -12118,7 +12021,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -12206,7 +12108,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"$feature_flag_response": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
@ -12294,7 +12195,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -12383,7 +12283,6 @@
|
||||
"$active_feature_flags": ["4050-query-ui-optB"],
|
||||
"$geoip_continent_code": "OC",
|
||||
"$geoip_continent_name": "Oceania",
|
||||
"is_clickhouse_enabled": false,
|
||||
"email_service_available": false,
|
||||
"$geoip_subdivision_1_code": "NSW",
|
||||
"$geoip_subdivision_1_name": "New South Wales",
|
||||
@ -14023,12 +13922,6 @@
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7fa-0000-c78a-c7b289c94008",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7d6-0000-2314-06df9faddf58",
|
||||
"name": "is_demo_project",
|
||||
|
@ -85,9 +85,7 @@ function Bar({
|
||||
const [labelPosition, setLabelPosition] = useState<LabelPosition>('inside')
|
||||
const [labelVisible, setLabelVisible] = useState(true)
|
||||
const LABEL_POSITION_OFFSET = 8 // Defined here and in SCSS
|
||||
const { insightProps } = useValues(insightLogic)
|
||||
const { clickhouseFeaturesEnabled } = useValues(funnelLogic(insightProps))
|
||||
const cursorType = clickhouseFeaturesEnabled && !disabled ? 'pointer' : ''
|
||||
const cursorType = !disabled ? 'pointer' : ''
|
||||
const hasBreakdownSum = isBreakdown && typeof breakdownSumPercentage === 'number'
|
||||
const shouldShowLabel = !isBreakdown || (hasBreakdownSum && labelVisible)
|
||||
|
||||
@ -146,7 +144,7 @@ function Bar({
|
||||
backgroundColor: getSeriesColor(breakdownIndex),
|
||||
}}
|
||||
onClick={() => {
|
||||
if (clickhouseFeaturesEnabled && !disabled && onBarClick) {
|
||||
if (!disabled && onBarClick) {
|
||||
onBarClick()
|
||||
}
|
||||
}}
|
||||
@ -296,7 +294,6 @@ export function FunnelBarGraph({ color = 'white' }: { color?: string }): JSX.Ele
|
||||
visibleStepsWithConversionMetrics: steps,
|
||||
stepReference,
|
||||
barGraphLayout: layout,
|
||||
clickhouseFeaturesEnabled,
|
||||
aggregationTargetLabel,
|
||||
isModalActive,
|
||||
} = useValues(logic)
|
||||
@ -361,8 +358,7 @@ export function FunnelBarGraph({ color = 'white' }: { color?: string }): JSX.Ele
|
||||
<EntityFilterInfo filter={getActionFilterFromFunnelStep(step)} />
|
||||
)}
|
||||
</div>
|
||||
{clickhouseFeaturesEnabled &&
|
||||
filters.funnel_order_type !== StepOrderValue.UNORDERED &&
|
||||
{filters.funnel_order_type !== StepOrderValue.UNORDERED &&
|
||||
stepIndex > 0 &&
|
||||
step.action_id === steps[stepIndex - 1].action_id && <DuplicateStepIndicator />}
|
||||
<FunnelStepDropdown index={stepIndex} />
|
||||
@ -479,9 +475,7 @@ export function FunnelBarGraph({ color = 'white' }: { color?: string }): JSX.Ele
|
||||
onClick={() => openPersonsModalForStep({ step, converted: false })} // dropoff value for steps is negative
|
||||
style={{
|
||||
flex: `${1 - breakdownSum / basisStep.count} 1 0`,
|
||||
cursor: `${
|
||||
clickhouseFeaturesEnabled && !dashboardItemId ? 'pointer' : ''
|
||||
}`,
|
||||
cursor: `${!dashboardItemId ? 'pointer' : ''}`,
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
@ -546,9 +540,7 @@ export function FunnelBarGraph({ color = 'white' }: { color?: string }): JSX.Ele
|
||||
onClick={() => openPersonsModalForStep({ step, converted: false })} // dropoff value for steps is negative
|
||||
style={{
|
||||
flex: `${1 - step.conversionRates.fromBasisStep} 1 0`,
|
||||
cursor: `${
|
||||
clickhouseFeaturesEnabled && !dashboardItemId ? 'pointer' : ''
|
||||
}`,
|
||||
cursor: `${!dashboardItemId ? 'pointer' : ''}`,
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
|
@ -15,9 +15,7 @@ import { FunnelStepsPicker } from 'scenes/insights/InsightTabs/FunnelTab/FunnelS
|
||||
|
||||
export function FunnelCanvasLabel(): JSX.Element | null {
|
||||
const { insightProps, filters, activeView } = useValues(insightLogic)
|
||||
const { conversionMetrics, clickhouseFeaturesEnabled, aggregationTargetLabel } = useValues(
|
||||
funnelLogic(insightProps)
|
||||
)
|
||||
const { conversionMetrics, aggregationTargetLabel } = useValues(funnelLogic(insightProps))
|
||||
const { setChartFilter } = useActions(chartFilterLogic(insightProps))
|
||||
|
||||
if (activeView !== InsightType.FUNNELS) {
|
||||
@ -57,9 +55,7 @@ export function FunnelCanvasLabel(): JSX.Element | null {
|
||||
<Button
|
||||
type="link"
|
||||
onClick={() => setChartFilter(FunnelVizType.TimeToConvert)}
|
||||
disabled={
|
||||
!clickhouseFeaturesEnabled || filters.funnel_viz_type === FunnelVizType.TimeToConvert
|
||||
}
|
||||
disabled={filters.funnel_viz_type === FunnelVizType.TimeToConvert}
|
||||
>
|
||||
<span className="l4">{humanFriendlyDuration(conversionMetrics.averageTime)}</span>
|
||||
</Button>
|
||||
|
@ -249,18 +249,6 @@ describe('funnelLogic', () => {
|
||||
await expectLogic(preflightLogic).toDispatchActions(['loadPreflightSuccess'])
|
||||
})
|
||||
|
||||
it('has clickhouse enabled once preflight loads', async () => {
|
||||
await expectLogic()
|
||||
.toDispatchActions(preflightLogic, ['loadPreflight'])
|
||||
.toMatchValues(logic, {
|
||||
clickhouseFeaturesEnabled: false,
|
||||
})
|
||||
.toDispatchActions(preflightLogic, ['loadPreflightSuccess'])
|
||||
.toMatchValues(logic, {
|
||||
clickhouseFeaturesEnabled: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('sets filters after load if valid', async () => {
|
||||
await expectLogic(logic)
|
||||
.toDispatchActions(['loadResults'])
|
||||
@ -979,11 +967,6 @@ describe('funnelLogic', () => {
|
||||
})
|
||||
|
||||
describe('is modal active', () => {
|
||||
it('modal is inactive when clickhouse is not enabled', async () => {
|
||||
await expectLogic().toDispatchActions(preflightLogic, ['loadPreflight']).toMatchValues(logic, {
|
||||
isModalActive: false,
|
||||
})
|
||||
})
|
||||
it('modal is inactive when viewed on dashboard', async () => {
|
||||
await expectLogic(preflightLogic).toDispatchActions(['loadPreflightSuccess'])
|
||||
await router.actions.push(urls.dashboard('1'))
|
||||
|
@ -37,7 +37,7 @@ import {
|
||||
TrendResult,
|
||||
} from '~/types'
|
||||
import { BinCountAuto, FEATURE_FLAGS, FunnelLayout } from 'lib/constants'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
|
||||
import {
|
||||
aggregateBreakdownResult,
|
||||
formatDisplayPercentage,
|
||||
@ -485,11 +485,6 @@ export const funnelLogic = kea<funnelLogicType<openPersonsModelProps>>({
|
||||
(filters, lastFilters): boolean => !equal(cleanFilters(filters), cleanFilters(lastFilters)),
|
||||
],
|
||||
barGraphLayout: [() => [selectors.filters], ({ layout }): FunnelLayout => layout || FunnelLayout.vertical],
|
||||
clickhouseFeaturesEnabled: [
|
||||
() => [preflightLogic.selectors.preflight],
|
||||
// Controls auto-calculation of results and ability to break down values
|
||||
(preflight): boolean => !!preflight?.is_clickhouse_enabled,
|
||||
],
|
||||
histogramGraphData: [
|
||||
() => [selectors.timeConversionResults],
|
||||
(timeConversionResults: FunnelsTimeConversionBins) => {
|
||||
@ -1043,9 +1038,8 @@ export const funnelLogic = kea<funnelLogicType<openPersonsModelProps>>({
|
||||
},
|
||||
],
|
||||
correlationAnalysisAvailable: [
|
||||
(s) => [s.hasAvailableFeature, s.clickhouseFeaturesEnabled],
|
||||
(hasAvailableFeature, clickhouseFeaturesEnabled): boolean =>
|
||||
clickhouseFeaturesEnabled && hasAvailableFeature(AvailableFeature.CORRELATION_ANALYSIS),
|
||||
(s) => [s.hasAvailableFeature],
|
||||
(hasAvailableFeature): boolean => hasAvailableFeature(AvailableFeature.CORRELATION_ANALYSIS),
|
||||
],
|
||||
allProperties: [
|
||||
(s) => [s.inversePropertyNames, s.excludedPropertyNames],
|
||||
@ -1136,10 +1130,7 @@ export const funnelLogic = kea<funnelLogicType<openPersonsModelProps>>({
|
||||
return count
|
||||
},
|
||||
],
|
||||
isModalActive: [
|
||||
(s) => [s.clickhouseFeaturesEnabled, s.isViewedOnDashboard],
|
||||
(clickhouseFeaturesEnabled, isViewedOnDashboard) => clickhouseFeaturesEnabled && !isViewedOnDashboard,
|
||||
],
|
||||
isModalActive: [(s) => [s.isViewedOnDashboard], (isViewedOnDashboard) => !isViewedOnDashboard],
|
||||
incompletenessOffsetFromEnd: [
|
||||
(s) => [s.steps, s.conversionWindow],
|
||||
(steps, conversionWindow) => {
|
||||
@ -1174,13 +1165,6 @@ export const funnelLogic = kea<funnelLogicType<openPersonsModelProps>>({
|
||||
})
|
||||
})
|
||||
|
||||
// load the old people table
|
||||
if (!values.clickhouseFeaturesEnabled) {
|
||||
if ((values.stepsWithCount[0]?.people?.length ?? 0) > 0) {
|
||||
actions.loadPeople(values.stepsWithCount)
|
||||
}
|
||||
}
|
||||
|
||||
// load correlation table after funnel. Maybe parallel?
|
||||
if (
|
||||
values.correlationAnalysisAvailable &&
|
||||
|
@ -95,7 +95,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
|
@ -23,7 +23,6 @@ import {
|
||||
} from '@ant-design/icons'
|
||||
import { SelectGradientOverflow } from 'lib/components/SelectGradientOverflow'
|
||||
import { BareEntity, entityFilterLogic } from '../entityFilterLogic'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel'
|
||||
import { getEventNamesForAction, pluralize } from 'lib/utils'
|
||||
import { SeriesGlyph, SeriesLetter } from 'lib/components/SeriesGlyph'
|
||||
@ -471,14 +470,9 @@ function MathSelector({
|
||||
const numericalNotice = `This can only be used on properties that have at least one number type occurence in your events.${
|
||||
areEventPropertiesNumericalAvailable ? '' : ' None have been found yet!'
|
||||
}`
|
||||
const { preflight } = useValues(preflightLogic)
|
||||
const { eventMathEntries, propertyMathEntries } = useValues(mathsLogic)
|
||||
|
||||
let math_entries = eventMathEntries
|
||||
|
||||
if (!preflight?.is_clickhouse_enabled) {
|
||||
math_entries = math_entries.filter((item) => item[0] !== 'weekly_active' && item[0] !== 'monthly_active')
|
||||
}
|
||||
const math_entries = eventMathEntries
|
||||
|
||||
return (
|
||||
<Select
|
||||
|
@ -95,7 +95,7 @@ export function InsightTimeoutState({ isLoading }: { isLoading: boolean }): JSX.
|
||||
<li>Reduce the date range of your query.</li>
|
||||
<li>Remove some filters.</li>
|
||||
{!preflight?.cloud && <li>Increase the size of your database server.</li>}
|
||||
{!preflight?.cloud && !preflight?.is_clickhouse_enabled && (
|
||||
{!preflight?.cloud && (
|
||||
<li>
|
||||
<a
|
||||
data-attr="insight-timeout-upgrade-to-clickhouse"
|
||||
@ -208,7 +208,7 @@ export function InsightErrorState({ excludeDetail, title }: InsightErrorStatePro
|
||||
|
||||
export function FunnelSingleStepState(): JSX.Element {
|
||||
const { insightProps } = useValues(insightLogic)
|
||||
const { filters, clickhouseFeaturesEnabled } = useValues(funnelLogic(insightProps))
|
||||
const { filters } = useValues(funnelLogic(insightProps))
|
||||
const { setFilters } = useActions(funnelLogic(insightProps))
|
||||
const { addFilter } = useActions(entityFilterLogic({ setFilters, filters, typeKey: 'EditFunnel-action' }))
|
||||
|
||||
@ -221,9 +221,7 @@ export function FunnelSingleStepState(): JSX.Element {
|
||||
<h2 className="funnels-empty-state__title">Add another step!</h2>
|
||||
<p className="funnels-empty-state__description">
|
||||
You’re almost there! Funnels require at least two steps before calculating.
|
||||
{clickhouseFeaturesEnabled
|
||||
? ' Once you have two steps defined, additional changes will recalculate automatically.'
|
||||
: ''}
|
||||
{' Once you have two steps defined, additional changes will recalculate automatically.'}
|
||||
</p>
|
||||
<div className="mt text-center">
|
||||
<Button
|
||||
|
@ -208,7 +208,6 @@
|
||||
"can_create_org": true,
|
||||
"email_service_available": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"US/Pacific": -8,
|
||||
|
@ -223,7 +223,6 @@
|
||||
"can_create_org": true,
|
||||
"email_service_available": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"US/Pacific": -8,
|
||||
|
@ -226,7 +226,6 @@
|
||||
"can_create_org": true,
|
||||
"email_service_available": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"US/Pacific": -8,
|
||||
|
@ -238,7 +238,6 @@
|
||||
"can_create_org": true,
|
||||
"email_service_available": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"US/Pacific": -8,
|
||||
|
@ -203,7 +203,6 @@
|
||||
"can_create_org": true,
|
||||
"email_service_available": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"US/Pacific": -8,
|
||||
|
@ -200,7 +200,6 @@
|
||||
"can_create_org": true,
|
||||
"email_service_available": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"US/Pacific": -8,
|
||||
|
@ -24,7 +24,6 @@ import {
|
||||
} from 'scenes/insights/EmptyStates'
|
||||
import { Loading } from 'lib/utils'
|
||||
import { funnelLogic } from 'scenes/funnels/funnelLogic'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import clsx from 'clsx'
|
||||
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
|
||||
import { PathCanvasLabel } from 'scenes/paths/PathsLabel'
|
||||
@ -46,7 +45,6 @@ export function InsightContainer(
|
||||
disableTable: false,
|
||||
}
|
||||
): JSX.Element {
|
||||
const { preflight } = useValues(preflightLogic)
|
||||
const { featureFlags } = useValues(featureFlagLogic)
|
||||
const {
|
||||
insightProps,
|
||||
@ -103,7 +101,6 @@ export function InsightContainer(
|
||||
|
||||
function renderTable(): JSX.Element | null {
|
||||
if (
|
||||
!preflight?.is_clickhouse_enabled &&
|
||||
!showErrorMessage &&
|
||||
!showTimeoutMessage &&
|
||||
areFiltersValid &&
|
||||
@ -114,7 +111,6 @@ export function InsightContainer(
|
||||
}
|
||||
|
||||
if (
|
||||
preflight?.is_clickhouse_enabled &&
|
||||
activeView === InsightType.FUNNELS &&
|
||||
!showErrorMessage &&
|
||||
!showTimeoutMessage &&
|
||||
|
@ -1,4 +1,4 @@
|
||||
import React, { useEffect, useState } from 'react'
|
||||
import React from 'react'
|
||||
import { useValues, useActions, useMountedLogic } from 'kea'
|
||||
import clsx from 'clsx'
|
||||
import { funnelLogic } from 'scenes/funnels/funnelLogic'
|
||||
@ -30,15 +30,10 @@ import { FunnelStepReferencePicker } from './FunnelStepReferencePicker'
|
||||
export function FunnelTab(): JSX.Element {
|
||||
const { insightProps, allEventNames } = useValues(insightLogic)
|
||||
const { loadResults } = useActions(insightLogic)
|
||||
const {
|
||||
isStepsEmpty,
|
||||
filters,
|
||||
clickhouseFeaturesEnabled,
|
||||
aggregationTargetLabel,
|
||||
filterSteps,
|
||||
advancedOptionsUsedCount,
|
||||
} = useValues(funnelLogic(insightProps))
|
||||
const { clearFunnel, setFilters, toggleAdvancedMode, setStepReference } = useActions(funnelLogic(insightProps))
|
||||
const { isStepsEmpty, filters, aggregationTargetLabel, filterSteps, advancedOptionsUsedCount } = useValues(
|
||||
funnelLogic(insightProps)
|
||||
)
|
||||
const { setFilters, toggleAdvancedMode, setStepReference } = useActions(funnelLogic(insightProps))
|
||||
const { featureFlags } = useValues(featureFlagLogic)
|
||||
const { groupsTaxonomicTypes, showGroupsOptions } = useValues(groupsModel)
|
||||
const screens = useBreakpoint()
|
||||
@ -60,7 +55,7 @@ export function FunnelTab(): JSX.Element {
|
||||
<h4 className="secondary" style={{ marginBottom: 0 }}>
|
||||
Query steps
|
||||
</h4>
|
||||
{clickhouseFeaturesEnabled && (
|
||||
{
|
||||
<div className="flex-center">
|
||||
<span
|
||||
style={{
|
||||
@ -74,7 +69,7 @@ export function FunnelTab(): JSX.Element {
|
||||
</span>
|
||||
<ToggleButtonChartFilter simpleMode />
|
||||
</div>
|
||||
)}
|
||||
}
|
||||
</Row>
|
||||
<Card className="action-filters-bordered" bodyStyle={{ padding: 0 }}>
|
||||
<ActionFilter
|
||||
@ -100,23 +95,6 @@ export function FunnelTab(): JSX.Element {
|
||||
rowClassName="action-filters-bordered"
|
||||
/>
|
||||
<div className="mb-05" />
|
||||
{!clickhouseFeaturesEnabled && (
|
||||
<>
|
||||
<hr style={{ margin: '0', marginBottom: '0.5rem' }} />
|
||||
<Row style={{ justifyContent: 'flex-end', paddingBottom: 8, paddingRight: 8 }}>
|
||||
{!isStepsEmpty && (
|
||||
<Button
|
||||
type="link"
|
||||
onClick={(): void => clearFunnel()}
|
||||
data-attr="save-funnel-clear-button"
|
||||
>
|
||||
Clear
|
||||
</Button>
|
||||
)}
|
||||
<CalculateFunnelButton style={{ marginLeft: 4 }} />
|
||||
</Row>
|
||||
</>
|
||||
)}
|
||||
</Card>
|
||||
</form>
|
||||
</div>
|
||||
@ -168,7 +146,7 @@ export function FunnelTab(): JSX.Element {
|
||||
eventNames={allEventNames}
|
||||
/>
|
||||
|
||||
{clickhouseFeaturesEnabled && filters.funnel_viz_type === FunnelVizType.Steps && (
|
||||
{filters.funnel_viz_type === FunnelVizType.Steps && (
|
||||
<>
|
||||
<hr />
|
||||
<h4 className="secondary">
|
||||
@ -191,7 +169,7 @@ export function FunnelTab(): JSX.Element {
|
||||
</>
|
||||
)}
|
||||
|
||||
{clickhouseFeaturesEnabled && (
|
||||
{
|
||||
<>
|
||||
<hr />
|
||||
<div className="flex-center cursor-pointer" onClick={toggleAdvancedMode}>
|
||||
@ -281,50 +259,8 @@ export function FunnelTab(): JSX.Element {
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
}
|
||||
</Col>
|
||||
</Row>
|
||||
)
|
||||
}
|
||||
|
||||
function CalculateFunnelButton({ style }: { style: React.CSSProperties }): JSX.Element {
|
||||
const { insightProps } = useValues(insightLogic)
|
||||
const { filters, areFiltersValid, filtersDirty, clickhouseFeaturesEnabled, isLoading } = useValues(
|
||||
funnelLogic(insightProps)
|
||||
)
|
||||
const [tooltipOpen, setTooltipOpen] = useState(false)
|
||||
const shouldRecalculate = filtersDirty && areFiltersValid && !isLoading && !clickhouseFeaturesEnabled
|
||||
|
||||
// Only show tooltip after 3s of inactivity
|
||||
useEffect(() => {
|
||||
if (shouldRecalculate) {
|
||||
const rerenderInterval = setTimeout(() => {
|
||||
setTooltipOpen(true)
|
||||
}, 3000)
|
||||
|
||||
return () => {
|
||||
clearTimeout(rerenderInterval)
|
||||
setTooltipOpen(false)
|
||||
}
|
||||
} else {
|
||||
setTooltipOpen(false)
|
||||
}
|
||||
}, [shouldRecalculate, filters])
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
visible={tooltipOpen}
|
||||
title="Your query has changed. Calculate your changes to see updates in the visualization."
|
||||
>
|
||||
<Button
|
||||
style={style}
|
||||
type={shouldRecalculate ? 'primary' : 'default'}
|
||||
htmlType="submit"
|
||||
disabled={!areFiltersValid}
|
||||
data-attr="save-funnel-button"
|
||||
>
|
||||
Calculate
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ export function ToggleButtonChartFilter({
|
||||
simpleMode,
|
||||
}: ToggleButtonChartFilterProps): JSX.Element | null {
|
||||
const { insightProps } = useValues(insightLogic)
|
||||
const { clickhouseFeaturesEnabled, aggregationTargetLabel } = useValues(funnelLogic(insightProps))
|
||||
const { aggregationTargetLabel } = useValues(funnelLogic(insightProps))
|
||||
const { chartFilter } = useValues(chartFilterLogic(insightProps))
|
||||
const { setChartFilter } = useActions(chartFilterLogic(insightProps))
|
||||
const defaultDisplay = FunnelVizType.Steps
|
||||
@ -38,14 +38,14 @@ export function ToggleButtonChartFilter({
|
||||
label: 'Time to convert',
|
||||
description: `Track how long it takes for ${aggregationTargetLabel.plural} to convert`,
|
||||
icon: <ClockCircleOutlined />,
|
||||
hidden: !clickhouseFeaturesEnabled,
|
||||
hidden: false,
|
||||
},
|
||||
{
|
||||
key: FunnelVizType.Trends,
|
||||
label: 'Historical trends',
|
||||
description: "Track how this funnel's conversion rate is trending over time",
|
||||
icon: <LineChartOutlined />,
|
||||
hidden: !clickhouseFeaturesEnabled,
|
||||
hidden: false,
|
||||
},
|
||||
]
|
||||
|
||||
|
@ -22,7 +22,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
|
||||
|
||||
export function RetentionTab(): JSX.Element {
|
||||
const { featureFlags } = useValues(featureFlagLogic)
|
||||
const { insightProps, clickhouseFeaturesEnabled, allEventNames } = useValues(insightLogic)
|
||||
const { insightProps, allEventNames } = useValues(insightLogic)
|
||||
const { groupsTaxonomicTypes, showGroupsOptions } = useValues(groupsModel)
|
||||
const { filters, actionFilterTargetEntity, actionFilterReturningEntity } = useValues(
|
||||
retentionTableLogic(insightProps)
|
||||
@ -171,8 +171,7 @@ export function RetentionTab(): JSX.Element {
|
||||
/>
|
||||
<TestAccountFilter filters={filters} onChange={setFilters} />
|
||||
|
||||
{clickhouseFeaturesEnabled &&
|
||||
featureFlags[FEATURE_FLAGS.RETENTION_BREAKDOWN] &&
|
||||
{featureFlags[FEATURE_FLAGS.RETENTION_BREAKDOWN] &&
|
||||
filters.display !== ACTIONS_LINE_GRAPH_LINEAR ? (
|
||||
<>
|
||||
<hr />
|
||||
|
@ -10,7 +10,6 @@ import { trendsLogic } from '../../../trends/trendsLogic'
|
||||
import { FilterType, InsightType } from '~/types'
|
||||
import { Formula } from './Formula'
|
||||
import { TestAccountFilter } from 'scenes/insights/TestAccountFilter'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import './TrendTab.scss'
|
||||
import useBreakpoint from 'antd/lib/grid/hooks/useBreakpoint'
|
||||
import { GlobalFiltersTitle } from 'scenes/insights/common'
|
||||
@ -27,7 +26,6 @@ export function TrendTab({ view }: TrendTabProps): JSX.Element {
|
||||
const { insightProps, allEventNames } = useValues(insightLogic)
|
||||
const { filters } = useValues(trendsLogic(insightProps))
|
||||
const { setFilters, toggleLifecycle } = useActions(trendsLogic(insightProps))
|
||||
const { preflight } = useValues(preflightLogic)
|
||||
const { groupsTaxonomicTypes } = useValues(groupsModel)
|
||||
const [isUsingFormulas, setIsUsingFormulas] = useState(filters.formula ? true : false)
|
||||
const lifecycles = [
|
||||
@ -43,7 +41,7 @@ export function TrendTab({ view }: TrendTabProps): JSX.Element {
|
||||
const screens = useBreakpoint()
|
||||
const isSmallScreen = screens.xs || (screens.sm && !screens.md)
|
||||
const isTrends = !filters.insight || filters.insight === InsightType.TRENDS
|
||||
const formulaAvailable = isTrends && preflight?.is_clickhouse_enabled
|
||||
const formulaAvailable = isTrends
|
||||
const formulaEnabled = (filters.events?.length || 0) + (filters.actions?.length || 0) > 0
|
||||
|
||||
return (
|
||||
|
@ -374,7 +374,6 @@ const sampleContextData = {
|
||||
},
|
||||
current_user: { organization: { available_features: ['correlation_analysis'] } },
|
||||
preflight: {
|
||||
is_clickhouse_enabled: true,
|
||||
instance_preferences: { disable_paid_fs: false },
|
||||
},
|
||||
default_event_name: '$pageview',
|
||||
|
@ -138,7 +138,6 @@ const sampleContextData = {
|
||||
},
|
||||
},
|
||||
preflight: {
|
||||
is_clickhouse_enabled: true,
|
||||
instance_preferences: { disable_paid_fs: false },
|
||||
},
|
||||
default_event_name: '$pageview',
|
||||
|
@ -132,7 +132,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -2643,12 +2642,6 @@
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7fa-0000-c78a-c7b289c94008",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7d6-0000-2314-06df9faddf58",
|
||||
"name": "is_demo_project",
|
||||
|
@ -120,7 +120,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -2843,12 +2842,6 @@
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7fa-0000-c78a-c7b289c94008",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7d6-0000-2314-06df9faddf58",
|
||||
"name": "is_demo_project",
|
||||
|
@ -115,7 +115,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -2611,12 +2610,6 @@
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7fa-0000-c78a-c7b289c94008",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7d6-0000-2314-06df9faddf58",
|
||||
"name": "is_demo_project",
|
||||
|
@ -140,7 +140,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -2974,12 +2973,6 @@
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7fa-0000-c78a-c7b289c94008",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7d6-0000-2314-06df9faddf58",
|
||||
"name": "is_demo_project",
|
||||
|
@ -118,7 +118,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -2601,12 +2600,6 @@
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7fa-0000-c78a-c7b289c94008",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7d6-0000-2314-06df9faddf58",
|
||||
"name": "is_demo_project",
|
||||
|
@ -129,7 +129,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -2974,12 +2973,6 @@
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7fa-0000-c78a-c7b289c94008",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "0179fcb6-f7d6-0000-2314-06df9faddf58",
|
||||
"name": "is_demo_project",
|
||||
|
@ -25,7 +25,6 @@ import { filterTrendsClientSideParams, keyForInsightLogicProps } from 'scenes/in
|
||||
import { cleanFilters } from 'scenes/insights/utils/cleanFilters'
|
||||
import { dashboardsModel } from '~/models/dashboardsModel'
|
||||
import { pollFunnel } from 'scenes/funnels/funnelUtils'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import { extractObjectDiffKeys, findInsightFromMountedLogic, getInsightId } from './utils'
|
||||
import { teamLogic } from '../teamLogic'
|
||||
import { Scene } from 'scenes/sceneTypes'
|
||||
@ -440,10 +439,6 @@ export const insightLogic = kea<insightLogicType>({
|
||||
(s) => [s.insight, s.activeView],
|
||||
({ filters }, activeView) => filters?.insight || activeView || InsightType.TRENDS,
|
||||
],
|
||||
clickhouseFeaturesEnabled: [
|
||||
() => [preflightLogic.selectors.preflight],
|
||||
(preflight) => !!preflight?.is_clickhouse_enabled,
|
||||
],
|
||||
filtersChanged: [
|
||||
(s) => [s.savedFilters, s.filters],
|
||||
(savedFilters, filters) =>
|
||||
|
@ -116,7 +116,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -9773,16 +9772,6 @@
|
||||
"updated_by": null,
|
||||
"query_usage_30_day": 0
|
||||
},
|
||||
{
|
||||
"id": "017989da-7571-0001-bc1f-c48306dd7a5e",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"description": null,
|
||||
"tags": null,
|
||||
"is_numerical": false,
|
||||
"updated_at": null,
|
||||
"updated_by": null,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "01791516-32bb-0000-4094-b9829f5f9651",
|
||||
"name": "is_demo_project",
|
||||
|
@ -116,7 +116,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": true,
|
||||
"db_backend": "clickhouse",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -6424,16 +6423,6 @@
|
||||
"updated_by": null,
|
||||
"query_usage_30_day": 0
|
||||
},
|
||||
{
|
||||
"id": "017989da-7571-0001-bc1f-c48306dd7a5e",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"description": null,
|
||||
"tags": null,
|
||||
"is_numerical": false,
|
||||
"updated_at": null,
|
||||
"updated_by": null,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "01791516-32bb-0000-4094-b9829f5f9651",
|
||||
"name": "is_demo_project",
|
||||
|
@ -15,7 +15,6 @@ import { DateFilter } from 'lib/components/DateFilter/DateFilter'
|
||||
import { Tooltip } from 'lib/components/Tooltip'
|
||||
import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types'
|
||||
import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import './SessionRecordingTable.scss'
|
||||
import { LemonTable, LemonTableColumns } from 'lib/components/LemonTable'
|
||||
import { TZLabel } from 'lib/components/TimezoneAware'
|
||||
@ -74,7 +73,6 @@ export function SessionRecordingsTable({ personUUID, isPersonPage = false }: Ses
|
||||
setDurationFilter,
|
||||
enableFilter,
|
||||
} = useActions(sessionRecordingsTableLogicInstance)
|
||||
const { preflight } = useValues(preflightLogic)
|
||||
|
||||
const columns: LemonTableColumns<SessionRecordingType> = [
|
||||
{
|
||||
@ -151,7 +149,7 @@ export function SessionRecordingsTable({ personUUID, isPersonPage = false }: Ses
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
{!isPersonPage && preflight?.is_clickhouse_enabled && (
|
||||
{!isPersonPage && (
|
||||
<div className="mt-2">
|
||||
<Typography.Text strong>
|
||||
{`Filter by persons and cohorts `}
|
||||
|
@ -203,7 +203,6 @@
|
||||
},
|
||||
"can_create_org": false,
|
||||
"ee_available": true,
|
||||
"is_clickhouse_enabled": false,
|
||||
"db_backend": "postgres",
|
||||
"available_timezones": {
|
||||
"Africa/Abidjan": 0,
|
||||
@ -1618,12 +1617,6 @@
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "017c49bd-c5b5-0000-2339-428a6a70e29a",
|
||||
"name": "is_clickhouse_enabled",
|
||||
"is_numerical": false,
|
||||
"query_usage_30_day": null
|
||||
},
|
||||
{
|
||||
"id": "017c49bd-c542-0000-ab97-5484f63a8fc3",
|
||||
"name": "is_demo_project",
|
||||
|
@ -10,7 +10,6 @@ import './PersonsModal.scss'
|
||||
import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo'
|
||||
import { PropertiesTable } from 'lib/components/PropertiesTable'
|
||||
import { DateDisplay } from 'lib/components/DateDisplay'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import { PersonHeader } from '../persons/PersonHeader'
|
||||
import api from '../../lib/api'
|
||||
import { LemonTable, LemonTableColumns } from 'lib/components/LemonTable'
|
||||
@ -45,7 +44,6 @@ export function PersonsModal({
|
||||
firstLoadedPeople,
|
||||
searchTerm,
|
||||
isInitialLoad,
|
||||
clickhouseFeaturesEnabled,
|
||||
peopleParams,
|
||||
actorLabel,
|
||||
sessionRecordingId,
|
||||
@ -60,7 +58,6 @@ export function PersonsModal({
|
||||
openRecordingModal,
|
||||
closeRecordingModal,
|
||||
} = useActions(personsModalLogic)
|
||||
const { preflight } = useValues(preflightLogic)
|
||||
const { featureFlags } = useValues(featureFlagLogic)
|
||||
|
||||
const title = useMemo(
|
||||
@ -96,7 +93,6 @@ export function PersonsModal({
|
||||
const flaggedInsights = featureFlags[FEATURE_FLAGS.NEW_INSIGHT_COHORTS]
|
||||
const isDownloadCsvAvailable: boolean = view === InsightType.TRENDS && showModalActions && !!people?.action
|
||||
const isSaveAsCohortAvailable =
|
||||
clickhouseFeaturesEnabled &&
|
||||
(view === InsightType.TRENDS ||
|
||||
view === InsightType.STICKINESS ||
|
||||
(!!flaggedInsights && (view === InsightType.FUNNELS || view === InsightType.PATHS))) && // make sure flaggedInsights isn't evaluated as undefined
|
||||
@ -158,25 +154,23 @@ export function PersonsModal({
|
||||
) : (
|
||||
people && (
|
||||
<>
|
||||
{!preflight?.is_clickhouse_enabled && (
|
||||
<Input.Search
|
||||
allowClear
|
||||
enterButton
|
||||
placeholder="Search for persons by email, name, or ID"
|
||||
onChange={(e) => {
|
||||
setSearchTerm(e.target.value)
|
||||
if (!e.target.value) {
|
||||
setFirstLoadedActors(firstLoadedPeople)
|
||||
}
|
||||
}}
|
||||
value={searchTerm}
|
||||
onSearch={(term) =>
|
||||
term
|
||||
? setPersonsModalFilters(term, people, filters)
|
||||
: setFirstLoadedActors(firstLoadedPeople)
|
||||
<Input.Search
|
||||
allowClear
|
||||
enterButton
|
||||
placeholder="Search for persons by email, name, or ID"
|
||||
onChange={(e) => {
|
||||
setSearchTerm(e.target.value)
|
||||
if (!e.target.value) {
|
||||
setFirstLoadedActors(firstLoadedPeople)
|
||||
}
|
||||
/>
|
||||
)}
|
||||
}}
|
||||
value={searchTerm}
|
||||
onSearch={(term) =>
|
||||
term
|
||||
? setPersonsModalFilters(term, people, filters)
|
||||
: setFirstLoadedActors(firstLoadedPeople)
|
||||
}
|
||||
/>
|
||||
{featureFlags[FEATURE_FLAGS.MULTI_POINT_PERSON_MODAL] &&
|
||||
!!people.crossDataset?.length &&
|
||||
people.seriesId !== undefined && (
|
||||
|
@ -15,7 +15,6 @@ import {
|
||||
GraphDataset,
|
||||
} from '~/types'
|
||||
import { personsModalLogicType } from './personsModalLogicType'
|
||||
import { preflightLogic } from 'scenes/PreflightCheck/logic'
|
||||
import { eventUsageLogic } from 'lib/utils/eventUsageLogic'
|
||||
|
||||
import { TrendActors } from 'scenes/trends/types'
|
||||
@ -242,10 +241,6 @@ export const personsModalLogic = kea<personsModalLogicType<LoadPeopleFromUrlProp
|
||||
(s) => [s.peopleLoading, s.loadingMorePeople],
|
||||
(peopleLoading, loadingMorePeople) => peopleLoading && !loadingMorePeople,
|
||||
],
|
||||
clickhouseFeaturesEnabled: [
|
||||
() => [preflightLogic.selectors.preflight],
|
||||
(preflight) => !!preflight?.is_clickhouse_enabled,
|
||||
],
|
||||
isGroupType: [(s) => [s.people], (people) => people?.people?.[0] && isGroupType(people.people[0])],
|
||||
actorLabel: [
|
||||
(s) => [s.people, s.isGroupType, s.groupTypes, s.aggregationLabel],
|
||||
|
@ -1240,10 +1240,6 @@ export interface PreflightStatus {
|
||||
/** Whether this is a managed demo environment. */
|
||||
demo: boolean
|
||||
celery: boolean
|
||||
/** Whether EE code is available (but not necessarily a license). */
|
||||
ee_available?: boolean
|
||||
/** Is ClickHouse used as the analytics database instead of Postgres. */
|
||||
is_clickhouse_enabled?: boolean
|
||||
realm: Realm
|
||||
db_backend?: 'postgres' | 'clickhouse'
|
||||
available_social_auth_providers: AuthBackends
|
||||
|
14
package.json
14
package.json
@ -29,8 +29,8 @@
|
||||
"start-docker": "yarn start-docker:esbuild",
|
||||
"start-docker:esbuild": "yarn start-http:esbuild --host 0.0.0.0",
|
||||
"start-docker:webpack": "yarn start-http:webpack --host 0.0.0.0",
|
||||
"start-ch-dev": "concurrently -n DOCKER,ESBUILD,TYPEGEN -c red,blue,green \"docker-compose -f ee/docker-compose.ch.yml pull && CH_WEB_SCRIPT=./ee/bin/docker-ch-dev-backend docker-compose -f ee/docker-compose.ch.yml up\" \"yarn run start-http --host 0.0.0.0\" \"yarn run typegen:watch\"",
|
||||
"clear-ch-dev": "docker compose -f ee/docker-compose.ch.yml stop && docker compose -f ee/docker-compose.ch.yml rm -v && docker compose -f ee/docker-compose.ch.yml down",
|
||||
"start-ch-dev": "concurrently -n DOCKER,ESBUILD,TYPEGEN -c red,blue,green \"docker-compose -f docker-compose.dev.yml pull && CH_WEB_SCRIPT=./ee/bin/docker-ch-dev-backend docker-compose -f docker-compose.dev.yml up\" \"yarn run start-http --host 0.0.0.0\" \"yarn run typegen:watch\"",
|
||||
"clear-ch-dev": "docker compose -f docker-compose.dev.yml stop && docker compose -f docker-compose.dev.yml rm -v && docker compose -f docker-compose.dev.yml down",
|
||||
"clean": "rm -rf frontend/dist && mkdir frontend/dist",
|
||||
"build": "yarn copy-scripts && yarn build:esbuild",
|
||||
"build:webpack": "echo \"Building Webpack\" && NODE_ENV=production webpack --config webpack.config.js && cp -a frontend/public/* frontend/dist/",
|
||||
@ -50,12 +50,12 @@
|
||||
"storybook": "start-storybook -s .storybook/public -p 6006",
|
||||
"build-storybook": "build-storybook -s .storybook/public",
|
||||
"arm64:build:clickhouse": "./docker/clickhouse-builder/build.sh",
|
||||
"arm64:ch-dev:start": "concurrently -n DOCKER,ESBUILD,TYPEGEN -c red,blue,green \"docker-compose -f ee/docker-compose.ch.arm64.yml pull && CH_WEB_SCRIPT=./ee/bin/docker-ch-dev-backend docker-compose -f ee/docker-compose.ch.arm64.yml up\" \"yarn run start-http --host 0.0.0.0\" \"yarn run typegen:watch\"",
|
||||
"arm64:ch-dev:clear": "docker compose -f ee/docker-compose.ch.arm64.yml stop && docker compose -f ee/docker-compose.ch.arm64.yml rm -v && docker compose -f ee/docker-compose.ch.arm64.yml down",
|
||||
"arm64:ch-dev:start": "concurrently -n DOCKER,ESBUILD,TYPEGEN -c red,blue,green \"docker-compose -f docker-compose.arm64.yml pull && CH_WEB_SCRIPT=./ee/bin/docker-ch-dev-backend docker-compose -f docker-compose.arm64.yml up\" \"yarn run start-http --host 0.0.0.0\" \"yarn run typegen:watch\"",
|
||||
"arm64:ch-dev:clear": "docker compose -f docker-compose.arm64.yml stop && docker compose -f docker-compose.arm64.yml rm -v && docker compose -f docker-compose.arm64.yml down",
|
||||
"arm64:services": "yarn arm64:services:stop && yarn arm64:services:clean && yarn arm64:services:start",
|
||||
"arm64:services:start": "docker-compose -f ee/docker-compose.ch.arm64.yml up zookeeper kafka clickhouse",
|
||||
"arm64:services:stop": "docker-compose -f ee/docker-compose.ch.arm64.yml down",
|
||||
"arm64:services:clean": "docker-compose -f ee/docker-compose.ch.arm64.yml rm -v zookeeper kafka clickhouse",
|
||||
"arm64:services:start": "docker-compose -f docker-compose.arm64.yml up zookeeper kafka clickhouse",
|
||||
"arm64:services:stop": "docker-compose -f docker-compose.arm64.yml down",
|
||||
"arm64:services:clean": "docker-compose -f docker-compose.arm64.yml rm -v zookeeper kafka clickhouse",
|
||||
"dev:migrate:postgres": "export DEBUG=1 PRIMARY_DB=clickhouse && source env/bin/activate && python manage.py migrate",
|
||||
"dev:migrate:clickhouse": "export DEBUG=1 PRIMARY_DB=clickhouse && source env/bin/activate && python manage.py migrate_clickhouse"
|
||||
},
|
||||
|
@ -35,9 +35,9 @@
|
||||
"setup:test:ee": "yarn setup:test:postgres && yarn setup:test:clickhouse",
|
||||
"setup:test:postgres": "cd .. && python manage.py setup_test_environment",
|
||||
"setup:test:clickhouse": "cd .. && unset KAFKA_URL && export TEST=1 PRIMARY_DB=clickhouse CLICKHOUSE_DATABASE=posthog_test && python manage.py migrate_clickhouse",
|
||||
"services:start": "cd .. && docker-compose -f ee/docker-compose.ch.yml up zookeeper kafka clickhouse",
|
||||
"services:stop": "cd .. && docker-compose -f ee/docker-compose.ch.yml down",
|
||||
"services:clean": "cd .. && docker-compose -f ee/docker-compose.ch.yml rm -v zookeeper kafka clickhouse",
|
||||
"services:start": "cd .. && docker-compose -f docker-compose.dev.yml up zookeeper kafka clickhouse",
|
||||
"services:stop": "cd .. && docker-compose -f docker-compose.dev.yml down",
|
||||
"services:clean": "cd .. && docker-compose -f docker-compose.dev.yml rm -v zookeeper kafka clickhouse",
|
||||
"services": "yarn services:stop && yarn services:clean && yarn services:start"
|
||||
},
|
||||
"bin": {
|
||||
|
@ -1,7 +1,6 @@
|
||||
from rest_framework import decorators, exceptions
|
||||
|
||||
from posthog.api.routing import DefaultRouterPlusPlus
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
from . import (
|
||||
action,
|
||||
@ -100,57 +99,36 @@ router.register(r"async_migrations", async_migration.AsyncMigrationsViewset, "as
|
||||
router.register(r"instance_settings", instance_settings.InstanceSettingsViewset, "instance_settings")
|
||||
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.views.actions import ClickhouseActionsViewSet, LegacyClickhouseActionsViewSet
|
||||
from ee.clickhouse.views.cohort import ClickhouseCohortViewSet, LegacyClickhouseCohortViewSet
|
||||
from ee.clickhouse.views.element import ClickhouseElementViewSet, LegacyClickhouseElementViewSet
|
||||
from ee.clickhouse.views.events import ClickhouseEventsViewSet, LegacyClickhouseEventsViewSet
|
||||
from ee.clickhouse.views.experiments import ClickhouseExperimentsViewSet
|
||||
from ee.clickhouse.views.groups import ClickhouseGroupsTypesView, ClickhouseGroupsView
|
||||
from ee.clickhouse.views.insights import ClickhouseInsightsViewSet, LegacyClickhouseInsightsViewSet
|
||||
from ee.clickhouse.views.paths import ClickhousePathsViewSet, LegacyClickhousePathsViewSet
|
||||
from ee.clickhouse.views.person import ClickhousePersonViewSet, LegacyClickhousePersonViewSet
|
||||
from ee.clickhouse.views.session_recordings import ClickhouseSessionRecordingViewSet
|
||||
from ee.clickhouse.views.actions import ClickhouseActionsViewSet, LegacyClickhouseActionsViewSet
|
||||
from ee.clickhouse.views.cohort import ClickhouseCohortViewSet, LegacyClickhouseCohortViewSet
|
||||
from ee.clickhouse.views.element import ClickhouseElementViewSet, LegacyClickhouseElementViewSet
|
||||
from ee.clickhouse.views.events import ClickhouseEventsViewSet, LegacyClickhouseEventsViewSet
|
||||
from ee.clickhouse.views.experiments import ClickhouseExperimentsViewSet
|
||||
from ee.clickhouse.views.groups import ClickhouseGroupsTypesView, ClickhouseGroupsView
|
||||
from ee.clickhouse.views.insights import ClickhouseInsightsViewSet, LegacyClickhouseInsightsViewSet
|
||||
from ee.clickhouse.views.paths import ClickhousePathsViewSet, LegacyClickhousePathsViewSet
|
||||
from ee.clickhouse.views.person import ClickhousePersonViewSet, LegacyClickhousePersonViewSet
|
||||
from ee.clickhouse.views.session_recordings import ClickhouseSessionRecordingViewSet
|
||||
|
||||
# Legacy endpoints CH (to be removed eventually)
|
||||
router.register(r"action", LegacyClickhouseActionsViewSet, basename="action") # Should be completely unused now
|
||||
router.register(r"event", LegacyClickhouseEventsViewSet, basename="event") # Should be completely unused now
|
||||
router.register(r"insight", LegacyClickhouseInsightsViewSet, basename="insight") # Should be completely unused now
|
||||
router.register(r"person", LegacyClickhousePersonViewSet, basename="person")
|
||||
router.register(r"paths", LegacyClickhousePathsViewSet, basename="paths")
|
||||
router.register(r"element", LegacyClickhouseElementViewSet, basename="element")
|
||||
router.register(r"cohort", LegacyClickhouseCohortViewSet, basename="cohort")
|
||||
# Nested endpoints CH
|
||||
projects_router.register(r"actions", ClickhouseActionsViewSet, "project_actions", ["team_id"])
|
||||
projects_router.register(r"events", ClickhouseEventsViewSet, "project_events", ["team_id"])
|
||||
projects_router.register(r"groups", ClickhouseGroupsView, "project_groups", ["team_id"])
|
||||
projects_router.register(r"groups_types", ClickhouseGroupsTypesView, "project_groups_types", ["team_id"])
|
||||
projects_router.register(r"insights", ClickhouseInsightsViewSet, "project_insights", ["team_id"])
|
||||
projects_router.register(r"persons", ClickhousePersonViewSet, "project_persons", ["team_id"])
|
||||
projects_router.register(r"paths", ClickhousePathsViewSet, "project_paths", ["team_id"])
|
||||
projects_router.register(r"elements", ClickhouseElementViewSet, "project_elements", ["team_id"])
|
||||
projects_router.register(r"cohorts", ClickhouseCohortViewSet, "project_cohorts", ["team_id"])
|
||||
projects_router.register(r"experiments", ClickhouseExperimentsViewSet, "project_experiments", ["team_id"])
|
||||
projects_router.register(
|
||||
r"session_recordings", ClickhouseSessionRecordingViewSet, "project_session_recordings", ["team_id"],
|
||||
)
|
||||
else:
|
||||
# Legacy endpoints PG (to be removed eventually)
|
||||
router.register(r"insight", insight.LegacyInsightViewSet) # Should be completely unused now
|
||||
router.register(r"action", action.LegacyActionViewSet) # Should be completely unused now
|
||||
router.register(r"person", person.LegacyPersonViewSet)
|
||||
router.register(r"event", event.LegacyEventViewSet) # Should be completely unused now
|
||||
router.register(r"paths", paths.LegacyPathsViewSet, basename="paths")
|
||||
router.register(r"element", element.LegacyElementViewSet)
|
||||
router.register(r"cohort", cohort.LegacyCohortViewSet)
|
||||
# Nested endpoints PG
|
||||
projects_router.register(r"insights", insight.LegacyInsightViewSet, "project_insights", ["team_id"])
|
||||
projects_router.register(r"actions", action.ActionViewSet, "project_actions", ["team_id"])
|
||||
projects_router.register(r"persons", person.LegacyPersonViewSet, "project_persons", ["team_id"])
|
||||
projects_router.register(r"events", event.LegacyEventViewSet, "project_events", ["team_id"])
|
||||
projects_router.register(r"paths", paths.LegacyPathsViewSet, "project_paths", ["team_id"])
|
||||
projects_router.register(r"elements", element.LegacyElementViewSet, "project_elements", ["team_id"])
|
||||
projects_router.register(r"cohorts", cohort.LegacyCohortViewSet, "project_cohorts", ["team_id"])
|
||||
projects_router.register(
|
||||
r"session_recordings", session_recording.SessionRecordingViewSet, "project_session_recordings", ["team_id"],
|
||||
)
|
||||
# Legacy endpoints CH (to be removed eventually)
|
||||
router.register(r"action", LegacyClickhouseActionsViewSet, basename="action") # Should be completely unused now
|
||||
router.register(r"event", LegacyClickhouseEventsViewSet, basename="event") # Should be completely unused now
|
||||
router.register(r"insight", LegacyClickhouseInsightsViewSet, basename="insight") # Should be completely unused now
|
||||
router.register(r"person", LegacyClickhousePersonViewSet, basename="person")
|
||||
router.register(r"paths", LegacyClickhousePathsViewSet, basename="paths")
|
||||
router.register(r"element", LegacyClickhouseElementViewSet, basename="element")
|
||||
router.register(r"cohort", LegacyClickhouseCohortViewSet, basename="cohort")
|
||||
# Nested endpoints CH
|
||||
projects_router.register(r"actions", ClickhouseActionsViewSet, "project_actions", ["team_id"])
|
||||
projects_router.register(r"events", ClickhouseEventsViewSet, "project_events", ["team_id"])
|
||||
projects_router.register(r"groups", ClickhouseGroupsView, "project_groups", ["team_id"])
|
||||
projects_router.register(r"groups_types", ClickhouseGroupsTypesView, "project_groups_types", ["team_id"])
|
||||
projects_router.register(r"insights", ClickhouseInsightsViewSet, "project_insights", ["team_id"])
|
||||
projects_router.register(r"persons", ClickhousePersonViewSet, "project_persons", ["team_id"])
|
||||
projects_router.register(r"paths", ClickhousePathsViewSet, "project_paths", ["team_id"])
|
||||
projects_router.register(r"elements", ClickhouseElementViewSet, "project_elements", ["team_id"])
|
||||
projects_router.register(r"cohorts", ClickhouseCohortViewSet, "project_cohorts", ["team_id"])
|
||||
projects_router.register(r"experiments", ClickhouseExperimentsViewSet, "project_experiments", ["team_id"])
|
||||
projects_router.register(
|
||||
r"session_recordings", ClickhouseSessionRecordingViewSet, "project_session_recordings", ["team_id"],
|
||||
)
|
||||
|
@ -39,7 +39,6 @@ from posthog.models.filters.stickiness_filter import StickinessFilter
|
||||
from posthog.models.team import Team
|
||||
from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission
|
||||
from posthog.queries import base, retention, stickiness, trends
|
||||
from posthog.tasks.calculate_action import calculate_action
|
||||
from posthog.utils import generate_cache_key, get_safe_cache, should_refresh
|
||||
|
||||
from .person import PersonSerializer, paginated_result
|
||||
@ -125,7 +124,6 @@ class ActionSerializer(serializers.HyperlinkedModelSerializer):
|
||||
action=instance, **{key: value for key, value in step.items() if key not in ("isNew", "selection")},
|
||||
)
|
||||
|
||||
calculate_action.delay(action_id=instance.pk)
|
||||
report_user_action(validated_data["created_by"], "action created", instance.get_analytics_metadata())
|
||||
|
||||
return instance
|
||||
@ -152,7 +150,6 @@ class ActionSerializer(serializers.HyperlinkedModelSerializer):
|
||||
)
|
||||
|
||||
instance = super().update(instance, validated_data)
|
||||
calculate_action.delay(action_id=instance.pk)
|
||||
instance.refresh_from_db()
|
||||
report_user_action(
|
||||
self.context["request"].user,
|
||||
|
@ -9,88 +9,88 @@ from django.http import JsonResponse
|
||||
from django.utils import timezone
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from rest_framework import status
|
||||
from sentry_sdk import capture_exception, configure_scope, push_scope
|
||||
from sentry_sdk import capture_exception, configure_scope
|
||||
from sentry_sdk.api import capture_exception
|
||||
from statshog.defaults.django import statsd
|
||||
|
||||
from ee.kafka_client.client import KafkaProducer
|
||||
from ee.kafka_client.topics import KAFKA_DEAD_LETTER_QUEUE
|
||||
from ee.settings import KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC
|
||||
from posthog.api.utils import get_data, get_team, get_token
|
||||
from posthog.celery import app as celery_app
|
||||
from posthog.exceptions import generate_exception_response
|
||||
from posthog.helpers.session_recording import preprocess_session_recording_events
|
||||
from posthog.models import Team
|
||||
from posthog.models.feature_flag import get_overridden_feature_flags
|
||||
from posthog.models.utils import UUIDT
|
||||
from posthog.utils import cors_response, get_ip_address, is_clickhouse_enabled
|
||||
from posthog.utils import cors_response, get_ip_address
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
from ee.kafka_client.client import KafkaProducer
|
||||
from ee.kafka_client.topics import KAFKA_DEAD_LETTER_QUEUE
|
||||
from ee.settings import KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC
|
||||
|
||||
def parse_kafka_event_data(
|
||||
distinct_id: str,
|
||||
ip: Optional[str],
|
||||
site_url: str,
|
||||
data: Dict,
|
||||
team_id: Optional[int],
|
||||
now: datetime,
|
||||
sent_at: Optional[datetime],
|
||||
event_uuid: UUIDT,
|
||||
) -> Dict:
|
||||
return {
|
||||
"uuid": str(event_uuid),
|
||||
"distinct_id": distinct_id,
|
||||
"ip": ip,
|
||||
"site_url": site_url,
|
||||
"data": json.dumps(data),
|
||||
"team_id": team_id,
|
||||
"now": now.isoformat(),
|
||||
"sent_at": sent_at.isoformat() if sent_at else "",
|
||||
}
|
||||
def parse_kafka_event_data(
|
||||
distinct_id: str,
|
||||
ip: Optional[str],
|
||||
site_url: str,
|
||||
data: Dict,
|
||||
team_id: Optional[int],
|
||||
now: datetime,
|
||||
sent_at: Optional[datetime],
|
||||
event_uuid: UUIDT,
|
||||
) -> Dict:
|
||||
return {
|
||||
"uuid": str(event_uuid),
|
||||
"distinct_id": distinct_id,
|
||||
"ip": ip,
|
||||
"site_url": site_url,
|
||||
"data": json.dumps(data),
|
||||
"team_id": team_id,
|
||||
"now": now.isoformat(),
|
||||
"sent_at": sent_at.isoformat() if sent_at else "",
|
||||
}
|
||||
|
||||
|
||||
def log_event(data: Dict, event_name: str) -> None:
|
||||
if settings.DEBUG:
|
||||
print(f"Logging event {event_name} to Kafka topic {KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC}")
|
||||
|
||||
# TODO: Handle Kafka being unavailable with exponential backoff retries
|
||||
try:
|
||||
KafkaProducer().produce(topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, data=data)
|
||||
except Exception as e:
|
||||
statsd.incr("capture_endpoint_log_event_error")
|
||||
print(f"Failed to produce event to Kafka topic {KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC} with error:", e)
|
||||
raise e
|
||||
|
||||
|
||||
def log_event_to_dead_letter_queue(
|
||||
raw_payload: Dict,
|
||||
event_name: str,
|
||||
event: Dict,
|
||||
error_message: str,
|
||||
error_location: str,
|
||||
topic: str = KAFKA_DEAD_LETTER_QUEUE,
|
||||
):
|
||||
data = event.copy()
|
||||
|
||||
data["failure_timestamp"] = datetime.now().isoformat()
|
||||
data["error_location"] = error_location
|
||||
data["error"] = error_message
|
||||
data["elements_chain"] = ""
|
||||
data["id"] = str(UUIDT())
|
||||
data["event"] = event_name
|
||||
data["raw_payload"] = json.dumps(raw_payload)
|
||||
data["now"] = datetime.fromisoformat(data["now"]).replace(tzinfo=None).isoformat() if data["now"] else None
|
||||
|
||||
data["event_uuid"] = event["uuid"]
|
||||
del data["uuid"]
|
||||
|
||||
try:
|
||||
KafkaProducer().produce(topic=topic, data=data)
|
||||
statsd.incr(settings.EVENTS_DEAD_LETTER_QUEUE_STATSD_METRIC)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
statsd.incr("events_dead_letter_queue_produce_error")
|
||||
|
||||
def log_event(data: Dict, event_name: str) -> None:
|
||||
if settings.DEBUG:
|
||||
print(f"Logging event {event_name} to Kafka topic {KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC}")
|
||||
|
||||
# TODO: Handle Kafka being unavailable with exponential backoff retries
|
||||
try:
|
||||
KafkaProducer().produce(topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, data=data)
|
||||
except Exception as e:
|
||||
statsd.incr("capture_endpoint_log_event_error")
|
||||
print(f"Failed to produce event to Kafka topic {KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC} with error:", e)
|
||||
raise e
|
||||
|
||||
def log_event_to_dead_letter_queue(
|
||||
raw_payload: Dict,
|
||||
event_name: str,
|
||||
event: Dict,
|
||||
error_message: str,
|
||||
error_location: str,
|
||||
topic: str = KAFKA_DEAD_LETTER_QUEUE,
|
||||
):
|
||||
data = event.copy()
|
||||
|
||||
data["failure_timestamp"] = datetime.now().isoformat()
|
||||
data["error_location"] = error_location
|
||||
data["error"] = error_message
|
||||
data["elements_chain"] = ""
|
||||
data["id"] = str(UUIDT())
|
||||
data["event"] = event_name
|
||||
data["raw_payload"] = json.dumps(raw_payload)
|
||||
data["now"] = datetime.fromisoformat(data["now"]).replace(tzinfo=None).isoformat() if data["now"] else None
|
||||
|
||||
data["event_uuid"] = event["uuid"]
|
||||
del data["uuid"]
|
||||
|
||||
try:
|
||||
KafkaProducer().produce(topic=topic, data=data)
|
||||
statsd.incr(settings.EVENTS_DEAD_LETTER_QUEUE_STATSD_METRIC)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
statsd.incr("events_dead_letter_queue_produce_error")
|
||||
|
||||
if settings.DEBUG:
|
||||
print("Failed to produce to events dead letter queue with error:", e)
|
||||
print("Failed to produce to events dead letter queue with error:", e)
|
||||
|
||||
|
||||
def _datetime_from_seconds_or_millis(timestamp: str) -> datetime:
|
||||
@ -173,7 +173,7 @@ def get_event(request):
|
||||
return error_response
|
||||
|
||||
send_events_to_dead_letter_queue = False
|
||||
if db_error and is_clickhouse_enabled():
|
||||
if db_error:
|
||||
send_events_to_dead_letter_queue = True
|
||||
|
||||
if isinstance(data, dict):
|
||||
@ -295,23 +295,14 @@ def get_distinct_id(event):
|
||||
|
||||
|
||||
def capture_internal(event, distinct_id, ip, site_url, now, sent_at, team_id, event_uuid=UUIDT()) -> None:
|
||||
if is_clickhouse_enabled():
|
||||
parsed_event = parse_kafka_event_data(
|
||||
distinct_id=distinct_id,
|
||||
ip=ip,
|
||||
site_url=site_url,
|
||||
data=event,
|
||||
team_id=team_id,
|
||||
now=now,
|
||||
sent_at=sent_at,
|
||||
event_uuid=event_uuid,
|
||||
)
|
||||
log_event(parsed_event, event["event"])
|
||||
else:
|
||||
task_name = "posthog.tasks.process_event.process_event_with_plugins"
|
||||
celery_queue = settings.PLUGINS_CELERY_QUEUE
|
||||
celery_app.send_task(
|
||||
name=task_name,
|
||||
queue=celery_queue,
|
||||
args=[distinct_id, ip, site_url, event, team_id, now.isoformat(), sent_at,],
|
||||
)
|
||||
parsed_event = parse_kafka_event_data(
|
||||
distinct_id=distinct_id,
|
||||
ip=ip,
|
||||
site_url=site_url,
|
||||
data=event,
|
||||
team_id=team_id,
|
||||
now=now,
|
||||
sent_at=sent_at,
|
||||
event_uuid=event_uuid,
|
||||
)
|
||||
log_event(parsed_event, event["event"])
|
||||
|
@ -24,8 +24,7 @@ from posthog.queries.stickiness import (
|
||||
stickiness_format_intervals,
|
||||
stickiness_process_entity_type,
|
||||
)
|
||||
from posthog.tasks.calculate_cohort import calculate_cohort, calculate_cohort_ch, calculate_cohort_from_list
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
from posthog.tasks.calculate_cohort import calculate_cohort_ch, calculate_cohort_from_list
|
||||
|
||||
|
||||
class CohortSerializer(serializers.ModelSerializer):
|
||||
@ -75,10 +74,7 @@ class CohortSerializer(serializers.ModelSerializer):
|
||||
if cohort.is_static:
|
||||
self._handle_static(cohort, request)
|
||||
else:
|
||||
if is_clickhouse_enabled():
|
||||
calculate_cohort_ch.delay(cohort.id)
|
||||
else:
|
||||
calculate_cohort.delay(cohort.id)
|
||||
calculate_cohort_ch.delay(cohort.id)
|
||||
|
||||
report_user_action(request.user, "cohort created", cohort.get_analytics_metadata())
|
||||
return cohort
|
||||
@ -146,10 +142,7 @@ class CohortSerializer(serializers.ModelSerializer):
|
||||
if request.FILES.get("csv"):
|
||||
self._calculate_static_by_csv(request.FILES["csv"], cohort)
|
||||
else:
|
||||
if is_clickhouse_enabled():
|
||||
calculate_cohort_ch.delay(cohort.id)
|
||||
else:
|
||||
calculate_cohort.delay(cohort.id)
|
||||
calculate_cohort_ch.delay(cohort.id)
|
||||
|
||||
report_user_action(
|
||||
request.user,
|
||||
|
@ -7,10 +7,8 @@ from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from posthog.async_migrations.status import async_migrations_ok
|
||||
from posthog.gitsha import GIT_SHA
|
||||
from posthog.internal_metrics.team import get_internal_metrics_dashboards
|
||||
from posthog.models import Element, Event, SessionRecordingEvent
|
||||
from posthog.permissions import OrganizationAdminAnyPermissions, SingleTenancyOrAdmin
|
||||
from posthog.utils import (
|
||||
dict_from_cursor_fetchall,
|
||||
@ -19,9 +17,6 @@ from posthog.utils import (
|
||||
get_plugin_server_version,
|
||||
get_redis_info,
|
||||
get_redis_queue_depth,
|
||||
get_table_approx_count,
|
||||
get_table_size,
|
||||
is_clickhouse_enabled,
|
||||
is_plugin_server_alive,
|
||||
is_postgres_alive,
|
||||
is_redis_alive,
|
||||
@ -58,11 +53,7 @@ class InstanceStatusViewSet(viewsets.ViewSet):
|
||||
)
|
||||
|
||||
metrics.append(
|
||||
{
|
||||
"key": "analytics_database",
|
||||
"metric": "Analytics database in use",
|
||||
"value": "ClickHouse" if is_clickhouse_enabled() else "Postgres",
|
||||
}
|
||||
{"key": "analytics_database", "metric": "Analytics database in use", "value": "ClickHouse",}
|
||||
)
|
||||
|
||||
metrics.append(
|
||||
@ -98,40 +89,9 @@ class InstanceStatusViewSet(viewsets.ViewSet):
|
||||
}
|
||||
)
|
||||
|
||||
# metrics.append(
|
||||
# {"key": "async_migrations_ok", "metric": "Async migrations up-to-date", "value": async_migrations_ok()}
|
||||
# )
|
||||
from ee.clickhouse.system_status import system_status
|
||||
|
||||
if not is_clickhouse_enabled():
|
||||
event_table_count = get_table_approx_count(Event._meta.db_table)
|
||||
event_table_size = get_table_size(Event._meta.db_table)
|
||||
|
||||
element_table_count = get_table_approx_count(Element._meta.db_table)
|
||||
element_table_size = get_table_size(Element._meta.db_table)
|
||||
|
||||
session_recording_event_table_count = get_table_approx_count(SessionRecordingEvent._meta.db_table)
|
||||
session_recording_event_table_size = get_table_size(SessionRecordingEvent._meta.db_table)
|
||||
|
||||
metrics.append(
|
||||
{
|
||||
"metric": "Postgres elements table size",
|
||||
"value": f"{element_table_count} rows (~{element_table_size})",
|
||||
}
|
||||
)
|
||||
metrics.append(
|
||||
{"metric": "Postgres events table size", "value": f"{event_table_count} rows (~{event_table_size})"}
|
||||
)
|
||||
metrics.append(
|
||||
{
|
||||
"metric": "Postgres session recording table size",
|
||||
"value": f"{session_recording_event_table_count} rows (~{session_recording_event_table_size})",
|
||||
}
|
||||
)
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.system_status import system_status
|
||||
|
||||
metrics.extend(list(system_status()))
|
||||
metrics.extend(list(system_status()))
|
||||
|
||||
metrics.append({"key": "redis_alive", "metric": "Redis alive", "value": redis_alive})
|
||||
if redis_alive:
|
||||
@ -175,11 +135,10 @@ class InstanceStatusViewSet(viewsets.ViewSet):
|
||||
def queries(self, request: Request) -> Response:
|
||||
queries = {"postgres_running": self.get_postgres_running_queries()}
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.system_status import get_clickhouse_running_queries, get_clickhouse_slow_log
|
||||
from ee.clickhouse.system_status import get_clickhouse_running_queries, get_clickhouse_slow_log
|
||||
|
||||
queries["clickhouse_running"] = get_clickhouse_running_queries()
|
||||
queries["clickhouse_slow_log"] = get_clickhouse_slow_log()
|
||||
queries["clickhouse_running"] = get_clickhouse_running_queries()
|
||||
queries["clickhouse_slow_log"] = get_clickhouse_slow_log()
|
||||
|
||||
return Response({"results": queries})
|
||||
|
||||
@ -190,10 +149,10 @@ class InstanceStatusViewSet(viewsets.ViewSet):
|
||||
)
|
||||
def analyze_ch_query(self, request: Request) -> Response:
|
||||
response = {}
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.system_status import analyze_query
|
||||
|
||||
response["results"] = analyze_query(request.data["query"])
|
||||
from ee.clickhouse.system_status import analyze_query
|
||||
|
||||
response["results"] = analyze_query(request.data["query"])
|
||||
|
||||
return Response(response)
|
||||
|
||||
|
@ -8,7 +8,6 @@ from posthog.test.base import APIBaseTest
|
||||
|
||||
|
||||
def factory_test_action_api(event_factory):
|
||||
@patch("posthog.tasks.calculate_action.calculate_action.delay")
|
||||
class TestActionApi(APIBaseTest):
|
||||
@patch("posthog.api.action.report_user_action")
|
||||
def test_create_action(self, patch_capture, *args):
|
||||
@ -270,7 +269,3 @@ def factory_test_action_api(event_factory):
|
||||
self.assertEqual(response, {"count": 1})
|
||||
|
||||
return TestActionApi
|
||||
|
||||
|
||||
class TestAction(factory_test_action_api(Event.objects.create)): # type: ignore
|
||||
pass
|
||||
|
@ -2,15 +2,13 @@ import json
|
||||
|
||||
from freezegun import freeze_time
|
||||
|
||||
from posthog.constants import ENTITY_ID, ENTITY_MATH, ENTITY_TYPE, TRENDS_CUMULATIVE
|
||||
from posthog.constants import ENTITY_ID, ENTITY_TYPE, TRENDS_CUMULATIVE
|
||||
from posthog.models import Action, ActionStep, Cohort, Event, Organization, Person
|
||||
from posthog.queries.abstract_test.test_interval import AbstractIntervalTest
|
||||
from posthog.tasks.calculate_action import calculate_actions_from_last_calculation
|
||||
from posthog.test.base import APIBaseTest
|
||||
|
||||
|
||||
def action_people_test_factory(event_factory, person_factory, action_factory, cohort_factory):
|
||||
class TestActionPeople(AbstractIntervalTest, APIBaseTest):
|
||||
class TestActionPeople(APIBaseTest):
|
||||
def _create_events(self, use_time=False):
|
||||
action_factory(team=self.team, name="no events")
|
||||
|
||||
@ -185,7 +183,7 @@ def action_people_test_factory(event_factory, person_factory, action_factory, co
|
||||
event_factory(
|
||||
team=self.team, event="sign up", distinct_id="person1", timestamp="2019-11-27T16:50:00Z",
|
||||
)
|
||||
calculate_actions_from_last_calculation()
|
||||
|
||||
return person1, person2, person3, person4, person5, person6, person7
|
||||
|
||||
def test_hour_interval(self):
|
||||
@ -270,7 +268,7 @@ def action_people_test_factory(event_factory, person_factory, action_factory, co
|
||||
event_factory(
|
||||
team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-01-05T15:50:00Z",
|
||||
)
|
||||
calculate_actions_from_last_calculation()
|
||||
|
||||
# test people
|
||||
action_response = self.client.get(
|
||||
f"/api/projects/{self.team.id}/actions/people/",
|
||||
@ -314,7 +312,7 @@ def action_people_test_factory(event_factory, person_factory, action_factory, co
|
||||
event_factory(
|
||||
team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-01-05T15:50:00Z",
|
||||
)
|
||||
calculate_actions_from_last_calculation()
|
||||
|
||||
# test people
|
||||
action_response = self.client.get(
|
||||
f"/api/projects/{self.team.id}/actions/people/",
|
||||
@ -627,25 +625,3 @@ def action_people_test_factory(event_factory, person_factory, action_factory, co
|
||||
self.assertEqual(people["results"][0]["people"][0]["id"], person2.pk)
|
||||
|
||||
return TestActionPeople
|
||||
|
||||
|
||||
def _create_action(**kwargs):
|
||||
team = kwargs.pop("team")
|
||||
name = kwargs.pop("name")
|
||||
action = Action.objects.create(team=team, name=name)
|
||||
ActionStep.objects.create(action=action, event=name)
|
||||
action.calculate_events()
|
||||
return action
|
||||
|
||||
|
||||
def _create_cohort(**kwargs):
|
||||
team = kwargs.pop("team")
|
||||
name = kwargs.pop("name")
|
||||
groups = kwargs.pop("groups")
|
||||
cohort = Cohort.objects.create(team=team, name=name, groups=groups)
|
||||
cohort.calculate_people()
|
||||
return cohort
|
||||
|
||||
|
||||
class TestActionPeople(action_people_test_factory(Event.objects.create, Person.objects.create, _create_action, _create_cohort)): # type: ignore
|
||||
pass
|
||||
|
@ -1,7 +1,8 @@
|
||||
import base64
|
||||
import gzip
|
||||
import json
|
||||
from datetime import timedelta
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import timezone as tz
|
||||
from typing import Any, Dict, List, Union
|
||||
from unittest.mock import MagicMock, call, patch
|
||||
from urllib.parse import quote
|
||||
@ -44,21 +45,20 @@ class TestCapture(BaseTest):
|
||||
return json.loads(base64.b64decode(data))
|
||||
|
||||
def _to_arguments(self, patch_process_event_with_plugins: Any) -> dict:
|
||||
args = patch_process_event_with_plugins.call_args[1]["args"]
|
||||
distinct_id, ip, site_url, data, team_id, now, sent_at = args
|
||||
args = patch_process_event_with_plugins.call_args[1]["data"]
|
||||
|
||||
return {
|
||||
"distinct_id": distinct_id,
|
||||
"ip": ip,
|
||||
"site_url": site_url,
|
||||
"data": data,
|
||||
"team_id": team_id,
|
||||
"now": now,
|
||||
"sent_at": sent_at,
|
||||
"distinct_id": args["distinct_id"],
|
||||
"ip": args["ip"],
|
||||
"site_url": args["site_url"],
|
||||
"data": json.loads(args["data"]),
|
||||
"team_id": args["team_id"],
|
||||
"now": args["now"],
|
||||
"sent_at": args["sent_at"],
|
||||
}
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_capture_event(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_capture_event(self, kafka_produce):
|
||||
data = {
|
||||
"event": "$autocapture",
|
||||
"properties": {
|
||||
@ -75,7 +75,7 @@ class TestCapture(BaseTest):
|
||||
with self.assertNumQueries(1):
|
||||
response = self.client.get("/e/?data=%s" % quote(self._to_json(data)), HTTP_ORIGIN="https://localhost",)
|
||||
self.assertEqual(response.get("access-control-allow-origin"), "https://localhost")
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
arguments.pop("sent_at") # can't compare fakedate
|
||||
self.assertDictEqual(
|
||||
@ -90,7 +90,7 @@ class TestCapture(BaseTest):
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.configure_scope")
|
||||
@patch("posthog.api.capture.celery_app.send_task", MagicMock())
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce", MagicMock())
|
||||
def test_capture_event_adds_library_to_sentry(self, patched_scope):
|
||||
mock_set_tag = mock_sentry_context_for_tagging(patched_scope)
|
||||
|
||||
@ -115,7 +115,7 @@ class TestCapture(BaseTest):
|
||||
mock_set_tag.assert_has_calls([call("library", "web"), call("library.version", "1.14.1")])
|
||||
|
||||
@patch("posthog.api.capture.configure_scope")
|
||||
@patch("posthog.api.capture.celery_app.send_task", MagicMock())
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce", MagicMock())
|
||||
def test_capture_event_adds_unknown_to_sentry_when_no_properties_sent(self, patched_scope):
|
||||
mock_set_tag = mock_sentry_context_for_tagging(patched_scope)
|
||||
|
||||
@ -137,8 +137,8 @@ class TestCapture(BaseTest):
|
||||
|
||||
mock_set_tag.assert_has_calls([call("library", "unknown"), call("library.version", "unknown")])
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_personal_api_key(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_personal_api_key(self, kafka_produce):
|
||||
key = PersonalAPIKey(label="X", user=self.user)
|
||||
key.save()
|
||||
data = {
|
||||
@ -158,7 +158,7 @@ class TestCapture(BaseTest):
|
||||
with self.assertNumQueries(4):
|
||||
response = self.client.get("/e/?data=%s" % quote(self._to_json(data)), HTTP_ORIGIN="https://localhost",)
|
||||
self.assertEqual(response.get("access-control-allow-origin"), "https://localhost")
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
arguments.pop("sent_at") # can't compare fakedate
|
||||
self.assertDictEqual(
|
||||
@ -172,8 +172,8 @@ class TestCapture(BaseTest):
|
||||
},
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_personal_api_key_from_batch_request(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_personal_api_key_from_batch_request(self, kafka_produce):
|
||||
# Originally issue POSTHOG-2P8
|
||||
key = PersonalAPIKey(label="X", user=self.user)
|
||||
key.save()
|
||||
@ -195,7 +195,7 @@ class TestCapture(BaseTest):
|
||||
response = self.client.get("/e/?data=%s" % quote(self._to_json(data)))
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
arguments.pop("sent_at") # can't compare fakedate
|
||||
self.assertDictEqual(
|
||||
@ -221,8 +221,8 @@ class TestCapture(BaseTest):
|
||||
},
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_multiple_events(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_multiple_events(self, kafka_produce):
|
||||
self.client.post(
|
||||
"/track/",
|
||||
data={
|
||||
@ -235,10 +235,10 @@ class TestCapture(BaseTest):
|
||||
"api_key": self.team.api_token,
|
||||
},
|
||||
)
|
||||
self.assertEqual(patch_process_event_with_plugins.call_count, 2)
|
||||
self.assertEqual(kafka_produce.call_count, 2)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_emojis_in_text(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_emojis_in_text(self, kafka_produce):
|
||||
self.team.api_token = "xp9qT2VLY76JJg"
|
||||
self.team.save()
|
||||
|
||||
@ -249,14 +249,13 @@ class TestCapture(BaseTest):
|
||||
"data": "eyJldmVudCI6ICIkd2ViX2V2ZW50IiwicHJvcGVydGllcyI6IHsiJG9zIjogIk1hYyBPUyBYIiwiJGJyb3dzZXIiOiAiQ2hyb21lIiwiJHJlZmVycmVyIjogImh0dHBzOi8vYXBwLmhpYmVybHkuY29tL2xvZ2luP25leHQ9LyIsIiRyZWZlcnJpbmdfZG9tYWluIjogImFwcC5oaWJlcmx5LmNvbSIsIiRjdXJyZW50X3VybCI6ICJodHRwczovL2FwcC5oaWJlcmx5LmNvbS8iLCIkYnJvd3Nlcl92ZXJzaW9uIjogNzksIiRzY3JlZW5faGVpZ2h0IjogMjE2MCwiJHNjcmVlbl93aWR0aCI6IDM4NDAsInBoX2xpYiI6ICJ3ZWIiLCIkbGliX3ZlcnNpb24iOiAiMi4zMy4xIiwiJGluc2VydF9pZCI6ICJnNGFoZXFtejVrY3AwZ2QyIiwidGltZSI6IDE1ODA0MTAzNjguMjY1LCJkaXN0aW5jdF9pZCI6IDYzLCIkZGV2aWNlX2lkIjogIjE2ZmQ1MmRkMDQ1NTMyLTA1YmNhOTRkOWI3OWFiLTM5NjM3YzBlLTFhZWFhMC0xNmZkNTJkZDA0NjQxZCIsIiRpbml0aWFsX3JlZmVycmVyIjogIiRkaXJlY3QiLCIkaW5pdGlhbF9yZWZlcnJpbmdfZG9tYWluIjogIiRkaXJlY3QiLCIkdXNlcl9pZCI6IDYzLCIkZXZlbnRfdHlwZSI6ICJjbGljayIsIiRjZV92ZXJzaW9uIjogMSwiJGhvc3QiOiAiYXBwLmhpYmVybHkuY29tIiwiJHBhdGhuYW1lIjogIi8iLCIkZWxlbWVudHMiOiBbCiAgICB7InRhZ19uYW1lIjogImJ1dHRvbiIsIiRlbF90ZXh0IjogIu2gve2yuyBXcml0aW5nIGNvZGUiLCJjbGFzc2VzIjogWwogICAgImJ0biIsCiAgICAiYnRuLXNlY29uZGFyeSIKXSwiYXR0cl9fY2xhc3MiOiAiYnRuIGJ0bi1zZWNvbmRhcnkiLCJhdHRyX19zdHlsZSI6ICJjdXJzb3I6IHBvaW50ZXI7IG1hcmdpbi1yaWdodDogOHB4OyBtYXJnaW4tYm90dG9tOiAxcmVtOyIsIm50aF9jaGlsZCI6IDIsIm50aF9vZl90eXBlIjogMX0sCiAgICB7InRhZ19uYW1lIjogImRpdiIsIm50aF9jaGlsZCI6IDEsIm50aF9vZl90eXBlIjogMX0sCiAgICB7InRhZ19uYW1lIjogImRpdiIsImNsYXNzZXMiOiBbCiAgICAiZmVlZGJhY2stc3RlcCIsCiAgICAiZmVlZGJhY2stc3RlcC1zZWxlY3RlZCIKXSwiYXR0cl9fY2xhc3MiOiAiZmVlZGJhY2stc3RlcCBmZWVkYmFjay1zdGVwLXNlbGVjdGVkIiwibnRoX2NoaWxkIjogMiwibnRoX29mX3R5cGUiOiAxfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwiY2xhc3NlcyI6IFsKICAgICJnaXZlLWZlZWRiYWNrIgpdLCJhdHRyX19jbGFzcyI6ICJnaXZlLWZlZWRiYWNrIiwiYXR0cl9fc3R5bGUiOiAid2lkdGg6IDkwJTsgbWFyZ2luOiAwcHggYXV0bzsgZm9udC1zaXplOiAxNXB4OyBwb3NpdGlvbjogcmVsYXRpdmU7IiwibnRoX2NoaWxkIjogMSwibnRoX29mX3R5cGUiOiAxfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwiYXR0cl9fc3R5bGUiOiAib3ZlcmZsb3c6IGhpZGRlbjsiLCJudGhfY2hpbGQiOiAxLCJudGhfb2ZfdHlwZSI6IDF9LAogICAgeyJ0YWdfbmFtZSI6ICJkaXYiLCJjbGFzc2VzIjogWwogICAgIm1vZGFsLWJvZHkiCl0sImF0dHJfX2NsYXNzIjogIm1vZGFsLWJvZHkiLCJhdHRyX19zdHlsZSI6ICJmb250LXNpemU6IDE1cHg7IiwibnRoX2NoaWxkIjogMiwibnRoX29mX3R5cGUiOiAyfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwiY2xhc3NlcyI6IFsKICAgICJtb2RhbC1jb250ZW50IgpdLCJhdHRyX19jbGFzcyI6ICJtb2RhbC1jb250ZW50IiwibnRoX2NoaWxkIjogMSwibnRoX29mX3R5cGUiOiAxfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwiY2xhc3NlcyI6IFsKICAgICJtb2RhbC1kaWFsb2ciLAogICAgIm1vZGFsLWxnIgpdLCJhdHRyX19jbGFzcyI6ICJtb2RhbC1kaWFsb2cgbW9kYWwtbGciLCJhdHRyX19yb2xlIjogImRvY3VtZW50IiwibnRoX2NoaWxkIjogMSwibnRoX29mX3R5cGUiOiAxfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwiY2xhc3NlcyI6IFsKICAgICJtb2RhbCIsCiAgICAiZmFkZSIsCiAgICAic2hvdyIKXSwiYXR0cl9fY2xhc3MiOiAibW9kYWwgZmFkZSBzaG93IiwiYXR0cl9fc3R5bGUiOiAiZGlzcGxheTogYmxvY2s7IiwibnRoX2NoaWxkIjogMiwibnRoX29mX3R5cGUiOiAyfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwibnRoX2NoaWxkIjogMSwibnRoX29mX3R5cGUiOiAxfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwibnRoX2NoaWxkIjogMSwibnRoX29mX3R5cGUiOiAxfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwiY2xhc3NlcyI6IFsKICAgICJrLXBvcnRsZXRfX2JvZHkiLAogICAgIiIKXSwiYXR0cl9fY2xhc3MiOiAiay1wb3J0bGV0X19ib2R5ICIsImF0dHJfX3N0eWxlIjogInBhZGRpbmc6IDBweDsiLCJudGhfY2hpbGQiOiAyLCJudGhfb2ZfdHlwZSI6IDJ9LAogICAgeyJ0YWdfbmFtZSI6ICJkaXYiLCJjbGFzc2VzIjogWwogICAgImstcG9ydGxldCIsCiAgICAiay1wb3J0bGV0LS1oZWlnaHQtZmx1aWQiCl0sImF0dHJfX2NsYXNzIjogImstcG9ydGxldCBrLXBvcnRsZXQtLWhlaWdodC1mbHVpZCIsIm50aF9jaGlsZCI6IDEsIm50aF9vZl90eXBlIjogMX0sCiAgICB7InRhZ19uYW1lIjogImRpdiIsImNsYXNzZXMiOiBbCiAgICAiY29sLWxnLTYiCl0sImF0dHJfX2NsYXNzIjogImNvbC1sZy02IiwibnRoX2NoaWxkIjogMSwibnRoX29mX3R5cGUiOiAxfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwiY2xhc3NlcyI6IFsKICAgICJyb3ciCl0sImF0dHJfX2NsYXNzIjogInJvdyIsIm50aF9jaGlsZCI6IDEsIm50aF9vZl90eXBlIjogMX0sCiAgICB7InRhZ19uYW1lIjogImRpdiIsImF0dHJfX3N0eWxlIjogInBhZGRpbmc6IDQwcHggMzBweCAwcHg7IGJhY2tncm91bmQtY29sb3I6IHJnYigyMzksIDIzOSwgMjQ1KTsgbWFyZ2luLXRvcDogLTQwcHg7IG1pbi1oZWlnaHQ6IGNhbGMoMTAwdmggLSA0MHB4KTsiLCJudGhfY2hpbGQiOiAyLCJudGhfb2ZfdHlwZSI6IDJ9LAogICAgeyJ0YWdfbmFtZSI6ICJkaXYiLCJhdHRyX19zdHlsZSI6ICJtYXJnaW4tdG9wOiAwcHg7IiwibnRoX2NoaWxkIjogMiwibnRoX29mX3R5cGUiOiAyfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwiY2xhc3NlcyI6IFsKICAgICJBcHAiCl0sImF0dHJfX2NsYXNzIjogIkFwcCIsImF0dHJfX3N0eWxlIjogImNvbG9yOiByZ2IoNTIsIDYxLCA2Mik7IiwibnRoX2NoaWxkIjogMSwibnRoX29mX3R5cGUiOiAxfSwKICAgIHsidGFnX25hbWUiOiAiZGl2IiwiYXR0cl9faWQiOiAicm9vdCIsIm50aF9jaGlsZCI6IDEsIm50aF9vZl90eXBlIjogMX0sCiAgICB7InRhZ19uYW1lIjogImJvZHkiLCJudGhfY2hpbGQiOiAyLCJudGhfb2ZfdHlwZSI6IDF9Cl0sInRva2VuIjogInhwOXFUMlZMWTc2SkpnIn19"
|
||||
},
|
||||
)
|
||||
|
||||
properties = json.loads(kafka_produce.call_args[1]["data"]["data"])["properties"]
|
||||
self.assertEqual(
|
||||
patch_process_event_with_plugins.call_args[1]["args"][3]["properties"]["$elements"][0]["$el_text"],
|
||||
"💻 Writing code",
|
||||
properties["$elements"][0]["$el_text"], "💻 Writing code",
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_js_gzip(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_js_gzip(self, kafka_produce):
|
||||
self.team.api_token = "rnEnwNvmHphTu5rFG4gWDDs49t00Vk50tDOeDdedMb4"
|
||||
self.team.save()
|
||||
|
||||
@ -266,14 +265,16 @@ class TestCapture(BaseTest):
|
||||
content_type="text/plain",
|
||||
)
|
||||
|
||||
self.assertEqual(patch_process_event_with_plugins.call_count, 1)
|
||||
self.assertEqual(patch_process_event_with_plugins.call_args[1]["args"][3]["event"], "my-event")
|
||||
self.assertEqual(kafka_produce.call_count, 1)
|
||||
|
||||
data = json.loads(kafka_produce.call_args[1]["data"]["data"])
|
||||
self.assertEqual(data["event"], "my-event")
|
||||
self.assertEqual(
|
||||
patch_process_event_with_plugins.call_args[1]["args"][3]["properties"]["prop"], "💻 Writing code",
|
||||
data["properties"]["prop"], "💻 Writing code",
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_js_gzip_with_no_content_type(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_js_gzip_with_no_content_type(self, kafka_produce):
|
||||
"IE11 sometimes does not send content_type"
|
||||
|
||||
self.team.api_token = "rnEnwNvmHphTu5rFG4gWDDs49t00Vk50tDOeDdedMb4"
|
||||
@ -285,14 +286,16 @@ class TestCapture(BaseTest):
|
||||
content_type="",
|
||||
)
|
||||
|
||||
self.assertEqual(patch_process_event_with_plugins.call_count, 1)
|
||||
self.assertEqual(patch_process_event_with_plugins.call_args[1]["args"][3]["event"], "my-event")
|
||||
self.assertEqual(kafka_produce.call_count, 1)
|
||||
|
||||
data = json.loads(kafka_produce.call_args[1]["data"]["data"])
|
||||
self.assertEqual(data["event"], "my-event")
|
||||
self.assertEqual(
|
||||
patch_process_event_with_plugins.call_args[1]["args"][3]["properties"]["prop"], "💻 Writing code",
|
||||
data["properties"]["prop"], "💻 Writing code",
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_invalid_gzip(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_invalid_gzip(self, kafka_produce):
|
||||
self.team.api_token = "rnEnwNvmHphTu5rFG4gWDDs49t00Vk50tDOeDdedMb4"
|
||||
self.team.save()
|
||||
|
||||
@ -308,10 +311,10 @@ class TestCapture(BaseTest):
|
||||
code="invalid_payload",
|
||||
),
|
||||
)
|
||||
self.assertEqual(patch_process_event_with_plugins.call_count, 0)
|
||||
self.assertEqual(kafka_produce.call_count, 0)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_invalid_lz64(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_invalid_lz64(self, kafka_produce):
|
||||
self.team.api_token = "rnEnwNvmHphTu5rFG4gWDDs49t00Vk50tDOeDdedMb4"
|
||||
self.team.save()
|
||||
|
||||
@ -324,20 +327,21 @@ class TestCapture(BaseTest):
|
||||
"Malformed request data: Failed to decompress data.", code="invalid_payload",
|
||||
),
|
||||
)
|
||||
self.assertEqual(patch_process_event_with_plugins.call_count, 0)
|
||||
self.assertEqual(kafka_produce.call_count, 0)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_incorrect_padding(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_incorrect_padding(self, kafka_produce):
|
||||
response = self.client.get(
|
||||
"/e/?data=eyJldmVudCI6IndoYXRldmVmciIsInByb3BlcnRpZXMiOnsidG9rZW4iOiJ0b2tlbjEyMyIsImRpc3RpbmN0X2lkIjoiYXNkZiJ9fQ",
|
||||
content_type="application/json",
|
||||
HTTP_REFERER="https://localhost",
|
||||
)
|
||||
self.assertEqual(response.json()["status"], 1)
|
||||
self.assertEqual(patch_process_event_with_plugins.call_args[1]["args"][3]["event"], "whatevefr")
|
||||
data = json.loads(kafka_produce.call_args[1]["data"]["data"])
|
||||
self.assertEqual(data["event"], "whatevefr")
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_empty_request_returns_an_error(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_empty_request_returns_an_error(self, kafka_produce):
|
||||
"""
|
||||
Empty requests that fail silently cause confusion as to whether they were successful or not.
|
||||
"""
|
||||
@ -345,20 +349,20 @@ class TestCapture(BaseTest):
|
||||
# Empty GET
|
||||
response = self.client.get("/e/?data=", content_type="application/json", HTTP_ORIGIN="https://localhost",)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(patch_process_event_with_plugins.call_count, 0)
|
||||
self.assertEqual(kafka_produce.call_count, 0)
|
||||
|
||||
# Empty POST
|
||||
response = self.client.post("/e/", {}, content_type="application/json", HTTP_ORIGIN="https://localhost",)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(patch_process_event_with_plugins.call_count, 0)
|
||||
self.assertEqual(kafka_produce.call_count, 0)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_batch(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_batch(self, kafka_produce):
|
||||
data = {"type": "capture", "event": "user signed up", "distinct_id": "2"}
|
||||
response = self.client.post(
|
||||
"/batch/", data={"api_key": self.team.api_token, "batch": [data]}, content_type="application/json",
|
||||
)
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
arguments.pop("sent_at") # can't compare fakedate
|
||||
self.assertDictEqual(
|
||||
@ -372,8 +376,8 @@ class TestCapture(BaseTest):
|
||||
},
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_batch_with_invalid_event(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_batch_with_invalid_event(self, kafka_produce):
|
||||
data = [
|
||||
{"type": "capture", "event": "event1", "distinct_id": "2"},
|
||||
{"type": "capture", "event": "event2"}, # invalid
|
||||
@ -387,13 +391,13 @@ class TestCapture(BaseTest):
|
||||
|
||||
# We should return a 200 but not process the invalid event
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(patch_process_event_with_plugins.call_count, 4)
|
||||
self.assertEqual(kafka_produce.call_count, 4)
|
||||
|
||||
events_processed = [call.kwargs["args"][3]["event"] for call in patch_process_event_with_plugins.call_args_list]
|
||||
events_processed = [json.loads(call.kwargs["data"]["data"])["event"] for call in kafka_produce.call_args_list]
|
||||
self.assertEqual(events_processed, ["event1", "event3", "event4", "event5"]) # event2 not processed
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_batch_gzip_header(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_batch_gzip_header(self, kafka_produce):
|
||||
data = {
|
||||
"api_key": self.team.api_token,
|
||||
"batch": [{"type": "capture", "event": "user signed up", "distinct_id": "2",}],
|
||||
@ -407,7 +411,7 @@ class TestCapture(BaseTest):
|
||||
HTTP_CONTENT_ENCODING="gzip",
|
||||
)
|
||||
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
arguments.pop("sent_at") # can't compare fakedate
|
||||
self.assertDictEqual(
|
||||
@ -421,8 +425,8 @@ class TestCapture(BaseTest):
|
||||
},
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_batch_gzip_param(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_batch_gzip_param(self, kafka_produce):
|
||||
data = {
|
||||
"api_key": self.team.api_token,
|
||||
"batch": [{"type": "capture", "event": "user signed up", "distinct_id": "2"}],
|
||||
@ -435,7 +439,7 @@ class TestCapture(BaseTest):
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
arguments.pop("sent_at") # can't compare fakedate
|
||||
self.assertDictEqual(
|
||||
@ -449,8 +453,8 @@ class TestCapture(BaseTest):
|
||||
},
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_batch_lzstring(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_batch_lzstring(self, kafka_produce):
|
||||
data = {
|
||||
"api_key": self.team.api_token,
|
||||
"batch": [{"type": "capture", "event": "user signed up", "distinct_id": "2"}],
|
||||
@ -464,7 +468,7 @@ class TestCapture(BaseTest):
|
||||
HTTP_CONTENT_ENCODING="lz64",
|
||||
)
|
||||
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
arguments.pop("sent_at") # can't compare fakedate
|
||||
self.assertDictEqual(
|
||||
@ -478,8 +482,8 @@ class TestCapture(BaseTest):
|
||||
},
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_lz64_with_emoji(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_lz64_with_emoji(self, kafka_produce):
|
||||
self.team.api_token = "KZZZeIpycLH-tKobLBET2NOg7wgJF2KqDL5yWU_7tZw"
|
||||
self.team.save()
|
||||
response = self.client.post(
|
||||
@ -489,7 +493,7 @@ class TestCapture(BaseTest):
|
||||
HTTP_CONTENT_ENCODING="lz64",
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
self.assertEqual(arguments["data"]["event"], "🤓")
|
||||
|
||||
def test_batch_incorrect_token(self):
|
||||
@ -546,8 +550,8 @@ class TestCapture(BaseTest):
|
||||
self.assertEqual(statsd_incr_first_call.args[0], "invalid_event")
|
||||
self.assertEqual(statsd_incr_first_call.kwargs, {"tags": {"error": "missing_distinct_id"}})
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_engage(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_engage(self, kafka_produce):
|
||||
response = self.client.get(
|
||||
"/engage/?data=%s"
|
||||
% quote(
|
||||
@ -564,7 +568,7 @@ class TestCapture(BaseTest):
|
||||
content_type="application/json",
|
||||
HTTP_ORIGIN="https://localhost",
|
||||
)
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
self.assertEqual(arguments["data"]["event"], "$identify")
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
arguments.pop("sent_at") # can't compare fakedate
|
||||
@ -574,8 +578,8 @@ class TestCapture(BaseTest):
|
||||
{"distinct_id": "3", "ip": "127.0.0.1", "site_url": "http://testserver", "team_id": self.team.pk,},
|
||||
)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_python_library(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_python_library(self, kafka_produce):
|
||||
self.client.post(
|
||||
"/track/",
|
||||
data={
|
||||
@ -583,11 +587,11 @@ class TestCapture(BaseTest):
|
||||
"api_key": self.team.api_token, # main difference in this test
|
||||
},
|
||||
)
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
self.assertEqual(arguments["team_id"], self.team.pk)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_base64_decode_variations(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_base64_decode_variations(self, kafka_produce):
|
||||
base64 = "eyJldmVudCI6IiRwYWdldmlldyIsInByb3BlcnRpZXMiOnsiZGlzdGluY3RfaWQiOiJlZWVlZWVlZ8+lZWVlZWUifX0="
|
||||
dict = self._dict_from_b64(base64)
|
||||
self.assertDictEqual(
|
||||
@ -598,7 +602,7 @@ class TestCapture(BaseTest):
|
||||
self.client.post(
|
||||
"/track/", data={"data": base64, "api_key": self.team.api_token,}, # main difference in this test
|
||||
)
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
self.assertEqual(arguments["team_id"], self.team.pk)
|
||||
self.assertEqual(arguments["distinct_id"], "eeeeeeegϥeeeee")
|
||||
|
||||
@ -607,12 +611,12 @@ class TestCapture(BaseTest):
|
||||
"/track/",
|
||||
data={"data": base64.replace("+", " "), "api_key": self.team.api_token,}, # main difference in this test
|
||||
)
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
self.assertEqual(arguments["team_id"], self.team.pk)
|
||||
self.assertEqual(arguments["distinct_id"], "eeeeeeegϥeeeee")
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_js_library_underscore_sent_at(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_js_library_underscore_sent_at(self, kafka_produce):
|
||||
now = timezone.now()
|
||||
tomorrow = now + timedelta(days=1, hours=2)
|
||||
tomorrow_sent_at = now + timedelta(days=1, hours=2, minutes=10)
|
||||
@ -629,19 +633,19 @@ class TestCapture(BaseTest):
|
||||
HTTP_ORIGIN="https://localhost",
|
||||
)
|
||||
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
|
||||
# right time sent as sent_at to process_event
|
||||
|
||||
self.assertEqual(arguments["sent_at"].tzinfo, timezone.utc)
|
||||
sent_at = datetime.fromisoformat(arguments["sent_at"])
|
||||
self.assertEqual(sent_at.tzinfo, tz.utc)
|
||||
|
||||
timediff = arguments["sent_at"].timestamp() - tomorrow_sent_at.timestamp()
|
||||
timediff = sent_at.timestamp() - tomorrow_sent_at.timestamp()
|
||||
self.assertLess(abs(timediff), 1)
|
||||
self.assertEqual(arguments["data"]["timestamp"], tomorrow.isoformat())
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_long_distinct_id(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_long_distinct_id(self, kafka_produce):
|
||||
now = timezone.now()
|
||||
tomorrow = now + timedelta(days=1, hours=2)
|
||||
tomorrow_sent_at = now + timedelta(days=1, hours=2, minutes=10)
|
||||
@ -657,11 +661,11 @@ class TestCapture(BaseTest):
|
||||
content_type="application/json",
|
||||
HTTP_ORIGIN="https://localhost",
|
||||
)
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
self.assertEqual(len(arguments["distinct_id"]), 200)
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_sent_at_field(self, patch_process_event_with_plugins):
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_sent_at_field(self, kafka_produce):
|
||||
now = timezone.now()
|
||||
tomorrow = now + timedelta(days=1, hours=2)
|
||||
tomorrow_sent_at = now + timedelta(days=1, hours=2, minutes=10)
|
||||
@ -677,11 +681,10 @@ class TestCapture(BaseTest):
|
||||
},
|
||||
)
|
||||
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments.pop("now") # can't compare fakedate
|
||||
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
sent_at = datetime.fromisoformat(arguments["sent_at"])
|
||||
# right time sent as sent_at to process_event
|
||||
timediff = arguments["sent_at"].timestamp() - tomorrow_sent_at.timestamp()
|
||||
timediff = sent_at.timestamp() - tomorrow_sent_at.timestamp()
|
||||
self.assertLess(abs(timediff), 1)
|
||||
self.assertEqual(arguments["data"]["timestamp"], tomorrow.isoformat())
|
||||
|
||||
@ -757,8 +760,8 @@ class TestCapture(BaseTest):
|
||||
self.assertEqual(statsd_incr_first_call.args[0], "invalid_event")
|
||||
self.assertEqual(statsd_incr_first_call.kwargs, {"tags": {"error": "missing_event_name"}})
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_add_feature_flags_if_missing(self, patch_process_event_with_plugins) -> None:
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_add_feature_flags_if_missing(self, kafka_produce) -> None:
|
||||
self.assertListEqual(self.team.event_properties_numerical, [])
|
||||
FeatureFlag.objects.create(team=self.team, created_by=self.user, key="test-ff", rollout_percentage=100)
|
||||
self.client.post(
|
||||
@ -768,11 +771,11 @@ class TestCapture(BaseTest):
|
||||
"api_key": self.team.api_token,
|
||||
},
|
||||
)
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
self.assertEqual(arguments["data"]["properties"]["$active_feature_flags"], ["test-ff"])
|
||||
|
||||
@patch("posthog.api.capture.celery_app.send_task")
|
||||
def test_add_feature_flags_with_overrides_if_missing(self, patch_process_event_with_plugins) -> None:
|
||||
@patch("ee.kafka_client.client._KafkaProducer.produce")
|
||||
def test_add_feature_flags_with_overrides_if_missing(self, kafka_produce) -> None:
|
||||
feature_flag_instance = FeatureFlag.objects.create(
|
||||
team=self.team, created_by=self.user, key="test-ff", rollout_percentage=0
|
||||
)
|
||||
@ -791,7 +794,7 @@ class TestCapture(BaseTest):
|
||||
"api_key": self.team.api_token,
|
||||
},
|
||||
)
|
||||
arguments = self._to_arguments(patch_process_event_with_plugins)
|
||||
arguments = self._to_arguments(kafka_produce)
|
||||
self.assertEqual(arguments["data"]["properties"]["$feature/test-ff"], True)
|
||||
self.assertEqual(arguments["data"]["properties"]["$active_feature_flags"], ["test-ff"])
|
||||
|
||||
@ -847,27 +850,3 @@ class TestCapture(BaseTest):
|
||||
"attr": None,
|
||||
},
|
||||
)
|
||||
|
||||
# On CH deployments the events sent would be added to a Kafka dead letter queue
|
||||
# On Postgres deployments we return a 503: Service Unavailable, and capture an
|
||||
# exception in Sentry
|
||||
@patch("statshog.defaults.django.statsd.incr")
|
||||
@patch("sentry_sdk.capture_exception")
|
||||
@patch("posthog.models.Team.objects.get_team_from_token", side_effect=mocked_get_team_from_token)
|
||||
def test_fetch_team_failure(self, get_team_from_token, capture_exception, statsd_incr):
|
||||
response = self.client.post(
|
||||
"/track/",
|
||||
data={
|
||||
"data": json.dumps(
|
||||
{"event": "some event", "properties": {"distinct_id": "valid id", "token": self.team.api_token,},},
|
||||
),
|
||||
"api_key": self.team.api_token,
|
||||
},
|
||||
)
|
||||
|
||||
# self.assertEqual(capture_exception.call_count, 1)
|
||||
self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE)
|
||||
self.assertEqual(response.json()["code"], "fetch_team_fail")
|
||||
|
||||
self.assertEqual(get_team_from_token.call_args.args[0], "token123")
|
||||
self.assertEqual(statsd_incr.call_args.args[0], "posthog_cloud_raw_endpoint_exception")
|
||||
|
@ -13,7 +13,7 @@ from posthog.test.base import APIBaseTest
|
||||
|
||||
class TestCohort(APIBaseTest):
|
||||
@patch("posthog.api.cohort.report_user_action")
|
||||
@patch("posthog.tasks.calculate_cohort.calculate_cohort.delay")
|
||||
@patch("posthog.tasks.calculate_cohort.calculate_cohort_ch.delay")
|
||||
def test_creating_update_and_calculating(self, patch_calculate_cohort, patch_capture):
|
||||
self.team.app_urls = ["http://somewebsite.com"]
|
||||
self.team.save()
|
||||
@ -141,7 +141,7 @@ User ID,
|
||||
self.assertEqual(Cohort.objects.get(pk=response.json()["id"]).name, "test2")
|
||||
|
||||
@patch("posthog.tasks.calculate_cohort.calculate_cohort_from_list.delay")
|
||||
@patch("posthog.tasks.calculate_cohort.calculate_cohort.delay")
|
||||
@patch("posthog.tasks.calculate_cohort.calculate_cohort_ch.delay")
|
||||
def test_static_cohort_to_dynamic_cohort(self, patch_calculate_cohort, patch_calculate_cohort_from_list):
|
||||
self.team.app_urls = ["http://somewebsite.com"]
|
||||
self.team.save()
|
||||
@ -189,10 +189,8 @@ email@example.org,
|
||||
)
|
||||
|
||||
response = self.client.get(f"/api/projects/{self.team.id}/cohorts").json()
|
||||
|
||||
self.assertEqual(len(response["results"]), 1)
|
||||
self.assertEqual(response["results"][0]["name"], "whatever")
|
||||
self.assertEqual(response["results"][0]["count"], 1)
|
||||
self.assertEqual(response["results"][0]["created_by"]["id"], self.user.id)
|
||||
|
||||
|
||||
|
@ -92,7 +92,3 @@ def factory_test_element(create_event: Callable) -> Callable:
|
||||
self.assertEqual(len(response), 1)
|
||||
|
||||
return TestElement
|
||||
|
||||
|
||||
class TestElement(factory_test_element(Event.objects.create)): # type: ignore
|
||||
pass
|
||||
|
@ -372,17 +372,15 @@ def factory_test_event_api(event_factory, person_factory, _):
|
||||
)
|
||||
|
||||
page2 = self.client.get(response["next"]).json()
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from ee.clickhouse.client import sync_execute
|
||||
|
||||
self.assertEqual(
|
||||
sync_execute(
|
||||
"select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk}
|
||||
)[0][0],
|
||||
250,
|
||||
)
|
||||
self.assertEqual(
|
||||
sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk})[
|
||||
0
|
||||
][0],
|
||||
250,
|
||||
)
|
||||
|
||||
self.assertEqual(len(page2["results"]), 100)
|
||||
self.assertEqual(
|
||||
@ -428,17 +426,15 @@ def factory_test_event_api(event_factory, person_factory, _):
|
||||
self.assertIn(f"after={after}", unquote(response["next"]))
|
||||
|
||||
page2 = self.client.get(response["next"]).json()
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from ee.clickhouse.client import sync_execute
|
||||
|
||||
self.assertEqual(
|
||||
sync_execute(
|
||||
"select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk}
|
||||
)[0][0],
|
||||
25,
|
||||
)
|
||||
self.assertEqual(
|
||||
sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk})[
|
||||
0
|
||||
][0],
|
||||
25,
|
||||
)
|
||||
|
||||
self.assertEqual(len(page2["results"]), 10)
|
||||
self.assertIn(f"before=", unquote(page2["next"]))
|
||||
@ -647,16 +643,3 @@ def factory_test_event_api(event_factory, person_factory, _):
|
||||
self.assertEqual(response_invalid_token.status_code, 401)
|
||||
|
||||
return TestEvents
|
||||
|
||||
|
||||
def _create_action(**kwargs):
|
||||
team = kwargs.pop("team")
|
||||
name = kwargs.pop("name")
|
||||
action = Action.objects.create(team=team, name=name)
|
||||
ActionStep.objects.create(action=action, event=name)
|
||||
action.calculate_events()
|
||||
return action
|
||||
|
||||
|
||||
class TestEvent(factory_test_event_api(Event.objects.create, Person.objects.create, _create_action)): # type: ignore
|
||||
pass
|
||||
|
@ -20,7 +20,6 @@ from posthog.models import (
|
||||
from posthog.models.organization import OrganizationMembership
|
||||
from posthog.tasks.update_cache import update_dashboard_item_cache
|
||||
from posthog.test.base import APIBaseTest, QueryMatchingTest, snapshot_postgres_queries
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
|
||||
def insight_test_factory(event_factory, person_factory):
|
||||
@ -501,14 +500,11 @@ def insight_test_factory(event_factory, person_factory):
|
||||
).json()
|
||||
|
||||
# clickhouse funnels don't have a loading system
|
||||
if is_clickhouse_enabled():
|
||||
self.assertEqual(len(response["result"]), 2)
|
||||
self.assertEqual(response["result"][0]["name"], "user signed up")
|
||||
self.assertEqual(response["result"][0]["count"], 1)
|
||||
self.assertEqual(response["result"][1]["name"], "user did things")
|
||||
self.assertEqual(response["result"][1]["count"], 1)
|
||||
else:
|
||||
self.assertEqual(response["result"]["loading"], True)
|
||||
self.assertEqual(len(response["result"]), 2)
|
||||
self.assertEqual(response["result"][0]["name"], "user signed up")
|
||||
self.assertEqual(response["result"][0]["count"], 1)
|
||||
self.assertEqual(response["result"][1]["name"], "user did things")
|
||||
self.assertEqual(response["result"][1]["count"], 1)
|
||||
|
||||
# Tests backwards-compatibility when we changed GET to POST | GET
|
||||
def test_insight_funnels_basic_get(self):
|
||||
@ -519,12 +515,9 @@ def insight_test_factory(event_factory, person_factory):
|
||||
).json()
|
||||
|
||||
# clickhouse funnels don't have a loading system
|
||||
if is_clickhouse_enabled():
|
||||
self.assertEqual(len(response["result"]), 2)
|
||||
self.assertEqual(response["result"][0]["name"], "user signed up")
|
||||
self.assertEqual(response["result"][1]["name"], "user did things")
|
||||
else:
|
||||
self.assertEqual(response["result"]["loading"], True)
|
||||
self.assertEqual(len(response["result"]), 2)
|
||||
self.assertEqual(response["result"][0]["name"], "user signed up")
|
||||
self.assertEqual(response["result"][1]["name"], "user did things")
|
||||
|
||||
def test_insight_retention_basic(self):
|
||||
person_factory(team=self.team, distinct_ids=["person1"], properties={"email": "person1@test.com"})
|
||||
@ -580,7 +573,3 @@ def insight_test_factory(event_factory, person_factory):
|
||||
self.assertEqual(response_invalid_token.status_code, 401)
|
||||
|
||||
return TestInsight
|
||||
|
||||
|
||||
class TestInsight(insight_test_factory(Event.objects.create, Person.objects.create)): # type: ignore
|
||||
pass
|
||||
|
@ -319,9 +319,3 @@ def factory_test_person(event_factory, person_factory, get_events):
|
||||
self.assertDictContainsSubset({"id": cohort3.id, "count": 1, "name": cohort3.name}, response["results"][1])
|
||||
|
||||
return TestPerson
|
||||
|
||||
|
||||
class TestPerson(
|
||||
factory_test_person(Event.objects.create, Person.objects.create, Event.objects.filter) # type: ignore
|
||||
):
|
||||
pass
|
||||
|
@ -24,7 +24,7 @@ class TestPreflight(APIBaseTest):
|
||||
For security purposes, the information contained in an unauthenticated preflight request is minimal.
|
||||
"""
|
||||
self.client.logout()
|
||||
with self.settings(PRIMARY_DB=AnalyticsDBMS.POSTGRES, MULTI_TENANCY=False):
|
||||
with self.settings(PRIMARY_DB=AnalyticsDBMS.CLICKHOUSE, MULTI_TENANCY=False):
|
||||
response = self.client.get("/_preflight/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
@ -39,7 +39,7 @@ class TestPreflight(APIBaseTest):
|
||||
"initiated": True,
|
||||
"cloud": False,
|
||||
"demo": False,
|
||||
"realm": "hosted",
|
||||
"realm": "hosted-clickhouse",
|
||||
"available_social_auth_providers": {
|
||||
"google-oauth2": False,
|
||||
"github": False,
|
||||
@ -53,7 +53,7 @@ class TestPreflight(APIBaseTest):
|
||||
|
||||
def test_preflight_request(self):
|
||||
with self.settings(
|
||||
PRIMARY_DB=AnalyticsDBMS.POSTGRES,
|
||||
PRIMARY_DB=AnalyticsDBMS.CLICKHOUSE,
|
||||
MULTI_TENANCY=False,
|
||||
INSTANCE_PREFERENCES=self.instance_preferences(debug_queries=True),
|
||||
):
|
||||
@ -73,10 +73,8 @@ class TestPreflight(APIBaseTest):
|
||||
"initiated": True,
|
||||
"cloud": False,
|
||||
"demo": False,
|
||||
"realm": "hosted",
|
||||
"ee_available": settings.EE_AVAILABLE,
|
||||
"is_clickhouse_enabled": False,
|
||||
"db_backend": "postgres",
|
||||
"realm": "hosted-clickhouse",
|
||||
"db_backend": "clickhouse",
|
||||
"available_social_auth_providers": {
|
||||
"google-oauth2": False,
|
||||
"github": False,
|
||||
@ -148,8 +146,6 @@ class TestPreflight(APIBaseTest):
|
||||
"cloud": True,
|
||||
"demo": False,
|
||||
"realm": "cloud",
|
||||
"ee_available": True,
|
||||
"is_clickhouse_enabled": True,
|
||||
"db_backend": "clickhouse",
|
||||
"available_social_auth_providers": {
|
||||
"google-oauth2": False,
|
||||
@ -197,8 +193,6 @@ class TestPreflight(APIBaseTest):
|
||||
"cloud": True,
|
||||
"demo": False,
|
||||
"realm": "cloud",
|
||||
"ee_available": True,
|
||||
"is_clickhouse_enabled": True,
|
||||
"db_backend": "clickhouse",
|
||||
"available_social_auth_providers": {
|
||||
"google-oauth2": True,
|
||||
|
@ -14,8 +14,6 @@ from posthog.api.test.test_event_definition import (
|
||||
create_team,
|
||||
create_user,
|
||||
)
|
||||
from posthog.constants import TREND_FILTER_TYPE_EVENTS
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
|
||||
def identify(
|
||||
@ -33,16 +31,10 @@ def identify(
|
||||
"""
|
||||
properties = properties or {}
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.models.person import Person, PersonDistinctId
|
||||
from ee.clickhouse.models.person import Person, PersonDistinctId
|
||||
|
||||
person = Person.objects.create(team_id=team_id, properties=properties)
|
||||
PersonDistinctId.objects.create(distinct_id=distinct_id, team_id=team_id, person_id=person.id)
|
||||
else:
|
||||
from posthog.models.person import Person, PersonDistinctId
|
||||
|
||||
person = Person.objects.create(team_id=team_id, properties=properties)
|
||||
PersonDistinctId.objects.create(distinct_id=distinct_id, team_id=team_id, person_id=person.id)
|
||||
person = Person.objects.create(team_id=team_id, properties=properties)
|
||||
PersonDistinctId.objects.create(distinct_id=distinct_id, team_id=team_id, person_id=person.id)
|
||||
|
||||
capture_event(
|
||||
event=EventData(
|
||||
@ -89,95 +81,3 @@ def get_retention(
|
||||
"properties": json.dumps(properties or []),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@freeze_time("2021-08-03")
|
||||
def test_insight_retention_missing_persons_gh_5443(client: Client):
|
||||
"""
|
||||
This is a regression test for GH-5443.
|
||||
|
||||
The scenario here is that, an api request is being made for person retention, specifically for:
|
||||
|
||||
1. a "Week" period is being requested
|
||||
2. events just over a week from the first event for a user
|
||||
|
||||
"""
|
||||
|
||||
organization = create_organization(name="test org")
|
||||
team = create_team(organization=organization)
|
||||
user = create_user("user", "pass", organization)
|
||||
|
||||
identify(distinct_id="abc", team_id=team.id)
|
||||
|
||||
# This event will be the first event for the Person wrt the retention
|
||||
# period
|
||||
capture_event(
|
||||
event=EventData(
|
||||
event="event_name", team_id=team.id, distinct_id="abc", timestamp=datetime(2021, 3, 29), properties={},
|
||||
)
|
||||
)
|
||||
|
||||
# Create an event for just over a week from the initial identify event
|
||||
capture_event(
|
||||
event=EventData(
|
||||
event="event_name", team_id=team.id, distinct_id="abc", timestamp=datetime(2021, 4, 5), properties={},
|
||||
)
|
||||
)
|
||||
|
||||
client.force_login(user)
|
||||
|
||||
# These params are taken from
|
||||
# https://sentry.io/organizations/posthog/issues/2516393859/events/df790b8837a54051a140aa1fee51adfc/?project=1899813
|
||||
response = get_retention(
|
||||
client=client,
|
||||
events=[
|
||||
{
|
||||
"id": "$pageview",
|
||||
"math": None,
|
||||
"name": "$pageview",
|
||||
"type": "events",
|
||||
"order": 0,
|
||||
"properties": [],
|
||||
"math_property": None,
|
||||
}
|
||||
],
|
||||
date_from="-90d",
|
||||
date_to="2021-03-31T18:22:50.579Z",
|
||||
display="ActionsTable",
|
||||
selected_interval=10,
|
||||
total_intervals=11,
|
||||
insight="RETENTION",
|
||||
period="Week",
|
||||
retention_type="retention_first_time",
|
||||
target_entity={"id": "event_name", "name": "event_name", "type": "events", "order": 0},
|
||||
returning_entity={
|
||||
"id": "event_name",
|
||||
"math": None,
|
||||
"name": "event_name",
|
||||
"type": "events",
|
||||
"order": None,
|
||||
"properties": [],
|
||||
"math_property": None,
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200, response.content
|
||||
data = response.json()
|
||||
|
||||
# NOTE: prior to the fix for GH-5443, this test would fail by returning an
|
||||
# empty list. To "fix" I have make the generation of "appearances" more
|
||||
# forgiving of getting too much data from the clickhouse query.
|
||||
assert data["result"] == [
|
||||
{
|
||||
"appearances": [1],
|
||||
"person": {
|
||||
"created_at": "2021-08-03T00:00:00Z",
|
||||
"distinct_ids": ["abc"],
|
||||
"id": ANY,
|
||||
"name": "abc",
|
||||
"properties": {},
|
||||
"uuid": ANY,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
@ -365,7 +365,3 @@ def factory_test_session_recordings_api(session_recording_event_factory):
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
return TestSessionRecordings
|
||||
|
||||
|
||||
class TestSessionRecordingsAPI(factory_test_session_recordings_api(SessionRecordingEvent.objects.create)): # type: ignore
|
||||
pass
|
||||
|
@ -32,7 +32,6 @@ class TestSignupAPI(APIBaseTest):
|
||||
pass
|
||||
|
||||
@pytest.mark.skip_on_multitenancy
|
||||
@patch("posthog.api.organization.settings.EE_AVAILABLE", False)
|
||||
@patch("posthoganalytics.capture")
|
||||
def test_api_sign_up(self, mock_capture):
|
||||
|
||||
@ -563,7 +562,6 @@ class TestInviteSignup(APIBaseTest):
|
||||
# Signup (using invite)
|
||||
|
||||
@patch("posthoganalytics.capture")
|
||||
@patch("posthog.api.organization.settings.EE_AVAILABLE", True)
|
||||
def test_api_invite_sign_up(self, mock_capture):
|
||||
invite: OrganizationInvite = OrganizationInvite.objects.create(
|
||||
target_email="test+99@posthog.com", organization=self.organization,
|
||||
@ -620,7 +618,6 @@ class TestInviteSignup(APIBaseTest):
|
||||
self.assertTrue(user.check_password("test_password"))
|
||||
|
||||
@pytest.mark.ee
|
||||
@patch("posthog.api.organization.settings.EE_AVAILABLE", True)
|
||||
def test_api_invite_sign_up_where_there_are_no_default_non_private_projects(self):
|
||||
self.client.logout()
|
||||
invite: OrganizationInvite = OrganizationInvite.objects.create(
|
||||
@ -645,7 +642,6 @@ class TestInviteSignup(APIBaseTest):
|
||||
) # User is not assigned to a project, as there are no non-private projects
|
||||
self.assertEqual(user.team, None)
|
||||
|
||||
@patch("posthog.api.organization.settings.EE_AVAILABLE", True)
|
||||
def test_api_invite_sign_up_where_default_project_is_private(self):
|
||||
self.client.logout()
|
||||
self.team.access_control = True
|
||||
@ -664,7 +660,6 @@ class TestInviteSignup(APIBaseTest):
|
||||
self.assertEqual(user.current_team, team)
|
||||
self.assertEqual(user.team, team)
|
||||
|
||||
@patch("posthog.api.organization.settings.EE_AVAILABLE", False)
|
||||
def test_api_invite_sign_up_member_joined_email_is_not_sent_for_initial_member(self):
|
||||
invite: OrganizationInvite = OrganizationInvite.objects.create(
|
||||
target_email="test+100@posthog.com", organization=self.organization,
|
||||
@ -679,7 +674,6 @@ class TestInviteSignup(APIBaseTest):
|
||||
|
||||
self.assertEqual(len(mail.outbox), 0)
|
||||
|
||||
@patch("posthog.api.organization.settings.EE_AVAILABLE", False)
|
||||
def test_api_invite_sign_up_member_joined_email_is_sent_for_next_members(self):
|
||||
initial_user = User.objects.create_and_join(self.organization, "test+420@posthog.com", None)
|
||||
|
||||
@ -697,7 +691,6 @@ class TestInviteSignup(APIBaseTest):
|
||||
self.assertEqual(len(mail.outbox), 1)
|
||||
self.assertListEqual(mail.outbox[0].to, [initial_user.email])
|
||||
|
||||
@patch("posthog.api.organization.settings.EE_AVAILABLE", False)
|
||||
def test_api_invite_sign_up_member_joined_email_is_not_sent_if_disabled(self):
|
||||
self.organization.is_member_join_email_enabled = False
|
||||
self.organization.save()
|
||||
@ -719,7 +712,6 @@ class TestInviteSignup(APIBaseTest):
|
||||
|
||||
@patch("posthoganalytics.identify")
|
||||
@patch("posthoganalytics.capture")
|
||||
@patch("posthog.api.organization.settings.EE_AVAILABLE", False)
|
||||
def test_existing_user_can_sign_up_to_a_new_organization(self, mock_capture, mock_identify):
|
||||
user = self._create_user("test+159@posthog.com", "test_password")
|
||||
new_org = Organization.objects.create(name="TestCo")
|
||||
|
@ -11,7 +11,6 @@ from posthog.api.test.test_trends import NormalizedTrendResult, get_time_series_
|
||||
from posthog.constants import ENTITY_ID, ENTITY_TYPE
|
||||
from posthog.models import Action, ActionStep, Event, Person
|
||||
from posthog.models.team import Team
|
||||
from posthog.queries.abstract_test.test_compare import AbstractCompareTest
|
||||
from posthog.queries.stickiness import Stickiness
|
||||
from posthog.test.base import APIBaseTest
|
||||
from posthog.utils import encode_get_request_params
|
||||
@ -44,7 +43,7 @@ def get_stickiness_people_ok(client: Client, team_id: int, request: Dict[str, An
|
||||
|
||||
# parameterize tests to reuse in EE
|
||||
def stickiness_test_factory(stickiness, event_factory, person_factory, action_factory, get_earliest_timestamp):
|
||||
class TestStickiness(APIBaseTest, AbstractCompareTest):
|
||||
class TestStickiness(APIBaseTest):
|
||||
def _create_multiple_people(self, period=timedelta(days=1), event_properties=lambda index: {}):
|
||||
base_time = datetime.fromisoformat("2020-01-01T12:00:00.000000")
|
||||
p1 = person_factory(team_id=self.team.id, distinct_ids=["person1"], properties={"name": "person1"})
|
||||
@ -463,17 +462,3 @@ def stickiness_test_factory(stickiness, event_factory, person_factory, action_fa
|
||||
self.assertEqual(response[0]["data"][6], 0)
|
||||
|
||||
return TestStickiness
|
||||
|
||||
|
||||
def _create_action(**kwargs):
|
||||
team = kwargs.pop("team")
|
||||
name = kwargs.pop("name")
|
||||
event_name = kwargs.pop("event_name")
|
||||
action = Action.objects.create(team=team, name=name)
|
||||
ActionStep.objects.create(action=action, event=event_name)
|
||||
action.calculate_events()
|
||||
return action
|
||||
|
||||
|
||||
class DjangoStickinessTest(stickiness_test_factory(Stickiness, Event.objects.create, Person.objects.create, _create_action, Event.objects.earliest_timestamp)): # type: ignore
|
||||
pass
|
||||
|
@ -54,7 +54,7 @@ class TestUtils(BaseTest):
|
||||
|
||||
self.assertEqual(team, None)
|
||||
self.assertEqual(db_error, "Exception('test exception')")
|
||||
self.assertEqual(error_response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE) # type: ignore
|
||||
self.assertEqual(error_response, None)
|
||||
|
||||
get_team_from_token_patcher.stop()
|
||||
|
||||
|
@ -25,7 +25,7 @@ from posthog.models.filters.filter import Filter
|
||||
from posthog.models.filters.stickiness_filter import StickinessFilter
|
||||
from posthog.models.team import Team
|
||||
from posthog.models.user import User
|
||||
from posthog.utils import cors_response, is_clickhouse_enabled, load_data_from_request
|
||||
from posthog.utils import cors_response, load_data_from_request
|
||||
|
||||
|
||||
class PaginationMode(Enum):
|
||||
@ -179,18 +179,6 @@ def get_team(request, data, token) -> Tuple[Optional[Team], Optional[str], Optio
|
||||
|
||||
db_error = getattr(e, "message", repr(e))
|
||||
|
||||
if not is_clickhouse_enabled():
|
||||
error_response = cors_response(
|
||||
request,
|
||||
generate_exception_response(
|
||||
"capture",
|
||||
"Unable to fetch team from database.",
|
||||
type="server_error",
|
||||
code="fetch_team_fail",
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
),
|
||||
)
|
||||
|
||||
return None, db_error, error_response
|
||||
|
||||
if team is None:
|
||||
|
@ -8,7 +8,6 @@ from semantic_version.base import Version
|
||||
from posthog.async_migrations.definition import AsyncMigrationDefinition
|
||||
from posthog.models.async_migration import AsyncMigration, get_all_completed_async_migrations
|
||||
from posthog.settings import TEST
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
from posthog.version import VERSION
|
||||
|
||||
ALL_ASYNC_MIGRATIONS: Dict[str, AsyncMigrationDefinition] = {}
|
||||
@ -24,11 +23,10 @@ POSTHOG_VERSION = Version(VERSION)
|
||||
ASYNC_MIGRATIONS_MODULE_PATH = "posthog.async_migrations.migrations"
|
||||
ASYNC_MIGRATIONS_EXAMPLE_MODULE_PATH = "posthog.async_migrations.examples"
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
all_migrations = import_submodules(ASYNC_MIGRATIONS_MODULE_PATH)
|
||||
all_migrations = import_submodules(ASYNC_MIGRATIONS_MODULE_PATH)
|
||||
|
||||
for name, module in all_migrations.items():
|
||||
ALL_ASYNC_MIGRATIONS[name] = module.Migration()
|
||||
for name, module in all_migrations.items():
|
||||
ALL_ASYNC_MIGRATIONS[name] = module.Migration()
|
||||
|
||||
|
||||
def setup_async_migrations(ignore_posthog_version: bool = False):
|
||||
|
@ -12,7 +12,6 @@ from django.utils import timezone
|
||||
from sentry_sdk.api import capture_exception
|
||||
|
||||
from posthog.redis import get_client
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
# set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "posthog.settings")
|
||||
@ -87,42 +86,39 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
|
||||
name="send event usage report",
|
||||
)
|
||||
|
||||
if is_clickhouse_enabled():
|
||||
sender.add_periodic_task(120, clickhouse_lag.s(), name="clickhouse table lag")
|
||||
sender.add_periodic_task(120, clickhouse_row_count.s(), name="clickhouse events table row count")
|
||||
sender.add_periodic_task(120, clickhouse_part_count.s(), name="clickhouse table parts count")
|
||||
sender.add_periodic_task(120, clickhouse_mutation_count.s(), name="clickhouse table mutations count")
|
||||
sender.add_periodic_task(120, clickhouse_lag.s(), name="clickhouse table lag")
|
||||
sender.add_periodic_task(120, clickhouse_row_count.s(), name="clickhouse events table row count")
|
||||
sender.add_periodic_task(120, clickhouse_part_count.s(), name="clickhouse table parts count")
|
||||
sender.add_periodic_task(120, clickhouse_mutation_count.s(), name="clickhouse table mutations count")
|
||||
|
||||
sender.add_periodic_task(
|
||||
crontab(hour=0, minute=randrange(0, 40)), clickhouse_send_license_usage.s()
|
||||
) # every day at a random minute past midnight. Randomize to avoid overloading license.posthog.com
|
||||
try:
|
||||
from ee.settings import MATERIALIZE_COLUMNS_SCHEDULE_CRON
|
||||
|
||||
minute, hour, day_of_month, month_of_year, day_of_week = MATERIALIZE_COLUMNS_SCHEDULE_CRON.strip().split(" ")
|
||||
|
||||
sender.add_periodic_task(
|
||||
crontab(hour=0, minute=randrange(0, 40)), clickhouse_send_license_usage.s()
|
||||
) # every day at a random minute past midnight. Randomize to avoid overloading license.posthog.com
|
||||
try:
|
||||
from ee.settings import MATERIALIZE_COLUMNS_SCHEDULE_CRON
|
||||
crontab(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_month=day_of_month,
|
||||
month_of_year=month_of_year,
|
||||
day_of_week=day_of_week,
|
||||
),
|
||||
clickhouse_materialize_columns.s(),
|
||||
name="clickhouse materialize columns",
|
||||
)
|
||||
|
||||
minute, hour, day_of_month, month_of_year, day_of_week = MATERIALIZE_COLUMNS_SCHEDULE_CRON.strip().split(
|
||||
" "
|
||||
)
|
||||
|
||||
sender.add_periodic_task(
|
||||
crontab(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_month=day_of_month,
|
||||
month_of_year=month_of_year,
|
||||
day_of_week=day_of_week,
|
||||
),
|
||||
clickhouse_materialize_columns.s(),
|
||||
name="clickhouse materialize columns",
|
||||
)
|
||||
|
||||
sender.add_periodic_task(
|
||||
crontab(hour="*/4", minute=0),
|
||||
clickhouse_mark_all_materialized.s(),
|
||||
name="clickhouse mark all columns as materialized",
|
||||
)
|
||||
except Exception as err:
|
||||
capture_exception(err)
|
||||
print(f"Scheduling materialized column task failed: {err}")
|
||||
sender.add_periodic_task(
|
||||
crontab(hour="*/4", minute=0),
|
||||
clickhouse_mark_all_materialized.s(),
|
||||
name="clickhouse mark all columns as materialized",
|
||||
)
|
||||
except Exception as err:
|
||||
capture_exception(err)
|
||||
print(f"Scheduling materialized column task failed: {err}")
|
||||
|
||||
sender.add_periodic_task(120, calculate_cohort.s(), name="recalculate cohorts")
|
||||
|
||||
@ -137,18 +133,16 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
|
||||
# Set up clickhouse query instrumentation
|
||||
@task_prerun.connect
|
||||
def set_up_instrumentation(task_id, task, **kwargs):
|
||||
if is_clickhouse_enabled() and settings.EE_AVAILABLE:
|
||||
from ee.clickhouse import client
|
||||
from ee.clickhouse import client
|
||||
|
||||
client._request_information = {"kind": "celery", "id": task.name}
|
||||
client._request_information = {"kind": "celery", "id": task.name}
|
||||
|
||||
|
||||
@task_postrun.connect
|
||||
def teardown_instrumentation(task_id, task, **kwargs):
|
||||
if is_clickhouse_enabled() and settings.EE_AVAILABLE:
|
||||
from ee.clickhouse import client
|
||||
from ee.clickhouse import client
|
||||
|
||||
client._request_information = None
|
||||
client._request_information = None
|
||||
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
@ -172,90 +166,71 @@ if settings.CLICKHOUSE_REPLICATION:
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
def clickhouse_lag():
|
||||
if is_clickhouse_enabled() and settings.EE_AVAILABLE:
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from posthog.internal_metrics import gauge
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from posthog.internal_metrics import gauge
|
||||
|
||||
for table in CLICKHOUSE_TABLES:
|
||||
try:
|
||||
QUERY = (
|
||||
"""select max(_timestamp) observed_ts, now() now_ts, now() - max(_timestamp) as lag from {table};"""
|
||||
)
|
||||
query = QUERY.format(table=table)
|
||||
lag = sync_execute(query)[0][2]
|
||||
gauge("posthog_celery_clickhouse__table_lag_seconds", lag, tags={"table": table})
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
for table in CLICKHOUSE_TABLES:
|
||||
try:
|
||||
QUERY = """select max(_timestamp) observed_ts, now() now_ts, now() - max(_timestamp) as lag from {table};"""
|
||||
query = QUERY.format(table=table)
|
||||
lag = sync_execute(query)[0][2]
|
||||
gauge("posthog_celery_clickhouse__table_lag_seconds", lag, tags={"table": table})
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
def clickhouse_row_count():
|
||||
if is_clickhouse_enabled() and settings.EE_AVAILABLE:
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from posthog.internal_metrics import gauge
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from posthog.internal_metrics import gauge
|
||||
|
||||
for table in CLICKHOUSE_TABLES:
|
||||
try:
|
||||
QUERY = """select count(1) freq from {table};"""
|
||||
query = QUERY.format(table=table)
|
||||
rows = sync_execute(query)[0][0]
|
||||
gauge(f"posthog_celery_clickhouse_table_row_count", rows, tags={"table": table})
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
for table in CLICKHOUSE_TABLES:
|
||||
try:
|
||||
QUERY = """select count(1) freq from {table};"""
|
||||
query = QUERY.format(table=table)
|
||||
rows = sync_execute(query)[0][0]
|
||||
gauge(f"posthog_celery_clickhouse_table_row_count", rows, tags={"table": table})
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
def clickhouse_part_count():
|
||||
if is_clickhouse_enabled() and settings.EE_AVAILABLE:
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from posthog.internal_metrics import gauge
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from posthog.internal_metrics import gauge
|
||||
|
||||
QUERY = """
|
||||
select table, count(1) freq
|
||||
from system.parts
|
||||
group by table
|
||||
order by freq desc;
|
||||
"""
|
||||
rows = sync_execute(QUERY)
|
||||
for (table, parts) in rows:
|
||||
gauge(f"posthog_celery_clickhouse_table_parts_count", parts, tags={"table": table})
|
||||
else:
|
||||
pass
|
||||
QUERY = """
|
||||
select table, count(1) freq
|
||||
from system.parts
|
||||
group by table
|
||||
order by freq desc;
|
||||
"""
|
||||
rows = sync_execute(QUERY)
|
||||
for (table, parts) in rows:
|
||||
gauge(f"posthog_celery_clickhouse_table_parts_count", parts, tags={"table": table})
|
||||
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
def clickhouse_mutation_count():
|
||||
if is_clickhouse_enabled() and settings.EE_AVAILABLE:
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from posthog.internal_metrics import gauge
|
||||
from ee.clickhouse.client import sync_execute
|
||||
from posthog.internal_metrics import gauge
|
||||
|
||||
QUERY = """
|
||||
SELECT
|
||||
table,
|
||||
count(1) AS freq
|
||||
FROM system.mutations
|
||||
WHERE is_done = 0
|
||||
GROUP BY table
|
||||
ORDER BY freq DESC
|
||||
"""
|
||||
rows = sync_execute(QUERY)
|
||||
for (table, muts) in rows:
|
||||
gauge(f"posthog_celery_clickhouse_table_mutations_count", muts, tags={"table": table})
|
||||
else:
|
||||
pass
|
||||
QUERY = """
|
||||
SELECT
|
||||
table,
|
||||
count(1) AS freq
|
||||
FROM system.mutations
|
||||
WHERE is_done = 0
|
||||
GROUP BY table
|
||||
ORDER BY freq DESC
|
||||
"""
|
||||
rows = sync_execute(QUERY)
|
||||
for (table, muts) in rows:
|
||||
gauge(f"posthog_celery_clickhouse_table_mutations_count", muts, tags={"table": table})
|
||||
|
||||
|
||||
def recompute_materialized_columns_enabled() -> bool:
|
||||
if (
|
||||
is_clickhouse_enabled()
|
||||
and settings.EE_AVAILABLE
|
||||
and getattr(config, "MATERIALIZED_COLUMNS_ENABLED")
|
||||
and getattr(config, "COMPUTE_MATERIALIZED_COLUMNS_ENABLED")
|
||||
):
|
||||
if getattr(config, "MATERIALIZED_COLUMNS_ENABLED") and getattr(config, "COMPUTE_MATERIALIZED_COLUMNS_ENABLED"):
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -278,7 +253,7 @@ def clickhouse_mark_all_materialized():
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
def clickhouse_send_license_usage():
|
||||
if is_clickhouse_enabled() and not settings.MULTI_TENANCY:
|
||||
if not settings.MULTI_TENANCY:
|
||||
from ee.tasks.send_license_usage import send_license_usage
|
||||
|
||||
send_license_usage()
|
||||
@ -286,14 +261,9 @@ def clickhouse_send_license_usage():
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
def send_org_usage_report():
|
||||
if is_clickhouse_enabled():
|
||||
from ee.tasks.org_usage_report import send_all_org_usage_reports as send_reports_clickhouse
|
||||
from ee.tasks.org_usage_report import send_all_org_usage_reports as send_reports_clickhouse
|
||||
|
||||
send_reports_clickhouse()
|
||||
else:
|
||||
from posthog.tasks.org_usage_report import send_all_org_usage_reports as send_reports_postgres
|
||||
|
||||
send_reports_postgres()
|
||||
send_reports_clickhouse()
|
||||
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
@ -332,13 +302,6 @@ def status_report():
|
||||
status_report()
|
||||
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
def calculate_event_action_mappings():
|
||||
from posthog.tasks.calculate_action import calculate_actions_from_last_calculation
|
||||
|
||||
calculate_actions_from_last_calculation()
|
||||
|
||||
|
||||
@app.task(ignore_result=True)
|
||||
def calculate_cohort():
|
||||
from posthog.tasks.calculate_cohort import calculate_cohorts
|
||||
|
@ -7,7 +7,7 @@ from posthog.demo.revenue_data_generator import RevenueDataGenerator
|
||||
from posthog.demo.web_data_generator import WebDataGenerator
|
||||
from posthog.models import Organization, Team, User
|
||||
from posthog.models.event_definition import EventDefinition
|
||||
from posthog.utils import is_clickhouse_enabled, render_template
|
||||
from posthog.utils import render_template
|
||||
|
||||
ORGANIZATION_NAME = "Hogflix"
|
||||
TEAM_NAME = "Hogflix Demo App"
|
||||
@ -29,12 +29,11 @@ def demo(request: Request):
|
||||
user.save()
|
||||
EventDefinition.objects.get_or_create(team=team, name="$pageview")
|
||||
|
||||
if is_clickhouse_enabled(): # :TRICKY: Lazily backfill missing event data.
|
||||
from ee.clickhouse.models.event import get_events_by_team
|
||||
from ee.clickhouse.models.event import get_events_by_team
|
||||
|
||||
result = get_events_by_team(team_id=team.pk)
|
||||
if not result:
|
||||
create_demo_data(team, dashboards=False)
|
||||
result = get_events_by_team(team_id=team.pk)
|
||||
if not result:
|
||||
create_demo_data(team, dashboards=False)
|
||||
|
||||
return render_template("demo.html", request=request, context={"api_token": team.api_token})
|
||||
|
||||
|
@ -3,7 +3,6 @@ from typing import Dict, List
|
||||
from posthog.models import Action, Event, Person, PersonDistinctId, Team
|
||||
from posthog.models.session_recording_event import SessionRecordingEvent
|
||||
from posthog.models.utils import UUIDT
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
|
||||
class DataGenerator:
|
||||
@ -39,18 +38,17 @@ class DataGenerator:
|
||||
for person, distinct_id in zip(self.people, self.distinct_ids)
|
||||
]
|
||||
PersonDistinctId.objects.bulk_create(pids)
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.models.person import create_person, create_person_distinct_id
|
||||
from ee.clickhouse.models.person import create_person, create_person_distinct_id
|
||||
|
||||
for person in self.people:
|
||||
create_person(
|
||||
uuid=str(person.uuid),
|
||||
team_id=person.team.pk,
|
||||
properties=person.properties,
|
||||
is_identified=person.is_identified,
|
||||
)
|
||||
for pid in pids:
|
||||
create_person_distinct_id(pid.team.pk, pid.distinct_id, str(pid.person.uuid)) # use dummy number for id
|
||||
for person in self.people:
|
||||
create_person(
|
||||
uuid=str(person.uuid),
|
||||
team_id=person.team.pk,
|
||||
properties=person.properties,
|
||||
is_identified=person.is_identified,
|
||||
)
|
||||
for pid in pids:
|
||||
create_person_distinct_id(pid.team.pk, pid.distinct_id, str(pid.person.uuid)) # use dummy number for id
|
||||
|
||||
def make_person(self, index):
|
||||
return Person(team=self.team, properties={"is_demo": True})
|
||||
@ -68,16 +66,10 @@ class DataGenerator:
|
||||
pass
|
||||
|
||||
def bulk_import_events(self):
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.demo import bulk_create_events, bulk_create_session_recording_events
|
||||
from ee.clickhouse.demo import bulk_create_events, bulk_create_session_recording_events
|
||||
|
||||
bulk_create_events(self.events, team=self.team)
|
||||
bulk_create_session_recording_events(self.snapshots, team_id=self.team.pk)
|
||||
else:
|
||||
Event.objects.bulk_create([Event(**kw, team=self.team) for kw in self.events])
|
||||
SessionRecordingEvent.objects.bulk_create(
|
||||
[SessionRecordingEvent(**kw, team=self.team) for kw in self.snapshots]
|
||||
)
|
||||
bulk_create_events(self.events, team=self.team)
|
||||
bulk_create_session_recording_events(self.snapshots, team_id=self.team.pk)
|
||||
|
||||
def add_if_not_contained(self, array, value):
|
||||
if value not in array:
|
||||
|
@ -11,7 +11,6 @@ from django.utils import timezone
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
from posthog.models.utils import sane_repr
|
||||
from posthog.utils import is_clickhouse_enabled
|
||||
|
||||
from .action import Action
|
||||
from .event import Event
|
||||
@ -103,16 +102,11 @@ class Cohort(models.Model):
|
||||
"deleted": self.deleted,
|
||||
}
|
||||
|
||||
def calculate_people(self, use_clickhouse=is_clickhouse_enabled()):
|
||||
def calculate_people(self):
|
||||
if self.is_static:
|
||||
return
|
||||
try:
|
||||
if not use_clickhouse:
|
||||
self.is_calculating = True
|
||||
self.save()
|
||||
persons_query = self._postgres_persons_query()
|
||||
else:
|
||||
persons_query = self._clickhouse_persons_query()
|
||||
persons_query = self._clickhouse_persons_query()
|
||||
|
||||
try:
|
||||
sql, params = persons_query.distinct("pk").only("pk").query.sql_with_params()
|
||||
@ -131,34 +125,24 @@ class Cohort(models.Model):
|
||||
cursor = connection.cursor()
|
||||
with transaction.atomic():
|
||||
cursor.execute(query, params)
|
||||
if not use_clickhouse:
|
||||
self.last_calculation = timezone.now()
|
||||
self.errors_calculating = 0
|
||||
except Exception as err:
|
||||
if not use_clickhouse:
|
||||
self.errors_calculating = F("errors_calculating") + 1
|
||||
raise err
|
||||
finally:
|
||||
if not use_clickhouse:
|
||||
self.is_calculating = False
|
||||
self.save()
|
||||
|
||||
def calculate_people_ch(self):
|
||||
if is_clickhouse_enabled():
|
||||
from ee.clickhouse.models.cohort import recalculate_cohortpeople
|
||||
from posthog.tasks.calculate_cohort import calculate_cohort
|
||||
from ee.clickhouse.models.cohort import recalculate_cohortpeople
|
||||
from posthog.tasks.calculate_cohort import calculate_cohort
|
||||
|
||||
try:
|
||||
recalculate_cohortpeople(self)
|
||||
calculate_cohort(self.id)
|
||||
self.last_calculation = timezone.now()
|
||||
self.errors_calculating = 0
|
||||
except Exception as e:
|
||||
self.errors_calculating = F("errors_calculating") + 1
|
||||
raise e
|
||||
finally:
|
||||
self.is_calculating = False
|
||||
self.save()
|
||||
try:
|
||||
recalculate_cohortpeople(self)
|
||||
calculate_cohort(self.id)
|
||||
self.last_calculation = timezone.now()
|
||||
self.errors_calculating = 0
|
||||
except Exception as e:
|
||||
self.errors_calculating = F("errors_calculating") + 1
|
||||
raise e
|
||||
finally:
|
||||
self.is_calculating = False
|
||||
self.save()
|
||||
|
||||
def insert_users_by_list(self, items: List[str]) -> None:
|
||||
"""
|
||||
@ -166,9 +150,8 @@ class Cohort(models.Model):
|
||||
Important! Does not insert into clickhouse
|
||||
"""
|
||||
batchsize = 1000
|
||||
use_clickhouse = is_clickhouse_enabled()
|
||||
if use_clickhouse:
|
||||
from ee.clickhouse.models.cohort import insert_static_cohort
|
||||
from ee.clickhouse.models.cohort import insert_static_cohort
|
||||
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
for i in range(0, len(items), batchsize):
|
||||
@ -178,8 +161,7 @@ class Cohort(models.Model):
|
||||
.filter(Q(persondistinctid__team_id=self.team_id, persondistinctid__distinct_id__in=batch))
|
||||
.exclude(cohort__id=self.id)
|
||||
)
|
||||
if use_clickhouse:
|
||||
insert_static_cohort([p for p in persons_query.values_list("uuid", flat=True)], self.pk, self.team)
|
||||
insert_static_cohort([p for p in persons_query.values_list("uuid", flat=True)], self.pk, self.team)
|
||||
sql, params = persons_query.distinct("pk").only("pk").query.sql_with_params()
|
||||
query = UPDATE_QUERY.format(
|
||||
cohort_id=self.pk,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user