# # This is a composite action that packages our backend Django tests. # It is called by the `ci-backend.yml` job using a matrix. # name: Run Django tests inputs: python-version: required: true description: Python version, e.g. 3.11.9 clickhouse-server-image: required: true description: ClickHouse server image tag, e.g. clickhouse/clickhouse-server:latest segment: required: true description: Either 'Core' or 'Temporal' segment concurrency: required: true description: Count of concurrency groups group: required: true description: Group number person-on-events: required: true description: Whether testing with persons on events, true or false token: required: false description: GitHub token runs: using: 'composite' steps: # Pre-tests # Copies the fully versioned UDF xml file for use in CI testing - name: Stop/Start stack with Docker Compose shell: bash run: | export CLICKHOUSE_SERVER_IMAGE=${{ inputs.clickhouse-server-image }} export DOCKER_REGISTRY_PREFIX="us-east1-docker.pkg.dev/posthog-301601/mirror/" cp posthog/user_scripts/latest_user_defined_function.xml docker/clickhouse/user_defined_function.xml docker compose -f docker-compose.dev.yml down docker compose -f docker-compose.dev.yml up -d - name: Add Kafka to /etc/hosts shell: bash run: echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts - name: Set up Python uses: actions/setup-python@v5 with: python-version: ${{ inputs.python-version }} cache: pip cache-dependency-path: '**/requirements*.txt' token: ${{ inputs.token }} # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - name: Install uv shell: bash run: pip install uv - name: Determine if hogql-parser has changed compared to master shell: bash id: hogql-parser-diff run: | git fetch --no-tags --prune --depth=1 origin master changed=$(git diff --quiet HEAD origin/master -- hogql_parser/ && echo "false" || echo "true") echo "changed=$changed" >> $GITHUB_OUTPUT - name: Install SAML (python3-saml) dependencies shell: bash run: | sudo apt-get update && sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl - name: Install pnpm uses: pnpm/action-setup@v4 - name: Set up Node.js uses: actions/setup-node@v3 with: node-version: 18.12.1 cache: pnpm # tests would intermittently fail in GH actions # with exit code 134 _after passing_ all tests # this appears to fix it # absolute wild tbh https://stackoverflow.com/a/75503402 - uses: tlambert03/setup-qt-libs@v1 - name: Install plugin-transpiler shell: bash run: | cd plugin-transpiler pnpm install pnpm run build - name: Install Python dependencies shell: bash run: | uv pip install --system -r requirements.txt -r requirements-dev.txt - name: Install the working version of hogql-parser if: steps.hogql-parser-diff.outputs.changed == 'true' shell: bash # This is not cached currently, as it's important to build the current HEAD version of hogql-parser if it has # changed (requirements.txt has the already-published version) run: | sudo apt-get install libboost-all-dev unzip cmake curl uuid pkg-config curl https://www.antlr.org/download/antlr4-cpp-runtime-4.13.1-source.zip --output antlr4-source.zip # Check that the downloaded archive is the expected runtime - a security measure anltr_known_md5sum="c875c148991aacd043f733827644a76f" antlr_found_ms5sum="$(md5sum antlr4-source.zip | cut -d' ' -f1)" if [[ "$anltr_known_md5sum" != "$antlr_found_ms5sum" ]]; then echo "Unexpected MD5 sum of antlr4-source.zip!" echo "Known: $anltr_known_md5sum" echo "Found: $antlr_found_ms5sum" exit 64 fi unzip antlr4-source.zip -d antlr4-source && cd antlr4-source cmake . DESTDIR=out make install sudo cp -r out/usr/local/include/antlr4-runtime /usr/include/ sudo cp out/usr/local/lib/libantlr4-runtime.so* /usr/lib/ sudo ldconfig cd .. uv pip install --system ./hogql_parser - name: Set up needed files shell: bash run: | mkdir -p frontend/dist touch frontend/dist/index.html touch frontend/dist/layout.html touch frontend/dist/exporter.html [ ! -f ./share/GeoLite2-City.mmdb ] && ( curl -L "https://mmdbcdn.posthog.net/" --http1.1 | brotli --decompress --output=./share/GeoLite2-City.mmdb ) - name: Wait for Clickhouse & Kafka shell: bash run: bin/check_kafka_clickhouse_up - name: Wait for Temporal if: ${{ inputs.segment == 'Temporal' }} shell: bash run: | bin/check_temporal_up - name: Determine if --snapshot-update should be on # Skip on forks (due to GITHUB_TOKEN being read-only in PRs coming from them) except for persons-on-events # runs, as we want to ignore snapshots diverging there if: github.event.pull_request.head.repo.full_name == github.repository || inputs.person-on-events == 'true' shell: bash run: echo "PYTEST_ARGS=--snapshot-update" >> $GITHUB_ENV # We can only update snapshots within the PostHog org # Tests - name: Run Core tests id: run-core-tests if: ${{ inputs.segment == 'Core' }} env: PERSON_ON_EVENTS_V2_ENABLED: ${{ inputs.person-on-events }} shell: bash run: | # async_migrations covered in ci-async-migrations.yml pytest ${{ inputs.person-on-events == 'true' && './posthog/clickhouse/ ./posthog/queries/ ./posthog/api/test/test_insight* ./posthog/api/test/dashboards/test_dashboard.py' || 'posthog' }} ${{ inputs.person-on-events == 'true' && 'ee/clickhouse/' || 'ee/' }} -m "not async_migrations" \ --ignore=posthog/temporal \ --ignore=hogvm/python/test \ --splits ${{ inputs.concurrency }} --group ${{ inputs.group }} \ --durations=100 --durations-min=1.0 --store-durations \ $PYTEST_ARGS # Uncomment this code to create an ssh-able console so you can debug issues with github actions # (Consider changing the timeout in ci-backend.yml to have more time) # - name: Setup tmate session # if: failure() # uses: mxschmitt/action-tmate@v3 - name: Run /decide read replica tests id: run-decide-read-replica-tests if: ${{ inputs.segment == 'Core' && inputs.group == 1 && inputs.person-on-events != 'true' }} env: POSTHOG_DB_NAME: posthog READ_REPLICA_OPT_IN: 'decide,PersonalAPIKey, local_evaluation' POSTHOG_POSTGRES_READ_HOST: localhost POSTHOG_DB_PASSWORD: posthog POSTHOG_DB_USER: posthog shell: bash run: | pytest posthog/api/test/test_decide.py::TestDecideUsesReadReplica \ --durations=100 --durations-min=1.0 \ $PYTEST_ARGS - name: Run Temporal tests id: run-temporal-tests if: ${{ inputs.segment == 'Temporal' }} shell: bash env: AWS_S3_ALLOW_UNSAFE_RENAME: 'true' run: | pytest posthog/temporal -m "not async_migrations" \ --splits ${{ inputs.concurrency }} --group ${{ inputs.group }} \ --durations=100 --durations-min=1.0 --store-durations \ $PYTEST_ARGS # Post tests - name: Show docker compose logs on failure if: failure() && (steps.run-core-tests.outcome != 'failure' && steps.run-decide-read-replica-tests.outcome != 'failure' && steps.run-temporal-tests.outcome != 'failure') shell: bash run: docker compose -f docker-compose.dev.yml logs - name: Upload updated timing data as artifacts uses: actions/upload-artifact@v4 if: ${{ inputs.person-on-events != 'true' && inputs.clickhouse-server-image == 'clickhouse/clickhouse-server:23.12.6.19' }} with: name: timing_data-${{ inputs.segment }}-${{ inputs.group }} path: .test_durations retention-days: 2