0
0
mirror of https://github.com/PostHog/posthog.git synced 2024-11-22 17:24:15 +01:00
posthog/.github/workflows/ci-plugin-server.yml

345 lines
12 KiB
YAML

name: Plugin Server CI
on:
pull_request:
paths:
- .github/workflows/ci-plugin-server.yml
- 'plugin-server/**'
- 'posthog/clickhouse/migrations/**'
- 'ee/migrations/**'
- 'ee/management/commands/setup_test_environment.py'
- 'posthog/migrations/**'
- 'posthog/plugins/**'
- 'docker*.yml'
- '*Dockerfile'
push:
branches:
- master
paths:
- .github/workflows/ci-plugin-server.yml
- 'plugin-server/**'
- 'posthog/clickhouse/migrations/**'
- 'ee/migrations/**'
- 'ee/management/commands/setup_test_environment.py'
- 'posthog/migrations/**'
- 'posthog/plugins/**'
- 'docker*.yml'
- '*Dockerfile'
env:
OBJECT_STORAGE_ENABLED: true
OBJECT_STORAGE_ENDPOINT: 'http://localhost:19000'
OBJECT_STORAGE_ACCESS_KEY_ID: 'object_storage_root_user'
OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password'
OBJECT_STORAGE_SESSION_RECORDING_FOLDER: 'session_recordings'
OBJECT_STORAGE_BUCKET: 'posthog'
# set the max buffer size small enough that the functional tests behave the same in CI as when running locally
SESSION_RECORDING_MAX_BUFFER_SIZE_KB: 1024
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
code-quality:
name: Code quality
runs-on: ubuntu-22.04
defaults:
run:
working-directory: 'plugin-server'
steps:
- uses: actions/checkout@v3
- name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 7.x.x
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 18
cache: pnpm
- name: Install package.json dependencies with pnpm
run: pnpm install --frozen-lockfile
- name: Check formatting with prettier
run: pnpm prettier:check
- name: Lint with ESLint
run: pnpm lint
tests:
name: Tests (${{matrix.shard}})
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
shard: [1/3, 2/3, 3/3]
env:
REDIS_URL: 'redis://localhost'
CLICKHOUSE_HOST: 'localhost'
CLICKHOUSE_DATABASE: 'posthog_test'
KAFKA_HOSTS: 'kafka:9092'
steps:
- name: Code check out
uses: actions/checkout@v3
- name: Stop/Start stack with Docker Compose
run: |
docker compose -f docker-compose.dev.yml down
docker compose -f docker-compose.dev.yml up -d
- name: Add Kafka to /etc/hosts
run: echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.10.10
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
- uses: syphar/restore-virtualenv@v1
id: cache-backend-tests
with:
custom_cache_key_element: v1-
- uses: syphar/restore-pip-download-cache@v1
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
- name: Install SAML (python3-saml) dependencies
run: |
sudo apt-get update
sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl
- name: Install python dependencies
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
run: |
python -m pip install -r requirements-dev.txt
python -m pip install -r requirements.txt
- name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 7.x.x
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 18
cache: pnpm
cache-dependency-path: plugin-server/pnpm-lock.yaml
- name: Install package.json dependencies with pnpm
run: cd plugin-server && pnpm i
- name: Wait for Clickhouse & Kafka
run: bin/check_kafka_clickhouse_up
- name: Set up databases
env:
TEST: 'true'
SECRET_KEY: 'abcdef' # unsafe - for testing only
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog'
run: cd plugin-server && pnpm setup:test
- name: Test with Jest
env:
# Below DB name has `test_` prepended, as that's how Django (ran above) creates the test DB
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_posthog'
REDIS_URL: 'redis://localhost'
NODE_OPTIONS: '--max_old_space_size=4096'
run: cd plugin-server && pnpm test -- --runInBand --forceExit tests/ --shard=${{matrix.shard}}
functional-tests:
name: Functional tests (POE_EMBRACE_JOIN_FOR_TEAMS=${{matrix.POE_EMBRACE_JOIN_FOR_TEAMS}})
runs-on: ubuntu-latest-8-cores
strategy:
fail-fast: false
matrix:
POE_EMBRACE_JOIN_FOR_TEAMS: ['', '*']
env:
REDIS_URL: 'redis://localhost'
CLICKHOUSE_HOST: 'localhost'
CLICKHOUSE_DATABASE: 'posthog_test'
KAFKA_HOSTS: 'kafka:9092'
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog'
POE_EMBRACE_JOIN_FOR_TEAMS: ${{matrix.POE_EMBRACE_JOIN_FOR_TEAMS}}
steps:
- name: Code check out
uses: actions/checkout@v3
- name: Stop/Start stack with Docker Compose
run: |
docker compose -f docker-compose.dev.yml down
docker compose -f docker-compose.dev.yml up -d
- name: Add Kafka to /etc/hosts
run: echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.10.10
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
- uses: syphar/restore-virtualenv@v1
id: cache-backend-tests
with:
custom_cache_key_element: v1-
- uses: syphar/restore-pip-download-cache@v1
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
- name: Install SAML (python3-saml) dependencies
run: |
sudo apt-get update
sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
- name: Install python dependencies
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
run: |
python -m pip install -r requirements-dev.txt
python -m pip install -r requirements.txt
- name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 7.x.x
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 18
cache: pnpm
cache-dependency-path: plugin-server/pnpm-lock.yaml
- name: Install package.json dependencies with pnpm
run: |
cd plugin-server
pnpm install --frozen-lockfile
pnpm build
- name: Wait for Clickhouse & Kafka
run: bin/check_kafka_clickhouse_up
- name: Set up databases
env:
DEBUG: 'true'
SECRET_KEY: 'abcdef' # unsafe - for testing only
run: |
./manage.py migrate
./manage.py migrate_clickhouse
- name: Run functional tests
run: |
cd plugin-server
./bin/ci_functional_tests.sh
- name: Upload coverage report
uses: actions/upload-artifact@v3
if: always()
with:
name: functional-coverage
if-no-files-found: warn
retention-days: 1
path: 'plugin-server/coverage'
recording-ingestion-load-test:
name: Recording ingestion load test
# This test runs a load test on the ingestion pipeline, using
# synthetic recordings data. It is not meant to produce directly
# comparable results, but rather to give a rough idea of how
# ingestion performance changes over time.
runs-on: ubuntu-latest-8-cores
env:
DEBUG: 'true'
KAFKA_HOSTS: 'kafka:9092'
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog'
steps:
- name: Code check out
uses: actions/checkout@v3
- name: Start Kafka, PostgreSQL and Redis
run: |
# Note: we need to start Redis as well, this is just so
# `./manage.py setup_dev` runs.
docker compose -f docker-compose.dev.yml up kafka db redis -d
- name: Add Kafka to /etc/hosts
run: echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.10.10
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
- uses: syphar/restore-virtualenv@v1
id: cache-backend-tests
with:
custom_cache_key_element: v1-
- uses: syphar/restore-pip-download-cache@v1
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
- name: Install SAML (python3-saml) dependencies
run: |
sudo apt-get update
sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
- name: Install python dependencies
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
run: |
python -m pip install -r requirements-dev.txt
python -m pip install -r requirements.txt
- name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 7.x.x
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 18
cache: pnpm
cache-dependency-path: plugin-server/pnpm-lock.yaml
- name: Install package.json dependencies with pnpm
run: |
cd plugin-server
pnpm install --frozen-lockfile
pnpm build
- name: Set up postgres
run: |
./manage.py migrate
./manage.py setup_dev --no-data
- name: Run load test
run: |
cd plugin-server
SESSIONS_COUNT=2500 ./bin/ci_session_recordings_load_test.sh
- name: Upload profile as artifact
uses: actions/upload-artifact@v2
with:
name: profile
path: |
plugin-server/profile