mirror of
https://github.com/PostHog/posthog.git
synced 2024-11-22 08:15:44 +01:00
5f7341861f
Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
317 lines
14 KiB
YAML
317 lines
14 KiB
YAML
#
|
|
# This workflow runs CI E2E tests with Cypress.
|
|
#
|
|
# It relies on the container image built by 'container-images-ci.yml'.
|
|
#
|
|
name: E2E CI
|
|
|
|
on:
|
|
pull_request:
|
|
|
|
concurrency:
|
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
|
cancel-in-progress: true
|
|
|
|
jobs:
|
|
changes:
|
|
runs-on: ubuntu-latest
|
|
timeout-minutes: 5
|
|
name: Determine need to run E2E checks
|
|
# Set job outputs to values from filter step
|
|
outputs:
|
|
shouldTriggerCypress: ${{ steps.changes.outputs.shouldTriggerCypress }}
|
|
steps:
|
|
# For pull requests it's not necessary to check out the code
|
|
- uses: dorny/paths-filter@v2
|
|
id: changes
|
|
with:
|
|
filters: |
|
|
shouldTriggerCypress:
|
|
# Avoid running E2E tests for irrelevant changes
|
|
# NOTE: we are at risk of missing a dependency here. We could make
|
|
# the dependencies more clear if we separated the backend/frontend
|
|
# code completely
|
|
- 'ee/**'
|
|
- 'posthog/**'
|
|
- 'bin/*'
|
|
- frontend/**/*
|
|
- requirements.txt
|
|
- requirements-dev.txt
|
|
- package.json
|
|
- pnpm-lock.yaml
|
|
# Make sure we run if someone is explicitly change the workflow
|
|
- .github/workflows/ci-e2e.yml
|
|
- .github/actions/build-n-cache-image/action.yml
|
|
# We use docker compose for tests, make sure we rerun on
|
|
# changes to docker-compose.dev.yml e.g. dependency
|
|
# version changes
|
|
- docker-compose.dev.yml
|
|
- Dockerfile
|
|
- cypress/**
|
|
|
|
# Job that lists and chunks spec file names and caches node modules
|
|
chunks:
|
|
needs: changes
|
|
name: Cypress preparation
|
|
runs-on: ubuntu-latest
|
|
timeout-minutes: 5
|
|
outputs:
|
|
chunks: ${{ steps.chunk.outputs.chunks }}
|
|
steps:
|
|
- name: Check out
|
|
uses: actions/checkout@v3
|
|
|
|
- name: Group spec files into chunks of three
|
|
id: chunk
|
|
run: echo "chunks=$(ls cypress/e2e/* | jq --slurp --raw-input -c 'split("\n")[:-1] | _nwise(2) | join("\n")' | jq --slurp -c .)" >> $GITHUB_OUTPUT
|
|
|
|
container:
|
|
name: Build and cache container image
|
|
runs-on: ubuntu-latest
|
|
timeout-minutes: 60
|
|
needs: [changes]
|
|
permissions:
|
|
contents: read
|
|
id-token: write # allow issuing OIDC tokens for this workflow run
|
|
outputs:
|
|
tag: ${{ steps.build.outputs.tag }}
|
|
build-id: ${{ steps.build.outputs.build-id }}
|
|
steps:
|
|
- name: Checkout
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
uses: actions/checkout@v3
|
|
- name: Build the Docker image with Depot
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
# Build the container image in preparation for the E2E tests
|
|
uses: ./.github/actions/build-n-cache-image
|
|
id: build
|
|
with:
|
|
save: true
|
|
actions-id-token-request-url: ${{ env.ACTIONS_ID_TOKEN_REQUEST_URL }}
|
|
|
|
cypress:
|
|
name: Cypress E2E tests (${{ strategy.job-index }})
|
|
runs-on: ubuntu-latest
|
|
timeout-minutes: 60
|
|
needs: [chunks, changes, container]
|
|
permissions:
|
|
id-token: write # allow issuing OIDC tokens for this workflow run
|
|
|
|
strategy:
|
|
# when one test fails, DO NOT cancel the other
|
|
# containers, as there may be other spec failures
|
|
# we want to know about.
|
|
fail-fast: false
|
|
matrix:
|
|
chunk: ${{ fromJson(needs.chunks.outputs.chunks) }}
|
|
|
|
steps:
|
|
- name: Checkout
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
uses: actions/checkout@v3
|
|
|
|
- name: Install pnpm
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
uses: pnpm/action-setup@v4
|
|
|
|
- name: Set up Node.js
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
uses: actions/setup-node@v4
|
|
with:
|
|
node-version: 18.12.1
|
|
|
|
- name: Get pnpm cache directory path
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
id: pnpm-cache-dir
|
|
run: echo "PNPM_STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
|
|
|
- name: Get cypress cache directory path
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
id: cypress-cache-dir
|
|
run: echo "CYPRESS_BIN_PATH=$(npx cypress cache path)" >> $GITHUB_OUTPUT
|
|
|
|
- uses: actions/cache@v4
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
id: pnpm-cache
|
|
with:
|
|
path: |
|
|
${{ steps.pnpm-cache-dir.outputs.PNPM_STORE_PATH }}
|
|
${{ steps.cypress-cache-dir.outputs.CYPRESS_BIN_PATH }}
|
|
key: ${{ runner.os }}-pnpm-cypress-${{ hashFiles('**/pnpm-lock.yaml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-pnpm-cypress-
|
|
|
|
- name: Install package.json dependencies with pnpm
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
run: pnpm install --frozen-lockfile
|
|
|
|
- name: Stop/Start stack with Docker Compose
|
|
# these are required checks so, we can't skip entire sections
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
run: |
|
|
docker compose -f docker-compose.dev.yml down
|
|
docker compose -f docker-compose.dev.yml up -d
|
|
|
|
- name: Wait for ClickHouse
|
|
# these are required checks so, we can't skip entire sections
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
run: ./bin/check_kafka_clickhouse_up
|
|
|
|
- name: Install Depot CLI
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
uses: depot/setup-action@v1
|
|
|
|
- name: Get Docker image cached in Depot
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
uses: depot/pull-action@v1
|
|
with:
|
|
build-id: ${{ needs.container.outputs.build-id }}
|
|
tags: ${{ needs.container.outputs.tag }}
|
|
|
|
- name: Write .env # This step intentionally has no if, so that GH always considers the action as having run
|
|
run: |
|
|
cat <<EOT >> .env
|
|
SECRET_KEY=6b01eee4f945ca25045b5aab440b953461faf08693a9abbf1166dc7c6b9772da
|
|
REDIS_URL=redis://localhost
|
|
DATABASE_URL=postgres://posthog:posthog@localhost:5432/posthog
|
|
KAFKA_HOSTS=kafka:9092
|
|
DISABLE_SECURE_SSL_REDIRECT=1
|
|
SECURE_COOKIES=0
|
|
OPT_OUT_CAPTURE=0
|
|
E2E_TESTING=1
|
|
SKIP_SERVICE_VERSION_REQUIREMENTS=1
|
|
EMAIL_HOST=email.test.posthog.net
|
|
SITE_URL=http://localhost:8000
|
|
NO_RESTART_LOOP=1
|
|
CLICKHOUSE_SECURE=0
|
|
OBJECT_STORAGE_ENABLED=1
|
|
OBJECT_STORAGE_ENDPOINT=http://localhost:19000
|
|
OBJECT_STORAGE_ACCESS_KEY_ID=object_storage_root_user
|
|
OBJECT_STORAGE_SECRET_ACCESS_KEY=object_storage_root_password
|
|
GITHUB_ACTION_RUN_URL="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
|
CELERY_METRICS_PORT=8999
|
|
CLOUD_DEPLOYMENT=E2E
|
|
ENCRYPTION_SALT_KEYS=00beef0000beef0000beef0000beef00
|
|
EOT
|
|
|
|
- name: Start PostHog
|
|
# these are required checks so, we can't skip entire sections
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
run: |
|
|
mkdir -p /tmp/logs
|
|
|
|
echo "Starting PostHog using the container image ${{ needs.container.outputs.tag }}"
|
|
DOCKER_RUN="docker run --rm --network host --add-host kafka:127.0.0.1 --env-file .env ${{ needs.container.outputs.tag }}"
|
|
|
|
$DOCKER_RUN ./bin/migrate
|
|
$DOCKER_RUN python manage.py setup_dev
|
|
|
|
# only starts the plugin server so that the "wait for PostHog" step passes
|
|
$DOCKER_RUN ./bin/docker-worker &> /tmp/logs/worker.txt &
|
|
$DOCKER_RUN ./bin/docker-server &> /tmp/logs/server.txt &
|
|
|
|
- name: Wait for PostHog
|
|
# these are required checks so, we can't skip entire sections
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
# this action might be abandoned - but v1 doesn't point to latest of v1 (which it should)
|
|
# so pointing to v1.1.0 to remove warnings about node version with v1
|
|
# todo check https://github.com/iFaxity/wait-on-action/releases for new releases
|
|
uses: iFaxity/wait-on-action@v1.2.1
|
|
timeout-minutes: 3
|
|
with:
|
|
verbose: true
|
|
log: true
|
|
resource: http://localhost:8000
|
|
|
|
- name: Cypress run
|
|
# these are required checks so, we can't skip entire sections
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
uses: cypress-io/github-action@v6
|
|
with:
|
|
config-file: cypress.e2e.config.ts
|
|
config: retries=2
|
|
spec: ${{ matrix.chunk }}
|
|
install: false
|
|
# We were seeing suprising crashes in headless mode
|
|
# See https://github.com/cypress-io/cypress/issues/28893#issuecomment-1956480875
|
|
headed: true
|
|
env:
|
|
E2E_TESTING: 1
|
|
OPT_OUT_CAPTURE: 0
|
|
GITHUB_ACTION_RUN_URL: '${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'
|
|
|
|
- name: Archive test screenshots
|
|
uses: actions/upload-artifact@v3
|
|
with:
|
|
name: screenshots
|
|
path: cypress/screenshots
|
|
if: ${{ failure() }}
|
|
|
|
- name: Archive test downloads
|
|
uses: actions/upload-artifact@v3
|
|
with:
|
|
name: downloads
|
|
path: cypress/downloads
|
|
if: ${{ failure() }}
|
|
|
|
- name: Archive test videos
|
|
uses: actions/upload-artifact@v3
|
|
with:
|
|
name: videos
|
|
path: cypress/videos
|
|
if: ${{ failure() }}
|
|
|
|
- name: Archive accessibility violations
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true'
|
|
uses: actions/upload-artifact@v3
|
|
with:
|
|
name: accessibility-violations
|
|
path: '**/a11y/'
|
|
if-no-files-found: 'ignore'
|
|
|
|
- name: Show logs on failure
|
|
# use artefact here, as I think the output will be too large for display in an action
|
|
uses: actions/upload-artifact@v3
|
|
with:
|
|
name: logs-${{ strategy.job-index }}
|
|
path: /tmp/logs
|
|
if: ${{ failure() }}
|
|
|
|
calculate-running-time:
|
|
name: Calculate running time
|
|
runs-on: ubuntu-latest
|
|
needs: [cypress]
|
|
if: needs.changes.outputs.shouldTriggerCypress == 'true' &&
|
|
github.event.pull_request.head.repo.full_name == 'PostHog/posthog'
|
|
steps:
|
|
- name: Calculate running time
|
|
run: |
|
|
echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token
|
|
run_id=${GITHUB_RUN_ID}
|
|
repo=${GITHUB_REPOSITORY}
|
|
run_info=$(gh api repos/${repo}/actions/runs/${run_id})
|
|
echo run_info: ${run_info}
|
|
# name is the name of the workflow file
|
|
# run_started_at is the start time of the workflow
|
|
# we want to get the number of seconds between the start time and now
|
|
name=$(echo ${run_info} | jq -r '.name')
|
|
run_url=$(echo ${run_info} | jq -r '.url')
|
|
run_started_at=$(echo ${run_info} | jq -r '.run_started_at')
|
|
run_attempt=$(echo ${run_info} | jq -r '.run_attempt')
|
|
start_seconds=$(date -d "${run_started_at}" +%s)
|
|
now_seconds=$(date +%s)
|
|
duration=$((now_seconds-start_seconds))
|
|
echo running_time_duration_seconds=${duration} >> $GITHUB_ENV
|
|
echo running_time_run_url=${run_url} >> $GITHUB_ENV
|
|
echo running_time_run_attempt=${run_attempt} >> $GITHUB_ENV
|
|
echo running_time_run_id=${run_id} >> $GITHUB_ENV
|
|
echo running_time_run_started_at=${run_started_at} >> $GITHUB_ENV
|
|
|
|
- name: Capture running time to PostHog
|
|
if: github.event.pull_request.head.repo.full_name == 'PostHog/posthog'
|
|
uses: PostHog/posthog-github-action@v0.1
|
|
with:
|
|
posthog-token: ${{secrets.POSTHOG_API_TOKEN}}
|
|
event: 'posthog-ci-running-time'
|
|
properties: '{"duration_seconds": ${{ env.running_time_duration_seconds }}, "run_url": "${{ env.running_time_run_url }}", "run_attempt": "${{ env.running_time_run_attempt }}", "run_id": "${{ env.running_time_run_id }}", "run_started_at": "${{ env.running_time_run_started_at }}"}'
|