0
0
mirror of https://github.com/PostHog/posthog.git synced 2024-12-01 12:21:02 +01:00
posthog/docker-compose.hobby.yml

234 lines
6.9 KiB
YAML
Raw Normal View History

#
# `docker-compose` file used ONLY for hobby deployments.
#
# Please take a look at https://posthog.com/docs/self-host/deploy/hobby
# for more info.
#
# PostHog has sunset support for self-hosted K8s deployments.
# See: https://posthog.com/blog/sunsetting-helm-support-posthog
#
services:
db:
extends:
file: docker-compose.base.yml
service: db
# Pin to postgres 12 until we have a process for pg_upgrade to postgres 15 for exsisting installations
image: ${DOCKER_REGISTRY_PREFIX:-}postgres:12-alpine
volumes:
- postgres-data:/var/lib/postgresql/data
redis:
extends:
file: docker-compose.base.yml
service: redis
volumes:
feat: live events feed (#22302) * initial commit * initial commit * fix up some types * Add team id * add client side filters * check live events in onboarding * add eventsource * clean up live table logic * add event source module * Delete eventsManagementDescribers.tsx * update event source usage * Update liveEventsTableLogic.ts * Update UI snapshots for `chromium` (2) * add team live events token * Delete liveEventsTableLogic.ts * Update types.ts * switch to use window event source * improvements / feature flags * cleanup * update the live event host * Update UI snapshots for `chromium` (2) * remove event source lib * fix up event source types * Clean up live events view * Delete eventsManagement.ts * Update SDKs.tsx * improve live event typing * add better loading for the table * update the live events table columns * add last batch timestamp check * add toast for error * rename events management to activity * Hookup proper team id * Update start * Fix types * Update some tests * Put SDKs back with no live event changes * Update verifiedDomainsLogic.test.ts.snap * Update verifiedDomainsLogic.test.ts.snap * Update UI snapshots for `chromium` (2) * Update query snapshots * Update query snapshots * Update UI snapshots for `chromium` (2) * Update UI snapshots for `chromium` (2) * Update query snapshots * Update UI snapshots for `chromium` (2) * Update query snapshots * Update UI snapshots for `chromium` (2) * Update query snapshots * Update UI snapshots for `chromium` (2) * Use `preserveParams()` in redirect from old URL * Clean up UI and refactor tabs * Update E2E tests * Update UI snapshots for `chromium` (2) * Don't hide "Reload" when live events available * Remove unused import * Update UI snapshots for `chromium` (2) * Improve local batching reliability * Make console error clearer * Clarify directory structure * Update UI snapshots for `chromium` (2) * Jot down source of `EventSource` type * Remove unused scene code * Update UI snapshots for `chromium` (2) * Update UI snapshots for `chromium` (2) * Update a11.cy.ts * Update UI snapshots for `chromium` (2) * Update UI snapshots for `chromium` (2) * Update UI snapshots for `chromium` (2) * Remove any effects for users with flag off * Update UI snapshots for `chromium` (1) * Update UI snapshots for `chromium` (1) * Update UI snapshots for `chromium` (2) --------- Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Michael Matloka <michal@matloka.com> Co-authored-by: Michael Matloka <dev@twixes.com>
2024-06-04 13:33:42 +02:00
- redis-data:/data
redis7:
extends:
file: docker-compose.base.yml
service: redis7
volumes:
- redis7-data:/data
clickhouse:
#
# Note: please keep the default version in sync across
# `posthog` and the `charts-clickhouse` repos
#
extends:
file: docker-compose.base.yml
service: clickhouse
restart: on-failure
depends_on:
- kafka
- zookeeper
volumes:
- ./posthog/posthog/idl:/idl
- ./posthog/docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
- ./posthog/docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml
- ./posthog/docker/clickhouse/users.xml:/etc/clickhouse-server/users.xml
- clickhouse-data:/var/lib/clickhouse
zookeeper:
extends:
file: docker-compose.base.yml
service: zookeeper
volumes:
- zookeeper-datalog:/datalog
- zookeeper-data:/data
- zookeeper-logs:/logs
kafka:
extends:
file: docker-compose.base.yml
service: kafka
depends_on:
- zookeeper
environment:
KAFKA_LOG_RETENTION_MS: 3600000
KAFKA_LOG_RETENTION_CHECK_INTERVAL_MS: 300000
KAFKA_LOG_RETENTION_HOURS: 1
volumes:
feat: live events feed (#22302) * initial commit * initial commit * fix up some types * Add team id * add client side filters * check live events in onboarding * add eventsource * clean up live table logic * add event source module * Delete eventsManagementDescribers.tsx * update event source usage * Update liveEventsTableLogic.ts * Update UI snapshots for `chromium` (2) * add team live events token * Delete liveEventsTableLogic.ts * Update types.ts * switch to use window event source * improvements / feature flags * cleanup * update the live event host * Update UI snapshots for `chromium` (2) * remove event source lib * fix up event source types * Clean up live events view * Delete eventsManagement.ts * Update SDKs.tsx * improve live event typing * add better loading for the table * update the live events table columns * add last batch timestamp check * add toast for error * rename events management to activity * Hookup proper team id * Update start * Fix types * Update some tests * Put SDKs back with no live event changes * Update verifiedDomainsLogic.test.ts.snap * Update verifiedDomainsLogic.test.ts.snap * Update UI snapshots for `chromium` (2) * Update query snapshots * Update query snapshots * Update UI snapshots for `chromium` (2) * Update UI snapshots for `chromium` (2) * Update query snapshots * Update UI snapshots for `chromium` (2) * Update query snapshots * Update UI snapshots for `chromium` (2) * Update query snapshots * Update UI snapshots for `chromium` (2) * Use `preserveParams()` in redirect from old URL * Clean up UI and refactor tabs * Update E2E tests * Update UI snapshots for `chromium` (2) * Don't hide "Reload" when live events available * Remove unused import * Update UI snapshots for `chromium` (2) * Improve local batching reliability * Make console error clearer * Clarify directory structure * Update UI snapshots for `chromium` (2) * Jot down source of `EventSource` type * Remove unused scene code * Update UI snapshots for `chromium` (2) * Update UI snapshots for `chromium` (2) * Update a11.cy.ts * Update UI snapshots for `chromium` (2) * Update UI snapshots for `chromium` (2) * Update UI snapshots for `chromium` (2) * Remove any effects for users with flag off * Update UI snapshots for `chromium` (1) * Update UI snapshots for `chromium` (1) * Update UI snapshots for `chromium` (2) --------- Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Michael Matloka <michal@matloka.com> Co-authored-by: Michael Matloka <dev@twixes.com>
2024-06-04 13:33:42 +02:00
- kafka-data:/bitnami/kafka
worker:
extends:
file: docker-compose.base.yml
service: worker
environment:
SENTRY_DSN: $SENTRY_DSN
SITE_URL: https://$DOMAIN
SECRET_KEY: $POSTHOG_SECRET
OBJECT_STORAGE_ACCESS_KEY_ID: 'object_storage_root_user'
OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password'
OBJECT_STORAGE_ENDPOINT: http://objectstorage:19000
OBJECT_STORAGE_ENABLED: true
ENCRYPTION_SALT_KEYS: $ENCRYPTION_SALT_KEYS
image: $REGISTRY_URL:$POSTHOG_APP_TAG
web:
extends:
file: docker-compose.base.yml
service: web
command: /compose/start
volumes:
- ./compose:/compose
image: $REGISTRY_URL:$POSTHOG_APP_TAG
environment:
SENTRY_DSN: $SENTRY_DSN
SITE_URL: https://$DOMAIN
SECRET_KEY: $POSTHOG_SECRET
OBJECT_STORAGE_ACCESS_KEY_ID: 'object_storage_root_user'
OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password'
OBJECT_STORAGE_ENDPOINT: http://objectstorage:19000
OBJECT_STORAGE_ENABLED: true
ENCRYPTION_SALT_KEYS: $ENCRYPTION_SALT_KEYS
depends_on:
- db
- redis
- clickhouse
- kafka
- objectstorage
plugins:
extends:
file: docker-compose.base.yml
service: plugins
image: $REGISTRY_URL:$POSTHOG_APP_TAG
environment:
SENTRY_DSN: $SENTRY_DSN
SITE_URL: https://$DOMAIN
SECRET_KEY: $POSTHOG_SECRET
OBJECT_STORAGE_ACCESS_KEY_ID: 'object_storage_root_user'
OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password'
OBJECT_STORAGE_ENDPOINT: http://objectstorage:19000
OBJECT_STORAGE_ENABLED: true
CDP_REDIS_HOST: redis7
CDP_REDIS_PORT: 6379
ENCRYPTION_SALT_KEYS: $ENCRYPTION_SALT_KEYS
depends_on:
- db
- redis
- redis7
- clickhouse
- kafka
- objectstorage
caddy:
image: caddy:2.6.1
restart: unless-stopped
ports:
- '80:80'
- '443:443'
volumes:
- ./Caddyfile:/etc/caddy/Caddyfile
- caddy-data:/data
- caddy-config:/config
depends_on:
- web
objectstorage:
extends:
file: docker-compose.base.yml
service: objectstorage
restart: on-failure
volumes:
- objectstorage:/data
ports:
- '19000:19000'
- '19001:19001'
asyncmigrationscheck:
extends:
file: docker-compose.base.yml
service: asyncmigrationscheck
image: $REGISTRY_URL:$POSTHOG_APP_TAG
environment:
SENTRY_DSN: $SENTRY_DSN
SITE_URL: https://$DOMAIN
SECRET_KEY: $POSTHOG_SECRET
SKIP_ASYNC_MIGRATIONS_SETUP: 0
# Temporal containers
temporal:
extends:
file: docker-compose.base.yml
service: temporal
environment:
- ENABLE_ES=false
ports:
- 7233:7233
volumes:
- ./posthog/docker/temporal/dynamicconfig:/etc/temporal/config/dynamicconfig
elasticsearch:
extends:
file: docker-compose.base.yml
service: elasticsearch
temporal-admin-tools:
extends:
file: docker-compose.base.yml
service: temporal-admin-tools
depends_on:
- temporal
temporal-ui:
extends:
file: docker-compose.base.yml
service: temporal-ui
ports:
- 8081:8080
depends_on:
temporal:
condition: service_started
db:
condition: service_healthy
temporal-django-worker:
command: /compose/temporal-django-worker
extends:
file: docker-compose.base.yml
service: temporal-django-worker
volumes:
- ./compose:/compose
image: $REGISTRY_URL:$POSTHOG_APP_TAG
environment:
SENTRY_DSN: $SENTRY_DSN
SITE_URL: https://$DOMAIN
SECRET_KEY: $POSTHOG_SECRET
depends_on:
- db
- redis
- clickhouse
- kafka
- objectstorage
- temporal
volumes:
zookeeper-data:
zookeeper-datalog:
zookeeper-logs:
objectstorage:
postgres-data:
clickhouse-data:
caddy-data:
caddy-config:
redis-data:
redis7-data:
kafka-data: