0
0
mirror of https://github.com/PostHog/posthog.git synced 2024-11-24 18:07:17 +01:00
posthog/docker-compose.hobby.yml

147 lines
4.4 KiB
YAML
Raw Normal View History

version: '3'
services:
db:
image: postgres:12-alpine
restart: on-failure
environment:
POSTGRES_USER: posthog
POSTGRES_DB: posthog
POSTGRES_PASSWORD: posthog
ports:
- '5432:5432'
redis:
image: redis:6.2.7-alpine
restart: on-failure
ports:
- '6379:6379'
command: redis-server --maxmemory-policy allkeys-lru --maxmemory 200mb
clickhouse:
#
# Note: please keep the default version in sync across
# `posthog` and the `charts-clickhouse` repos
#
image: ${CLICKHOUSE_SERVER_IMAGE:-clickhouse/clickhouse-server:21.11.11.1}
restart: on-failure
depends_on:
- kafka
- zookeeper
ports:
- '8123:8123'
- '9000:9000'
- '9440:9440'
- '9009:9009'
volumes:
- ./posthog/ee/idl:/idl
- ./posthog/docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
- ./posthog/docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml
- ./posthog/docker/clickhouse/users.xml:/etc/clickhouse-server/users.xml
zookeeper:
image: zookeeper:3.7.0
restart: on-failure
volumes:
- zookeeper-datalog:/datalog
- zookeeper-data:/data
- zookeeper-logs:/logs
kafka:
image: bitnami/kafka:2.8.1-debian-10-r99
restart: on-failure
depends_on:
- zookeeper
ports:
- '9092:9092'
environment:
KAFKA_BROKER_ID: 1001
KAFKA_CFG_RESERVED_BROKER_MAX_ID: 1001
KAFKA_CFG_LISTENERS: PLAINTEXT://:9092
KAFKA_CFG_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181
ALLOW_PLAINTEXT_LISTENER: 'true'
worker: &worker
image: posthog/posthog:$POSTHOG_APP_TAG
command: ./bin/docker-worker-celery --with-scheduler
restart: on-failure
environment:
SENTRY_DSN: $SENTRY_DSN
SITE_URL: https://$DOMAIN
DISABLE_SECURE_SSL_REDIRECT: 'true'
IS_BEHIND_PROXY: 'true'
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
CLICKHOUSE_HOST: 'clickhouse'
CLICKHOUSE_DATABASE: 'posthog'
CLICKHOUSE_SECURE: 'false'
CLICKHOUSE_VERIFY: 'false'
KAFKA_URL: 'kafka://kafka'
REDIS_URL: 'redis://redis:6379/'
SECRET_KEY: $POSTHOG_SECRET
PGHOST: db
PGUSER: posthog
PGPASSWORD: posthog
DEPLOYMENT: hobby
depends_on:
- db
- redis
- clickhouse
- kafka
- object_storage
web:
<<: *worker
command: /compose/start
ports:
- '8000:8000'
- '8234:8234'
volumes:
- ./compose:/compose
caddy:
image: caddy
restart: unless-stopped
ports:
- '80:80'
- '443:443'
volumes:
- ./Caddyfile:/etc/caddy/Caddyfile
depends_on:
- web
plugins:
image: posthog/posthog:$POSTHOG_APP_TAG
command: ./bin/plugin-server --no-restart-loop
restart: on-failure
environment:
DATABASE_URL: 'postgres://posthog:posthog@db:5432/posthog'
KAFKA_ENABLED: 'true'
KAFKA_HOSTS: 'kafka:9092'
REDIS_URL: 'redis://redis:6379/'
CLICKHOUSE_HOST: 'clickhouse'
depends_on:
- db
- redis
- clickhouse
- kafka
- object_storage
object_storage:
image: minio/minio
ports:
- '19000:19000'
- '19001:19001'
volumes:
- object_storage:/data
environment:
MINIO_ROOT_USER: object_storage_root_user
MINIO_ROOT_PASSWORD: object_storage_root_password
entrypoint: sh
command: -c 'mkdir -p /data/posthog && minio server --address ":19000" --console-address ":19001" /data' # create the 'posthog' bucket before starting the service
asyncmigrationscheck:
<<: *worker
command: python manage.py run_async_migrations --check
restart: 'no'
scale: 0
volumes:
zookeeper-data:
zookeeper-datalog:
zookeeper-logs:
object_storage: