From e569eb75b00075e630bcc5d7562dee9a7b7c264a Mon Sep 17 00:00:00 2001 From: Romein van Buren Date: Sun, 4 May 2025 13:54:42 +0200 Subject: [PATCH] Fix some bugs, it is working now! --- Dockerfile | 2 +- README.md | 20 ++++++++++++-------- docker-compose.yaml | 10 ++++++++-- src/backup.sh | 2 +- test_image/Dockerfile | 4 +++- 5 files changed, 25 insertions(+), 13 deletions(-) diff --git a/Dockerfile b/Dockerfile index b3314d7..cd21100 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,4 +24,4 @@ ADD src/env.sh env.sh ADD src/backup.sh backup.sh # ADD src/restore.sh restore.sh -- not ready yet -CMD ['sh', 'run.sh'] +CMD ["sh", "run.sh"] diff --git a/README.md b/README.md index 3297f4c..c1b269f 100644 --- a/README.md +++ b/README.md @@ -19,17 +19,21 @@ services: volume_backup: image: smartyellow/docker-volume-s3-backup + volumes: + data:/data/folder1 environment: - SCHEDULE: '@weekly' # optional - BACKUP_KEEP_DAYS: 7 # optional - PASSPHRASE: passphrase # optional - S3_REGION: region - S3_ACCESS_KEY_ID: key - S3_SECRET_ACCESS_KEY: secret - S3_BUCKET: my-bucket - S3_PREFIX: backup + BACKUP_NAME: myvolume # required + SCHEDULE: '@weekly' # optional + BACKUP_KEEP_DAYS: 7 # optional + PASSPHRASE: passphrase # optional + S3_REGION: region # or set S3_ENDPOINT if you do not use AWS + S3_ACCESS_KEY_ID: key # alias S3_ACCESS_KEY + S3_SECRET_ACCESS_KEY: secret # alias S3_SECRET_KEY + S3_BUCKET: my-bucket # required + S3_PREFIX: backup # required ``` +- You can mount as many folders into `/data` as you like. Everything in `/data` is copied to your S3 storage in a single `.tar.gz`. - The `SCHEDULE` variable determines backup frequency. See go-cron schedules documentation [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules). Omit to run the backup immediately and then exit. - If `PASSPHRASE` is provided, the backup will be encrypted using GPG. - Run `docker exec sh backup.sh` to trigger a backup ad-hoc. diff --git a/docker-compose.yaml b/docker-compose.yaml index 134056c..c79f2c7 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -4,16 +4,22 @@ # $ docker compose up -d --build --force-recreate services: - test: - build: test_image + test_image: + build: ./test_image volumes: - data:/data backup: build: . env_file: .env + volumes: + - data:/data/testdir environment: + BACKUP_NAME: backup-test SCHEDULE: '* * * * *' BACKUP_KEEP_DAYS: 7 PASSPHRASE: passphrase S3_PREFIX: backup-test + +volumes: + data: diff --git a/src/backup.sh b/src/backup.sh index 76815be..5bd3168 100644 --- a/src/backup.sh +++ b/src/backup.sh @@ -6,7 +6,7 @@ set -o pipefail source ./env.sh echo "Creating backup..." -tar -xzf dump.tar.gz /data +tar -czf dump.tar.gz /data timestamp=$(date +"%Y-%m-%dT%H:%M:%S") s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${BACKUP_NAME}_${timestamp}.dump" diff --git a/test_image/Dockerfile b/test_image/Dockerfile index d7dea20..966673e 100644 --- a/test_image/Dockerfile +++ b/test_image/Dockerfile @@ -1,3 +1,5 @@ -FROM scratch +FROM alpine:3.21 COPY data/ /data + +ENTRYPOINT ["/bin/sh", "-c", "while sleep 3600; do :; done"]