diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..485dee6 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.idea diff --git a/Dockerfile b/Dockerfile index e3552f8..4ab650b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,6 +19,7 @@ ENV S3_ENDPOINT '' ENV S3_S3V4 'no' ENV SCHEDULE '' ENV PASSPHRASE '' +ENV BACKUP_KEEP_DAYS 7 ADD src/run.sh run.sh ADD src/backup.sh backup.sh diff --git a/README.md b/README.md index 8a67c16..023df0f 100644 --- a/README.md +++ b/README.md @@ -24,11 +24,14 @@ pg_backup_s3: POSTGRES_DATABASE: dbname POSTGRES_USER: user POSTGRES_PASSWORD: password + BACKUP_KEEP_DAYS: 7 ``` + - Images are tagged by the major PostgreSQL version they support: `10`, `11`, `12`, `13`, or `14`. - The `SCHEDULE` variable determines backup frequency. See go-cron schedules documentation [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules). Omit to run the backup immediately and then exit. - If `PASSPHRASE` is provided, the backup will be encrypted using GPG. - Run `docker exec sh backup.sh` to trigger a backup ad-hoc +- Use `BACKUP_KEEP_DAYS` to set time for how long you want to keep backup. ## Restore > **WARNING:** DATA LOSS! All database objects will be dropped and re-created. diff --git a/src/backup.sh b/src/backup.sh index e5a243d..2fece34 100644 --- a/src/backup.sh +++ b/src/backup.sh @@ -79,7 +79,16 @@ else fi echo "Uploading backup to $S3_BUCKET..." -aws $aws_args s3 cp "$local_file" "$s3_uri" +aws "$aws_args" s3 cp "$local_file" "$s3_uri" rm "$local_file" echo "Backup complete." + +if [ "$BACKUP_KEEP_DAYS" -ne 0 ]; then + date_from_remove=$(date -v -"${BACKUP_KEEP_DAYS}"d +"%Y-%m-%d") + backups_query="Contents[?LastModified<='${date_from_remove} 00:00:00'].{Key: Key}" + remove_backups=$(aws s3api list-objects-v2 --bucket "${S3_BUCKET}" --prefix "${S3_PREFIX}" --query "${backups_query}" --output text | xargs -n1 -t -I 'KEY' aws s3 rm s3://"${S3_BUCKET}"/'KEY') + echo "Removing old backup from $S3_BUCKET..." + eval "$remove_backups"; + echo "Removing complete." +fi