1
0
mirror of https://github.com/garraflavatra/docker-volume-s3-backup.git synced 2025-06-27 13:35:10 +00:00

Compare commits

...

3 Commits

Author SHA1 Message Date
044ddcdbda Update documentation 2025-05-04 18:03:24 +02:00
cc24a7d123 Add restore script 2025-05-04 18:03:15 +02:00
b462c54de6 Fix tar.gz extension 2025-05-04 17:40:10 +02:00
4 changed files with 17 additions and 21 deletions

View File

@ -22,6 +22,6 @@ ENV BACKUP_KEEP_DAYS ''
ADD src/run.sh run.sh
ADD src/env.sh env.sh
ADD src/backup.sh backup.sh
# ADD src/restore.sh restore.sh -- not ready yet
ADD src/restore.sh restore.sh
CMD ["sh", "run.sh"]

View File

@ -42,22 +42,19 @@ services:
### Restore
> [!NOTE]
> The restore command is yet to be ported from the original restore script from [`eeshugerman/postgres-backup-s3`](https://github.com/eeshugerman/postgres-backup-s3). Contributions are welcome :)
> [!CAUTION]
> DATA LOSS! All folder content will be dropped and re-created.
> All existing files will be replaced by the backup content!
#### ... from latest backup
#### From latest backup
```sh
docker exec <container_name> sh restore.sh
```
> [!NOTE]
> If your bucket has more than a 1000 files, the latest may not be restored -- only one S3 `ls` command is used
> If your bucket has over 1000 files, the latest may not be restored the S3 `ls` command is issued only once.
#### ... from specific backup
#### From specific backup
```sh
docker exec <container_name> sh restore.sh <timestamp>
@ -65,7 +62,8 @@ docker exec <container_name> sh restore.sh <timestamp>
## Development
### Run a simple test environment with Docker Compose
You can use the Docker Compose file in the root with your own S3 bucket for development and testing.
```sh
cp .env.example .env
# fill out your secrets/params in .env

View File

@ -9,7 +9,7 @@ echo "Creating backup..."
tar -czf dump.tar.gz /data
timestamp=$(date +"%Y-%m-%dT%H:%M:%S")
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${BACKUP_NAME}_${timestamp}.dump"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${BACKUP_NAME}_${timestamp}.tar.gz"
if [ -n "$PASSPHRASE" ]; then
echo "Encrypting backup..."

View File

@ -8,18 +8,18 @@ source ./env.sh
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}"
if [ -z "$PASSPHRASE" ]; then
file_type=".dump"
file_type=".tar.gz"
else
file_type=".dump.gpg"
file_type=".tar.gz.gpg"
fi
if [ $# -eq 1 ]; then
timestamp="$1"
key_suffix="${POSTGRES_DATABASE}_${timestamp}${file_type}"
key_suffix="${BACKUP_NAME}_${timestamp}${file_type}"
else
echo "Finding latest backup..."
key_suffix=$(
aws $aws_args s3 ls "${s3_uri_base}/${POSTGRES_DATABASE}" \
aws $aws_args s3 ls "${s3_uri_base}/${BACKUP_NAME}" \
| sort \
| tail -n 1 \
| awk '{ print $4 }'
@ -27,18 +27,16 @@ else
fi
echo "Fetching backup from S3..."
aws $aws_args s3 cp "${s3_uri_base}/${key_suffix}" "db${file_type}"
aws $aws_args s3 cp "${s3_uri_base}/${key_suffix}" "dump${file_type}"
if [ -n "$PASSPHRASE" ]; then
echo "Decrypting backup..."
gpg --decrypt --batch --passphrase "$PASSPHRASE" db.dump.gpg > db.dump
rm db.dump.gpg
gpg --decrypt --batch --passphrase "$PASSPHRASE" dump.tar.gz.gpg > dump.tar.gz
rm dump.tar.gz.gpg
fi
conn_opts="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DATABASE"
echo "Restoring from backup..."
pg_restore $conn_opts --clean --if-exists db.dump
rm db.dump
tar -xzf dump.tar.gz -C /data --strip-components 1
rm dump.tar.gz
echo "Restore complete."