Enhance backup functionality to support multiple databases

- Updated `backup.sh` to allow backing up multiple databases by introducing the `DATABASE_NAMES` environment variable.
- Modified `env.sh` to handle the new `DATABASE_NAMES` variable and ensure compatibility with existing `DATABASE_NAME`.
- Updated `docker-compose.yaml` and `README.md` to reflect changes and provide examples for backing up single and multiple databases.
- Added a new binary file `umb_pm_dev.bacpac` for database backup.

Signed-off-by: Shakar Bakr <5h4k4r.b4kr@gmail.com>
This commit is contained in:
Shakar Bakr 2025-11-03 16:25:12 +03:00
parent 3c9af7f6c5
commit f6792ed364
No known key found for this signature in database
GPG Key ID: DA55A26823AE3C28
5 changed files with 74 additions and 37 deletions

View File

@ -28,7 +28,10 @@ services:
S3_BUCKET: my-bucket
S3_PREFIX: backup
DATABASE_HOST: postgres
# Back up a single database
DATABASE_NAME: dbname
# Or back up multiple databases (comma or space separated)
# DATABASE_NAMES: "db_one,db_two"
DATABASE_USER: user
DATABASE_PASSWORD: password
DATABASE_SERVER: postgres # postgres, mariadb, or mssql
@ -63,7 +66,7 @@ services:
S3_PREFIX: mssql-backup
DATABASE_HOST: mssql
DATABASE_PORT: 1433
DATABASE_NAME: MyDatabase
DATABASE_NAME: MyDatabase # or set DATABASE_NAMES for multiple databases
DATABASE_USER: sa
DATABASE_PASSWORD: YourStrong@Passw0rd
DATABASE_SERVER: mssql
@ -76,6 +79,7 @@ volumes:
See [`docker-compose.yaml`](./docker-compose.yaml) for a complete working example.
- Images are tagged by the major PostgreSQL version supported: `11`, `12`, `13`, `14`, or `15`.
- To back up multiple databases in a single run, set `DATABASE_NAMES` to a comma- or space-separated list. `DATABASE_NAME` remains available for single database backups.
- The `SCHEDULE` variable determines backup frequency. See go-cron schedules documentation [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules). Omit to run the backup immediately and then exit.
- If `PASSPHRASE` is provided, the backup will be encrypted using GPG.
- Run `docker exec <container name> sh backup.sh` to trigger a backup ad-hoc.

View File

@ -104,6 +104,7 @@ services:
S3_PREFIX: mssql-backups
DATABASE_HOST: mssql
DATABASE_NAME: TestDB
# DATABASE_NAMES: "TestDB,AnotherDatabase" # Optional: back up multiple databases
DATABASE_USER: sa
DATABASE_PORT: 1433
DATABASE_SERVER: mssql

View File

@ -6,37 +6,41 @@ set -o pipefail
source ./env.sh
source ./helpers.sh
echo "Creating backup of $DATABASE_NAME database..."
backup
for CURRENT_DATABASE in $DATABASE_NAMES_LIST; do
DATABASE_NAME="$CURRENT_DATABASE"
timestamp=$(date +"%Y-%m-%dT%H:%M:%S")
echo "Creating backup of $DATABASE_NAME database..."
backup
# MSSQL uses .bak extension, other databases use .dump
if [ "$DATABASE_SERVER" = "mssql" ]; then
local_file="${MSSQL_DATA_DIR}/db.bak"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${DATABASE_NAME}_${timestamp}.bak"
else
local_file="db.dump"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${DATABASE_NAME}_${timestamp}.dump"
fi
timestamp=$(date +"%Y-%m-%dT%H:%M:%S")
if [ -n "$PASSPHRASE" ]; then
echo "Encrypting backup..."
gpg --symmetric --batch --passphrase "$PASSPHRASE" "$local_file"
# MSSQL uses .bak extension, other databases use .dump
if [ "$DATABASE_SERVER" = "mssql" ]; then
local_file="${MSSQL_DATA_DIR}/db.bak"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${DATABASE_NAME}_${timestamp}.bak"
else
local_file="db.dump"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${DATABASE_NAME}_${timestamp}.dump"
fi
if [ -n "${PASSPHRASE:-}" ]; then
echo "Encrypting backup..."
gpg --symmetric --batch --passphrase "$PASSPHRASE" "$local_file"
rm "$local_file"
local_file="${local_file}.gpg"
s3_uri="${s3_uri_base}.gpg"
else
s3_uri="$s3_uri_base"
fi
echo "Uploading backup of $DATABASE_NAME to $S3_BUCKET..."
aws $aws_args s3 cp "$local_file" "$s3_uri"
rm "$local_file"
local_file="${local_file}.gpg"
s3_uri="${s3_uri_base}.gpg"
else
s3_uri="$s3_uri_base"
fi
echo "Uploading backup to $S3_BUCKET..."
aws $aws_args s3 cp "$local_file" "$s3_uri"
rm "$local_file"
done
echo "Backup complete."
if [ -n "$BACKUP_KEEP_DAYS" ]; then
if [ -n "${BACKUP_KEEP_DAYS:-}" ]; then
sec=$((86400*BACKUP_KEEP_DAYS))
date_from_remove=$(date -d "@$(($(date +%s) - sec))" +%Y-%m-%d)
backups_query="Contents[?LastModified<='${date_from_remove} 00:00:00'].{Key: Key}"

View File

@ -1,39 +1,67 @@
if [ -z "$S3_BUCKET" ]; then
if [ -z "${S3_BUCKET:-}" ]; then
echo "You need to set the S3_BUCKET environment variable."
exit 1
fi
if [ -z "$DATABASE_SERVER" ]; then
if [ -z "${DATABASE_SERVER:-}" ]; then
echo "You need to set the DATABASE_SERVER environment variable. (postgres, mariadb, mssql)"
exit 1
fi
if [ -z "$DATABASE_NAME" ]; then
echo "You need to set the DATABASE_NAME environment variable."
exit 1
database_names_env="${DATABASE_NAMES:-}"
database_name_env="${DATABASE_NAME:-}"
if [ -n "$database_names_env" ]; then
sanitized_names=$(printf "%s" "$database_names_env" | tr ',' ' ')
DATABASE_NAMES_LIST=$(printf "%s" "$sanitized_names" | awk '{$1=$1; print}')
else
DATABASE_NAMES_LIST=""
fi
if [ -z "$DATABASE_HOST" ]; then
if [ -n "$database_name_env" ]; then
if [ -z "$DATABASE_NAMES_LIST" ]; then
DATABASE_NAMES_LIST="$database_name_env"
fi
else
if [ -z "$DATABASE_NAMES_LIST" ]; then
echo "You need to set the DATABASE_NAME or DATABASE_NAMES environment variable."
exit 1
fi
fi
if [ -z "$database_name_env" ]; then
for first_database in $DATABASE_NAMES_LIST; do
DATABASE_NAME="$first_database"
break
done
else
DATABASE_NAME="$database_name_env"
fi
export DATABASE_NAMES_LIST
export DATABASE_NAME
if [ -z "${DATABASE_HOST:-}" ]; then
echo "You need to set the DATABASE_HOST environment variable."
exit 1
fi
if [ -z "$DATABASE_PORT" ]; then
if [ -z "${DATABASE_PORT:-}" ]; then
echo "You need to set the DATABASE_PORT environment variable."
exit 1
fi
if [ -z "$DATABASE_USER" ]; then
if [ -z "${DATABASE_USER:-}" ]; then
echo "You need to set the DATABASE_USER environment variable."
exit 1
fi
if [ -z "$DATABASE_PASSWORD" ]; then
if [ -z "${DATABASE_PASSWORD:-}" ]; then
echo "You need to set the DATABASE_PASSWORD environment variable."
exit 1
fi
if [ -z "$S3_ENDPOINT" ]; then
if [ -z "${S3_ENDPOINT:-}" ]; then
echo "No S3_ENDPOINT set, using default aws region."
aws_args=""
else
@ -41,10 +69,10 @@ else
fi
if [ -n "$S3_ACCESS_KEY_ID" ]; then
if [ -n "${S3_ACCESS_KEY_ID:-}" ]; then
export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID
fi
if [ -n "$S3_SECRET_ACCESS_KEY" ]; then
if [ -n "${S3_SECRET_ACCESS_KEY:-}" ]; then
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
fi

BIN
umb_pm_dev.bacpac Normal file

Binary file not shown.