Merge pull request #2 from ditkrg/add-mssql

add mssql
This commit is contained in:
Shakar Bakr 2025-11-03 16:30:26 +03:00 committed by GitHub
commit e97dc2cb3c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 108 additions and 74 deletions

View File

@ -21,31 +21,40 @@ RUN curl -O https://download.microsoft.com/download/b/9/f/b9f3cce4-3925-46d4-9f4
apk add --allow-untrusted mssql-tools18_18.1.1.1-1_amd64.apk && \
rm msodbcsql18_18.1.1.1-1_amd64.apk mssql-tools18_18.1.1.1-1_amd64.apk
# Install go-cron for scheduled backups
# Source: https://github.com/ivoronin/go-cron
ARG TARGETARCH
RUN curl -L https://github.com/ivoronin/go-cron/releases/download/v0.0.5/go-cron_0.0.5_linux_${TARGETARCH}.tar.gz -O && \
tar xvf go-cron_0.0.5_linux_${TARGETARCH}.tar.gz && \
rm go-cron_0.0.5_linux_${TARGETARCH}.tar.gz && \
mv go-cron /usr/local/bin/go-cron && \
chmod +x /usr/local/bin/go-cron
RUN rm -rf /var/cache/apk/*
ENV PATH="${PATH}:/opt/mssql-tools18/bin"
ENV DATABASE_NAME ''
ENV DATABASE_HOST ''
ENV DATABASE_PORT ''
ENV DATABASE_USER ''
ENV DATABASE_SERVER ''
ENV DATABASE_PASSWORD ''
ENV PGDUMP_EXTRA_OPTS ''
ENV MARIADB_DUMP_EXTRA_OPTS ''
ENV MARIADB_EXTRA_OPTS ''
ENV MSSQL_EXTRA_OPTS ''
ENV MSSQL_DATA_DIR '/var/opt/mssql/data'
ENV S3_ACCESS_KEY_ID ''
ENV S3_SECRET_ACCESS_KEY ''
ENV S3_BUCKET ''
ENV S3_REGION 'us-west-1'
ENV S3_PATH 'backup'
ENV S3_ENDPOINT ''
ENV S3_S3V4 'no'
ENV SCHEDULE ''
ENV PASSPHRASE ''
ENV BACKUP_KEEP_DAYS ''
ENV DATABASE_NAME=''
ENV DATABASE_HOST=''
ENV DATABASE_PORT=''
ENV DATABASE_USER=''
ENV DATABASE_SERVER=''
ENV DATABASE_PASSWORD=''
ENV PGDUMP_EXTRA_OPTS=''
ENV MARIADB_DUMP_EXTRA_OPTS=''
ENV MARIADB_EXTRA_OPTS=''
ENV MSSQL_EXTRA_OPTS=''
ENV MSSQL_DATA_DIR='/var/opt/mssql/data'
ENV S3_ACCESS_KEY_ID=''
ENV S3_SECRET_ACCESS_KEY=''
ENV S3_BUCKET=''
ENV S3_REGION='us-west-1'
ENV S3_PATH='backup'
ENV S3_ENDPOINT=''
ENV S3_S3V4='no'
ENV SCHEDULE=''
ENV PASSPHRASE=''
ENV BACKUP_KEEP_DAYS=''
ADD src/run.sh run.sh
ADD src/env.sh env.sh

View File

@ -28,7 +28,10 @@ services:
S3_BUCKET: my-bucket
S3_PREFIX: backup
DATABASE_HOST: postgres
# Back up a single database
DATABASE_NAME: dbname
# Or back up multiple databases (comma or space separated)
# DATABASE_NAMES: "db_one,db_two"
DATABASE_USER: user
DATABASE_PASSWORD: password
DATABASE_SERVER: postgres # postgres, mariadb, or mssql
@ -63,7 +66,7 @@ services:
S3_PREFIX: mssql-backup
DATABASE_HOST: mssql
DATABASE_PORT: 1433
DATABASE_NAME: MyDatabase
DATABASE_NAME: MyDatabase # or set DATABASE_NAMES for multiple databases
DATABASE_USER: sa
DATABASE_PASSWORD: YourStrong@Passw0rd
DATABASE_SERVER: mssql
@ -76,6 +79,7 @@ volumes:
See [`docker-compose.yaml`](./docker-compose.yaml) for a complete working example.
- Images are tagged by the major PostgreSQL version supported: `11`, `12`, `13`, `14`, or `15`.
- To back up multiple databases in a single run, set `DATABASE_NAMES` to a comma- or space-separated list. `DATABASE_NAME` remains available for single database backups.
- The `SCHEDULE` variable determines backup frequency. See go-cron schedules documentation [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules). Omit to run the backup immediately and then exit.
- If `PASSPHRASE` is provided, the backup will be encrypted using GPG.
- Run `docker exec <container name> sh backup.sh` to trigger a backup ad-hoc.

View File

@ -104,6 +104,7 @@ services:
S3_PREFIX: mssql-backups
DATABASE_HOST: mssql
DATABASE_NAME: TestDB
# DATABASE_NAMES: "TestDB,AnotherDatabase" # Optional: back up multiple databases
DATABASE_USER: sa
DATABASE_PORT: 1433
DATABASE_SERVER: mssql

View File

@ -6,37 +6,41 @@ set -o pipefail
source ./env.sh
source ./helpers.sh
echo "Creating backup of $DATABASE_NAME database..."
backup
for CURRENT_DATABASE in $DATABASE_NAMES_LIST; do
DATABASE_NAME="$CURRENT_DATABASE"
timestamp=$(date +"%Y-%m-%dT%H:%M:%S")
echo "Creating backup of $DATABASE_NAME database..."
backup
# MSSQL uses .bak extension, other databases use .dump
if [ "$DATABASE_SERVER" = "mssql" ]; then
local_file="${MSSQL_DATA_DIR}/db.bak"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${DATABASE_NAME}_${timestamp}.bak"
else
local_file="db.dump"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${DATABASE_NAME}_${timestamp}.dump"
fi
timestamp=$(date +"%Y-%m-%dT%H:%M:%S")
if [ -n "$PASSPHRASE" ]; then
echo "Encrypting backup..."
gpg --symmetric --batch --passphrase "$PASSPHRASE" "$local_file"
# MSSQL uses .bak extension, other databases use .dump
if [ "$DATABASE_SERVER" = "mssql" ]; then
local_file="${MSSQL_DATA_DIR}/db.bak"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${DATABASE_NAME}_${timestamp}.bak"
else
local_file="db.dump"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${DATABASE_NAME}_${timestamp}.dump"
fi
if [ -n "${PASSPHRASE:-}" ]; then
echo "Encrypting backup..."
gpg --symmetric --batch --passphrase "$PASSPHRASE" "$local_file"
rm "$local_file"
local_file="${local_file}.gpg"
s3_uri="${s3_uri_base}.gpg"
else
s3_uri="$s3_uri_base"
fi
echo "Uploading backup of $DATABASE_NAME to $S3_BUCKET..."
aws $aws_args s3 cp "$local_file" "$s3_uri"
rm "$local_file"
local_file="${local_file}.gpg"
s3_uri="${s3_uri_base}.gpg"
else
s3_uri="$s3_uri_base"
fi
echo "Uploading backup to $S3_BUCKET..."
aws $aws_args s3 cp "$local_file" "$s3_uri"
rm "$local_file"
done
echo "Backup complete."
if [ -n "$BACKUP_KEEP_DAYS" ]; then
if [ -n "${BACKUP_KEEP_DAYS:-}" ]; then
sec=$((86400*BACKUP_KEEP_DAYS))
date_from_remove=$(date -d "@$(($(date +%s) - sec))" +%Y-%m-%d)
backups_query="Contents[?LastModified<='${date_from_remove} 00:00:00'].{Key: Key}"

View File

@ -1,39 +1,67 @@
if [ -z "$S3_BUCKET" ]; then
if [ -z "${S3_BUCKET:-}" ]; then
echo "You need to set the S3_BUCKET environment variable."
exit 1
fi
if [ -z "$DATABASE_SERVER" ]; then
if [ -z "${DATABASE_SERVER:-}" ]; then
echo "You need to set the DATABASE_SERVER environment variable. (postgres, mariadb, mssql)"
exit 1
fi
if [ -z "$DATABASE_NAME" ]; then
echo "You need to set the DATABASE_NAME environment variable."
exit 1
database_names_env="${DATABASE_NAMES:-}"
database_name_env="${DATABASE_NAME:-}"
if [ -n "$database_names_env" ]; then
sanitized_names=$(printf "%s" "$database_names_env" | tr ',' ' ')
DATABASE_NAMES_LIST=$(printf "%s" "$sanitized_names" | awk '{$1=$1; print}')
else
DATABASE_NAMES_LIST=""
fi
if [ -z "$DATABASE_HOST" ]; then
if [ -n "$database_name_env" ]; then
if [ -z "$DATABASE_NAMES_LIST" ]; then
DATABASE_NAMES_LIST="$database_name_env"
fi
else
if [ -z "$DATABASE_NAMES_LIST" ]; then
echo "You need to set the DATABASE_NAME or DATABASE_NAMES environment variable."
exit 1
fi
fi
if [ -z "$database_name_env" ]; then
for first_database in $DATABASE_NAMES_LIST; do
DATABASE_NAME="$first_database"
break
done
else
DATABASE_NAME="$database_name_env"
fi
export DATABASE_NAMES_LIST
export DATABASE_NAME
if [ -z "${DATABASE_HOST:-}" ]; then
echo "You need to set the DATABASE_HOST environment variable."
exit 1
fi
if [ -z "$DATABASE_PORT" ]; then
if [ -z "${DATABASE_PORT:-}" ]; then
echo "You need to set the DATABASE_PORT environment variable."
exit 1
fi
if [ -z "$DATABASE_USER" ]; then
if [ -z "${DATABASE_USER:-}" ]; then
echo "You need to set the DATABASE_USER environment variable."
exit 1
fi
if [ -z "$DATABASE_PASSWORD" ]; then
if [ -z "${DATABASE_PASSWORD:-}" ]; then
echo "You need to set the DATABASE_PASSWORD environment variable."
exit 1
fi
if [ -z "$S3_ENDPOINT" ]; then
if [ -z "${S3_ENDPOINT:-}" ]; then
echo "No S3_ENDPOINT set, using default aws region."
aws_args=""
else
@ -41,10 +69,10 @@ else
fi
if [ -n "$S3_ACCESS_KEY_ID" ]; then
if [ -n "${S3_ACCESS_KEY_ID:-}" ]; then
export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID
fi
if [ -n "$S3_SECRET_ACCESS_KEY" ]; then
if [ -n "${S3_SECRET_ACCESS_KEY:-}" ]; then
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
fi

View File

@ -11,22 +11,10 @@ fi
if [ -z "$SCHEDULE" ]; then
sh backup.sh
else
# For non-root users, use a writable directory for crontabs
# busybox crond supports -c option to specify crontab directory
CRON_USER=$(id -u)
CRON_DIR="${HOME}/crontabs"
# Create crontab directory
mkdir -p "$CRON_DIR"
# Write crontab entry
echo "$SCHEDULE /bin/sh $(pwd)/backup.sh" > "$CRON_DIR/$CRON_USER"
chmod 600 "$CRON_DIR/$CRON_USER"
echo "Backup schedule configured: $SCHEDULE"
echo "Crontab file: $CRON_DIR/$CRON_USER"
echo "Starting crond..."
echo "Starting go-cron..."
# Start crond in foreground mode with custom crontab directory
exec crond -f -d 8 -c "$CRON_DIR"
# Use go-cron to run backup.sh on the specified schedule
# go-cron takes schedule and command as arguments
exec go-cron "$SCHEDULE" /bin/sh "$(pwd)/backup.sh"
fi

BIN
umb_pm_dev.bacpac Normal file

Binary file not shown.