Add mysql support

This commit is contained in:
Shkar T. Noori 2023-08-10 10:16:56 +03:00
parent 2564496912
commit 943f04209e
No known key found for this signature in database
GPG Key ID: E7AD76088FB6FE02
9 changed files with 196 additions and 94 deletions

View File

@ -1,8 +1,9 @@
---
name: build and push images
on:
push:
branches: ['master']
branches: ['main']
jobs:
build-and-push-image:
@ -11,36 +12,32 @@ jobs:
strategy:
matrix:
include:
- { postgres: 11, alpine: '3.10' }
- { postgres: 12, alpine: '3.12' }
- { postgres: 13, alpine: '3.14' }
- { postgres: 14, alpine: '3.16' }
- { postgres: 15, alpine: '3.17' }
- { db: mysql, alpine: 3.18 }
- { db: postgres, alpine: 3.18 }
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v2
- name: Log in to DockerHub
uses: docker/login-action@v1
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
registry: reg.dev.krd
username: ${{ secrets.HARBOR_PUBLIC_USER }}
password: ${{ secrets.HARBOR_PUBLIC_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
tags: ${{ github.repository }}:${{ matrix.postgres }}
tags: db-backup-s3/${{ matrix.db }}-backup:alpine-${{ matrix.alpine }}
build-args: |
ALPINE_VERSION=${{ matrix.alpine }}
DATABASE_SERVER=${{ matrix.db }}
platforms: |
linux/amd64
linux/arm64

View File

@ -1,16 +1,37 @@
ARG ALPINE_VERSION
# Download go-cron
ARG ALPINE_VERSION=3.18
FROM curlimages/curl AS go-cron-downloader
ARG GOCRON_VERSION=0.0.5
ARG TARGETARCH=amd64
RUN curl -sL https://github.com/ivoronin/go-cron/releases/download/v${GOCRON_VERSION}/go-cron_${GOCRON_VERSION}_linux_${TARGETARCH}.tar.gz -O
RUN tar xvf go-cron_${GOCRON_VERSION}_linux_${TARGETARCH}.tar.gz
FROM alpine:${ALPINE_VERSION}
ARG TARGETARCH
ARG TARGETARCH=amd64
ARG DATABASE_SERVER=postgres
ADD src/install.sh install.sh
RUN sh install.sh && rm install.sh
RUN apk update && \
apk add --no-cache \
gnupg \
aws-cli
ENV POSTGRES_DATABASE ''
ENV POSTGRES_HOST ''
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER ''
ENV POSTGRES_PASSWORD ''
RUN if [[ "${DATABASE_SERVER}" == "mysql" ]]; then apk add --no-cache mysql-client mariadb-connector-c; fi
RUN if [[ "${DATABASE_SERVER}" == "postgres" ]]; then apk add --no-cache postgresql-client ; fi
RUN rm -rf /var/cache/apk/*
COPY --from=go-cron-downloader /home/curl_user/go-cron /usr/local/bin/go-cron
ENV DATABASE_NAME ''
ENV DATABASE_HOST ''
ENV DATABASE_PORT ''
ENV DATABASE_USER ''
ENV DATABASE_PASSWORD ''
ENV PGDUMP_EXTRA_OPTS ''
ENV MYSQLDUMP_EXTRA_OPTS ''
ENV S3_ACCESS_KEY_ID ''
ENV S3_SECRET_ACCESS_KEY ''
ENV S3_BUCKET ''
@ -21,10 +42,12 @@ ENV S3_S3V4 'no'
ENV SCHEDULE ''
ENV PASSPHRASE ''
ENV BACKUP_KEEP_DAYS ''
ENV DATABASE_SERVER=${DATABASE_SERVER}
ADD src/run.sh run.sh
ADD src/env.sh env.sh
ADD src/backup.sh backup.sh
ADD src/helpers.sh helpers.sh
ADD src/restore.sh restore.sh
CMD ["sh", "run.sh"]

View File

@ -1,5 +1,5 @@
# Introduction
This project provides Docker images to periodically back up a PostgreSQL database to AWS S3, and to restore from the backup as needed.
This project provides Docker images to periodically back up a database to AWS S3, and to restore from the backup as needed.
# Usage
## Backup
@ -8,11 +8,11 @@ services:
postgres:
image: postgres:13
environment:
POSTGRES_USER: user
POSTGRES_PASSWORD: password
DATABASE_USER: user
DATABASE_PASSWORD: password
backup:
image: eeshugerman/postgres-backup-s3:15
image: reg.dev.krd/db-backup-s3/postgres-backup:alpine-3.18
environment:
SCHEDULE: '@weekly' # optional
BACKUP_KEEP_DAYS: 7 # optional
@ -22,10 +22,10 @@ services:
S3_SECRET_ACCESS_KEY: secret
S3_BUCKET: my-bucket
S3_PREFIX: backup
POSTGRES_HOST: postgres
POSTGRES_DATABASE: dbname
POSTGRES_USER: user
POSTGRES_PASSWORD: password
DATABASE_HOST: postgres
DATABASE_NAME: dbname
DATABASE_USER: user
DATABASE_PASSWORD: password
```
- Images are tagged by the major PostgreSQL version supported: `11`, `12`, `13`, `14`, or `15`.
@ -61,22 +61,7 @@ docker compose up -d
```
# Acknowledgements
This project is a fork and re-structuring of @schickling's [postgres-backup-s3](https://github.com/schickling/dockerfiles/tree/master/postgres-backup-s3) and [postgres-restore-s3](https://github.com/schickling/dockerfiles/tree/master/postgres-restore-s3).
This project is a fork and re-structuring @eeshugerman's fork of @schickling's [postgres-backup-s3](https://github.com/schickling/dockerfiles/tree/master/postgres-backup-s3) and [postgres-restore-s3](https://github.com/schickling/dockerfiles/tree/master/postgres-restore-s3).
## Fork goals
These changes would have been difficult or impossible merge into @schickling's repo or similarly-structured forks.
- dedicated repository
- automated builds
- support multiple PostgreSQL versions
- backup and restore with one image
## Other changes and features
- some environment variables renamed or removed
- uses `pg_dump`'s `custom` format (see [docs](https://www.postgresql.org/docs/10/app-pgdump.html))
- drop and re-create all database objects on restore
- backup blobs and all schemas by default
- no Python 2 dependencies
- filter backups on S3 by database name
- support encrypted (password-protected) backups
- support for restoring from a specific backup by timestamp
- support for auto-removal of old backups
The fork by @eeshugerman works very well for postgres databases, the repo is intended to add support for different databases.

View File

@ -3,26 +3,69 @@
services:
postgres:
image: postgres:14
image: postgres:15-alpine
environment:
POSTGRES_USER: user
POSTGRES_PASSWORD: password
backup:
mysql:
image: mysql:8.0
environment:
MYSQL_USER: user
MYSQL_PASSWORD: password
MYSQL_DATABASE: database
MYSQL_ROOT_PASSWORD: root_password
minio:
image: bitnami/minio
ports:
- 9000:9000
- 9001:9001
environment:
MINIO_ROOT_USER: miniouser
MINIO_ROOT_PASSWORD: minioroot
MINIO_DEFAULT_BUCKETS: backups
backup-postgres:
build:
context: .
args:
ALPINE_VERSION: '3.16'
ALPINE_VERSION: '3.18'
DATABASE_SERVER: postgres
environment:
SCHEDULE: '@weekly' # optional
# SCHEDULE: '@weekly' # optional
BACKUP_KEEP_DAYS: 7 # optional
PASSPHRASE: passphrase # optional
S3_REGION:
S3_ACCESS_KEY_ID:
S3_SECRET_ACCESS_KEY:
S3_BUCKET:
S3_PREFIX: backup
POSTGRES_HOST: postgres
POSTGRES_DATABASE: postgres
POSTGRES_USER: user
POSTGRES_PASSWORD: password
# S3_REGION:
S3_ENDPOINT: http://minio:9000
S3_ACCESS_KEY_ID: miniouser
S3_SECRET_ACCESS_KEY: minioroot
S3_BUCKET: backups
S3_PREFIX: postgres-backups
DATABASE_HOST: postgres
DATABASE_NAME: user
DATABASE_USER: user
DATABASE_PORT: 5432
DATABASE_PASSWORD: password
backup-mysql:
build:
context: .
args:
ALPINE_VERSION: '3.18'
DATABASE_SERVER: mysql
environment:
# SCHEDULE: '@weekly' # optional
BACKUP_KEEP_DAYS: 7 # optional
PASSPHRASE: passphrase # optional
# S3_REGION:
S3_ENDPOINT: http://minio:9000
S3_ACCESS_KEY_ID: miniouser
S3_SECRET_ACCESS_KEY: minioroot
S3_BUCKET: backups
S3_PREFIX: mysql-backups
DATABASE_HOST: mysql
DATABASE_NAME: database
DATABASE_USER: root
DATABASE_PORT: 3306
DATABASE_PASSWORD: root_password

View File

@ -4,18 +4,13 @@ set -eu
set -o pipefail
source ./env.sh
source ./helpers.sh
echo "Creating backup of $POSTGRES_DATABASE database..."
pg_dump --format=custom \
-h $POSTGRES_HOST \
-p $POSTGRES_PORT \
-U $POSTGRES_USER \
-d $POSTGRES_DATABASE \
$PGDUMP_EXTRA_OPTS \
> db.dump
echo "Creating backup of $DATABASE_NAME database..."
backup
timestamp=$(date +"%Y-%m-%dT%H:%M:%S")
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${POSTGRES_DATABASE}_${timestamp}.dump"
s3_uri_base="s3://${S3_BUCKET}/${S3_PREFIX}/${DATABASE_NAME}_${timestamp}.dump"
if [ -n "$PASSPHRASE" ]; then
echo "Encrypting backup..."

View File

@ -3,33 +3,33 @@ if [ -z "$S3_BUCKET" ]; then
exit 1
fi
if [ -z "$POSTGRES_DATABASE" ]; then
echo "You need to set the POSTGRES_DATABASE environment variable."
if [ -z "$DATABASE_NAME" ]; then
echo "You need to set the DATABASE_NAME environment variable."
exit 1
fi
if [ -z "$POSTGRES_HOST" ]; then
# https://docs.docker.com/network/links/#environment-variables
if [ -n "$POSTGRES_PORT_5432_TCP_ADDR" ]; then
POSTGRES_HOST=$POSTGRES_PORT_5432_TCP_ADDR
POSTGRES_PORT=$POSTGRES_PORT_5432_TCP_PORT
else
echo "You need to set the POSTGRES_HOST environment variable."
exit 1
fi
fi
if [ -z "$POSTGRES_USER" ]; then
echo "You need to set the POSTGRES_USER environment variable."
if [ -z "$DATABASE_HOST" ]; then
echo "You need to set the DATABASE_HOST environment variable."
exit 1
fi
if [ -z "$POSTGRES_PASSWORD" ]; then
echo "You need to set the POSTGRES_PASSWORD environment variable."
if [ -z "$DATABASE_PORT" ]; then
echo "You need to set the DATABASE_PORT environment variable."
exit 1
fi
if [ -z "$DATABASE_USER" ]; then
echo "You need to set the DATABASE_USER environment variable."
exit 1
fi
if [ -z "$DATABASE_PASSWORD" ]; then
echo "You need to set the DATABASE_PASSWORD environment variable."
exit 1
fi
if [ -z "$S3_ENDPOINT" ]; then
echo "No S3_ENDPOINT set, using default aws region."
aws_args=""
else
aws_args="--endpoint-url $S3_ENDPOINT"
@ -42,5 +42,6 @@ fi
if [ -n "$S3_SECRET_ACCESS_KEY" ]; then
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
fi
export AWS_DEFAULT_REGION=$S3_REGION
export PGPASSWORD=$POSTGRES_PASSWORD
export PGPASSWORD=$DATABASE_PASSWORD

58
src/helpers.sh Normal file
View File

@ -0,0 +1,58 @@
backup() {
if [[ "$DATABASE_SERVER" == "postgres" ]]; then
backup_postgres
elif [[ "$DATABASE_SERVER" == "mysql" ]]; then
backup_mysql
else
echo "Unknown database server: $DATABASE_SERVER"
exit 1
fi
}
restore() {
if [[ "$DATABASE_SERVER" == "postgres" ]]; then
restore_postgres
elif [[ "$DATABASE_SERVER" == "mysql" ]]; then
restore_mysql
else
echo "Unknown database server: $DATABASE_SERVER"
exit 1
fi
}
restore_postgres() {
conn_opts="-h $DATABASE_HOST -p $DATABASE_PORT -U $DATABASE_USER -d $DATABASE_NAME"
pg_restore $conn_opts --clean --if-exists db.dump
}
backup_postgres() {
pg_dump --format=custom \
-h $DATABASE_HOST \
-p $DATABASE_PORT \
-U $DATABASE_USER \
-d $DATABASE_NAME \
$PGDUMP_EXTRA_OPTS > db.dump
}
backup_mysql() {
mysqldump \
--host "$DATABASE_HOST" \
--port "$DATABASE_PORT" \
--user "$DATABASE_USER" \
--password="$DATABASE_PASSWORD" $MYSQLDUMP_EXTRA_OPTS \
$DATABASE_NAME > db.dump
}
restore_mysql() {
echo "Restoring from backup..."
mysql \
-h $DATABASE_HOST \
-P $DATABASE_PORT \
-u $DATABASE_USER \
--password="$DATABASE_PASSWORD" \
$DATABASE_NAME < db.dump
rm db.dump
}

View File

@ -15,11 +15,11 @@ fi
if [ $# -eq 1 ]; then
timestamp="$1"
key_suffix="${POSTGRES_DATABASE}_${timestamp}${file_type}"
key_suffix="${DATABASE_NAME}_${timestamp}${file_type}"
else
echo "Finding latest backup..."
key_suffix=$(
aws $aws_args s3 ls "${s3_uri_base}/${POSTGRES_DATABASE}" \
aws $aws_args s3 ls "${s3_uri_base}/${DATABASE_NAME}" \
| sort \
| tail -n 1 \
| awk '{ print $4 }'
@ -35,10 +35,8 @@ if [ -n "$PASSPHRASE" ]; then
rm db.dump.gpg
fi
conn_opts="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DATABASE"
echo "Restoring from backup..."
pg_restore $conn_opts --clean --if-exists db.dump
restore
rm db.dump
echo "Restore complete."

View File

@ -2,6 +2,8 @@
set -eu
sh env.sh
if [ "$S3_S3V4" = "yes" ]; then
aws configure set default.s3.signature_version s3v4
fi