From 71256554dc6ee128cd6e7bbad55959ed82ecaeb2 Mon Sep 17 00:00:00 2001 From: pilotso11 Date: Sat, 19 Jul 2025 17:17:15 +0100 Subject: [PATCH 1/6] 1. Add support for password as a file. 2. Add support for backup of multiple databases. 3. Improve build and push script. --- .github/workflows/docker-build-push.yml | 59 +++++ .github/workflows/test.yml | 301 ------------------------ .gitignore | 5 + 10/Dockerfile | 14 -- 10/backup.py | 157 ------------ 10/entrypoint.sh | 12 - 10/restore.py | 81 ------- 10/restore.sh | 3 - 11/backup.py | 157 ------------ 11/entrypoint.sh | 12 - 11/restore.py | 81 ------- 12/Dockerfile | 14 -- 12/backup.py | 157 ------------ 12/entrypoint.sh | 12 - 12/restore.py | 81 ------- 13/Dockerfile | 14 -- 13/backup.py | 157 ------------ 13/entrypoint.sh | 12 - 13/restore.py | 81 ------- 13/restore.sh | 3 - 14/Dockerfile | 14 -- 14/backup.py | 157 ------------ 14/entrypoint.sh | 12 - 14/restore.py | 81 ------- 15/Dockerfile | 14 -- 15/backup.py | 157 ------------ 15/entrypoint.sh | 12 - 15/restore.py | 81 ------- 16/Dockerfile | 14 -- 16/backup.py | 157 ------------ 16/entrypoint.sh | 12 - 16/restore.py | 81 ------- Makefile | 2 - README.md | 5 +- {11 => template}/Dockerfile | 3 +- template/Dockerfile.template | 14 -- template/backup.py | 51 ++-- template/restore.py | 16 +- template/run.sh | 4 + test.sh | 21 ++ update.py | 26 -- 41 files changed, 137 insertions(+), 2210 deletions(-) create mode 100644 .github/workflows/docker-build-push.yml delete mode 100644 .github/workflows/test.yml create mode 100644 .gitignore delete mode 100644 10/Dockerfile delete mode 100644 10/backup.py delete mode 100755 10/entrypoint.sh delete mode 100644 10/restore.py delete mode 100755 10/restore.sh delete mode 100644 11/backup.py delete mode 100755 11/entrypoint.sh delete mode 100644 11/restore.py delete mode 100644 12/Dockerfile delete mode 100644 12/backup.py delete mode 100755 12/entrypoint.sh delete mode 100644 12/restore.py delete mode 100644 13/Dockerfile delete mode 100644 13/backup.py delete mode 100755 13/entrypoint.sh delete mode 100644 13/restore.py delete mode 100755 13/restore.sh delete mode 100644 14/Dockerfile delete mode 100644 14/backup.py delete mode 100755 14/entrypoint.sh delete mode 100644 14/restore.py delete mode 100644 15/Dockerfile delete mode 100644 15/backup.py delete mode 100755 15/entrypoint.sh delete mode 100644 15/restore.py delete mode 100644 16/Dockerfile delete mode 100644 16/backup.py delete mode 100755 16/entrypoint.sh delete mode 100644 16/restore.py delete mode 100644 Makefile rename {11 => template}/Dockerfile (78%) delete mode 100644 template/Dockerfile.template create mode 100644 template/run.sh create mode 100755 test.sh delete mode 100644 update.py diff --git a/.github/workflows/docker-build-push.yml b/.github/workflows/docker-build-push.yml new file mode 100644 index 0000000..38f791d --- /dev/null +++ b/.github/workflows/docker-build-push.yml @@ -0,0 +1,59 @@ +name: Docker Build and Push + +on: + push: + branches: [ main ] + workflow_dispatch: # Allow manual triggering + +jobs: + build-and-push: + name: Build and Push PostgreSQL Backup Images + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + postgres-version: ['10', '11', '12', '13', '14', '15', '16', '17'] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to DockerHub + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ github.repository }} + tags: | + type=ref,event=branch-pg${{ matrix.postgres-version }} + type=ref,event=pr-pg${{ matrix.postgres-version }} + type=semver,pattern={{version}}-pg${{ matrix.postgres-version }} + type=semver,pattern={{major}}.{{minor}}-pg${{ matrix.postgres-version }} + type=semver,pattern={{major}}-pg${{ matrix.postgres-version }} + type=sha-pg${{ matrix.postgres-version }} + + - name: Prepare build context + run: | + mkdir -p build/${{ matrix.postgres-version }} + cp template/* build/${{ matrix.postgres-version }}/ + sed "s/%VERSION%/${{ matrix.postgres-version }}/g" template/Dockerfile > build/${{ matrix.postgres-version }}/Dockerfile + + - name: Build and push + uses: docker/build-push-action@v6 + with: + context: build/${{ matrix.postgres-version }} + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 61855fa..0000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,301 +0,0 @@ -name: Test -on: [push, pull_request] - -jobs: - tests: - name: ${{ matrix.version }} - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - include: - - {version: '16', os: ubuntu-latest} - - {version: '15', os: ubuntu-latest} - - {version: '14', os: ubuntu-latest} - - {version: '13', os: ubuntu-latest} - - {version: '12', os: ubuntu-latest} - - {version: '11', os: ubuntu-latest} - - {version: '10', os: ubuntu-latest} - - services: - postgres: - image: postgres:${{ matrix.version }} - env: - POSTGRES_PASSWORD: test - POSTGRES_USER: test - POSTGRES_DB: test_${{ matrix.version }} - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 5s --health-timeout 5s --health-retries 10 - s3: - image: zenko/cloudserver - env: - ENDPOINT: s3 - S3BACKEND: mem - REMOTE_MANAGEMENT_DISABLE: 1 - SCALITY_ACCESS_KEY_ID: access_key - SCALITY_SECRET_ACCESS_KEY: secret - - steps: - - name: Create Test Data - uses: addnab/docker-run-action@v3 - with: - image: postgres:${{ matrix.version }} - run: > - psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -c ' - CREATE TABLE books ( - id serial PRIMARY KEY, - name VARCHAR ( 128 ) UNIQUE NOT NULL, - author VARCHAR (128 ) NOT NULL - ); - INSERT INTO books (name, author) VALUES - ($$Fittstim$$, $$Linda Skugge$$), - ($$DSM-5$$, $$American Psychiatric Association$$); - - CREATE TABLE movies ( - id serial PRIMARY KEY, - name VARCHAR ( 128 ) UNIQUE NOT NULL, - director VARCHAR (128 ) NOT NULL - ); - INSERT INTO movies (name, director) VALUES - ($$Beau Travail$$, $$Claire Denis$$), - ($$Reservoir Dogs$$, $$Quentin Tarantino$$); - ' - options: > - -e PGPASSWORD=test - - - name: Create S3 bucket - uses: addnab/docker-run-action@v3 - with: - image: amazon/aws-cli - run: aws --endpoint-url=http://s3:8000 s3api create-bucket --bucket test-postgresql-backup; aws --endpoint-url=http://s3:8000 s3 ls - options: > - -e AWS_EC2_METADATA_DISABLED=true - -e AWS_ACCESS_KEY_ID=access_key - -e AWS_SECRET_ACCESS_KEY=secret - - - uses: actions/checkout@v2 - - - name: Build Docker Image - uses: docker/build-push-action@v2 - with: - tags: heyman/postgresql-backup:latest - push: false - context: ${{ matrix.version }} - - - name: Take Backup - uses: addnab/docker-run-action@v3 - with: - image: heyman/postgresql-backup:latest - run: python3 -u /backup/backup.py - options: > - -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' - -e DB_HOST=postgres - -e DB_PASS=test - -e DB_USER=test - -e DB_NAME=test_${{ matrix.version }} - -e S3_PATH=s3://test-postgresql-backup/backups - -e AWS_ACCESS_KEY_ID=access_key - -e AWS_SECRET_ACCESS_KEY=secret - -e AWS_DEFAULT_REGION=us-east-1 - -e FILENAME=test_${{ matrix.version }} - - - name: Take Backup (using DB_USE_ENV) - uses: addnab/docker-run-action@main - with: - image: heyman/postgresql-backup:latest - run: python3 -u /backup/backup.py - options: > - -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' - -e DB_USE_ENV=True - -e PGHOST=postgres - -e PGPASSWORD=test - -e PGUSER=test - -e PGDATABASE=test_${{ matrix.version }} - -e S3_PATH=s3://test-postgresql-backup/backups - -e AWS_ACCESS_KEY_ID=access_key - -e AWS_SECRET_ACCESS_KEY=secret - -e AWS_DEFAULT_REGION=us-east-1 - -e FILENAME=test_${{ matrix.version }}_env - - - name: Take Backup (using PG_DUMP_EXTRA_OPTIONS) - uses: addnab/docker-run-action@main - with: - image: heyman/postgresql-backup:latest - run: python3 -u /backup/backup.py - options: > - -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' - -e DB_USE_ENV=True - -e PGHOST=postgres - -e PGPASSWORD=test - -e PGUSER=test - -e PGDATABASE=test_${{ matrix.version }} - -e S3_PATH=s3://test-postgresql-backup/backups - -e AWS_ACCESS_KEY_ID=access_key - -e AWS_SECRET_ACCESS_KEY=secret - -e AWS_DEFAULT_REGION=us-east-1 - -e FILENAME=test_${{ matrix.version }}_exclude - -e PG_DUMP_EXTRA_OPTIONS='--exclude-table=movies' - - - name: Check equality - uses: addnab/docker-run-action@main - with: - image: amazon/aws-cli - entryPoint: /bin/bash - run: | - aws s3 --endpoint-url=http://s3:8000 cp s3://test-postgresql-backup/backups/test_${{ matrix.version }} . - aws s3 --endpoint-url=http://s3:8000 cp s3://test-postgresql-backup/backups/test_${{ matrix.version }}_env . - diff test_${{ matrix.version }} test_${{ matrix.version }}_env - echo "$( md5sum test_${{ matrix.version }} |awk '{print $1}') test_${{ matrix.version }}_env"|md5sum -c - options: > - -e AWS_EC2_METADATA_DISABLED=true - -e AWS_ACCESS_KEY_ID=access_key - -e AWS_SECRET_ACCESS_KEY=secret - - - name: Clear DB table - uses: addnab/docker-run-action@v3 - with: - image: postgres:${{ matrix.version }} - run: > - psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -c ' - DROP TABLE books; - DROP TABLE movies; - ' - options: > - -e PGPASSWORD=test - - - name: Check that table was actually removed - uses: addnab/docker-run-action@v3 - with: - image: postgres:${{ matrix.version }} - shell: bash - run: > - [[ "0" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; - '` ]] - options: > - -e PGPASSWORD=test - - - name: Restore Backup - uses: addnab/docker-run-action@v3 - with: - image: heyman/postgresql-backup:latest - run: python3 -u /backup/restore.py test_${{ matrix.version }} - options: > - -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' - -e DB_HOST=postgres - -e DB_PASS=test - -e DB_USER=test - -e DB_NAME=test_${{ matrix.version }} - -e S3_PATH=s3://test-postgresql-backup/backups - -e AWS_ACCESS_KEY_ID=access_key - -e AWS_SECRET_ACCESS_KEY=secret - -e AWS_DEFAULT_REGION=us-east-1 - - - name: Check that table got imported - uses: addnab/docker-run-action@v3 - with: - image: postgres:${{ matrix.version }} - shell: bash - run: > - [[ "1" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; - '` ]] && [[ "Fittstim" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT name FROM books WHERE author=$$Linda Skugge$$; - '` ]] - options: > - -e PGPASSWORD=test - - - name: Clear DB table - uses: addnab/docker-run-action@main - with: - image: postgres:${{ matrix.version }} - shell: bash - run: > - psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -c ' - DROP TABLE books; - DROP TABLE movies; - ' && [[ "0" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; - '` ]] - options: > - -e PGPASSWORD=test - - - name: Restore Backup (DB_USE_ENV) - uses: addnab/docker-run-action@main - with: - image: heyman/postgresql-backup:latest - run: python3 -u /backup/restore.py test_${{ matrix.version }}_env - options: > - -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' - -e DB_USE_ENV=True - -e PGHOST=postgres - -e PGPASSWORD=test - -e PGUSER=test - -e PGDATABASE=test_${{ matrix.version }} - -e S3_PATH=s3://test-postgresql-backup/backups - -e AWS_ACCESS_KEY_ID=access_key - -e AWS_SECRET_ACCESS_KEY=secret - -e AWS_DEFAULT_REGION=us-east-1 - - - name: Check that table got imported - uses: addnab/docker-run-action@main - with: - image: postgres:${{ matrix.version }} - shell: bash - run: > - [[ "1" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; - '` ]] && [[ "Fittstim" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT name FROM books WHERE author=$$Linda Skugge$$; - '` ]] - options: > - -e PGPASSWORD=test - - - name: Clear DB table - uses: addnab/docker-run-action@main - with: - image: postgres:${{ matrix.version }} - shell: bash - run: > - psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -c ' - DROP TABLE books; - DROP TABLE movies; - ' && [[ "0" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; - '` ]] - options: > - -e PGPASSWORD=test - - - name: Restore Backup (PG_DUMP_EXTRA_OPTIONS) - uses: addnab/docker-run-action@main - with: - image: heyman/postgresql-backup:latest - run: python3 -u /backup/restore.py test_${{ matrix.version }}_exclude - options: > - -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' - -e DB_HOST=postgres - -e DB_PASS=test - -e DB_USER=test - -e DB_NAME=test_${{ matrix.version }} - -e S3_PATH=s3://test-postgresql-backup/backups - -e AWS_ACCESS_KEY_ID=access_key - -e AWS_SECRET_ACCESS_KEY=secret - -e AWS_DEFAULT_REGION=us-east-1 - - - name: Check that table got imported (PG_DUMP_EXTRA_OPTIONS) - uses: addnab/docker-run-action@main - with: - image: postgres:${{ matrix.version }} - shell: bash - run: > - [[ "1" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; - '` ]] && [[ "Fittstim" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT name FROM books WHERE author=$$Linda Skugge$$; - '` ]] && [[ "0" == `psql -d test_${{ matrix.version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' - SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$movies$$; - '` ]] - options: > - -e PGPASSWORD=test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6a07a48 --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +/.venv/ +/.secrets/ +/tmp/ +/out/ +.idea/ diff --git a/10/Dockerfile b/10/Dockerfile deleted file mode 100644 index a746444..0000000 --- a/10/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM postgres:10-alpine - -# Install dependencies -RUN apk add --no-cache -uv curl aws-cli python3 - -VOLUME ["/data/backups"] - -ENV BACKUP_DIR=/data/backups - -ADD . /backup - -ENTRYPOINT ["/backup/entrypoint.sh"] - -CMD ["crond", "-f", "-l", "2"] diff --git a/10/backup.py b/10/backup.py deleted file mode 100644 index f34014c..0000000 --- a/10/backup.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -dt = datetime.now() - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -MAIL_TO = os.environ.get("MAIL_TO") -MAIL_FROM = os.environ.get("MAIL_FROM") -WEBHOOK = os.environ.get("WEBHOOK") -WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") -WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") -KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) -FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") - -file_name = dt.strftime(FILENAME) -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -if WEBHOOK_DATA and not WEBHOOK_METHOD: - WEBHOOK_METHOD = 'POST' -else: - WEBHOOK_METHOD = WEBHOOK_METHOD or 'GET' - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output, - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def take_backup(): - env = os.environ.copy() - if DB_USE_ENV: - env.update({key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - # trigger postgres-backup - command = [ - "pg_dump", - "-Fc", - ] - if PG_DUMP_EXTRA_OPTIONS: - command.append(PG_DUMP_EXTRA_OPTIONS) - command.append("> %s" % backup_file) - cmd(" ".join(command), env=env) - -def upload_backup(): - opts = "--storage-class=%s %s" % (S3_STORAGE_CLASS, S3_EXTRA_OPTIONS) - cmd("aws s3 cp %s %s %s" % (opts, backup_file, S3_PATH)) - -def prune_local_backup_files(): - cmd("find %s -type f -prune -mtime +%i -exec rm -f {} \;" % (BACKUP_DIR, KEEP_BACKUP_DAYS)) - -def send_email(to_address, from_address, subject, body): - """ - Super simple, doesn't do any escaping - """ - cmd("""aws --region us-east-1 ses send-email --from %(from)s --destination '{"ToAddresses":["%(to)s"]}' --message '{"Subject":{"Data":"%(subject)s","Charset":"UTF-8"},"Body":{"Text":{"Data":"%(body)s","Charset":"UTF-8"}}}'""" % { - "to": to_address, - "from": from_address, - "subject": subject, - "body": body, - }) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def pretty_bytes(num): - for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: - if num < 1024.0: - return "%3.1f %s" % (num, x) - num /= 1024.0 - -def main(): - start_time = datetime.now() - log("Dumping database") - take_backup() - backup_size=os.path.getsize(backup_file) - - if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: - log("Uploading to S3") - upload_backup() - else: - log("Skipping S3 upload, no AWS credentials provided") - - log("Pruning local backup copies") - prune_local_backup_files() - end_time = datetime.now() - - meta = { - "filename": file_name, - "date": end_time.strftime("%Y-%m-%d"), - "time": end_time.strftime('%H:%M:%S'), - "duration": "%.2f" % ((end_time - start_time).total_seconds()), - "size": pretty_bytes(backup_size) - } - - if MAIL_TO and MAIL_FROM: - log("Sending mail to %s" % MAIL_TO) - send_email( - MAIL_TO, - MAIL_FROM, - "Backup complete: %s" % DB_NAME, - "Took %(duration)s seconds" % meta, - ) - - if WEBHOOK: - if WEBHOOK_DATA: - opts = "%s -d '%s'" % (WEBHOOK_CURL_OPTIONS, WEBHOOK_DATA % meta) - else: - opts = WEBHOOK_CURL_OPTIONS - - log("Making HTTP %s request to webhook: %s" % (WEBHOOK_METHOD, WEBHOOK)) - cmd("curl -X %s %s %s" % (WEBHOOK_METHOD, opts, WEBHOOK)) - - log("Backup complete, took %(duration)s seconds, size %(size)s" % meta) - - -if __name__ == "__main__": - main() diff --git a/10/entrypoint.sh b/10/entrypoint.sh deleted file mode 100755 index 78d2a8c..0000000 --- a/10/entrypoint.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -e - -if [[ $@ == *crond* ]] && [ -z "$CRON_SCHEDULE" ]; then - echo "ERROR: \$CRON_SCHEDULE not set!" - exit 1 -fi - -# Write cron schedule -echo "$CRON_SCHEDULE python3 -u /backup/backup.py > /dev/stdout" | crontab - - -exec "$@" diff --git a/10/restore.py b/10/restore.py deleted file mode 100644 index 4abc06d..0000000 --- a/10/restore.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -file_name = sys.argv[1] -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output.decode(), - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def restore_backup(): - if not backup_exists(): - sys.stderr.write("Backup file doesn't exists!\n") - sys.exit(1) - - # restore postgres-backup - env = os.environ.copy() - if DB_USE_ENV: - env.update({ key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - cmd("pg_restore -Fc -d %s %s" % (DB_NAME, backup_file), env=env) - -def download_backup(): - cmd("aws s3 cp %s %s%s %s" % (S3_EXTRA_OPTIONS, S3_PATH, file_name, backup_file)) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def main(): - start_time = datetime.now() - if backup_exists(): - log("Backup file already exists in filesystem %s" % backup_file) - else: - log("Downloading database dump") - download_backup() - - log("Restoring database") - restore_backup() - - log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) - -if __name__ == "__main__": - main() diff --git a/10/restore.sh b/10/restore.sh deleted file mode 100755 index 5c9b302..0000000 --- a/10/restore.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -python -u /backup/restore.py $1 diff --git a/11/backup.py b/11/backup.py deleted file mode 100644 index f34014c..0000000 --- a/11/backup.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -dt = datetime.now() - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -MAIL_TO = os.environ.get("MAIL_TO") -MAIL_FROM = os.environ.get("MAIL_FROM") -WEBHOOK = os.environ.get("WEBHOOK") -WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") -WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") -KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) -FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") - -file_name = dt.strftime(FILENAME) -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -if WEBHOOK_DATA and not WEBHOOK_METHOD: - WEBHOOK_METHOD = 'POST' -else: - WEBHOOK_METHOD = WEBHOOK_METHOD or 'GET' - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output, - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def take_backup(): - env = os.environ.copy() - if DB_USE_ENV: - env.update({key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - # trigger postgres-backup - command = [ - "pg_dump", - "-Fc", - ] - if PG_DUMP_EXTRA_OPTIONS: - command.append(PG_DUMP_EXTRA_OPTIONS) - command.append("> %s" % backup_file) - cmd(" ".join(command), env=env) - -def upload_backup(): - opts = "--storage-class=%s %s" % (S3_STORAGE_CLASS, S3_EXTRA_OPTIONS) - cmd("aws s3 cp %s %s %s" % (opts, backup_file, S3_PATH)) - -def prune_local_backup_files(): - cmd("find %s -type f -prune -mtime +%i -exec rm -f {} \;" % (BACKUP_DIR, KEEP_BACKUP_DAYS)) - -def send_email(to_address, from_address, subject, body): - """ - Super simple, doesn't do any escaping - """ - cmd("""aws --region us-east-1 ses send-email --from %(from)s --destination '{"ToAddresses":["%(to)s"]}' --message '{"Subject":{"Data":"%(subject)s","Charset":"UTF-8"},"Body":{"Text":{"Data":"%(body)s","Charset":"UTF-8"}}}'""" % { - "to": to_address, - "from": from_address, - "subject": subject, - "body": body, - }) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def pretty_bytes(num): - for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: - if num < 1024.0: - return "%3.1f %s" % (num, x) - num /= 1024.0 - -def main(): - start_time = datetime.now() - log("Dumping database") - take_backup() - backup_size=os.path.getsize(backup_file) - - if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: - log("Uploading to S3") - upload_backup() - else: - log("Skipping S3 upload, no AWS credentials provided") - - log("Pruning local backup copies") - prune_local_backup_files() - end_time = datetime.now() - - meta = { - "filename": file_name, - "date": end_time.strftime("%Y-%m-%d"), - "time": end_time.strftime('%H:%M:%S'), - "duration": "%.2f" % ((end_time - start_time).total_seconds()), - "size": pretty_bytes(backup_size) - } - - if MAIL_TO and MAIL_FROM: - log("Sending mail to %s" % MAIL_TO) - send_email( - MAIL_TO, - MAIL_FROM, - "Backup complete: %s" % DB_NAME, - "Took %(duration)s seconds" % meta, - ) - - if WEBHOOK: - if WEBHOOK_DATA: - opts = "%s -d '%s'" % (WEBHOOK_CURL_OPTIONS, WEBHOOK_DATA % meta) - else: - opts = WEBHOOK_CURL_OPTIONS - - log("Making HTTP %s request to webhook: %s" % (WEBHOOK_METHOD, WEBHOOK)) - cmd("curl -X %s %s %s" % (WEBHOOK_METHOD, opts, WEBHOOK)) - - log("Backup complete, took %(duration)s seconds, size %(size)s" % meta) - - -if __name__ == "__main__": - main() diff --git a/11/entrypoint.sh b/11/entrypoint.sh deleted file mode 100755 index 78d2a8c..0000000 --- a/11/entrypoint.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -e - -if [[ $@ == *crond* ]] && [ -z "$CRON_SCHEDULE" ]; then - echo "ERROR: \$CRON_SCHEDULE not set!" - exit 1 -fi - -# Write cron schedule -echo "$CRON_SCHEDULE python3 -u /backup/backup.py > /dev/stdout" | crontab - - -exec "$@" diff --git a/11/restore.py b/11/restore.py deleted file mode 100644 index 4abc06d..0000000 --- a/11/restore.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -file_name = sys.argv[1] -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output.decode(), - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def restore_backup(): - if not backup_exists(): - sys.stderr.write("Backup file doesn't exists!\n") - sys.exit(1) - - # restore postgres-backup - env = os.environ.copy() - if DB_USE_ENV: - env.update({ key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - cmd("pg_restore -Fc -d %s %s" % (DB_NAME, backup_file), env=env) - -def download_backup(): - cmd("aws s3 cp %s %s%s %s" % (S3_EXTRA_OPTIONS, S3_PATH, file_name, backup_file)) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def main(): - start_time = datetime.now() - if backup_exists(): - log("Backup file already exists in filesystem %s" % backup_file) - else: - log("Downloading database dump") - download_backup() - - log("Restoring database") - restore_backup() - - log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) - -if __name__ == "__main__": - main() diff --git a/12/Dockerfile b/12/Dockerfile deleted file mode 100644 index f3ac6f0..0000000 --- a/12/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM postgres:12-alpine - -# Install dependencies -RUN apk add --no-cache -uv curl aws-cli python3 - -VOLUME ["/data/backups"] - -ENV BACKUP_DIR=/data/backups - -ADD . /backup - -ENTRYPOINT ["/backup/entrypoint.sh"] - -CMD ["crond", "-f", "-l", "2"] diff --git a/12/backup.py b/12/backup.py deleted file mode 100644 index f34014c..0000000 --- a/12/backup.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -dt = datetime.now() - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -MAIL_TO = os.environ.get("MAIL_TO") -MAIL_FROM = os.environ.get("MAIL_FROM") -WEBHOOK = os.environ.get("WEBHOOK") -WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") -WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") -KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) -FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") - -file_name = dt.strftime(FILENAME) -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -if WEBHOOK_DATA and not WEBHOOK_METHOD: - WEBHOOK_METHOD = 'POST' -else: - WEBHOOK_METHOD = WEBHOOK_METHOD or 'GET' - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output, - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def take_backup(): - env = os.environ.copy() - if DB_USE_ENV: - env.update({key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - # trigger postgres-backup - command = [ - "pg_dump", - "-Fc", - ] - if PG_DUMP_EXTRA_OPTIONS: - command.append(PG_DUMP_EXTRA_OPTIONS) - command.append("> %s" % backup_file) - cmd(" ".join(command), env=env) - -def upload_backup(): - opts = "--storage-class=%s %s" % (S3_STORAGE_CLASS, S3_EXTRA_OPTIONS) - cmd("aws s3 cp %s %s %s" % (opts, backup_file, S3_PATH)) - -def prune_local_backup_files(): - cmd("find %s -type f -prune -mtime +%i -exec rm -f {} \;" % (BACKUP_DIR, KEEP_BACKUP_DAYS)) - -def send_email(to_address, from_address, subject, body): - """ - Super simple, doesn't do any escaping - """ - cmd("""aws --region us-east-1 ses send-email --from %(from)s --destination '{"ToAddresses":["%(to)s"]}' --message '{"Subject":{"Data":"%(subject)s","Charset":"UTF-8"},"Body":{"Text":{"Data":"%(body)s","Charset":"UTF-8"}}}'""" % { - "to": to_address, - "from": from_address, - "subject": subject, - "body": body, - }) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def pretty_bytes(num): - for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: - if num < 1024.0: - return "%3.1f %s" % (num, x) - num /= 1024.0 - -def main(): - start_time = datetime.now() - log("Dumping database") - take_backup() - backup_size=os.path.getsize(backup_file) - - if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: - log("Uploading to S3") - upload_backup() - else: - log("Skipping S3 upload, no AWS credentials provided") - - log("Pruning local backup copies") - prune_local_backup_files() - end_time = datetime.now() - - meta = { - "filename": file_name, - "date": end_time.strftime("%Y-%m-%d"), - "time": end_time.strftime('%H:%M:%S'), - "duration": "%.2f" % ((end_time - start_time).total_seconds()), - "size": pretty_bytes(backup_size) - } - - if MAIL_TO and MAIL_FROM: - log("Sending mail to %s" % MAIL_TO) - send_email( - MAIL_TO, - MAIL_FROM, - "Backup complete: %s" % DB_NAME, - "Took %(duration)s seconds" % meta, - ) - - if WEBHOOK: - if WEBHOOK_DATA: - opts = "%s -d '%s'" % (WEBHOOK_CURL_OPTIONS, WEBHOOK_DATA % meta) - else: - opts = WEBHOOK_CURL_OPTIONS - - log("Making HTTP %s request to webhook: %s" % (WEBHOOK_METHOD, WEBHOOK)) - cmd("curl -X %s %s %s" % (WEBHOOK_METHOD, opts, WEBHOOK)) - - log("Backup complete, took %(duration)s seconds, size %(size)s" % meta) - - -if __name__ == "__main__": - main() diff --git a/12/entrypoint.sh b/12/entrypoint.sh deleted file mode 100755 index 78d2a8c..0000000 --- a/12/entrypoint.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -e - -if [[ $@ == *crond* ]] && [ -z "$CRON_SCHEDULE" ]; then - echo "ERROR: \$CRON_SCHEDULE not set!" - exit 1 -fi - -# Write cron schedule -echo "$CRON_SCHEDULE python3 -u /backup/backup.py > /dev/stdout" | crontab - - -exec "$@" diff --git a/12/restore.py b/12/restore.py deleted file mode 100644 index 4abc06d..0000000 --- a/12/restore.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -file_name = sys.argv[1] -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output.decode(), - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def restore_backup(): - if not backup_exists(): - sys.stderr.write("Backup file doesn't exists!\n") - sys.exit(1) - - # restore postgres-backup - env = os.environ.copy() - if DB_USE_ENV: - env.update({ key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - cmd("pg_restore -Fc -d %s %s" % (DB_NAME, backup_file), env=env) - -def download_backup(): - cmd("aws s3 cp %s %s%s %s" % (S3_EXTRA_OPTIONS, S3_PATH, file_name, backup_file)) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def main(): - start_time = datetime.now() - if backup_exists(): - log("Backup file already exists in filesystem %s" % backup_file) - else: - log("Downloading database dump") - download_backup() - - log("Restoring database") - restore_backup() - - log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) - -if __name__ == "__main__": - main() diff --git a/13/Dockerfile b/13/Dockerfile deleted file mode 100644 index 7b00064..0000000 --- a/13/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM postgres:13-alpine - -# Install dependencies -RUN apk add --no-cache -uv curl aws-cli python3 - -VOLUME ["/data/backups"] - -ENV BACKUP_DIR=/data/backups - -ADD . /backup - -ENTRYPOINT ["/backup/entrypoint.sh"] - -CMD ["crond", "-f", "-l", "2"] diff --git a/13/backup.py b/13/backup.py deleted file mode 100644 index f34014c..0000000 --- a/13/backup.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -dt = datetime.now() - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -MAIL_TO = os.environ.get("MAIL_TO") -MAIL_FROM = os.environ.get("MAIL_FROM") -WEBHOOK = os.environ.get("WEBHOOK") -WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") -WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") -KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) -FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") - -file_name = dt.strftime(FILENAME) -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -if WEBHOOK_DATA and not WEBHOOK_METHOD: - WEBHOOK_METHOD = 'POST' -else: - WEBHOOK_METHOD = WEBHOOK_METHOD or 'GET' - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output, - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def take_backup(): - env = os.environ.copy() - if DB_USE_ENV: - env.update({key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - # trigger postgres-backup - command = [ - "pg_dump", - "-Fc", - ] - if PG_DUMP_EXTRA_OPTIONS: - command.append(PG_DUMP_EXTRA_OPTIONS) - command.append("> %s" % backup_file) - cmd(" ".join(command), env=env) - -def upload_backup(): - opts = "--storage-class=%s %s" % (S3_STORAGE_CLASS, S3_EXTRA_OPTIONS) - cmd("aws s3 cp %s %s %s" % (opts, backup_file, S3_PATH)) - -def prune_local_backup_files(): - cmd("find %s -type f -prune -mtime +%i -exec rm -f {} \;" % (BACKUP_DIR, KEEP_BACKUP_DAYS)) - -def send_email(to_address, from_address, subject, body): - """ - Super simple, doesn't do any escaping - """ - cmd("""aws --region us-east-1 ses send-email --from %(from)s --destination '{"ToAddresses":["%(to)s"]}' --message '{"Subject":{"Data":"%(subject)s","Charset":"UTF-8"},"Body":{"Text":{"Data":"%(body)s","Charset":"UTF-8"}}}'""" % { - "to": to_address, - "from": from_address, - "subject": subject, - "body": body, - }) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def pretty_bytes(num): - for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: - if num < 1024.0: - return "%3.1f %s" % (num, x) - num /= 1024.0 - -def main(): - start_time = datetime.now() - log("Dumping database") - take_backup() - backup_size=os.path.getsize(backup_file) - - if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: - log("Uploading to S3") - upload_backup() - else: - log("Skipping S3 upload, no AWS credentials provided") - - log("Pruning local backup copies") - prune_local_backup_files() - end_time = datetime.now() - - meta = { - "filename": file_name, - "date": end_time.strftime("%Y-%m-%d"), - "time": end_time.strftime('%H:%M:%S'), - "duration": "%.2f" % ((end_time - start_time).total_seconds()), - "size": pretty_bytes(backup_size) - } - - if MAIL_TO and MAIL_FROM: - log("Sending mail to %s" % MAIL_TO) - send_email( - MAIL_TO, - MAIL_FROM, - "Backup complete: %s" % DB_NAME, - "Took %(duration)s seconds" % meta, - ) - - if WEBHOOK: - if WEBHOOK_DATA: - opts = "%s -d '%s'" % (WEBHOOK_CURL_OPTIONS, WEBHOOK_DATA % meta) - else: - opts = WEBHOOK_CURL_OPTIONS - - log("Making HTTP %s request to webhook: %s" % (WEBHOOK_METHOD, WEBHOOK)) - cmd("curl -X %s %s %s" % (WEBHOOK_METHOD, opts, WEBHOOK)) - - log("Backup complete, took %(duration)s seconds, size %(size)s" % meta) - - -if __name__ == "__main__": - main() diff --git a/13/entrypoint.sh b/13/entrypoint.sh deleted file mode 100755 index 78d2a8c..0000000 --- a/13/entrypoint.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -e - -if [[ $@ == *crond* ]] && [ -z "$CRON_SCHEDULE" ]; then - echo "ERROR: \$CRON_SCHEDULE not set!" - exit 1 -fi - -# Write cron schedule -echo "$CRON_SCHEDULE python3 -u /backup/backup.py > /dev/stdout" | crontab - - -exec "$@" diff --git a/13/restore.py b/13/restore.py deleted file mode 100644 index 4abc06d..0000000 --- a/13/restore.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -file_name = sys.argv[1] -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output.decode(), - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def restore_backup(): - if not backup_exists(): - sys.stderr.write("Backup file doesn't exists!\n") - sys.exit(1) - - # restore postgres-backup - env = os.environ.copy() - if DB_USE_ENV: - env.update({ key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - cmd("pg_restore -Fc -d %s %s" % (DB_NAME, backup_file), env=env) - -def download_backup(): - cmd("aws s3 cp %s %s%s %s" % (S3_EXTRA_OPTIONS, S3_PATH, file_name, backup_file)) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def main(): - start_time = datetime.now() - if backup_exists(): - log("Backup file already exists in filesystem %s" % backup_file) - else: - log("Downloading database dump") - download_backup() - - log("Restoring database") - restore_backup() - - log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) - -if __name__ == "__main__": - main() diff --git a/13/restore.sh b/13/restore.sh deleted file mode 100755 index 5c9b302..0000000 --- a/13/restore.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -python -u /backup/restore.py $1 diff --git a/14/Dockerfile b/14/Dockerfile deleted file mode 100644 index ea0387c..0000000 --- a/14/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM postgres:14-alpine - -# Install dependencies -RUN apk add --no-cache -uv curl aws-cli python3 - -VOLUME ["/data/backups"] - -ENV BACKUP_DIR=/data/backups - -ADD . /backup - -ENTRYPOINT ["/backup/entrypoint.sh"] - -CMD ["crond", "-f", "-l", "2"] diff --git a/14/backup.py b/14/backup.py deleted file mode 100644 index f34014c..0000000 --- a/14/backup.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -dt = datetime.now() - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -MAIL_TO = os.environ.get("MAIL_TO") -MAIL_FROM = os.environ.get("MAIL_FROM") -WEBHOOK = os.environ.get("WEBHOOK") -WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") -WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") -KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) -FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") - -file_name = dt.strftime(FILENAME) -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -if WEBHOOK_DATA and not WEBHOOK_METHOD: - WEBHOOK_METHOD = 'POST' -else: - WEBHOOK_METHOD = WEBHOOK_METHOD or 'GET' - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output, - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def take_backup(): - env = os.environ.copy() - if DB_USE_ENV: - env.update({key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - # trigger postgres-backup - command = [ - "pg_dump", - "-Fc", - ] - if PG_DUMP_EXTRA_OPTIONS: - command.append(PG_DUMP_EXTRA_OPTIONS) - command.append("> %s" % backup_file) - cmd(" ".join(command), env=env) - -def upload_backup(): - opts = "--storage-class=%s %s" % (S3_STORAGE_CLASS, S3_EXTRA_OPTIONS) - cmd("aws s3 cp %s %s %s" % (opts, backup_file, S3_PATH)) - -def prune_local_backup_files(): - cmd("find %s -type f -prune -mtime +%i -exec rm -f {} \;" % (BACKUP_DIR, KEEP_BACKUP_DAYS)) - -def send_email(to_address, from_address, subject, body): - """ - Super simple, doesn't do any escaping - """ - cmd("""aws --region us-east-1 ses send-email --from %(from)s --destination '{"ToAddresses":["%(to)s"]}' --message '{"Subject":{"Data":"%(subject)s","Charset":"UTF-8"},"Body":{"Text":{"Data":"%(body)s","Charset":"UTF-8"}}}'""" % { - "to": to_address, - "from": from_address, - "subject": subject, - "body": body, - }) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def pretty_bytes(num): - for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: - if num < 1024.0: - return "%3.1f %s" % (num, x) - num /= 1024.0 - -def main(): - start_time = datetime.now() - log("Dumping database") - take_backup() - backup_size=os.path.getsize(backup_file) - - if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: - log("Uploading to S3") - upload_backup() - else: - log("Skipping S3 upload, no AWS credentials provided") - - log("Pruning local backup copies") - prune_local_backup_files() - end_time = datetime.now() - - meta = { - "filename": file_name, - "date": end_time.strftime("%Y-%m-%d"), - "time": end_time.strftime('%H:%M:%S'), - "duration": "%.2f" % ((end_time - start_time).total_seconds()), - "size": pretty_bytes(backup_size) - } - - if MAIL_TO and MAIL_FROM: - log("Sending mail to %s" % MAIL_TO) - send_email( - MAIL_TO, - MAIL_FROM, - "Backup complete: %s" % DB_NAME, - "Took %(duration)s seconds" % meta, - ) - - if WEBHOOK: - if WEBHOOK_DATA: - opts = "%s -d '%s'" % (WEBHOOK_CURL_OPTIONS, WEBHOOK_DATA % meta) - else: - opts = WEBHOOK_CURL_OPTIONS - - log("Making HTTP %s request to webhook: %s" % (WEBHOOK_METHOD, WEBHOOK)) - cmd("curl -X %s %s %s" % (WEBHOOK_METHOD, opts, WEBHOOK)) - - log("Backup complete, took %(duration)s seconds, size %(size)s" % meta) - - -if __name__ == "__main__": - main() diff --git a/14/entrypoint.sh b/14/entrypoint.sh deleted file mode 100755 index 78d2a8c..0000000 --- a/14/entrypoint.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -e - -if [[ $@ == *crond* ]] && [ -z "$CRON_SCHEDULE" ]; then - echo "ERROR: \$CRON_SCHEDULE not set!" - exit 1 -fi - -# Write cron schedule -echo "$CRON_SCHEDULE python3 -u /backup/backup.py > /dev/stdout" | crontab - - -exec "$@" diff --git a/14/restore.py b/14/restore.py deleted file mode 100644 index 4abc06d..0000000 --- a/14/restore.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -file_name = sys.argv[1] -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output.decode(), - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def restore_backup(): - if not backup_exists(): - sys.stderr.write("Backup file doesn't exists!\n") - sys.exit(1) - - # restore postgres-backup - env = os.environ.copy() - if DB_USE_ENV: - env.update({ key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - cmd("pg_restore -Fc -d %s %s" % (DB_NAME, backup_file), env=env) - -def download_backup(): - cmd("aws s3 cp %s %s%s %s" % (S3_EXTRA_OPTIONS, S3_PATH, file_name, backup_file)) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def main(): - start_time = datetime.now() - if backup_exists(): - log("Backup file already exists in filesystem %s" % backup_file) - else: - log("Downloading database dump") - download_backup() - - log("Restoring database") - restore_backup() - - log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) - -if __name__ == "__main__": - main() diff --git a/15/Dockerfile b/15/Dockerfile deleted file mode 100644 index 8f63985..0000000 --- a/15/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM postgres:15-alpine - -# Install dependencies -RUN apk add --no-cache -uv curl aws-cli python3 - -VOLUME ["/data/backups"] - -ENV BACKUP_DIR=/data/backups - -ADD . /backup - -ENTRYPOINT ["/backup/entrypoint.sh"] - -CMD ["crond", "-f", "-l", "2"] diff --git a/15/backup.py b/15/backup.py deleted file mode 100644 index f34014c..0000000 --- a/15/backup.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -dt = datetime.now() - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -MAIL_TO = os.environ.get("MAIL_TO") -MAIL_FROM = os.environ.get("MAIL_FROM") -WEBHOOK = os.environ.get("WEBHOOK") -WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") -WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") -KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) -FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") - -file_name = dt.strftime(FILENAME) -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -if WEBHOOK_DATA and not WEBHOOK_METHOD: - WEBHOOK_METHOD = 'POST' -else: - WEBHOOK_METHOD = WEBHOOK_METHOD or 'GET' - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output, - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def take_backup(): - env = os.environ.copy() - if DB_USE_ENV: - env.update({key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - # trigger postgres-backup - command = [ - "pg_dump", - "-Fc", - ] - if PG_DUMP_EXTRA_OPTIONS: - command.append(PG_DUMP_EXTRA_OPTIONS) - command.append("> %s" % backup_file) - cmd(" ".join(command), env=env) - -def upload_backup(): - opts = "--storage-class=%s %s" % (S3_STORAGE_CLASS, S3_EXTRA_OPTIONS) - cmd("aws s3 cp %s %s %s" % (opts, backup_file, S3_PATH)) - -def prune_local_backup_files(): - cmd("find %s -type f -prune -mtime +%i -exec rm -f {} \;" % (BACKUP_DIR, KEEP_BACKUP_DAYS)) - -def send_email(to_address, from_address, subject, body): - """ - Super simple, doesn't do any escaping - """ - cmd("""aws --region us-east-1 ses send-email --from %(from)s --destination '{"ToAddresses":["%(to)s"]}' --message '{"Subject":{"Data":"%(subject)s","Charset":"UTF-8"},"Body":{"Text":{"Data":"%(body)s","Charset":"UTF-8"}}}'""" % { - "to": to_address, - "from": from_address, - "subject": subject, - "body": body, - }) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def pretty_bytes(num): - for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: - if num < 1024.0: - return "%3.1f %s" % (num, x) - num /= 1024.0 - -def main(): - start_time = datetime.now() - log("Dumping database") - take_backup() - backup_size=os.path.getsize(backup_file) - - if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: - log("Uploading to S3") - upload_backup() - else: - log("Skipping S3 upload, no AWS credentials provided") - - log("Pruning local backup copies") - prune_local_backup_files() - end_time = datetime.now() - - meta = { - "filename": file_name, - "date": end_time.strftime("%Y-%m-%d"), - "time": end_time.strftime('%H:%M:%S'), - "duration": "%.2f" % ((end_time - start_time).total_seconds()), - "size": pretty_bytes(backup_size) - } - - if MAIL_TO and MAIL_FROM: - log("Sending mail to %s" % MAIL_TO) - send_email( - MAIL_TO, - MAIL_FROM, - "Backup complete: %s" % DB_NAME, - "Took %(duration)s seconds" % meta, - ) - - if WEBHOOK: - if WEBHOOK_DATA: - opts = "%s -d '%s'" % (WEBHOOK_CURL_OPTIONS, WEBHOOK_DATA % meta) - else: - opts = WEBHOOK_CURL_OPTIONS - - log("Making HTTP %s request to webhook: %s" % (WEBHOOK_METHOD, WEBHOOK)) - cmd("curl -X %s %s %s" % (WEBHOOK_METHOD, opts, WEBHOOK)) - - log("Backup complete, took %(duration)s seconds, size %(size)s" % meta) - - -if __name__ == "__main__": - main() diff --git a/15/entrypoint.sh b/15/entrypoint.sh deleted file mode 100755 index 78d2a8c..0000000 --- a/15/entrypoint.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -e - -if [[ $@ == *crond* ]] && [ -z "$CRON_SCHEDULE" ]; then - echo "ERROR: \$CRON_SCHEDULE not set!" - exit 1 -fi - -# Write cron schedule -echo "$CRON_SCHEDULE python3 -u /backup/backup.py > /dev/stdout" | crontab - - -exec "$@" diff --git a/15/restore.py b/15/restore.py deleted file mode 100644 index 4abc06d..0000000 --- a/15/restore.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -file_name = sys.argv[1] -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output.decode(), - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def restore_backup(): - if not backup_exists(): - sys.stderr.write("Backup file doesn't exists!\n") - sys.exit(1) - - # restore postgres-backup - env = os.environ.copy() - if DB_USE_ENV: - env.update({ key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - cmd("pg_restore -Fc -d %s %s" % (DB_NAME, backup_file), env=env) - -def download_backup(): - cmd("aws s3 cp %s %s%s %s" % (S3_EXTRA_OPTIONS, S3_PATH, file_name, backup_file)) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def main(): - start_time = datetime.now() - if backup_exists(): - log("Backup file already exists in filesystem %s" % backup_file) - else: - log("Downloading database dump") - download_backup() - - log("Restoring database") - restore_backup() - - log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) - -if __name__ == "__main__": - main() diff --git a/16/Dockerfile b/16/Dockerfile deleted file mode 100644 index 4d89164..0000000 --- a/16/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM postgres:16-alpine - -# Install dependencies -RUN apk add --no-cache -uv curl aws-cli python3 - -VOLUME ["/data/backups"] - -ENV BACKUP_DIR=/data/backups - -ADD . /backup - -ENTRYPOINT ["/backup/entrypoint.sh"] - -CMD ["crond", "-f", "-l", "2"] diff --git a/16/backup.py b/16/backup.py deleted file mode 100644 index f34014c..0000000 --- a/16/backup.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -dt = datetime.now() - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -MAIL_TO = os.environ.get("MAIL_TO") -MAIL_FROM = os.environ.get("MAIL_FROM") -WEBHOOK = os.environ.get("WEBHOOK") -WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") -WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") -KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) -FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") - -file_name = dt.strftime(FILENAME) -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -if WEBHOOK_DATA and not WEBHOOK_METHOD: - WEBHOOK_METHOD = 'POST' -else: - WEBHOOK_METHOD = WEBHOOK_METHOD or 'GET' - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output, - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def take_backup(): - env = os.environ.copy() - if DB_USE_ENV: - env.update({key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - # trigger postgres-backup - command = [ - "pg_dump", - "-Fc", - ] - if PG_DUMP_EXTRA_OPTIONS: - command.append(PG_DUMP_EXTRA_OPTIONS) - command.append("> %s" % backup_file) - cmd(" ".join(command), env=env) - -def upload_backup(): - opts = "--storage-class=%s %s" % (S3_STORAGE_CLASS, S3_EXTRA_OPTIONS) - cmd("aws s3 cp %s %s %s" % (opts, backup_file, S3_PATH)) - -def prune_local_backup_files(): - cmd("find %s -type f -prune -mtime +%i -exec rm -f {} \;" % (BACKUP_DIR, KEEP_BACKUP_DAYS)) - -def send_email(to_address, from_address, subject, body): - """ - Super simple, doesn't do any escaping - """ - cmd("""aws --region us-east-1 ses send-email --from %(from)s --destination '{"ToAddresses":["%(to)s"]}' --message '{"Subject":{"Data":"%(subject)s","Charset":"UTF-8"},"Body":{"Text":{"Data":"%(body)s","Charset":"UTF-8"}}}'""" % { - "to": to_address, - "from": from_address, - "subject": subject, - "body": body, - }) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def pretty_bytes(num): - for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: - if num < 1024.0: - return "%3.1f %s" % (num, x) - num /= 1024.0 - -def main(): - start_time = datetime.now() - log("Dumping database") - take_backup() - backup_size=os.path.getsize(backup_file) - - if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: - log("Uploading to S3") - upload_backup() - else: - log("Skipping S3 upload, no AWS credentials provided") - - log("Pruning local backup copies") - prune_local_backup_files() - end_time = datetime.now() - - meta = { - "filename": file_name, - "date": end_time.strftime("%Y-%m-%d"), - "time": end_time.strftime('%H:%M:%S'), - "duration": "%.2f" % ((end_time - start_time).total_seconds()), - "size": pretty_bytes(backup_size) - } - - if MAIL_TO and MAIL_FROM: - log("Sending mail to %s" % MAIL_TO) - send_email( - MAIL_TO, - MAIL_FROM, - "Backup complete: %s" % DB_NAME, - "Took %(duration)s seconds" % meta, - ) - - if WEBHOOK: - if WEBHOOK_DATA: - opts = "%s -d '%s'" % (WEBHOOK_CURL_OPTIONS, WEBHOOK_DATA % meta) - else: - opts = WEBHOOK_CURL_OPTIONS - - log("Making HTTP %s request to webhook: %s" % (WEBHOOK_METHOD, WEBHOOK)) - cmd("curl -X %s %s %s" % (WEBHOOK_METHOD, opts, WEBHOOK)) - - log("Backup complete, took %(duration)s seconds, size %(size)s" % meta) - - -if __name__ == "__main__": - main() diff --git a/16/entrypoint.sh b/16/entrypoint.sh deleted file mode 100755 index 78d2a8c..0000000 --- a/16/entrypoint.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -e - -if [[ $@ == *crond* ]] && [ -z "$CRON_SCHEDULE" ]; then - echo "ERROR: \$CRON_SCHEDULE not set!" - exit 1 -fi - -# Write cron schedule -echo "$CRON_SCHEDULE python3 -u /backup/backup.py > /dev/stdout" | crontab - - -exec "$@" diff --git a/16/restore.py b/16/restore.py deleted file mode 100644 index 4abc06d..0000000 --- a/16/restore.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import sys -from datetime import datetime - -BACKUP_DIR = os.environ["BACKUP_DIR"] - -S3_PATH = os.environ.get("S3_PATH", "") -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") - -DB_USE_ENV = os.environ.get("DB_USE_ENV", False) -DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") - -if not DB_NAME: - raise Exception("DB_NAME must be set") - -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") - -file_name = sys.argv[1] -backup_file = os.path.join(BACKUP_DIR, file_name) - -if not S3_PATH.endswith("/"): - S3_PATH = S3_PATH + "/" - -def cmd(command, **kwargs): - try: - subprocess.check_output([command], shell=True, stderr=subprocess.STDOUT, **kwargs) - except subprocess.CalledProcessError as e: - sys.stderr.write("\n".join([ - "Command execution failed. Output:", - "-"*80, - e.output.decode(), - "-"*80, - "" - ])) - raise - -def backup_exists(): - return os.path.exists(backup_file) - -def restore_backup(): - if not backup_exists(): - sys.stderr.write("Backup file doesn't exists!\n") - sys.exit(1) - - # restore postgres-backup - env = os.environ.copy() - if DB_USE_ENV: - env.update({ key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) - else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) - - cmd("pg_restore -Fc -d %s %s" % (DB_NAME, backup_file), env=env) - -def download_backup(): - cmd("aws s3 cp %s %s%s %s" % (S3_EXTRA_OPTIONS, S3_PATH, file_name, backup_file)) - -def log(msg): - print("[%s]: %s" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) - -def main(): - start_time = datetime.now() - if backup_exists(): - log("Backup file already exists in filesystem %s" % backup_file) - else: - log("Downloading database dump") - download_backup() - - log("Restoring database") - restore_backup() - - log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) - -if __name__ == "__main__": - main() diff --git a/Makefile b/Makefile deleted file mode 100644 index aee3226..0000000 --- a/Makefile +++ /dev/null @@ -1,2 +0,0 @@ -update: - python3 update.py diff --git a/README.md b/README.md index e935f06..ac6b46d 100644 --- a/README.md +++ b/README.md @@ -94,8 +94,10 @@ The following environment variables are required: * `DB_HOST`: Postgres hostname * `DB_PASS`: Postgres password +* `DB_PASS_FILE`: Path to a file containing the Postgres password. If this is set, `DB_PASS` will be ignored. * `DB_USER`: Postgres username * `DB_NAME`: Name of database to import into +* `DB_NAMES`: Comma separated list of database names to restore. If not set, the first database in the backup file will be restored. The following environment variables are required if the file to restore is not already in the backup volume: @@ -123,7 +125,8 @@ image. The following docker tags are available for this image, and they are based on the corresponding official postgres alpine image: -* `16`, `latest` +* `17`, `latest` +* `16` * `15` * `14` * `13` diff --git a/11/Dockerfile b/template/Dockerfile similarity index 78% rename from 11/Dockerfile rename to template/Dockerfile index 01d9dac..d17280e 100644 --- a/11/Dockerfile +++ b/template/Dockerfile @@ -1,4 +1,4 @@ -FROM postgres:11-alpine +FROM postgres:%VERSION%-alpine # Install dependencies RUN apk add --no-cache -uv curl aws-cli python3 @@ -8,6 +8,7 @@ VOLUME ["/data/backups"] ENV BACKUP_DIR=/data/backups ADD . /backup +RUN chmod +x /backup/*.sh ENTRYPOINT ["/backup/entrypoint.sh"] diff --git a/template/Dockerfile.template b/template/Dockerfile.template deleted file mode 100644 index 1bc8c65..0000000 --- a/template/Dockerfile.template +++ /dev/null @@ -1,14 +0,0 @@ -FROM postgres:%(VERSION)s-alpine - -# Install dependencies -RUN apk add --no-cache -uv curl aws-cli python3 - -VOLUME ["/data/backups"] - -ENV BACKUP_DIR=/data/backups - -ADD . /backup - -ENTRYPOINT ["/backup/entrypoint.sh"] - -CMD ["crond", "-f", "-l", "2"] diff --git a/template/backup.py b/template/backup.py index f34014c..366cc26 100644 --- a/template/backup.py +++ b/template/backup.py @@ -17,15 +17,21 @@ DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") +DB_NAMES = os.environ.get("DB_NAMES") -if not DB_NAME: - raise Exception("DB_NAME must be set") +if not DB_NAME and not DB_NAMES: + raise Exception("DB_NAME or DB_NAMES must be set") if not DB_USE_ENV: DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] + DB_PASS = os.environ.get("DB_PASS", "") DB_USER = os.environ["DB_USER"] DB_PORT = os.environ.get("DB_PORT", "5432") + DB_PASS_FILE = os.environ.get("DB_PASS_FILE") + if DB_PASS_FILE: + print(f"Reading password from: {DB_PASS_FILE}") + with open(DB_PASS_FILE, "r") as f: + DB_PASS = f.read().strip() MAIL_TO = os.environ.get("MAIL_TO") MAIL_FROM = os.environ.get("MAIL_FROM") @@ -34,12 +40,8 @@ WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) -FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") -file_name = dt.strftime(FILENAME) -backup_file = os.path.join(BACKUP_DIR, file_name) - if not S3_PATH.endswith("/"): S3_PATH = S3_PATH + "/" @@ -55,21 +57,21 @@ def cmd(command, **kwargs): sys.stderr.write("\n".join([ "Command execution failed. Output:", "-"*80, - e.output, + e.output.decode('utf-8'), # Convert bytes to string "-"*80, "" ])) raise -def backup_exists(): +def backup_exists(backup_file: str): return os.path.exists(backup_file) -def take_backup(): +def take_backup(backup_file: str, db_name: str): env = os.environ.copy() if DB_USE_ENV: env.update({key: os.environ[key] for key in os.environ.keys() if key.startswith('PG') }) else: - env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': DB_NAME, 'PGPORT': DB_PORT}) + env.update({'PGPASSWORD': DB_PASS, 'PGHOST': DB_HOST, 'PGUSER': DB_USER, 'PGDATABASE': db_name, 'PGPORT': DB_PORT}) # trigger postgres-backup command = [ @@ -81,12 +83,12 @@ def take_backup(): command.append("> %s" % backup_file) cmd(" ".join(command), env=env) -def upload_backup(): +def upload_backup(backup_file: str): opts = "--storage-class=%s %s" % (S3_STORAGE_CLASS, S3_EXTRA_OPTIONS) cmd("aws s3 cp %s %s %s" % (opts, backup_file, S3_PATH)) def prune_local_backup_files(): - cmd("find %s -type f -prune -mtime +%i -exec rm -f {} \;" % (BACKUP_DIR, KEEP_BACKUP_DAYS)) + cmd("find %s -type f -prune -mtime +%i -exec rm -f {} \\;" % (BACKUP_DIR, KEEP_BACKUP_DAYS)) def send_email(to_address, from_address, subject, body): """ @@ -108,15 +110,20 @@ def pretty_bytes(num): return "%3.1f %s" % (num, x) num /= 1024.0 -def main(): +def main_one(db_name: str): + def_file_name = db_name + "_%Y-%m-%d" + filename = os.environ.get("FILENAME", def_file_name) + file_name = dt.strftime(filename) + backup_file = os.path.join(BACKUP_DIR, file_name) + start_time = datetime.now() - log("Dumping database") - take_backup() + log(f"Dumping database: {db_name}") + take_backup(backup_file, db_name) backup_size=os.path.getsize(backup_file) if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: log("Uploading to S3") - upload_backup() + upload_backup(backup_file) else: log("Skipping S3 upload, no AWS credentials provided") @@ -137,7 +144,7 @@ def main(): send_email( MAIL_TO, MAIL_FROM, - "Backup complete: %s" % DB_NAME, + "Backup complete: %s" % db_name, "Took %(duration)s seconds" % meta, ) @@ -152,6 +159,12 @@ def main(): log("Backup complete, took %(duration)s seconds, size %(size)s" % meta) +def main(): + if not DB_NAMES: + main_one(DB_NAME) + else: + for name in DB_NAMES.split(","): + main_one(name.strip()) if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/template/restore.py b/template/restore.py index 4abc06d..0d66e1a 100644 --- a/template/restore.py +++ b/template/restore.py @@ -16,11 +16,15 @@ if not DB_NAME: raise Exception("DB_NAME must be set") -if not DB_USE_ENV: - DB_HOST = os.environ["DB_HOST"] - DB_PASS = os.environ["DB_PASS"] - DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT", "5432") +DB_HOST = os.environ["DB_HOST"] +DB_PASS = os.environ.get("DB_PASS", "") +DB_USER = os.environ["DB_USER"] +DB_PORT = os.environ.get("DB_PORT", "5432") +DB_PASS_FILE = os.environ.get("DB_PASS_FILE") +if DB_PASS_FILE: + print(f"Reading password from: {DB_PASS_FILE}") + with open(DB_PASS_FILE, "r") as f: + DB_PASS = f.read().strip() file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) @@ -35,7 +39,7 @@ def cmd(command, **kwargs): sys.stderr.write("\n".join([ "Command execution failed. Output:", "-"*80, - e.output.decode(), + e.output.decode("utf-8"), "-"*80, "" ])) diff --git a/template/run.sh b/template/run.sh new file mode 100644 index 0000000..05a953d --- /dev/null +++ b/template/run.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -e + +python3 -u /backup/backup.py diff --git a/test.sh b/test.sh new file mode 100755 index 0000000..76539da --- /dev/null +++ b/test.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +mkdir -p ./tmp +mkdir -p ./out +cp template/* tmp/ +sed s/%VERSION%/17/g template/Dockerfile > tmp/Dockerfile +docker build -t postgressql-backup:test-17 tmp +docker run --rm -it \ + -e DB_HOST=192.168.2.5 \ + -e DB_PORT=5432 \ + -e DB_USER=pg \ + -e DB_PASS_FILE=/secrets/pg_secret.txt \ + -e DB_NAMES="postgres, gitea" \ + -e KEEP_BACKUP_DAYS=1 \ + -v ./.secrets:/secrets \ + -v ./out:/data/backups \ + --entrypoint "/bin/bash" \ + --entrypoint "/backup/run.sh" \ + postgressql-backup:test-17 + + diff --git a/update.py b/update.py deleted file mode 100644 index ab2d103..0000000 --- a/update.py +++ /dev/null @@ -1,26 +0,0 @@ -import os -import re -import shutil -import subprocess - - -# versions will be a list of all #, ## and ##.## directories -versions = [p for p in os.listdir() if os.path.isdir(p) and re.match(r"^\d+(\.\d+)?$", p)] - -with open(os.path.join("template", "Dockerfile.template"), "r", encoding="utf-8") as f: - dockerfile_template = f.read() - -for version in versions: - # write Dockerfile in version directory - with open(os.path.join(version, "Dockerfile"), "w", encoding="utf-8") as f: - f.write(dockerfile_template % {"VERSION":version}) - - # copy other files into version directory - for file_name in os.listdir("template"): - if file_name == "Dockerfile.template": - continue - - # we use system cp in order to preserve file permissions - p = subprocess.Popen(['cp', os.path.join("template", file_name), os.path.join(version, file_name)]) - p.wait() - From 6e130e57abdce51a145ddf6bc21b54e7be5e847e Mon Sep 17 00:00:00 2001 From: pilotso11 Date: Sat, 19 Jul 2025 17:44:01 +0100 Subject: [PATCH 2/6] Add GitHub Actions workflow for PostgreSQL backup tests (#1) * Add GitHub Actions workflow to test PostgreSQL backup functionality * fix docker-build-push.yml * fix docker-build-push.yml on events * Add - to flavor * add schedule to test --- .github/workflows/docker-build-push.yml | 20 ++++-- .github/workflows/test.yml | 91 +++++++++++++++++++++++++ 2 files changed, 104 insertions(+), 7 deletions(-) create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/docker-build-push.yml b/.github/workflows/docker-build-push.yml index 38f791d..0df91e9 100644 --- a/.github/workflows/docker-build-push.yml +++ b/.github/workflows/docker-build-push.yml @@ -3,7 +3,10 @@ name: Docker Build and Push on: push: branches: [ main ] - workflow_dispatch: # Allow manual triggering + tags: [ 'v*', 'latest' ] + pull_request: + branches: [ main ] + workflow_dispatch: # Allows manual triggering of the workflow jobs: build-and-push: @@ -35,12 +38,15 @@ jobs: with: images: ${{ github.repository }} tags: | - type=ref,event=branch-pg${{ matrix.postgres-version }} - type=ref,event=pr-pg${{ matrix.postgres-version }} - type=semver,pattern={{version}}-pg${{ matrix.postgres-version }} - type=semver,pattern={{major}}.{{minor}}-pg${{ matrix.postgres-version }} - type=semver,pattern={{major}}-pg${{ matrix.postgres-version }} - type=sha-pg${{ matrix.postgres-version }} + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + type=sha + flavor: | + latest=auto + suffix=-pg${{ matrix.postgres-version }},onlatest=true - name: Prepare build context run: | diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..38b6733 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,91 @@ +name: Test Backup Functionality + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + workflow_dispatch: # Allows manual triggering of the workflow + +jobs: + test-backup: + name: Test PostgreSQL Backup + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Create backup directory + run: mkdir -p ./backup-output + + - name: Prepare build context + run: | + mkdir -p build/16 + cp template/* build/16/ + sed "s/%VERSION%/16/g" template/Dockerfile > build/16/Dockerfile + + - name: Build test image + uses: docker/build-push-action@v6 + with: + context: build/16 + push: false + tags: postgresql-backup:test-16 + load: true + + - name: Start PostgreSQL container + run: | + docker run -d \ + --name postgres-test \ + -e POSTGRES_PASSWORD=postgres \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_DB=testdb \ + postgres:16-alpine + + - name: Wait for PostgreSQL to start + run: | + echo "Waiting for PostgreSQL to start..." + timeout 30s bash -c 'until docker exec postgres-test pg_isready -U postgres; do sleep 1; done' + echo "PostgreSQL is ready!" + + - name: Run backup + run: | + docker run --rm \ + --link postgres-test:db \ + -e DB_HOST=db \ + -e DB_USER=postgres \ + -e DB_PASS=postgres \ + -e DB_NAME=testdb \ + -v ${{ github.workspace }}/backup-output:/data/backups \ + --entrypoint "/backup/run.sh" \ + postgresql-backup:test-16 + + - name: Check backup was created + run: | + echo "Checking for backup files..." + ls -la ${{ github.workspace }}/backup-output + + # Count backup files + BACKUP_COUNT=$(find ${{ github.workspace }}/backup-output -type f | wc -l) + + if [ "$BACKUP_COUNT" -eq 0 ]; then + echo "Error: No backup files were created!" + exit 1 + else + echo "Success: Found $BACKUP_COUNT backup file(s)" + fi + + # Verify backup file format + BACKUP_FILE=$(find ${{ github.workspace }}/backup-output -type f | head -n 1) + echo "Backup file: $BACKUP_FILE" + + # Check if the backup file is a valid PostgreSQL dump + if file "$BACKUP_FILE" | grep -q "PostgreSQL"; then + echo "Success: Backup file is a valid PostgreSQL dump" + else + echo "Warning: Backup file format could not be verified" + file "$BACKUP_FILE" + fi From 1c85ffd0ff124bb26772ca8e43745c85484d1e31 Mon Sep 17 00:00:00 2001 From: pilotso11 Date: Sat, 19 Jul 2025 19:15:24 +0100 Subject: [PATCH 3/6] Fix and enhance PostgreSQL backup functionality (#2) * Add GitHub Actions workflow to test PostgreSQL backup functionality * fix docker-build-push.yml * fix docker-build-push.yml on events * Add - to flavor * add schedule to test * fix backup to use local env * fix backup to use local env and add db name * Try again * add original test back * fix formatting * remove wait * use service * add github_network * add github host network * add github ${{ job.container.network }} * move to build new image * fix restore with DB_USE_ENV * fix restore with DB_USE_ENV * try different naming for versions * fix name to postgres-version --- .github/workflows/smoketest.yml | 88 ++++++++ .github/workflows/test.yml | 350 ++++++++++++++++++++++++++------ template/restore.py | 19 +- 3 files changed, 382 insertions(+), 75 deletions(-) create mode 100644 .github/workflows/smoketest.yml diff --git a/.github/workflows/smoketest.yml b/.github/workflows/smoketest.yml new file mode 100644 index 0000000..0497f9d --- /dev/null +++ b/.github/workflows/smoketest.yml @@ -0,0 +1,88 @@ +name: Smoke-test on pg16 + +on: + push: + branches: [main] + pull_request: + branches: [main] + workflow_dispatch: # Allows manual triggering of the workflow + +jobs: + test-backup: + name: Test PostgreSQL Backup + runs-on: ubuntu-latest + + services: + postgres: + image: postgres:16-alpine + env: + POSTGRES_PASSWORD: test + POSTGRES_USER: test + POSTGRES_DB: testdb + ports: + - 5432:5432 + options: --health-cmd pg_isready --health-interval 5s --health-timeout 5s --health-retries 10 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Create backup directory + run: mkdir -p ./backup-output + + - name: Prepare build context + run: | + mkdir -p build/16 + cp template/* build/16/ + sed "s/%VERSION%/16/g" template/Dockerfile > build/16/Dockerfile + + - name: Build test image + uses: docker/build-push-action@v6 + with: + context: build/16 + push: false + tags: postgresql-backup:test-16 + load: true + + - name: Run backup + run: | + docker run --rm \ + --network=${{ job.container.network }} \ + -e DB_HOST=postgres \ + -e DB_PORT=5432 \ + -e DB_PASS=test \ + -e DB_USER=test \ + -e DB_NAME=testdb \ + -v ${{ github.workspace }}/backup-output:/data/backups \ + --entrypoint "/backup/run.sh" \ + postgresql-backup:test-16 + + - name: Check backup was created + run: | + echo "Checking for backup files..." + ls -la ${{ github.workspace }}/backup-output + + # Count backup files + BACKUP_COUNT=$(find ${{ github.workspace }}/backup-output -type f | wc -l) + + if [ "$BACKUP_COUNT" -eq 0 ]; then + echo "Error: No backup files were created!" + exit 1 + else + echo "Success: Found $BACKUP_COUNT backup file(s)" + fi + + # Verify backup file format + BACKUP_FILE=$(find ${{ github.workspace }}/backup-output -type f | head -n 1) + echo "Backup file: $BACKUP_FILE" + + # Check if the backup file is a valid PostgreSQL dump + if file "$BACKUP_FILE" | grep -q "PostgreSQL"; then + echo "Success: Backup file is a valid PostgreSQL dump" + else + echo "Warning: Backup file format could not be verified" + file "$BACKUP_FILE" + fi diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 38b6733..0c8fd65 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,91 +1,309 @@ -name: Test Backup Functionality +name: Full Tests on: push: branches: [ main ] + tags: [ 'v*', 'latest' ] pull_request: branches: [ main ] workflow_dispatch: # Allows manual triggering of the workflow jobs: - test-backup: - name: Test PostgreSQL Backup + tests: + name: pg-${{ matrix.postgres-version }} runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + postgres-version: ['10', '11', '12', '13', '14', '15', '16', '17'] + + services: + postgres: + image: postgres:${{ matrix.postgres-version }}-alpine + env: + POSTGRES_PASSWORD: test + POSTGRES_USER: test + POSTGRES_DB: test_${{ matrix.postgres-version }} + ports: + - 5432:5432 + options: --health-cmd pg_isready --health-interval 5s --health-timeout 5s --health-retries 10 + s3: + image: zenko/cloudserver + env: + ENDPOINT: s3 + S3BACKEND: mem + REMOTE_MANAGEMENT_DISABLE: 1 + SCALITY_ACCESS_KEY_ID: access_key + SCALITY_SECRET_ACCESS_KEY: secret + steps: - - name: Checkout code - uses: actions/checkout@v4 + - name: Create Test Data + uses: addnab/docker-run-action@v3 + with: + image: postgres:${{ matrix.postgres-version }}-alpine + run: > + psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -c ' + CREATE TABLE books ( + id serial PRIMARY KEY, + name VARCHAR ( 128 ) UNIQUE NOT NULL, + author VARCHAR (128 ) NOT NULL + ); + INSERT INTO books (name, author) VALUES + ($$Fittstim$$, $$Linda Skugge$$), + ($$DSM-5$$, $$American Psychiatric Association$$); + + CREATE TABLE movies ( + id serial PRIMARY KEY, + name VARCHAR ( 128 ) UNIQUE NOT NULL, + director VARCHAR (128 ) NOT NULL + ); + INSERT INTO movies (name, director) VALUES + ($$Beau Travail$$, $$Claire Denis$$), + ($$Reservoir Dogs$$, $$Quentin Tarantino$$); + ' + options: > + -e PGPASSWORD=test - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + - name: Create S3 bucket + uses: addnab/docker-run-action@v3 + with: + image: amazon/aws-cli + run: aws --endpoint-url=http://s3:8000 s3api create-bucket --bucket test-postgresql-backup; aws --endpoint-url=http://s3:8000 s3 ls + options: > + -e AWS_EC2_METADATA_DISABLED=true + -e AWS_ACCESS_KEY_ID=access_key + -e AWS_SECRET_ACCESS_KEY=secret - - name: Create backup directory - run: mkdir -p ./backup-output + - uses: actions/checkout@v2 - name: Prepare build context run: | - mkdir -p build/16 - cp template/* build/16/ - sed "s/%VERSION%/16/g" template/Dockerfile > build/16/Dockerfile + mkdir -p build/${{ matrix.postgres-version }} + cp template/* build/${{ matrix.postgres-version }}/ + sed "s/%VERSION%/${{ matrix.postgres-version }}/g" template/Dockerfile > build/${{ matrix.postgres-version }}/Dockerfile - name: Build test image uses: docker/build-push-action@v6 with: - context: build/16 + context: build/${{ matrix.postgres-version }} push: false - tags: postgresql-backup:test-16 + tags: postgresql-backup:test-${{ matrix.postgres-version }} load: true - - name: Start PostgreSQL container - run: | - docker run -d \ - --name postgres-test \ - -e POSTGRES_PASSWORD=postgres \ - -e POSTGRES_USER=postgres \ - -e POSTGRES_DB=testdb \ - postgres:16-alpine - - - name: Wait for PostgreSQL to start - run: | - echo "Waiting for PostgreSQL to start..." - timeout 30s bash -c 'until docker exec postgres-test pg_isready -U postgres; do sleep 1; done' - echo "PostgreSQL is ready!" - - name: Run backup - run: | - docker run --rm \ - --link postgres-test:db \ - -e DB_HOST=db \ - -e DB_USER=postgres \ - -e DB_PASS=postgres \ - -e DB_NAME=testdb \ - -v ${{ github.workspace }}/backup-output:/data/backups \ - --entrypoint "/backup/run.sh" \ - postgresql-backup:test-16 - - - name: Check backup was created - run: | - echo "Checking for backup files..." - ls -la ${{ github.workspace }}/backup-output - - # Count backup files - BACKUP_COUNT=$(find ${{ github.workspace }}/backup-output -type f | wc -l) - - if [ "$BACKUP_COUNT" -eq 0 ]; then - echo "Error: No backup files were created!" - exit 1 - else - echo "Success: Found $BACKUP_COUNT backup file(s)" - fi - - # Verify backup file format - BACKUP_FILE=$(find ${{ github.workspace }}/backup-output -type f | head -n 1) - echo "Backup file: $BACKUP_FILE" - - # Check if the backup file is a valid PostgreSQL dump - if file "$BACKUP_FILE" | grep -q "PostgreSQL"; then - echo "Success: Backup file is a valid PostgreSQL dump" - else - echo "Warning: Backup file format could not be verified" - file "$BACKUP_FILE" - fi + - name: Take Backup + uses: addnab/docker-run-action@v3 + with: + image: postgresql-backup:test-${{ matrix.postgres-version }} + run: python3 -u /backup/backup.py + options: > + -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' + -e DB_HOST=postgres + -e DB_PASS=test + -e DB_USER=test + -e DB_NAME=test_${{ matrix.postgres-version }} + -e S3_PATH=s3://test-postgresql-backup/backups + -e AWS_ACCESS_KEY_ID=access_key + -e AWS_SECRET_ACCESS_KEY=secret + -e AWS_DEFAULT_REGION=us-east-1 + -e FILENAME=test_${{ matrix.postgres-version }} + + - name: Take Backup (using DB_USE_ENV) + uses: addnab/docker-run-action@main + with: + image: postgresql-backup:test-${{ matrix.postgres-version }} + run: python3 -u /backup/backup.py + options: > + -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' + -e DB_USE_ENV=True + -e PGHOST=postgres + -e PGPASSWORD=test + -e PGUSER=test + -e PGDATABASE=test_${{ matrix.postgres-version }} + -e S3_PATH=s3://test-postgresql-backup/backups + -e AWS_ACCESS_KEY_ID=access_key + -e AWS_SECRET_ACCESS_KEY=secret + -e AWS_DEFAULT_REGION=us-east-1 + -e FILENAME=test_${{ matrix.postgres-version }}_env + + - name: Take Backup (using PG_DUMP_EXTRA_OPTIONS) + uses: addnab/docker-run-action@main + with: + image: postgresql-backup:test-${{ matrix.postgres-version }} + run: python3 -u /backup/backup.py + options: > + -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' + -e DB_USE_ENV=True + -e PGHOST=postgres + -e PGPASSWORD=test + -e PGUSER=test + -e PGDATABASE=test_${{ matrix.postgres-version }} + -e S3_PATH=s3://test-postgresql-backup/backups + -e AWS_ACCESS_KEY_ID=access_key + -e AWS_SECRET_ACCESS_KEY=secret + -e AWS_DEFAULT_REGION=us-east-1 + -e FILENAME=test_${{ matrix.postgres-version }}_exclude + -e PG_DUMP_EXTRA_OPTIONS='--exclude-table=movies' + + - name: Check equality + uses: addnab/docker-run-action@main + with: + image: amazon/aws-cli + entryPoint: /bin/bash + run: | + aws s3 --endpoint-url=http://s3:8000 cp s3://test-postgresql-backup/backups/test_${{ matrix.postgres-version }} . + aws s3 --endpoint-url=http://s3:8000 cp s3://test-postgresql-backup/backups/test_${{ matrix.postgres-version }}_env . + diff test_${{ matrix.postgres-version }} test_${{ matrix.postgres-version }}_env + echo "$( md5sum test_${{ matrix.postgres-version }} |awk '{print $1}') test_${{ matrix.postgres-version }}_env"|md5sum -c + options: > + -e AWS_EC2_METADATA_DISABLED=true + -e AWS_ACCESS_KEY_ID=access_key + -e AWS_SECRET_ACCESS_KEY=secret + + - name: Clear DB table + uses: addnab/docker-run-action@v3 + with: + image: postgres:${{ matrix.postgres-version }}-alpine + run: > + psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -c ' + DROP TABLE books; + DROP TABLE movies; + ' + options: > + -e PGPASSWORD=test + + - name: Check that table was actually removed + uses: addnab/docker-run-action@v3 + with: + image: postgres:${{ matrix.postgres-version }}-alpine + shell: bash + run: > + [[ "0" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; + '` ]] + options: > + -e PGPASSWORD=test + + - name: Restore Backup + uses: addnab/docker-run-action@v3 + with: + image: postgresql-backup:test-${{ matrix.postgres-version }} + run: python3 -u /backup/restore.py test_${{ matrix.postgres-version }} + options: > + -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' + -e DB_HOST=postgres + -e DB_PASS=test + -e DB_USER=test + -e DB_NAME=test_${{ matrix.postgres-version }} + -e S3_PATH=s3://test-postgresql-backup/backups + -e AWS_ACCESS_KEY_ID=access_key + -e AWS_SECRET_ACCESS_KEY=secret + -e AWS_DEFAULT_REGION=us-east-1 + + - name: Check that table got imported + uses: addnab/docker-run-action@v3 + with: + image: postgres:${{ matrix.postgres-version }}-alpine + shell: bash + run: > + [[ "1" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; + '` ]] && [[ "Fittstim" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT name FROM books WHERE author=$$Linda Skugge$$; + '` ]] + options: > + -e PGPASSWORD=test + + - name: Clear DB table + uses: addnab/docker-run-action@main + with: + image: postgres:${{ matrix.postgres-version }}-alpine + shell: bash + run: > + psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -c ' + DROP TABLE books; + DROP TABLE movies; + ' && [[ "0" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; + '` ]] + options: > + -e PGPASSWORD=test + + - name: Restore Backup (DB_USE_ENV) + uses: addnab/docker-run-action@main + with: + image: postgresql-backup:test-${{ matrix.postgres-version }} + run: python3 -u /backup/restore.py test_${{ matrix.postgres-version }}_env + options: > + -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' + -e DB_USE_ENV=True + -e PGHOST=postgres + -e PGPASSWORD=test + -e PGUSER=test + -e PGDATABASE=test_${{ matrix.postgres-version }} + -e S3_PATH=s3://test-postgresql-backup/backups + -e AWS_ACCESS_KEY_ID=access_key + -e AWS_SECRET_ACCESS_KEY=secret + -e AWS_DEFAULT_REGION=us-east-1 + + - name: Check that table got imported + uses: addnab/docker-run-action@main + with: + image: postgres:${{ matrix.postgres-version }}-alpine + shell: bash + run: > + [[ "1" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; + '` ]] && [[ "Fittstim" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT name FROM books WHERE author=$$Linda Skugge$$; + '` ]] + options: > + -e PGPASSWORD=test + + - name: Clear DB table + uses: addnab/docker-run-action@main + with: + image: postgres:${{ matrix.postgres-version }}-alpine + shell: bash + run: > + psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -c ' + DROP TABLE books; + DROP TABLE movies; + ' && [[ "0" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; + '` ]] + options: > + -e PGPASSWORD=test + + - name: Restore Backup (PG_DUMP_EXTRA_OPTIONS) + uses: addnab/docker-run-action@main + with: + image: postgresql-backup:test-${{ matrix.postgres-version }} + run: python3 -u /backup/restore.py test_${{ matrix.postgres-version }}_exclude + options: > + -e S3_EXTRA_OPTIONS='--endpoint-url=http://s3:8000' + -e DB_HOST=postgres + -e DB_PASS=test + -e DB_USER=test + -e DB_NAME=test_${{ matrix.postgres-version }} + -e S3_PATH=s3://test-postgresql-backup/backups + -e AWS_ACCESS_KEY_ID=access_key + -e AWS_SECRET_ACCESS_KEY=secret + -e AWS_DEFAULT_REGION=us-east-1 + + - name: Check that table got imported (PG_DUMP_EXTRA_OPTIONS) + uses: addnab/docker-run-action@main + with: + image: postgres:${{ matrix.postgres-version }}-alpine + shell: bash + run: > + [[ "1" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$books$$; + '` ]] && [[ "Fittstim" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT name FROM books WHERE author=$$Linda Skugge$$; + '` ]] && [[ "0" == `psql -d test_${{ matrix.postgres-version }} -U test -h postgres -p ${{ job.services.postgres.ports[5432] }} -A -t -c ' + SELECT count(*) FROM pg_catalog.pg_tables WHERE tablename=$$movies$$; + '` ]] + options: > + -e PGPASSWORD=test diff --git a/template/restore.py b/template/restore.py index 0d66e1a..a6ce0a2 100644 --- a/template/restore.py +++ b/template/restore.py @@ -16,15 +16,16 @@ if not DB_NAME: raise Exception("DB_NAME must be set") -DB_HOST = os.environ["DB_HOST"] -DB_PASS = os.environ.get("DB_PASS", "") -DB_USER = os.environ["DB_USER"] -DB_PORT = os.environ.get("DB_PORT", "5432") -DB_PASS_FILE = os.environ.get("DB_PASS_FILE") -if DB_PASS_FILE: - print(f"Reading password from: {DB_PASS_FILE}") - with open(DB_PASS_FILE, "r") as f: - DB_PASS = f.read().strip() +if not DB_USE_ENV: + DB_HOST = os.environ["DB_HOST"] + DB_PASS = os.environ.get("DB_PASS", "") + DB_USER = os.environ["DB_USER"] + DB_PORT = os.environ.get("DB_PORT", "5432") + DB_PASS_FILE = os.environ.get("DB_PASS_FILE") + if DB_PASS_FILE: + print(f"Reading password from: {DB_PASS_FILE}") + with open(DB_PASS_FILE, "r") as f: + DB_PASS = f.read().strip() file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) From 64f8dc254695d6edd7c3451c422397810a88fd92 Mon Sep 17 00:00:00 2001 From: pilotso11 Date: Sat, 19 Jul 2025 19:18:42 +0100 Subject: [PATCH 4/6] remove tests on tag --- .github/workflows/test.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0c8fd65..23253b9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -3,7 +3,6 @@ name: Full Tests on: push: branches: [ main ] - tags: [ 'v*', 'latest' ] pull_request: branches: [ main ] workflow_dispatch: # Allows manual triggering of the workflow From 99ca663aa21db85f3b468d16ed6ae31f6eaf24e9 Mon Sep 17 00:00:00 2001 From: pilotso11 Date: Sat, 19 Jul 2025 19:33:35 +0100 Subject: [PATCH 5/6] Readme updates --- README.md | 10 ++++++---- template/backup.py | 2 ++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index ac6b46d..36a1046 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,9 @@ docker run -it --rm --name=pgbackup \ -e DB_HOST=the.db.host \ -e DB_USER=username \ -e DB_PASS=password \ + -e DB_PASS_FILE=/run/secrets/db_password \ -e DB_NAME=database_name \ + -e DB_NAMES="db1,db2,db3" \ -e S3_PATH='s3://my-bucket/backups/' \ -e AWS_ACCESS_KEY_ID='[aws key id]' \ -e AWS_SECRET_ACCESS_KEY='[aws secret key]' \ @@ -26,8 +28,10 @@ docker run -it --rm --name=pgbackup \ * `CRON_SCHEDULE`: The time schedule part of a crontab file (e.g: `15 3 * * *` for every night 03:15) * `DB_HOST`: Postgres hostname * `DB_PASS`: Postgres password +* `DB_PASS_FILE`: Path to a file containing the Postgres password. If set, overrides `DB_PASS`. * `DB_USER`: Postgres username * `DB_NAME`: Name of database +* `DB_NAMES`: Comma-separated list of database names to back up. If set, overrides `DB_NAME` and backs up each listed database. ## Optional environment variables @@ -93,11 +97,9 @@ To do this, we run the container with the command: `python -u /backup/restore.py The following environment variables are required: * `DB_HOST`: Postgres hostname -* `DB_PASS`: Postgres password -* `DB_PASS_FILE`: Path to a file containing the Postgres password. If this is set, `DB_PASS` will be ignored. +* `DB_PASS` or `DB_PASS_FILE`: Postgres password * `DB_USER`: Postgres username -* `DB_NAME`: Name of database to import into -* `DB_NAMES`: Comma separated list of database names to restore. If not set, the first database in the backup file will be restored. +* `DB_NAME` or `DB_NAMES`: Name of database to import into The following environment variables are required if the file to restore is not already in the backup volume: diff --git a/template/backup.py b/template/backup.py index 366cc26..1abf864 100644 --- a/template/backup.py +++ b/template/backup.py @@ -161,6 +161,8 @@ def main_one(db_name: str): def main(): if not DB_NAMES: + if not DB_NAME: + raise Exception("DB_NAME must be set") main_one(DB_NAME) else: for name in DB_NAMES.split(","): From cededfc58b21fee10311a7a6e1ae03fc01c2e3bb Mon Sep 17 00:00:00 2001 From: pilotso11 Date: Sat, 19 Jul 2025 20:31:42 +0100 Subject: [PATCH 6/6] Readme updates --- README.md | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 36a1046..a517163 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ # Docker PostgreSQL Backup -[![Build Status](https://github.com/heyman/postgresql-backup/workflows/Test/badge.svg)](https://github.com/heyman/postgresql-backup/actions?query=workflow%3ATest) +[![Build Status](https://github.com/pilotso11/postgresql-backup/workflows/Test/badge.svg)](https://github.com/pilotso11/postgresql-backup/actions?query=workflow%3ATest) Docker image that periodically dumps a Postgres database, and optionally uploads it to an Amazon S3 bucket. -Available on Docker Hub: [heyman/postgresql-backup](https://hub.docker.com/r/heyman/postgresql-backup) +Available on Docker Hub: [pilotso11/postgresql-backup](https://hub.docker.com/r/pilotso11/postgresql-backup) ## Example @@ -20,7 +20,7 @@ docker run -it --rm --name=pgbackup \ -e S3_PATH='s3://my-bucket/backups/' \ -e AWS_ACCESS_KEY_ID='[aws key id]' \ -e AWS_SECRET_ACCESS_KEY='[aws secret key]' \ - heyman/postgresql-backup:15 + pilotso11/postgresql-backup:latest-pg15 ``` ## Required environment variables @@ -127,11 +127,13 @@ image. The following docker tags are available for this image, and they are based on the corresponding official postgres alpine image: -* `17`, `latest` -* `16` -* `15` -* `14` -* `13` -* `12` -* `11` -* `10` +* `latest-pg17` +* `latest-pg16` +* `latest-pg15` +* `latest-pg14` +* `latest-pg13` +* `latest-pg12` +* `latest-pg11` +* `latest-pg10` + +Previous versions are available as well, but latest is recommended.