Skip to content

Commit

Permalink
Add support for single DB upload
Browse files Browse the repository at this point in the history
  • Loading branch information
nihaldivyam committed Mar 28, 2024
1 parent d84adbb commit 8059688
Show file tree
Hide file tree
Showing 3 changed files with 131 additions and 10 deletions.
46 changes: 46 additions & 0 deletions .github/workflows/logical-backup.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
name: Build Logical backup

on:
push:
branches: [ "fix-logical-backup" ]
paths: 'logical-backup/**'
tags: [ "v*" ]

jobs:
build-and-push:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v2

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1

- name: Generate container metadata
id: meta
uses: docker/metadata-action@v3
with:
images: ghcr.io/obmondo/logical-backup
tags: |
type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }}
type=raw,value=latest,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
type=ref,event=tag
flavor: |
latest=false
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Build & push container image
id: docker_build
uses: docker/[email protected]
with:
file: "./logical-backup/Dockerfile"
context: .
labels: ${{ steps.meta.outputs.labels }}
push: true
tags: ${{ steps.meta.outputs.tags }}
2 changes: 1 addition & 1 deletion logical-backup/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,4 @@ COPY ./* ./

ENV PG_DIR=/usr/lib/postgresql

ENTRYPOINT ["/dump.sh"]
ENTRYPOINT ["/dump.sh"]
93 changes: 84 additions & 9 deletions logical-backup/dump.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#! /usr/bin/env bash

set -eou pipefail
set -x
IFS=$'\n\t'

## NOTE, these env needs to be passed in the cronjob
Expand All @@ -13,25 +13,50 @@ ERRORCOUNT=0
POSTGRES_OPERATOR=spilo
LOGICAL_BACKUP_PROVIDER=${LOGICAL_BACKUP_PROVIDER:="s3"}
LOGICAL_BACKUP_S3_RETENTION_TIME=${LOGICAL_BACKUP_S3_RETENTION_TIME:=""}
LOGICAL_BACKUP_AZURE_RETENTION_TIME=${LOGICAL_BACKUP_AZURE_RETENTION_TIME:=""}

PG_DUMP_EXTRA_ARGUMENTS=${PG_DUMP_EXTRA_ARGUMENTS:=""}
PG_DUMPALL_EXTRA_ARGUMENTS=${PG_DUMPALL_EXTRA_ARGUMENTS:=""}
PG_DUMP_NJOBS=${PG_DUMP_NJOBS:-4}
PG_DUMP_COMPRESS=${PG_DUMP_COMPRESS:-6}

function estimate_size {
"$PG_BIN"/psql -tqAc "${ALL_DB_SIZE_QUERY}"
}

function dump_db_multi {
echo "Taking dump of ${PGDATABASE} from ${PGHOST}"
# settings are taken from the environment
"$PG_BIN"/pg_dump --file "/tmp/db-$1" $1
}

function dump {
echo "Taking dump of ${PGDATABASE} from ${PGHOST}"
# settings are taken from the environment
"$PG_BIN"/pg_dump
}

function dump_global {
"$PG_BIN"/pg_dumpall --globals-only $PG_DUMPALL_EXTRA_ARGUMENTS
}

function list_databases {
# SQL from dumpall
psql -c "SELECT datname FROM pg_database d WHERE datallowconn AND datconnlimit != -2 ORDER BY (datname <> 'template1'), datname" --csv | tail -n +2
}

function compress {
pigz
}

function az_upload {
PATH_TO_BACKUP="${LOGICAL_BACKUP_S3_BUCKET}/${POSTGRES_OPERATOR}/${LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX}/logical_backups/$(date +%s).sql.gz"
PATH_TO_BACKUP="${LOGICAL_BACKUP_AZURE_CONTAINER}/${POSTGRES_OPERATOR}/${LOGICAL_BACKUP_AZURE_SCOPE_SUFFIX}/${PGUSER}/logical_backups/$(date +%s).sql.gz"

az storage blob upload --file "${1}" --account-name "${LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_NAME}" --account-key "${LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_KEY}" -c "${LOGICAL_BACKUP_AZURE_CONTAINER}" -n "${PATH_TO_BACKUP}"
}

az storage blob upload --file "${1}" --account-name "${LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_NAME}" --account-key "${LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_KEY}" -c "${LOGICAL_BACKUP_AZURE_STORAGE_CONTAINER}" -n "${PATH_TO_BACKUP}"
function az_upload_dir {
az storage blob upload-batch --account-name "$LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_NAME" --account-key "$LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_KEY" -d "$LOGICAL_BACKUP_AZURE_STORAGE_CONTAINER" --destination-path $2 -s "$1"
}

function aws_delete_objects {
Expand Down Expand Up @@ -91,7 +116,7 @@ function aws_upload {
# mimic bucket setup from Spilo
# to keep logical backups at the same path as WAL
# NB: $LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX already contains the leading "/" when set by the Postgres Operator
PATH_TO_BACKUP=s3://${LOGICAL_BACKUP_S3_BUCKET}"/"${POSTGRES_OPERATOR}"/"${LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX}"/logical_backups/"$(date +%s).sql.gz
PATH_TO_BACKUP="s3://${LOGICAL_BACKUP_S3_BUCKET}/${POSTGRES_OPERATOR}/${LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX}/${PGUSER}/logical_backups/$(date +%s).sql.gz"

args=()

Expand All @@ -103,8 +128,26 @@ function aws_upload {
aws s3 cp - "$PATH_TO_BACKUP" "${args[@]//\'/}"
}

function aws_upload_dir {
declare -r EXPECTED_SIZE="$1"

# mimic bucket setup from Spilo
# to keep logical backups at the same path as WAL
# NB: $LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX already contains the leading "/" when set by the Postgres Operator
PATH_TO_BACKUP="s3://${LOGICAL_BACKUP_S3_BUCKET}/${POSTGRES_OPERATOR}/${LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX}/${PGUSER}/logical_backups/$(date +%Y-%m-%d)"

args=()

[[ ! -z "${EXPECTED_SIZE}" ]] && args+=("--expected-size=${EXPECTED_SIZE}")
[[ ! -z "${LOGICAL_BACKUP_S3_ENDPOINT}" ]] && args+=("--endpoint-url=${LOGICAL_BACKUP_S3_ENDPOINT}")
[[ ! -z "${LOGICAL_BACKUP_S3_REGION}" ]] && args+=("--region=${LOGICAL_BACKUP_S3_REGION}")

echo "Uploading dump dir to s3"
aws s3 cp "$1" "$PATH_TO_BACKUP"/$dbname.gz
}

function gcs_upload {
PATH_TO_BACKUP=gs://${LOGICAL_BACKUP_S3_BUCKET}"/"${POSTGRES_OPERATOR}"/"${LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX}"/logical_backups/"$(date +%s).sql.gz
PATH_TO_BACKUP=gs://${LOGICAL_BACKUP_S3_BUCKET}"/"${POSTGRES_OPERATOR}"/"${LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX}"/logical_backups/"$(date +%Y-%m-%d).sql.gz

gsutil -o Credentials:gs_service_key_file=${LOGICAL_BACKUP_GOOGLE_APPLICATION_CREDENTIALS} cp - "${PATH_TO_BACKUP}"
}
Expand All @@ -118,15 +161,47 @@ function upload {
aws_upload $(($(estimate_size) / DUMP_SIZE_COEFF))
aws_delete_outdated
;;
"az")
az_upload
;;
esac
}

if [ "$LOGICAL_BACKUP_PROVIDER" == "az" ]; then
dump | compress > /tmp/azure-backup.sql.gz
az_upload /tmp/azure-backup.sql.gz
if [[ "$LOGICAL_BACKUP_PROVIDER" == "s3" && -z "$PGDATABASE" ]]; then
set +x

list_databases > /tmp/database-list
while read dbname; do
echo "Dumping $dbname..."
dump_db_multi $dbname
echo "Uploading directory /tmp/db-$dbname to $PATH_TO_BACKUP/$dbname..."
pigz "/tmp/db-$dbname"
aws_upload_dir "/tmp/db-$dbname.gz" "$PATH_TO_BACKUP/$dbname"
echo "Cleaning up /tmp/db-$dbname..."
rm -rf "/tmp/db-$dbname.gz"
done < /tmp/database-list
rm /tmp/database-list

exit $ERRORCOUNT
elif [[ "$LOGICAL_BACKUP_PROVIDER" == "az" && -z "$PGDATABASE" ]]; then
set +x

list_databases > /tmp/database-list
while read dbname; do
echo "Dumping $dbname..."
dump_db_multi $dbname
echo "Uploading directory /tmp/db-$dbname to $PATH_TO_BACKUP/$dbname..."
pigz "/tmp/db-$dbname"
az_upload_dir "/tmp/db-$dbname" "$PATH_TO_BACKUP/$dbname"
echo "Cleaning up /tmp/db-$dbname..."
rm -rf "/tmp/db-$dbname"
done < /tmp/database-list
rm /tmp/database-list

exit $ERRORCOUNT
else
dump | compress | upload
[[ ${PIPESTATUS[0]} != 0 || ${PIPESTATUS[1]} != 0 || ${PIPESTATUS[2]} != 0 ]] && (( ERRORCOUNT += 1 ))
set +x
exit $ERRORCOUNT
fi
fi

0 comments on commit 8059688

Please sign in to comment.