Skip to content

Commit

Permalink
Merge pull request #104 from kartoza/fixes
Browse files Browse the repository at this point in the history
update action and improve logging
  • Loading branch information
NyakudyaA authored Nov 5, 2024
2 parents 19623a3 + 8e8ec77 commit e32d3fd
Show file tree
Hide file tree
Showing 9 changed files with 183 additions and 114 deletions.
83 changes: 45 additions & 38 deletions .github/workflows/build-latest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ on:
branches:
- master
jobs:
run-scenario-tests:
build-backup-image:
if: |
github.actor != 'dependabot[bot]' &&
!(
Expand All @@ -29,9 +29,6 @@ jobs:
- 3
postgisMinorRelease:
- 5
scenario:
- restore
- s3
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
Expand All @@ -47,16 +44,47 @@ jobs:
push: false
load: true
tags: kartoza/pg-backup:manual-build
outputs: type=docker,dest=/tmp/pg-backup.tar
build-args: |
POSTGRES_MAJOR_VERSION=${{ matrix.postgresMajorVersion }}
POSTGIS_MAJOR_VERSION=${{ matrix.postgisMajorVersion }}
POSTGIS_MINOR_VERSION=${{ matrix.postgisMinorRelease }}
cache-from: |
type=gha,scope=test
type=gha,scope=prod
type=gha,scope=base
type=gha,scope=test
type=gha,scope=prod
type=gha,scope=base
cache-to: type=gha,scope=test
target: postgis-backup-test
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: kartoza-pg-backup
path: /tmp/pg-backup.tar

run-scenario-tests:
if: |
github.actor != 'dependabot[bot]' &&
!(
contains(github.event.pull_request.title, '[skip-release]') ||
contains(github.event.comment.body, '/skiprelease')
)
runs-on: ubuntu-latest
needs: [ build-backup-image ]
strategy:
matrix:
scenario:
- restore
- s3
steps:
- uses: actions/checkout@v4
- name: Download artifact
uses: actions/download-artifact@v4
with:
name: kartoza-pg-backup
path: /tmp
- name: Load image
run: |
docker load --input /tmp/pg-backup.tar
- name: Run scenario test ${{ matrix.scenario }}
working-directory: scenario_tests/${{ matrix.scenario }}
Expand All @@ -76,21 +104,17 @@ jobs:
contains(github.event.comment.body, '/skiprelease')
)
runs-on: ubuntu-latest
needs: [ run-scenario-tests ]
strategy:
matrix:
postgresMajorVersion:
- 17
postgisMajorVersion:
- 3
postgisMinorRelease:
- 5
needs: [ build-backup-image ]
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Download artifact
uses: actions/download-artifact@v4
with:
name: kartoza-pg-backup
path: /tmp
- name: Load image
run: |
docker load --input /tmp/pg-backup.tar
- name: Login to DockerHub
uses: docker/login-action@v3
with:
Expand All @@ -106,21 +130,4 @@ jobs:
type=ref,event=branch
type=ref,event=pr
- name: Build image for testing
id: docker_build_testing_image
uses: docker/build-push-action@v6
with:
context: .
file: Dockerfile
push: true
tags: |
${{ steps.docker_meta.outputs.tags }}-${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}
build-args: |
POSTGRES_MAJOR_VERSION=${{ matrix.postgresMajorVersion }}
POSTGIS_MAJOR_VERSION=${{ matrix.postgisMajorVersion }}
POSTGIS_MINOR_VERSION=${{ matrix.postgisMinorRelease }}
cache-from: |
type=gha,scope=test
type=gha,scope=prod
cache-to: type=gha,scope=test
target: postgis-backup-test
10 changes: 6 additions & 4 deletions .github/workflows/deploy-image.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,11 @@ jobs:
- name: Check if image exists on Docker Hub
id: check_hub_image_exists
run: |
docker login --username ${{ secrets.DOCKERHUB_USERNAME }} --password ${{ secrets.DOCKERHUB_PASSWORD }}
echo ${{ secrets.DOCKERHUB_PASSWORD }} > /tmp/credentials.txt
cat /tmp/credentials.txt | docker login --username ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin
TOKEN=$(curl -s -H "Content-Type: application/json" -X POST -d '{"username": "'${{ secrets.DOCKERHUB_USERNAME }}'", "password": "'${{ secrets.DOCKERHUB_PASSWORD }}'"}' https://hub.docker.com/v2/users/login/ | jq -r .token)
check_image=$(curl --silent -f --head -lL https://hub.docker.com/v2/repositories/kartoza/pg-backup/tags/${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}/ | head -n 1 | cut -d ' ' -f2) >> $GITHUB_OUTPUT
rm /tmp/credentials.txt
- name: Build prod image
id: docker_build_prod
Expand All @@ -61,7 +63,7 @@ jobs:
push: true
tags: |
${{ secrets.DOCKERHUB_REPO }}/pg-backup
${{ steps.check_hub_image_exists.outputs.check_image == 200 && format('{0}/pg-backup:{1}-{2}.{3}', secrets.DOCKERHUB_REPO, matrix.postgresMajorVersion, matrix.postgisMajorVersion, matrix.postgisMinorRelease) || null}}
${{ secrets.DOCKERHUB_REPO }}/pg-backup:${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}
${{ secrets.DOCKERHUB_REPO }}/pg-backup:${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}--v${{ steps.current_date.outputs.formatted }}
build-args: |
POSTGRES_MAJOR_VERSION=${{ matrix.postgresMajorVersion }}
Expand Down Expand Up @@ -100,7 +102,7 @@ jobs:

- name: Get Current Date
id: current_date
run: echo "formatted=$(date -u +%Y.%m.%d)" >> $GITHUB_OUTPUT
run: echo "formatted=$(date -u +%Y-%m-%d)" >> $GITHUB_OUTPUT

- name: Get Latest Commit Hash
id: latest_commit_hash
Expand All @@ -109,6 +111,6 @@ jobs:
- name: publish_release
id: tag_releases
run: |
gh release create v${{ matrix.postgresMajorVersion }}.${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}--v${{ steps.current_date.outputs.formatted }}--${{ steps.latest_commit_hash.outputs.commit }} --notes ${{ steps.latest_commit_hash.outputs.commit }} --target master --repo $GITHUB_REPOSITORY
gh release create v${{ matrix.postgresMajorVersion }}.${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}--${{ steps.current_date.outputs.formatted }}--${{ steps.latest_commit_hash.outputs.commit }} --notes ${{ steps.latest_commit_hash.outputs.commit }} --target master --repo $GITHUB_REPOSITORY
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ ENV \

ADD build_data /build_data
ADD scripts /backup-scripts
RUN chmod 0755 /backup-scripts/*.sh
RUN echo ${POSTGRES_MAJOR_VERSION} > /tmp/pg_version.txt && chmod 0755 /backup-scripts/*.sh
RUN sed -i 's/PostGIS/PgBackup/' ~/.bashrc

WORKDIR /backup-scripts
Expand Down
2 changes: 1 addition & 1 deletion build_data/backups-cron
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Run the backups at 11pm each night
${CRON_SCHEDULE} /backup-scripts/backups.sh > /var/log/cron.out 2>&1
${CRON_SCHEDULE} /backup-scripts/backups.sh > ${CONSOLE_LOGGING_OUTPUT}

# We need a blank line here for it to be a valid cron file
2 changes: 1 addition & 1 deletion build_data/backups-cron-default
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Run the backups at 11pm each night
0 23 * * * /backup-scripts/backups.sh > /var/log/cron.out 2>&1
0 23 * * * /backup-scripts/backups.sh > ${CONSOLE_LOGGING_OUTPUT}

# We need a blank line here for it to be a valid cron file
4 changes: 3 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,10 @@ services:
- POSTGRES_PASS=docker
- POSTGRES_PORT=5432
- RUN_AS_ROOT=true
#- CRON_SCHEDULE="*/5 * * * *"
- CRON_SCHEDULE="*/5 * * * *"
#- CONSOLE_LOGGING=TRUE
#- DB_DUMP_ENCRYPTION=true
#- DB_TABLES=TRUE
restart: on-failure
depends_on:
db:
Expand Down
126 changes: 77 additions & 49 deletions scripts/backups.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,25 @@

source /backup-scripts/pgenv.sh

# Env variables
MYDATE=$(date +%d-%B-%Y)
MONTH=$(date +%B)
YEAR=$(date +%Y)
MYBASEDIR=/${BUCKET}
MYBACKUPDIR=${MYBASEDIR}/${YEAR}/${MONTH}
mkdir -p ${MYBACKUPDIR}
pushd ${MYBACKUPDIR} || exit

function s3_config() {
if [[ ! -f /root/.s3cfg ]]; then
# If it doesn't exists, copy from ${EXTRA_CONF_DIR} directory if exists
if [[ -f ${EXTRA_CONFIG_DIR}/s3cfg ]]; then
cp -f ${EXTRA_CONFIG_DIR}/s3cfg /root/.s3cfg
else
# default value
envsubst < /build_data/s3cfg > /root/.s3cfg
fi
# If it doesn't exists, copy from ${EXTRA_CONF_DIR} directory if exists
if [[ -f ${EXTRA_CONFIG_DIR}/s3cfg ]]; then
cp -f ${EXTRA_CONFIG_DIR}/s3cfg /root/.s3cfg
else
# default value
envsubst < /build_data/s3cfg > /root/.s3cfg
fi


}

# Cleanup S3 bucket
Expand All @@ -38,37 +46,56 @@ function clean_s3bucket() {
}

function dump_tables() {
DATABASE=$1
DATABASE_DUMP_OPTIONS=$2
TIME_STAMP=$3
DATA_PATH=$4
array=($(PGPASSWORD=${POSTGRES_PASS} psql ${PG_CONN_PARAMETERS} -d ${DATABASE} -At --field-separator '.' -c "SELECT table_schema,table_name FROM information_schema.tables
where table_schema not in ('information_schema','pg_catalog','topology') and table_name
not in ('raster_columns','raster_overviews','spatial_ref_sys', 'geography_columns', 'geometry_columns')
ORDER BY table_schema,table_name;"))
for i in "${array[@]}"; do
IFS='.'
read -a strarr <<< "$i"
SCHEMA_NAME="${strarr[0]}"
TABLE_NAME="${strarr[1]}"
# names and schema names
if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]];then
PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} -d ${DATABASE} ${DATABASE_DUMP_OPTIONS} -t ${SCHEMA_NAME}."${TABLE_NAME}" | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out $DATA_PATH/${DATABASE}_${SCHEMA_NAME}_"${TABLE_NAME}"_${TIME_STAMP}.dmp
else
PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} -d ${DATABASE} ${DATABASE_DUMP_OPTIONS} -t ${SCHEMA_NAME}."${TABLE_NAME}" >$DATA_PATH/${DATABASE}_${SCHEMA_NAME}_"${TABLE_NAME}"_${TIME_STAMP}.dmp

fi
done
DATABASE=$1

# Retrieve table names
array=($(PGPASSWORD=${POSTGRES_PASS} psql ${PG_CONN_PARAMETERS} -d ${DATABASE} -At -F '.' -c "SELECT table_schema, table_name FROM information_schema.tables WHERE table_schema NOT IN ('information_schema', 'pg_catalog', 'topology') AND table_name NOT IN ('raster_columns', 'raster_overviews', 'spatial_ref_sys', 'geography_columns', 'geometry_columns') ORDER BY table_schema, table_name;"))

for i in "${array[@]}"; do

IFS='.' read -r -a strarr <<< "$i"
SCHEMA_NAME="${strarr[0]}"
TABLE_NAME="${strarr[1]}"

# Combine schema and table name
DB_TABLE="${SCHEMA_NAME}.${TABLE_NAME}"
# Check dump format
if [[ ${DUMP_ARGS} == '-Fc' ]]; then
FORMAT='dmp'
else
FORMAT='sql'
fi

# Construct filename
FILENAME="${DUMPPREFIX}_${DB_TABLE}_${MYDATE}.${FORMAT}"

# Log the backup start time
echo -e "Backup of \e[1;31m ${DB_TABLE} \033[0m from DATABASE \e[1;31m ${DATABASE} \033[0m starting at \e[1;31m $(date) \033[0m" >> ${CONSOLE_LOGGING_OUTPUT}

export PGPASSWORD=${POSTGRES_PASS}

# Dump command
if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]]; then
# Encrypted backup
pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d "${DATABASE}" -t "${DB_TABLE}" | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out "${FILENAME}"
if [[ $? -ne 0 ]];then
echo -e "Backup of \e[0;32m ${DB_TABLE} \033[0m from DATABASE \e[0;32m ${DATABASE} \033[0m failed" >> ${CONSOLE_LOGGING_OUTPUT}
fi
else
# Plain backup
pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d "${DATABASE}" -t "${DB_TABLE}" > "${FILENAME}"
if [[ $? -ne 0 ]];then
echo -e "Backup of \e[0;32m ${DB_TABLE} \033[0m from DATABASE \e[0;32m ${DATABASE} \033[0m failed" >> ${CONSOLE_LOGGING_OUTPUT}
fi
fi

# Log the backup completion time
echo -e "Backup of \e[1;33m ${DB_TABLE} \033[0m from DATABASE \e[1;33m ${DATABASE} \033[0m completed at \e[1;33m $(date) \033[0m" >> ${CONSOLE_LOGGING_OUTPUT}

done
}

# Env variables
MYDATE=$(date +%d-%B-%Y)
MONTH=$(date +%B)
YEAR=$(date +%Y)
MYBASEDIR=/${BUCKET}
MYBACKUPDIR=${MYBASEDIR}/${YEAR}/${MONTH}
mkdir -p ${MYBACKUPDIR}
pushd ${MYBACKUPDIR} || exit

function backup_db() {
EXTRA_PARAMS=''
Expand All @@ -81,22 +108,25 @@ function backup_db() {
else
export FILENAME=${MYBASEDIR}/"${ARCHIVE_FILENAME}.${DB}.dmp"
fi
echo "Backing up $DB" >>/var/log/cron.log
if [ -z "${DB_TABLES:-}" ]; then

if [[ "${DB_TABLES}" =~ [Ff][Aa][Ll][Ss][Ee] ]]; then
export PGPASSWORD=${POSTGRES_PASS}
echo -e "Backup of \e[1;31m ${DB} \033[0m starting at \e[1;31m $(date) \033[0m" >> ${CONSOLE_LOGGING_OUTPUT}
if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]];then
PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out ${FILENAME}
pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out ${FILENAME}
else
PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} > ${FILENAME}
pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} > ${FILENAME}
fi
echo "Backing up $FILENAME done" >>/var/log/cron.log
echo -e "Backup of \e[1;33m ${DB} \033[0m completed at \e[1;33m $(date) \033[0m and dump located at \e[1;33m ${FILENAME} \033[0m " >> ${CONSOLE_LOGGING_OUTPUT}
if [[ ${STORAGE_BACKEND} == "S3" ]]; then
gzip $FILENAME
echo "Backing up $FILENAME to s3://${BUCKET}/" >>/var/log/cron.log
gzip ${FILENAME}
echo -e "Pushing database backup \e[1;31m ${FILENAME} \033[0m to \e[1;31m s3://${BUCKET}/ \033[0m" >> ${CONSOLE_LOGGING_OUTPUT}
${EXTRA_PARAMS}
rm ${MYBACKUPDIR}/*.dmp.gz
fi
else
dump_tables ${DB} ${DUMP_ARGS} ${MYDATE} ${MYBACKUPDIR}

dump_tables ${DB}
if [[ ${STORAGE_BACKEND} == "S3" ]]; then
${EXTRA_PARAMS}
rm ${MYBACKUPDIR}/*
Expand All @@ -118,7 +148,7 @@ if [[ ${STORAGE_BACKEND} == "S3" ]]; then

# Backup globals Always get the latest
PGPASSWORD=${POSTGRES_PASS} pg_dumpall ${PG_CONN_PARAMETERS} --globals-only | s3cmd put - s3://${BUCKET}/globals.sql
echo "Sync globals.sql to ${BUCKET} bucket " >>/var/log/cron.log
echo "Sync globals.sql to ${BUCKET} bucket " >> ${CONSOLE_LOGGING_OUTPUT}
backup_db "s3cmd sync -r ${MYBASEDIR}/* s3://${BUCKET}/"

elif [[ ${STORAGE_BACKEND} =~ [Ff][Ii][Ll][Ee] ]]; then
Expand All @@ -129,14 +159,12 @@ elif [[ ${STORAGE_BACKEND} =~ [Ff][Ii][Ll][Ee] ]]; then

fi

echo "Backup running to $MYBACKUPDIR" >>/var/log/cron.log


if [ "${REMOVE_BEFORE:-}" ]; then
TIME_MINUTES=$((REMOVE_BEFORE * 24 * 60))
if [[ ${STORAGE_BACKEND} == "FILE" ]]; then
echo "Removing following backups older than ${REMOVE_BEFORE} days" >>/var/log/cron.log
find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete &>>/var/log/cron.log
echo "Removing following backups older than ${REMOVE_BEFORE} days" >> ${CONSOLE_LOGGING_OUTPUT}
find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete & >> ${CONSOLE_LOGGING_OUTPUT}
elif [[ ${STORAGE_BACKEND} == "S3" ]]; then
# Credits https://shout.setfive.com/2011/12/05/deleting-files-older-than-specified-time-with-s3cmd-and-bash/
clean_s3bucket "${BUCKET}" "${REMOVE_BEFORE} days"
Expand Down
Loading

0 comments on commit e32d3fd

Please sign in to comment.