Skip to content

Database Backup and Restore #1372

Database Backup and Restore

Database Backup and Restore #1372

name: Database Backup and Restore
on:
schedule: # 01:00 UTC
- cron: "0 1 * * *"
workflow_dispatch:
inputs:
overwriteThisMorningsBackup:
required: true
type: boolean
default: false
jobs:
backup:
name: Sanitise Production Database Backup
if: ${{ github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.overwriteThisMorningsBackup == 'true') }}
runs-on: ubuntu-latest
services:
postgres:
image: postgres:14.7
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set KV environment variables
run: |
tf_vars_file=terraform/aks/workspace_variables/production.tfvars.json
echo "key_vault_name=$(jq -r '.key_vault_name' ${tf_vars_file})" >> $GITHUB_ENV
- uses: azure/login@v2
with:
creds: ${{ secrets.AZURE_CREDENTIALS_PRODUCTION }}
- name: Install kubectl
uses: DFE-Digital/github-actions/set-kubectl@master
- uses: DFE-Digital/github-actions/set-kubelogin-environment@master
with:
azure-credentials: ${{ secrets.AZURE_CREDENTIALS_PRODUCTION }}
- name: K8 & konduit setup
shell: bash
run: |
make ci production get-cluster-credentials
make install-konduit
- name: Backup Publish Teacher Training Prod Database
run: |
now=$(date +"%F")
PROD_BACKUP=prod_backup-$now.sql
bin/konduit.sh publish-production -- pg_dump -E utf8 --clean --no-owner --if-exists -f $PROD_BACKUP
tar -cvzf ${PROD_BACKUP}.tar.gz ${PROD_BACKUP}
echo "PROD_BACKUP=$PROD_BACKUP" >> $GITHUB_ENV
- name: Set Connection String
run: |
STORAGE_CONN_STR="$(az keyvault secret show --name PUBLISH-STORAGE-ACCOUNT-CONNECTION-STRING-PRODUCTION --vault-name ${{ env.key_vault_name }} | jq -r .value)"
echo "::add-mask::$STORAGE_CONN_STR"
echo "STORAGE_CONN_STR=$STORAGE_CONN_STR" >> $GITHUB_ENV
- name: Upload Backup to Azure Storage
run: |
az storage blob upload --container-name database-backup \
--file ${PROD_BACKUP}.tar.gz --name ${PROD_BACKUP}.tar.gz \
--connection-string '${{ env.STORAGE_CONN_STR }}' --overwrite
- name: Set env variable
run: echo "SANITISED_FILE_NAME=publish_sanitised_$(date +"%F")" >> $GITHUB_ENV
- name: Sanitise the Database backup
run: |
echo "::group::Restore backup to intermediate database"
createdb ${DATABASE_NAME} && psql -f ${{ env.PROD_BACKUP }} -d ${DATABASE_NAME}
echo "::endgroup::"
echo "::group::Clear user data"
psql -d ${DATABASE_NAME} -f db/scripts/sanitise.sql
echo "::endgroup::"
echo "::group::Integration setup"
psql -d ${DATABASE_NAME} -f db/scripts/integration_setup.sql
echo "::endgroup::"
echo "::debug::Remove ${{ env.PROD_BACKUP }}"
rm ${{ env.PROD_BACKUP }}
echo "::group::Backup Sanitised Database"
pg_dump --compress=1 --encoding utf8 --clean --no-owner --if-exists -d ${DATABASE_NAME} -f ${SANITISED_FILE_NAME}.sql.gz
echo "::endgroup::"
env:
DATABASE_NAME: teacher_training_api
PGUSER: postgres
PGPASSWORD: postgres
PGHOST: localhost
PGPORT: 5432
- name: Upload Backup to Azure Storage
run: |
az storage blob upload --container-name database-backup \
--file ${SANITISED_FILE_NAME}.sql.gz --name ${SANITISED_FILE_NAME}.sql.gz --overwrite \
--connection-string '${{ env.STORAGE_CONN_STR }}'
rm ${SANITISED_FILE_NAME}.sql.gz
restore:
needs: [backup]
runs-on: ubuntu-latest
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
strategy:
fail-fast: false
matrix:
environment: [qa, staging]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Restore database
id: restore
uses: ./.github/actions/restore/
with:
azure-credentials: ${{ secrets[format('AZURE_CREDENTIALS_{0}', matrix.environment)] }}
prod-credentials: ${{ secrets.AZURE_CREDENTIALS_PRODUCTION }}
environment: ${{ matrix.environment }}