Skip to content

Database Backup and Restore #1207

Database Backup and Restore

Database Backup and Restore #1207

name: Database Backup and Restore
on:
schedule: # 01:00 UTC
- cron: "0 1 * * *"
workflow_dispatch:
inputs:
overwriteThisMorningsBackup:
required: true
type: boolean
default: false
jobs:
backup:
name: Sanitise Production Database Backup
if: ${{ github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.overwriteThisMorningsBackup == 'true') }}
runs-on: ubuntu-latest
services:
postgres:
image: postgres:14.7
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set KV environment variables
run: |
tf_vars_file=terraform/aks/workspace_variables/production_aks.tfvars.json
echo "key_vault_name=$(jq -r '.key_vault_name' ${tf_vars_file})" >> $GITHUB_ENV
- uses: azure/login@v2
with:
creds: ${{ secrets.AZURE_CREDENTIALS_PRODUCTION_AKS }}
- name: Install kubectl
uses: azure/setup-kubectl@v4
with:
version: "v1.26.1" # default is latest stable
- uses: DFE-Digital/github-actions/set-kubelogin-environment@master
with:
azure-credentials: ${{ secrets.AZURE_CREDENTIALS_PRODUCTION_AKS }}
- name: K8 & konduit setup
shell: bash
run: |
make ci production_aks get-cluster-credentials
make install-konduit
- name: Backup Publish Teacher Training Prod Database
run: |
now=$(date +"%F")
PROD_BACKUP=prod_backup-$now.sql
bin/konduit.sh publish-production -- pg_dump -E utf8 --clean --no-owner --if-exists -f $PROD_BACKUP
tar -cvzf ${PROD_BACKUP}.tar.gz ${PROD_BACKUP}
echo "PROD_BACKUP=$PROD_BACKUP" >> $GITHUB_ENV
- name: Set Connection String
run: |
STORAGE_CONN_STR="$(az keyvault secret show --name PUBLISH-STORAGE-ACCOUNT-CONNECTION-STRING-PRODUCTION --vault-name ${{ env.key_vault_name }} | jq -r .value)"
echo "::add-mask::$STORAGE_CONN_STR"
echo "STORAGE_CONN_STR=$STORAGE_CONN_STR" >> $GITHUB_ENV
- name: Upload Backup to Azure Storage
run: |
az storage blob upload --container-name database-backup \
--file ${PROD_BACKUP}.tar.gz --name ${PROD_BACKUP}.tar.gz \
--connection-string '${{ env.STORAGE_CONN_STR }}'
- name: Sanitise the Database backup
run: |
echo "::group::Restore backup to intermediate database"
createdb ${DATABASE_NAME} && psql -f ${{ env.PROD_BACKUP }} -d ${DATABASE_NAME}
echo "::endgroup::"
echo "::group::Clear user data"
psql -d ${DATABASE_NAME} -f db/scripts/sanitise.sql
echo "::endgroup::"
echo "::group::Integration setup"
psql -d ${DATABASE_NAME} -f db/scripts/integration_setup.sql
echo "::endgroup::"
echo "::debug::Remove ${{ env.PROD_BACKUP }}"
rm ${{ env.PROD_BACKUP }}
echo "::group::Backup Sanitised Database"
pg_dump --encoding utf8 --clean --no-owner --if-exists -d ${DATABASE_NAME} -f backup_sanitised.sql
echo "::endgroup::"
env:
DATABASE_NAME: teacher_training_api
PGUSER: postgres
PGPASSWORD: postgres
PGHOST: localhost
PGPORT: 5432
- name: Upload Sanitised Backup
uses: actions/upload-artifact@v4
with:
name: backup_sanitised
path: backup_sanitised.sql
retention-days: 7
restore-aks:
needs: [backup]
runs-on: ubuntu-latest
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
strategy:
fail-fast: false
matrix:
environment: [qa_aks, staging_aks]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Restore AKS database
id: restore_aks
uses: ./.github/actions/restore/
with:
azure-credentials: ${{ secrets[format('AZURE_CREDENTIALS_{0}', matrix.environment)] }}
environment: ${{ matrix.environment }}