Skip to content

Commit

Permalink
Adjust needs for build / deploy to streamline / separate cleanup from…
Browse files Browse the repository at this point in the history
… deployment steps

Adjust test for file migration required (near empty destination dir)
Refactored cleanup workflow
  • Loading branch information
warrenchristian1telus committed Oct 9, 2024
1 parent a5dea3a commit 0872e78
Show file tree
Hide file tree
Showing 3 changed files with 113 additions and 60 deletions.
46 changes: 21 additions & 25 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,25 @@ jobs:
echo "-----------------------"
echo URL: https://${{ env.APP_NAME }}-${{ github.ref_name }}.apps.silver.devops.gov.bc.ca
# Clean up resources if CLEAN_BUILDS is set to "YES"
call-cleanup-workflow:
name: 🧹️ Clean-up
uses: ./.github/workflows/cleanup.yml
secrets: inherit
needs: [checkEnv]
if: needs.checkEnv.outputs.CLEAN_BUILDS == 'YES'
with:
APP_NAME: ${{ needs.checkEnv.outputs.APP_NAME }}
APP_HOST_URL: ${{ needs.checkEnv.outputs.APP_HOST_URL }}
DEPLOY_NAMESPACE: ${{ needs.checkEnv.outputs.DEPLOY_NAMESPACE }}
DB_NAME: ${{ needs.checkEnv.outputs.DB_NAME }}
WEB_NAME: ${{ needs.checkEnv.outputs.WEB_NAME }}
PHP_NAME: ${{ needs.checkEnv.outputs.PHP_NAME }}
CRON_NAME: ${{ needs.checkEnv.outputs.CRON_NAME }}
REDIS_NAME: ${{ needs.checkEnv.outputs.REDIS_NAME }}
DB_BACKUP_DEPLOYMENT_NAME: ${{ needs.checkEnv.outputs.DB_BACKUP_DEPLOYMENT_NAME }}
CLEAN_PVC: ${{ needs.checkEnv.outputs.CLEAN_PVC }}

# Build Images and deploy to JFrog
db:
name: 🔨 DB
Expand Down Expand Up @@ -162,39 +181,16 @@ jobs:
uses: ./.github/workflows/performance.yml
secrets: inherit

call-cleanup-workflow:
name: 🧹️ Clean-up
uses: ./.github/workflows/cleanup.yml
secrets: inherit
needs: [checkEnv]
if: needs.checkEnv.outputs.CLEAN_BUILDS == 'YES'
with:
APP_NAME: ${{ needs.checkEnv.outputs.APP_NAME }}
APP_HOST_URL: ${{ needs.checkEnv.outputs.APP_HOST_URL }}
DEPLOY_NAMESPACE: ${{ needs.checkEnv.outputs.DEPLOY_NAMESPACE }}
DB_NAME: ${{ needs.checkEnv.outputs.DB_NAME }}
WEB_NAME: ${{ needs.checkEnv.outputs.WEB_NAME }}
PHP_NAME: ${{ needs.checkEnv.outputs.PHP_NAME }}
CRON_NAME: ${{ needs.checkEnv.outputs.CRON_NAME }}
REDIS_NAME: ${{ needs.checkEnv.outputs.REDIS_NAME }}
DB_BACKUP_DEPLOYMENT_NAME: ${{ needs.checkEnv.outputs.DB_BACKUP_DEPLOYMENT_NAME }}
CLEAN_PVC: ${{ needs.checkEnv.outputs.CLEAN_PVC }}

call-deploy-workflow:
name: 🚀 Deploy
uses: ./.github/workflows/deploy.yml
secrets: inherit
needs: [checkEnv, db, php, cron, build, web, call-cleanup-workflow]
needs: [checkEnv, db, php, cron, build, web]
if: |
(always() && needs.checkEnv.outputs.SKIP_DEPLOY == 'NO') &&
needs.checkEnv.result == 'success' &&
!contains(needs.*.result, 'failure') &&
!contains(needs.*.result, 'cancelled') &&
(
(needs.checkEnv.outputs.CLEAN_BUILDS.CLEAN_BUILDS == 'YES' &&
needs.call-cleanup-workflow.result == 'success') ||
needs.checkEnv.outputs.CLEAN_BUILDS.CLEAN_BUILDS != 'YES'
)
!contains(needs.*.result, 'cancelled')
with:
CLEAN_PVC: ${{ needs.checkEnv.outputs.CLEAN_PVC }}
APP_NAME: ${{ needs.checkEnv.outputs.APP_NAME }}
Expand Down
109 changes: 82 additions & 27 deletions openshift/scripts/deploy-database-backups.sh
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ list_backups() {
}

# Function to restore the backup by filename
restore_backup() {
restore_backup_from_file() {
local FILENAME=$1
echo "Restoring backup from file: $FILENAME"

Expand All @@ -56,7 +56,65 @@ restore_backup() {
oc exec $(oc get pod -l app.kubernetes.io/name=backup-storage -o jsonpath='{.items[0].metadata.name}') -- bash -c "mysql -h $DB_HOST -u root performance < $FILENAME"
else
echo "Unsupported file type: $FILENAME"
exit 1
fi

echo "Backup restoration process completed."
}

# Function to list available backups
list_backups() {
echo "Listing available backups..."

# Connect to the backup pod and list available backups
BACKUP_LIST=$(oc exec $(oc get pod -l app.kubernetes.io/name=backup-storage -o jsonpath='{.items[0].metadata.name}') -- ./backup.sh -l)

# Parse the backup list into an array
IFS=$'\n' read -rd '' -a BACKUP_ARRAY <<< "$BACKUP_LIST"

# Filter and sort backups
FILTERED_SORTED_BACKUPS=$(for line in "${BACKUP_ARRAY[@]}"; do
# Extract size, date, and filename
SIZE=$(echo "$line" | awk '{print $1}')
DATE=$(echo "$line" | awk '{print $2 " " $3}')
FILENAME=$(echo "$line" | awk '{print $4}')
# Convert size to bytes for comparison
SIZE_IN_BYTES=$(echo "$SIZE" | awk '
/M$/ { printf "%.0f\n", $1 * 1024 * 1024 }
/K$/ { printf "%.0f\n", $1 * 1024 }
/G$/ { printf "%.0f\n", $1 * 1024 * 1024 * 1024 }
!/[KMG]$/ { print $1 }
')
# Only include entries with size > 1M
if [ "$SIZE_IN_BYTES" -gt $((1 * 1024 * 1024)) ]; then
echo "$SIZE $DATE $FILENAME"
fi
done | sort -k2,3r)

# Select the latest backup
LATEST_BACKUP=$(echo "$FILTERED_SORTED_BACKUPS" | head -n 1)

# Output the size, date, and filename for the selected entry
echo "Selected Backup:"
echo "$LATEST_BACKUP"

# Return the filename of the selected backup
echo "$LATEST_BACKUP" | awk '{print $3}'
}

restore_database_from_backup() {
echo "Attempting to restore the database from the latest backup..."

# List backups and get the filename of the latest backup
LATEST_BACKUP_FILENAME=$(list_backups)

# Check if the file exists and has a .gz or .sql extension
if [[ -f "$LATEST_BACKUP_FILENAME" ]]; then
# Restore the backup using the filename
restore_backup_from_file "$LATEST_BACKUP_FILENAME"
else
echo "Backup file: $LATEST_BACKUP_FILENAME does not exist. Skipping restore."
fi
}

Expand Down Expand Up @@ -159,7 +217,9 @@ done

echo "Database pod found and running: $DB_POD_NAME."

TOTAL_USER_COUNT=0
ATTEMPTS=0
OUTPUT=""
until [ $ATTEMPTS -eq $MAX_ATTEMPTS ]; do
ATTEMPTS=$(( $ATTEMPTS + 1 ))
echo "Waiting for database to come online... $(($ATTEMPTS * $WAIT_TIME)) seconds..."
Expand All @@ -170,32 +230,27 @@ until [ $ATTEMPTS -eq $MAX_ATTEMPTS ]; do
# Check if the output contains an error
if echo "$OUTPUT" | grep -qi "error"; then
echo "❌ Database error: $OUTPUT"
# exit 1
fi

# Extract the user count from the output
CURRENT_USER_COUNT=$(echo "$OUTPUT" | grep -oP '\d+')

# Check if CURRENT_USER_COUNT is set and greater than 0
if [ -n "$CURRENT_USER_COUNT" ] && [ "$CURRENT_USER_COUNT" -gt 0 ]; then
echo "Database is online and contains $CURRENT_USER_COUNT users."
echo "No further action required."
break
elif [ -n "$CURRENT_USER_COUNT" ] && [ "$CURRENT_USER_COUNT" -eq 0 ]; then
echo "Database is online but contains no users."

# Main script execution
echo "Starting backup restoration process..."
# List backups and get the filename of the latest backup
LATEST_BACKUP_FILENAME=$(list_backups)
# Restore the backup using the filename
restore_backup "$LATEST_BACKUP_FILENAME"
echo "Backup restoration process completed."

break
else
# Current user count is 0 or not set
# echo "Database appears to be offline. Attempt $ATTEMPTS of $MAX_ATTEMPTS."
sleep $WAIT_TIME
# Extract the user count from the output
CURRENT_USER_COUNT=$(echo "$OUTPUT" | grep -oP '\d+')

# Check if CURRENT_USER_COUNT is set and greater than 0
if [ -n "$CURRENT_USER_COUNT" ] && [ "$CURRENT_USER_COUNT" -gt 0 ]; then
echo "Database is online and contains $CURRENT_USER_COUNT users."
echo "No further action required."
TOTAL_USER_COUNT=$CURRENT_USER_COUNT
break
else
# Current user count is 0 or not set
echo "Database appears to be offline. Attempt $ATTEMPTS of $MAX_ATTEMPTS."
fi
fi
sleep $WAIT_TIME
done

if [ $TOTAL_USER_COUNT -eq 0 ]; then
echo "Database is offline or does not contain any users."
echo "Attempting to restore the database from the latest backup..."
restore_database_from_backup
fi
18 changes: 10 additions & 8 deletions openshift/scripts/migrate-build-files.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,16 @@ dest_date_latest=${dest_date_latest:-0}
src_date_readable=$(date -d @$src_date_latest +"%Y-%m-%d %H:%M:%S")
dest_date_readable=$(date -d @$dest_date_latest +"%Y-%m-%d %H:%M:%S")

# Use find with -not -name to exclude directories from the file count
initial_file_count=$(find ${dest_dir} -not -name '.*' | wc -l)
echo "Initial file count: $initial_file_count"

echo "Latest source file modification date: $src_date_readable"
echo "Latest destination file modification date: $dest_date_readable"

# If the source directory has been modified more recently than the destination directory, proceed with the migration
if [ $src_date_latest -gt $dest_date_latest ]; then
# Check if src_date_latest is greater than dest_date_latest
# OR initial_file_count is less than 100 (project was likely uninstalled)
if [ $src_date_latest -gt $dest_date_latest ] || [ $initial_file_count -lt 100 ]; then
echo "Source directory has been modified more recently than the destination directory."
echo "Proceeding with migration..."
else
Expand All @@ -36,9 +41,6 @@ sleep 10

# Delete all files - including hidden ones
echo "Deleting all files in ${dest_dir}..."
# Use find with -not -name to exclude directories from the file count
initial_count=$(find ${dest_dir} -not -name '.*' | wc -l)
echo "Initial file count: $initial_count"
# Delate all files, excluding hidden files and directories
find ${dest_dir} -mindepth 1 -delete

Expand All @@ -49,11 +51,11 @@ final_count=$(find ${dest_dir} -not -name '.*' | wc -l)
echo "Final file count: $final_count"

# Calculate the number of files deleted
deleted_count=$((initial_count - final_count))
echo "Deleted $deleted_count of $initial_count files."
deleted_count=$((initial_file_count - final_count))
echo "Deleted $deleted_count of $initial_file_count files."

# Count the number of files remaining in the destination directory
remaining_count=$((initial_count - deleted_count))
remaining_count=$((initial_file_count - deleted_count))

# Check if all files have been deleted
if [ $((remaining_count)) -eq 0 ]; then
Expand Down

0 comments on commit 0872e78

Please sign in to comment.