Skip to content

Commit

Permalink
Merge branch 'main' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
dweinholz committed Oct 7, 2024
2 parents cca9e91 + c45a91d commit aace9b9
Show file tree
Hide file tree
Showing 16 changed files with 179 additions and 13 deletions.
48 changes: 45 additions & 3 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@ on:
push:
branches:
- 'main'
# - 'staging'
# - 'dev'
# - 'hotfix/**'
jobs:
build:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -43,3 +40,48 @@ jobs:
file: Dockerfile
push: true
tags: quay.io/denbicloud/cron-backup:${{ steps.tag.outputs.TAG }}

special_builds:
runs-on: ubuntu-latest
needs: build
strategy:
fail-fast: false
matrix:
type: [ postgresql, mysql, mongodb ]
steps:

- name: Workflow run cleanup action
uses: rokroskar/[email protected]
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@master
- name: Extract branch name
shell: bash
run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
id: extract_branch

- name: Set tag
run: sed 's/\//-/g' <<< "::set-output name=TAG::${{ steps.extract_branch.outputs.branch }}"
id: tag
- name: Get tag
run: echo "The selected tag is ${{ steps.tag.outputs.TAG }}"

-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Quay.io
uses: docker/login-action@v3
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_TOKEN }}
- name: Build and publish image to Quay
uses: docker/build-push-action@v5
env:
BASE_TAG: ${{ steps.tag.outputs.TAG }}
with:
file: ${{matrix.type}}/Dockerfile
context: ${{matrix.type}}
push: true
build-args: BASE_TAG
tags: quay.io/denbicloud/cron-backup:${{matrix.type}}-${{ steps.tag.outputs.TAG }}
8 changes: 6 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,25 @@ RUN apk add --update --no-cache \
fdupes \
python3 \
py3-pip \
curl \
busybox-extras \
ssmtp

RUN pip3 install --upgrade pip
RUN pip3 install s3cmd
RUN pip3 install --upgrade pip --break-system-packages
RUN pip3 install s3cmd --break-system-packages

RUN touch /var/log/cron.log

COPY ./prepare-cron.sh /prepare-cron.sh
COPY ./backup/rotate_backup.sh /rotate_backup.sh
COPY ./backup/backup-cron /backup-cron
COPY ./backup/notify_uptime_kuma.sh /notify_uptime_kuma.sh

COPY ./s3/s3_backup.sh /s3_backup.sh
COPY ./s3/s3_backup-cron /s3_backup-cron
COPY ./s3/s3_backup_rotation.sh /s3_backup_rotation.sh
COPY ./s3/s3_backup_rotation-cron /s3_backup_rotation-cron
COPY ./s3/s3_notify_uptime_kuma.sh /s3_notify_uptime_kuma.sh

RUN chmod +x /prepare-cron.sh

Expand Down
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,12 @@ base image with a shell script to prepare and run cron jobs. To use this you nee
- S3_CONFIGS_PATH - Directory of the different site configs with variables see below (should be mounted)
- S3_BACKUP_ROTATION_ENABLED - must to set to true to activate backup rotation in S3
- S3_BACKUP_ROTATION_TIME_LIMIT - expiration time in days for Backups - if rotation is enabled uploads older than this limit will be removed - (global - can be overwirtten per site.cfg)
- S3_KUMA_STATUS_ENDPOINT - when provided will be called after an successfull backup

In addition, a cfg must be specified for each site to which the backups are to be pushed - with the following content [example](s3/configs/example.site.cfg):
6. An Endpoint can be provided for Status Updates for [Uptime Kuma](https://github.com/louislam/uptime-kuma). Following env variables must be set:

- KUMA_STATUS_ENDPOINT -- when provided will be called after an successfull backup
~~~Bash
S3_HASHDIR=DIR #should be mounted - stores local checksum of pushed non-encrypted files (in S3 they are encrypted thus different checksum)
S3_OBJECT_STORAGE_EP=SITE_SPECIFIC_OBJECT_STORAGE EP (e.g openstack.cebitec.uni-bielefeld.de:8080)
Expand All @@ -52,10 +55,12 @@ Next use this image with your docker-compose.yml (here an example for a limesurv
- ${general_PERSISTENT_PATH}backup/limesurvey:/etc/backup
environment:
- LIMESURVEY_DB_PASSWORD
- KUMA_STATUS_ENDPOINT
- BACKUP_ROTATION_ENABLED=true
- BACKUP_ROTATION_MAX_SIZE=10
- BACKUP_ROTATION_CUT_SIZE=5
- BACKUP_ROTATION_SIZE_TYPE=GiB
- S3_KUMA_STATUS_ENDPOINT
- S3_BACKUP_ENABLED=true
- S3_PATH=limesurvey
- S3_CONFIGS_DIR=~/configs
Expand Down
18 changes: 18 additions & 0 deletions backup/notify_uptime_kuma.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
#!/bin/bash

# Set KUMA_STATUS_ENDPOINT variable from environment or default to empty string
KUMA_STATUS_ENDPOINT=${KUMA_STATUS_ENDPOINT:-}

if [ -z "$KUMA_STATUS_ENDPOINT" ]; then
echo "INFO: KUMA_STATUS_ENDPOINT is not set. Skipping."
else
# Use curl to make a GET request to the status endpoint
response=$(curl -s -X GET "$KUMA_STATUS_ENDPOINT")

# Check if the request was successful
if [ $? -eq 0 ]; then
echo "Status endpoint responded successfully: $response"
else
echo "Error: Failed to push status from $KUMA_STATUS_ENDPOINT. Status code: $?"
fi
fi
Empty file modified backup/rotate_backup.sh
100644 → 100755
Empty file.
Empty file modified mongodb/install-packages.sh
100644 → 100755
Empty file.
14 changes: 14 additions & 0 deletions mongodb/mongodb-backup.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
log() {
echo "[$(date +"%Y-%m-%d %H:%M:%S")] - $1"
}
trap 'log "Error occurred, exiting script"; exit 1' ERR

NOW=$(date '+%y-%m-%d-%H%M')
FILE="/etc/backup/${MONGODB_DB}-${NOW}.dump.gz"
Expand All @@ -18,3 +19,16 @@ fi
MONGODB_HOST=$(echo "$MONGODB_HOST" | cut -d: -f1)

mongodump --archive="$FILE" --gzip --uri="$URI"

# Check if the backup file is not empty and has a reasonable size
MIN_SIZE=$((1024 * 10)) # 10KB minimum size
if [ ! -s "$FILE" ] || [ $(stat -c%s "$FILE") -lt $MIN_SIZE ]; then
log "Backup file $FILE is too small (${MIN_SIZE}B required), aborting script"
exit 1
fi

# Send a notification using the notify_uptime_kuma.sh script
if ! /notify_uptime_kuma.sh; then
log "Failed to send notification"
fi
log "Backup completed successfully"
Empty file modified mysql/install-packages.sh
100644 → 100755
Empty file.
14 changes: 14 additions & 0 deletions mysql/mysql-backup.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,23 @@
log() {
echo "[$(date +"%Y-%m-%d %H:%M:%S")] - $1"
}
trap 'log "Error occurred, exiting script"; exit 1' ERR

NOW=$(date '+%y-%m-%d-%H%M')
FILE=/etc/backup/${MYSQL_HOST}-${NOW}.sql.gz
log "Create Backup $FILE"

mysqldump -h ${MYSQL_HOST} -u ${MYSQL_USER} --password=${MYSQL_PASSWORD} --all-databases | gzip > $FILE

# Check if the backup file is not empty and has a reasonable size
MIN_SIZE=$((1024 * 10)) # 10KB minimum size
if [ ! -s "$FILE" ] || [ $(stat -c%s "$FILE") -lt $MIN_SIZE ]; then
log "Backup file $FILE is too small (${MIN_SIZE}B required), aborting script"
exit 1
fi

# Send a notification using the notify_uptime_kuma.sh script
if ! ./notify_uptime_kuma.sh; then
log "Failed to send notification"
fi
log "Backup completed successfully"
Empty file modified postgresql/install-packages.sh
100644 → 100755
Empty file.
58 changes: 51 additions & 7 deletions postgresql/postgresql-backup.sh
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,18 +1,62 @@
#!/bin/sh

# Define a logging function
log() {
echo "[$(date +"%Y-%m-%d %H:%M:%S")] - $1"
}

touch ~/.pgpass
log "Creating ~/.pgpass file"
echo ${POSTGRES_HOST}:${POSTGRES_PORT}:${POSTGRES_DB}:${POSTGRES_USER}:${POSTGRES_PASSWORD} > ~/.pgpass
chmod 600 ~/.pgpass
# Set up an error trap to exit the script on any error
trap 'log "Error occurred, exiting script"; exit 1' ERR

# Create the ~/.pgpass file if it doesn't exist
if [ ! -f "~/.pgpass" ]; then
touch ~/.pgpass
log "Created ~/.pgpass file"
fi

# Set the PostgreSQL connection details as environment variables
POSTGRES_HOST=${POSTGRES_HOST:-}
POSTGRES_PORT=${POSTGRES_PORT:-5432} # default to 5432 if not set
POSTGRES_DB=${POSTGRES_DB:-}
POSTGRES_USER=${POSTGRES_USER:-}
POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-}

# Check that all required environment variables are set
if [ -z "$POSTGRES_HOST" ] || [ -z "$POSTGRES_DB" ] || [ -z "$POSTGRES_USER" ] || [ -z "$POSTGRES_PASSWORD" ]; then
log "Error: Missing PostgreSQL connection details, exiting script"
exit 1
fi

# Set the PGPASSFILE environment variable to point to the ~/.pgpass file
export PGPASSFILE='/root/.pgpass'

# Write the PostgreSQL connection details to the ~/.pgpass file
echo "${POSTGRES_HOST}:${POSTGRES_PORT}:${POSTGRES_DB}:${POSTGRES_USER}:${POSTGRES_PASSWORD}" > ~/.pgpass

# Set permissions on the ~/.pgpass file
chmod 600 ~/.pgpass

# Create a timestamp for the backup file name
NOW=$(date '+%y-%m-%d-%H%M')
FILE=/etc/backup/${POSTGRES_DB}-${NOW}.dump.gz
log "Create Backup $FILE"

pg_dump -h ${POSTGRES_HOST} -U ${POSTGRES_USER} ${POSTGRES_DB} -Z 9 > $FILE
# Define the backup file path and name
FILE="/etc/backup/${POSTGRES_DB}-${NOW}.dump.gz"

log "Creating Backup $FILE"

# Perform the PostgreSQL database dump
pg_dump -h "${POSTGRES_HOST}" -U "${POSTGRES_USER}" "${POSTGRES_DB}" -Z 9 > "$FILE"

# Check if the backup file is not empty and has a reasonable size
MIN_SIZE=$((1024 * 10)) # 10KB minimum size
if [ ! -s "$FILE" ] || [ $(stat -c%s "$FILE") -lt $MIN_SIZE ]; then
log "Backup file $FILE is too small (${MIN_SIZE}B required), aborting script"
exit 1
fi

# Send a notification using the notify_uptime_kuma.sh script
if ! /notify_uptime_kuma.sh; then
log "Failed to send notification"
fi

log "Backup completed successfully"
Empty file modified postgresql/postgresql-cron
100644 → 100755
Empty file.
Empty file modified prepare-cron.sh
100644 → 100755
Empty file.
8 changes: 7 additions & 1 deletion s3/s3_backup.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
log() {
echo "[$(date +"%Y-%m-%d %H:%M:%S")] - $1"
}

trap 'log "Error occurred, exiting script"; exit 1' ERR
log "Starting backup script"

basedir="/etc/backup"
Expand Down Expand Up @@ -89,3 +89,9 @@ find "$S3_CONFIGS_PATH" -type f -name "*.cfg" | while read -r env_data; do
log "Removing temp config and password files"
rm -f "$tmp_conf"
done


# Send a notification using the s3_notify_uptime_kuma.sh script
if ! /s3_notify_uptime_kuma.sh; then
log "Failed to send notification"
fi
Empty file modified s3/s3_backup_rotation.sh
100644 → 100755
Empty file.
19 changes: 19 additions & 0 deletions s3/s3_notify_uptime_kuma.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/bin/bash

# Set S3_KUMA_STATUS_ENDPOINT variable from environment or default to empty string
S3_KUMA_STATUS_ENDPOINT=${S3_KUMA_STATUS_ENDPOINT:-}

if [ -z "$S3_KUMA_STATUS_ENDPOINT" ]; then
echo "INFO: S3_KUMA_STATUS_ENDPOINT is not set. Skipping."
else
# Use curl to make a POST request to the status endpoint
response=$(curl -s -X POST \\
"$S3_KUMA_STATUS_ENDPOINT")

# Check if the request was successful
if [ $? -eq 0 ]; then
echo "Status endpoint responded successfully: $response"
else
echo "Error: Failed to send status update to $S3_KUMA_STATUS_ENDPOINT. Status code: $?"
fi
fi

0 comments on commit aace9b9

Please sign in to comment.