Files
postgres-backup/backup.sh
Hadley Rich ac44356633
All checks were successful
Docker Build and Push / build (push) Successful in 35s
Add healthchecks
2025-07-30 15:53:51 +12:00

193 lines
6.3 KiB
Bash

#!/bin/bash
set -euo pipefail
# Configuration from environment variables
POSTGRES_HOST="${POSTGRES_HOST:-localhost}"
POSTGRES_PORT="${POSTGRES_PORT:-5432}"
POSTGRES_USER="${POSTGRES_USER:-postgres}"
POSTGRES_PASSWORD="${POSTGRES_PASSWORD:-}"
POSTGRES_DB="${POSTGRES_DB:-postgres}"
POSTGRES_DATABASES="${POSTGRES_DATABASES:-}" # Comma-separated list of databases, if empty backs up all
S3_BUCKET="${S3_BUCKET}"
S3_PREFIX="${S3_PREFIX:-postgres-backups}"
S3_ENDPOINT="${S3_ENDPOINT}" # Required for third-party S3 services
S3_ACCESS_KEY_ID="${S3_ACCESS_KEY_ID}"
S3_SECRET_ACCESS_KEY="${S3_SECRET_ACCESS_KEY}"
S3_REGION="${S3_REGION:-auto}" # Default to 'auto' for S3-compatible services
BACKUP_RETENTION_DAYS="${BACKUP_RETENTION_DAYS:-7}"
# Generate timestamp
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
# Function to log messages
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
}
# Function to setup rclone configuration
setup_rclone() {
log "Setting up rclone configuration for S3-compatible storage"
# Create rclone config directory
mkdir -p ~/.config/rclone
# Create rclone configuration
cat > ~/.config/rclone/rclone.conf << EOF
[s3remote]
type = s3
provider = Other
access_key_id = ${S3_ACCESS_KEY_ID}
secret_access_key = ${S3_SECRET_ACCESS_KEY}
endpoint = ${S3_ENDPOINT}
region = ${S3_REGION}
force_path_style = true
acl = private
EOF
}
# Function to get list of databases
get_databases() {
if [[ -n "${POSTGRES_DATABASES}" ]]; then
echo "${POSTGRES_DATABASES}" | tr ',' '\n'
else
PGPASSWORD="${POSTGRES_PASSWORD}" psql -h "${POSTGRES_HOST}" -p "${POSTGRES_PORT}" -U "${POSTGRES_USER}" -d postgres -t -c "SELECT datname FROM pg_database WHERE datistemplate = false;" | grep -v '^$' | xargs
fi
}
# Function to backup a single database
backup_database() {
local db_name="$1"
local backup_file="/backups/${db_name}_${TIMESTAMP}.sql.gz"
log "Starting backup of database: ${db_name}"
log "Creating compressed SQL dump"
# Create database dump with gzip compression
PGPASSWORD="${POSTGRES_PASSWORD}" pg_dump \
-h "${POSTGRES_HOST}" \
-p "${POSTGRES_PORT}" \
-U "${POSTGRES_USER}" \
-d "${db_name}" \
--no-password \
--format=plain \
--clean \
--if-exists \
--no-privileges \
--no-owner \
| gzip --rsyncable > "${backup_file}"
# Upload to S3 with flat structure
local s3_key="${S3_PREFIX}/$(basename "${backup_file}")"
log "Uploading backup to S3: s3://${S3_BUCKET}/${s3_key}"
rclone copy "${backup_file}" "s3remote:${S3_BUCKET}/${S3_PREFIX}/" --progress
# Verify upload
if ! rclone ls "s3remote:${S3_BUCKET}/${s3_key}" > /dev/null; then
log "ERROR: Failed to verify backup upload"
return 1
fi
log "Successfully uploaded backup for ${db_name}"
# Clean up local file
rm -f "${backup_file}"
}
# Function to cleanup old backups
cleanup_old_backups() {
local db_name="$1"
local cutoff_date=$(date -d "${BACKUP_RETENTION_DAYS} days ago" +%Y%m%d_%H%M%S)
log "Cleaning up backups older than ${BACKUP_RETENTION_DAYS} days for database: ${db_name}"
# List and delete old backups using rclone with flat structure
rclone lsf "s3remote:${S3_BUCKET}/${S3_PREFIX}/" --include "${db_name}_*.sql.gz" | while read -r backup_file; do
# Extract timestamp from filename
backup_date=$(echo "$backup_file" | grep -o '[0-9]\{8\}_[0-9]\{6\}' || true)
if [[ -n "$backup_date" && "$backup_date" < "$cutoff_date" ]]; then
log "Deleting old backup file: ${backup_file}"
rclone delete "s3remote:${S3_BUCKET}/${S3_PREFIX}/${backup_file}" || log "Failed to delete ${backup_file}"
fi
done
}
# Function to send notification to healthchecks.io
send_notification() {
local status="$1"
local message="$2"
if [[ -n "${HEALTHCHECKS_URL:-}" ]]; then
case "$status" in
"start")
# Ping start endpoint
curl -fsS -m 10 --retry 5 "${HEALTHCHECKS_URL}/start" > /dev/null || log "Failed to send start notification"
;;
"success")
# Ping success endpoint (default)
curl -fsS -m 10 --retry 5 "${HEALTHCHECKS_URL}" > /dev/null || log "Failed to send success notification"
;;
"error"|"fail")
# Ping fail endpoint with log data
curl -fsS -m 10 --retry 5 --data-raw "$message" "${HEALTHCHECKS_URL}/fail" > /dev/null || log "Failed to send failure notification"
;;
esac
fi
}
# Main execution
main() {
log "Starting PostgreSQL backup process"
# Send start notification
send_notification "start" "PostgreSQL backup process started"
# Validate required environment variables
if [[ -z "${S3_BUCKET}" || -z "${S3_ACCESS_KEY_ID}" || -z "${S3_SECRET_ACCESS_KEY}" || -z "${S3_ENDPOINT}" ]]; then
log "ERROR: Missing required environment variables (S3_BUCKET, S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY, S3_ENDPOINT)"
send_notification "fail" "Missing required environment variables"
exit 1
fi
# Setup rclone
setup_rclone
# Test database connection
log "Testing database connection"
if ! PGPASSWORD="${POSTGRES_PASSWORD}" psql -h "${POSTGRES_HOST}" -p "${POSTGRES_PORT}" -U "${POSTGRES_USER}" -d postgres -c "SELECT 1" > /dev/null 2>&1; then
log "ERROR: Cannot connect to PostgreSQL database"
send_notification "fail" "Cannot connect to PostgreSQL database"
exit 1
fi
# Get list of databases to backup
databases=$(get_databases)
log "Databases to backup: ${databases}"
# Backup each database
backup_success=true
for db in ${databases}; do
if backup_database "${db}"; then
cleanup_old_backups "${db}"
else
log "ERROR: Failed to backup database: ${db}"
backup_success=false
fi
done
if [[ "${backup_success}" == "true" ]]; then
log "All database backups completed successfully"
send_notification "success" "All PostgreSQL database backups completed successfully"
else
log "Some database backups failed"
send_notification "fail" "Some PostgreSQL database backups failed"
exit 1
fi
}
# Execute main function
main "$@"