All checks were successful
Basil CI/CD Pipeline / Shared Package Tests (push) Successful in 1m10s
Basil CI/CD Pipeline / Code Linting (push) Successful in 1m18s
Basil CI/CD Pipeline / Web Tests (push) Successful in 1m29s
Basil CI/CD Pipeline / Security Scanning (push) Successful in 1m14s
Basil CI/CD Pipeline / API Tests (push) Successful in 1m45s
Basil CI/CD Pipeline / Trigger Deployment (push) Successful in 12s
Basil CI/CD Pipeline / Build All Packages (push) Successful in 1m31s
Basil CI/CD Pipeline / E2E Tests (push) Has been skipped
Basil CI/CD Pipeline / Build & Push Docker Images (push) Successful in 14m27s
Added production-grade backup and restore scripts for PostgreSQL servers
that can backup all databases automatically with retention management.
New scripts:
- scripts/backup-all-postgres-databases.sh - Backs up all databases on a
PostgreSQL server with automatic retention, compression, verification,
and notification support
- scripts/restore-postgres-database.sh - Restores individual databases
with safety backups and verification
- scripts/README-POSTGRES-BACKUP.md - Complete documentation with examples,
best practices, and troubleshooting
Features:
- Automatic detection and backup of all user databases
- Excludes system databases (postgres, template0, template1)
- Backs up global objects (roles, tablespaces)
- Optional gzip compression (80-90% space savings)
- Automatic retention management (configurable days)
- Integrity verification (gzip -t for compressed files)
- Safety backups before restore operations
- Detailed logging with color-coded output
- Backup summary reporting
- Email/Slack notification support (optional)
- Interactive restore with confirmation prompts
- Force mode for automation
- Verbose debugging mode
- Comprehensive error handling
Backup directory structure:
/var/backups/postgresql/YYYYMMDD/
- globals_YYYYMMDD_HHMMSS.sql.gz
- database1_YYYYMMDD_HHMMSS.sql.gz
- database2_YYYYMMDD_HHMMSS.sql.gz
Usage examples:
# Backup all databases with compression
./backup-all-postgres-databases.sh -c
# Custom configuration
./backup-all-postgres-databases.sh -h db.server.com -U backup_user -d /mnt/backups -r 60 -c
# Restore database
./restore-postgres-database.sh /var/backups/postgresql/20260120/mydb_20260120_020001.sql.gz
# Force restore (skip confirmation)
./restore-postgres-database.sh backup.sql.gz -d mydb -f
Automation:
# Add to crontab for daily backups at 2 AM
0 2 * * * /path/to/backup-all-postgres-databases.sh -c >> /var/log/postgres-backup.log 2>&1
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
403 lines
12 KiB
Bash
Executable File
403 lines
12 KiB
Bash
Executable File
#!/bin/bash
|
|
#
|
|
# PostgreSQL All Databases Backup Script
|
|
# Backs up all databases on a PostgreSQL server using pg_dump
|
|
#
|
|
# Usage:
|
|
# ./backup-all-postgres-databases.sh [options]
|
|
#
|
|
# Options:
|
|
# -h HOST Database host (default: localhost)
|
|
# -p PORT Database port (default: 5432)
|
|
# -U USER Database user (default: postgres)
|
|
# -d BACKUP_DIR Backup directory (default: /var/backups/postgresql)
|
|
# -r DAYS Retention days (default: 30)
|
|
# -c Enable compression (gzip)
|
|
# -v Verbose output
|
|
#
|
|
# Cron example (daily at 2 AM):
|
|
# 0 2 * * * /path/to/backup-all-postgres-databases.sh -c >> /var/log/postgres-backup.log 2>&1
|
|
|
|
set -e
|
|
set -o pipefail
|
|
|
|
# Default configuration
|
|
DB_HOST="localhost"
|
|
DB_PORT="5432"
|
|
DB_USER="postgres"
|
|
BACKUP_DIR="/var/backups/postgresql"
|
|
RETENTION_DAYS=30
|
|
COMPRESS=false
|
|
VERBOSE=false
|
|
|
|
# Color output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m' # No Color
|
|
|
|
# Parse command line arguments
|
|
while getopts "h:p:U:d:r:cvH" opt; do
|
|
case $opt in
|
|
h) DB_HOST="$OPTARG" ;;
|
|
p) DB_PORT="$OPTARG" ;;
|
|
U) DB_USER="$OPTARG" ;;
|
|
d) BACKUP_DIR="$OPTARG" ;;
|
|
r) RETENTION_DAYS="$OPTARG" ;;
|
|
c) COMPRESS=true ;;
|
|
v) VERBOSE=true ;;
|
|
H)
|
|
echo "PostgreSQL All Databases Backup Script"
|
|
echo ""
|
|
echo "Usage: $0 [options]"
|
|
echo ""
|
|
echo "Options:"
|
|
echo " -h HOST Database host (default: localhost)"
|
|
echo " -p PORT Database port (default: 5432)"
|
|
echo " -U USER Database user (default: postgres)"
|
|
echo " -d BACKUP_DIR Backup directory (default: /var/backups/postgresql)"
|
|
echo " -r DAYS Retention days (default: 30)"
|
|
echo " -c Enable compression (gzip)"
|
|
echo " -v Verbose output"
|
|
echo " -H Show this help"
|
|
echo ""
|
|
exit 0
|
|
;;
|
|
\?)
|
|
echo "Invalid option: -$OPTARG" >&2
|
|
exit 1
|
|
;;
|
|
esac
|
|
done
|
|
|
|
# Logging functions
|
|
log_info() {
|
|
echo -e "${GREEN}[INFO]${NC} $(date '+%Y-%m-%d %H:%M:%S') - $1"
|
|
}
|
|
|
|
log_warn() {
|
|
echo -e "${YELLOW}[WARN]${NC} $(date '+%Y-%m-%d %H:%M:%S') - $1"
|
|
}
|
|
|
|
log_error() {
|
|
echo -e "${RED}[ERROR]${NC} $(date '+%Y-%m-%d %H:%M:%S') - $1" >&2
|
|
}
|
|
|
|
log_debug() {
|
|
if [ "$VERBOSE" = true ]; then
|
|
echo -e "${BLUE}[DEBUG]${NC} $(date '+%Y-%m-%d %H:%M:%S') - $1"
|
|
fi
|
|
}
|
|
|
|
# Check dependencies
|
|
check_dependencies() {
|
|
log_debug "Checking dependencies..."
|
|
|
|
if ! command -v psql &> /dev/null; then
|
|
log_error "psql not found. Please install PostgreSQL client tools."
|
|
exit 1
|
|
fi
|
|
|
|
if ! command -v pg_dump &> /dev/null; then
|
|
log_error "pg_dump not found. Please install PostgreSQL client tools."
|
|
exit 1
|
|
fi
|
|
|
|
if [ "$COMPRESS" = true ] && ! command -v gzip &> /dev/null; then
|
|
log_error "gzip not found. Please install gzip or disable compression."
|
|
exit 1
|
|
fi
|
|
|
|
log_debug "All dependencies satisfied"
|
|
}
|
|
|
|
# Test database connection
|
|
test_connection() {
|
|
log_debug "Testing database connection..."
|
|
|
|
if ! psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d postgres -c "SELECT version();" &> /dev/null; then
|
|
log_error "Cannot connect to PostgreSQL server at $DB_HOST:$DB_PORT"
|
|
log_error "Check credentials, network connectivity, and pg_hba.conf settings"
|
|
exit 1
|
|
fi
|
|
|
|
log_debug "Database connection successful"
|
|
}
|
|
|
|
# Create backup directory structure
|
|
create_backup_dirs() {
|
|
local timestamp=$(date +%Y%m%d)
|
|
local backup_subdir="$BACKUP_DIR/$timestamp"
|
|
|
|
log_debug "Creating backup directory: $backup_subdir"
|
|
|
|
mkdir -p "$backup_subdir"
|
|
|
|
if [ ! -w "$backup_subdir" ]; then
|
|
log_error "Backup directory is not writable: $backup_subdir"
|
|
exit 1
|
|
fi
|
|
|
|
echo "$backup_subdir"
|
|
}
|
|
|
|
# Get list of databases to backup
|
|
get_databases() {
|
|
log_debug "Retrieving database list..."
|
|
|
|
# Get all databases except system databases
|
|
local databases=$(psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d postgres -t -c \
|
|
"SELECT datname FROM pg_database
|
|
WHERE datname NOT IN ('postgres', 'template0', 'template1')
|
|
AND datistemplate = false
|
|
ORDER BY datname;")
|
|
|
|
if [ -z "$databases" ]; then
|
|
log_warn "No user databases found to backup"
|
|
return 1
|
|
fi
|
|
|
|
echo "$databases"
|
|
}
|
|
|
|
# Backup a single database
|
|
backup_database() {
|
|
local db_name="$1"
|
|
local backup_dir="$2"
|
|
local timestamp=$(date +%Y%m%d_%H%M%S)
|
|
local backup_file="$backup_dir/${db_name}_${timestamp}.sql"
|
|
|
|
log_info "Backing up database: $db_name"
|
|
|
|
# Add compression extension if enabled
|
|
if [ "$COMPRESS" = true ]; then
|
|
backup_file="${backup_file}.gz"
|
|
fi
|
|
|
|
# Perform backup
|
|
local start_time=$(date +%s)
|
|
|
|
if [ "$COMPRESS" = true ]; then
|
|
if pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$db_name" \
|
|
--no-owner --no-privileges --create --clean | gzip > "$backup_file"; then
|
|
local status="SUCCESS"
|
|
else
|
|
local status="FAILED"
|
|
fi
|
|
else
|
|
if pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$db_name" \
|
|
--no-owner --no-privileges --create --clean > "$backup_file"; then
|
|
local status="SUCCESS"
|
|
else
|
|
local status="FAILED"
|
|
fi
|
|
fi
|
|
|
|
local end_time=$(date +%s)
|
|
local duration=$((end_time - start_time))
|
|
|
|
if [ "$status" = "SUCCESS" ]; then
|
|
# Verify backup file exists and has content
|
|
if [ ! -s "$backup_file" ]; then
|
|
log_error "Backup file is empty: $backup_file"
|
|
return 1
|
|
fi
|
|
|
|
# Verify compressed file integrity if compression is enabled
|
|
if [ "$COMPRESS" = true ]; then
|
|
if ! gzip -t "$backup_file" 2>/dev/null; then
|
|
log_error "Backup file is corrupted: $backup_file"
|
|
return 1
|
|
fi
|
|
fi
|
|
|
|
local file_size=$(du -h "$backup_file" | cut -f1)
|
|
log_info "✓ $db_name backup completed - Size: $file_size, Duration: ${duration}s"
|
|
log_debug " File: $backup_file"
|
|
return 0
|
|
else
|
|
log_error "✗ $db_name backup failed"
|
|
# Remove failed backup file
|
|
rm -f "$backup_file"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Backup global objects (roles, tablespaces, etc.)
|
|
backup_globals() {
|
|
local backup_dir="$1"
|
|
local timestamp=$(date +%Y%m%d_%H%M%S)
|
|
local backup_file="$backup_dir/globals_${timestamp}.sql"
|
|
|
|
log_info "Backing up global objects (roles, tablespaces)..."
|
|
|
|
if [ "$COMPRESS" = true ]; then
|
|
backup_file="${backup_file}.gz"
|
|
fi
|
|
|
|
if [ "$COMPRESS" = true ]; then
|
|
if pg_dumpall -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" --globals-only | gzip > "$backup_file"; then
|
|
local status="SUCCESS"
|
|
else
|
|
local status="FAILED"
|
|
fi
|
|
else
|
|
if pg_dumpall -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" --globals-only > "$backup_file"; then
|
|
local status="SUCCESS"
|
|
else
|
|
local status="FAILED"
|
|
fi
|
|
fi
|
|
|
|
if [ "$status" = "SUCCESS" ]; then
|
|
local file_size=$(du -h "$backup_file" | cut -f1)
|
|
log_info "✓ Global objects backup completed - Size: $file_size"
|
|
return 0
|
|
else
|
|
log_error "✗ Global objects backup failed"
|
|
rm -f "$backup_file"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Clean up old backups
|
|
cleanup_old_backups() {
|
|
log_info "Cleaning up backups older than $RETENTION_DAYS days..."
|
|
|
|
local deleted_count=0
|
|
|
|
# Find and delete old backup directories
|
|
while IFS= read -r old_dir; do
|
|
log_debug "Deleting old backup directory: $old_dir"
|
|
rm -rf "$old_dir"
|
|
((deleted_count++))
|
|
done < <(find "$BACKUP_DIR" -maxdepth 1 -type d -name "????????" -mtime +$RETENTION_DAYS 2>/dev/null)
|
|
|
|
if [ $deleted_count -gt 0 ]; then
|
|
log_info "Deleted $deleted_count old backup directories"
|
|
else
|
|
log_debug "No old backups to delete"
|
|
fi
|
|
}
|
|
|
|
# Generate backup summary
|
|
generate_summary() {
|
|
local backup_dir="$1"
|
|
local total_dbs="$2"
|
|
local successful_dbs="$3"
|
|
local failed_dbs="$4"
|
|
local total_size=$(du -sh "$backup_dir" 2>/dev/null | cut -f1)
|
|
|
|
echo ""
|
|
log_info "================================================"
|
|
log_info "Backup Summary"
|
|
log_info "================================================"
|
|
log_info "Backup Directory: $backup_dir"
|
|
log_info "Total Databases: $total_dbs"
|
|
log_info "Successful: $successful_dbs"
|
|
log_info "Failed: $failed_dbs"
|
|
log_info "Total Size: $total_size"
|
|
log_info "Retention: $RETENTION_DAYS days"
|
|
log_info "Compression: $([ "$COMPRESS" = true ] && echo "Enabled" || echo "Disabled")"
|
|
log_info "================================================"
|
|
echo ""
|
|
}
|
|
|
|
# Send notification (optional)
|
|
send_notification() {
|
|
local status="$1"
|
|
local summary="$2"
|
|
|
|
# Uncomment and configure to enable email notifications
|
|
# if command -v mail &> /dev/null; then
|
|
# echo "$summary" | mail -s "PostgreSQL Backup $status - $(hostname)" your-email@example.com
|
|
# fi
|
|
|
|
# Uncomment and configure to enable Slack notifications
|
|
# if [ -n "$SLACK_WEBHOOK_URL" ]; then
|
|
# curl -X POST "$SLACK_WEBHOOK_URL" \
|
|
# -H 'Content-Type: application/json' \
|
|
# -d "{\"text\":\"PostgreSQL Backup $status\n$summary\"}"
|
|
# fi
|
|
}
|
|
|
|
# Main execution
|
|
main() {
|
|
local start_time=$(date +%s)
|
|
|
|
log_info "================================================"
|
|
log_info "PostgreSQL All Databases Backup Script"
|
|
log_info "================================================"
|
|
log_info "Host: $DB_HOST:$DB_PORT"
|
|
log_info "User: $DB_USER"
|
|
log_info "Backup Directory: $BACKUP_DIR"
|
|
log_info "Compression: $([ "$COMPRESS" = true ] && echo "Enabled" || echo "Disabled")"
|
|
log_info "Retention: $RETENTION_DAYS days"
|
|
log_info "================================================"
|
|
echo ""
|
|
|
|
# Perform checks
|
|
check_dependencies
|
|
test_connection
|
|
|
|
# Create backup directory
|
|
local backup_subdir=$(create_backup_dirs)
|
|
|
|
# Get list of databases
|
|
local databases=$(get_databases)
|
|
|
|
if [ -z "$databases" ]; then
|
|
log_warn "No databases to backup. Exiting."
|
|
exit 0
|
|
fi
|
|
|
|
# Backup global objects first
|
|
backup_globals "$backup_subdir"
|
|
|
|
# Backup each database
|
|
local total_dbs=0
|
|
local successful_dbs=0
|
|
local failed_dbs=0
|
|
|
|
while IFS= read -r db; do
|
|
# Trim whitespace
|
|
db=$(echo "$db" | xargs)
|
|
|
|
if [ -n "$db" ]; then
|
|
((total_dbs++))
|
|
|
|
if backup_database "$db" "$backup_subdir"; then
|
|
((successful_dbs++))
|
|
else
|
|
((failed_dbs++))
|
|
fi
|
|
fi
|
|
done <<< "$databases"
|
|
|
|
# Cleanup old backups
|
|
cleanup_old_backups
|
|
|
|
# Calculate total execution time
|
|
local end_time=$(date +%s)
|
|
local total_duration=$((end_time - start_time))
|
|
|
|
# Generate summary
|
|
generate_summary "$backup_subdir" "$total_dbs" "$successful_dbs" "$failed_dbs"
|
|
|
|
log_info "Total execution time: ${total_duration}s"
|
|
|
|
# Send notification
|
|
if [ $failed_dbs -gt 0 ]; then
|
|
send_notification "COMPLETED WITH ERRORS" "$(generate_summary "$backup_subdir" "$total_dbs" "$successful_dbs" "$failed_dbs")"
|
|
exit 1
|
|
else
|
|
send_notification "SUCCESS" "$(generate_summary "$backup_subdir" "$total_dbs" "$successful_dbs" "$failed_dbs")"
|
|
log_info "All backups completed successfully! ✓"
|
|
exit 0
|
|
fi
|
|
}
|
|
|
|
# Run main function
|
|
main
|