- Add comprehensive health check system with multiple endpoints - Add Prometheus metrics endpoint - Add production logging configurations (5 strategies) - Add complete deployment documentation suite: * QUICKSTART.md - 30-minute deployment guide * DEPLOYMENT_CHECKLIST.md - Printable verification checklist * DEPLOYMENT_WORKFLOW.md - Complete deployment lifecycle * PRODUCTION_DEPLOYMENT.md - Comprehensive technical reference * production-logging.md - Logging configuration guide * ANSIBLE_DEPLOYMENT.md - Infrastructure as Code automation * README.md - Navigation hub * DEPLOYMENT_SUMMARY.md - Executive summary - Add deployment scripts and automation - Add DEPLOYMENT_PLAN.md - Concrete plan for immediate deployment - Update README with production-ready features All production infrastructure is now complete and ready for deployment.
359 lines
8.7 KiB
Bash
Executable File
359 lines
8.7 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# Production Backup Script
|
|
# Automated backup for database, Vault, and critical files
|
|
#
|
|
# Usage:
|
|
# ./scripts/backup.sh [--full|--database-only|--vault-only] [--encrypt]
|
|
#
|
|
# Options:
|
|
# --full Full backup (database + vault + files)
|
|
# --database-only Database backup only
|
|
# --vault-only Vault backup only
|
|
# --encrypt Encrypt backup files with GPG
|
|
|
|
set -euo pipefail
|
|
|
|
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
|
BACKUP_DIR="${PROJECT_ROOT}/../backups"
|
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
|
BACKUP_PATH="${BACKUP_DIR}/${TIMESTAMP}"
|
|
|
|
# Default options
|
|
BACKUP_TYPE="full"
|
|
ENCRYPT=false
|
|
|
|
# Parse arguments
|
|
for arg in "$@"; do
|
|
case $arg in
|
|
--full)
|
|
BACKUP_TYPE="full"
|
|
;;
|
|
--database-only)
|
|
BACKUP_TYPE="database"
|
|
;;
|
|
--vault-only)
|
|
BACKUP_TYPE="vault"
|
|
;;
|
|
--encrypt)
|
|
ENCRYPT=true
|
|
;;
|
|
esac
|
|
done
|
|
|
|
# Colors
|
|
GREEN="\e[32m"
|
|
YELLOW="\e[33m"
|
|
RED="\e[31m"
|
|
BLUE="\e[34m"
|
|
RESET="\e[0m"
|
|
|
|
# Logging functions
|
|
log() {
|
|
echo -e "${BLUE}[$(date +'%H:%M:%S')]${RESET} $1"
|
|
}
|
|
|
|
success() {
|
|
echo -e "${GREEN}✅ $1${RESET}"
|
|
}
|
|
|
|
warning() {
|
|
echo -e "${YELLOW}⚠️ $1${RESET}"
|
|
}
|
|
|
|
error() {
|
|
echo -e "${RED}❌ $1${RESET}"
|
|
exit 1
|
|
}
|
|
|
|
# Create backup directory
|
|
prepare_backup_dir() {
|
|
log "Preparing backup directory..."
|
|
|
|
mkdir -p "$BACKUP_PATH"
|
|
success "Backup directory created: $BACKUP_PATH"
|
|
}
|
|
|
|
# Backup database
|
|
backup_database() {
|
|
log "Backing up database..."
|
|
|
|
cd "$PROJECT_ROOT"
|
|
|
|
# Check if database is running
|
|
if ! docker compose ps db | grep -q "Up"; then
|
|
error "Database container is not running"
|
|
fi
|
|
|
|
# Dump database
|
|
local db_backup="${BACKUP_PATH}/database.sql"
|
|
|
|
if docker compose exec -T db pg_dump -U postgres michaelschiemer_prod > "$db_backup"; then
|
|
# Compress
|
|
gzip -f "$db_backup"
|
|
local size=$(du -h "${db_backup}.gz" | cut -f1)
|
|
|
|
success "Database backup created: database.sql.gz ($size)"
|
|
|
|
# Encrypt if requested
|
|
if [[ "$ENCRYPT" == "true" ]]; then
|
|
encrypt_file "${db_backup}.gz"
|
|
fi
|
|
else
|
|
error "Database backup failed"
|
|
fi
|
|
}
|
|
|
|
# Backup Vault secrets
|
|
backup_vault() {
|
|
log "Backing up Vault secrets..."
|
|
|
|
cd "$PROJECT_ROOT"
|
|
|
|
local vault_backup="${BACKUP_PATH}/vault_secrets.sql"
|
|
|
|
# Export Vault tables
|
|
if docker compose exec -T db pg_dump -U postgres michaelschiemer_prod \
|
|
-t vault_secrets -t vault_audit > "$vault_backup"; then
|
|
|
|
# Compress
|
|
gzip -f "$vault_backup"
|
|
local size=$(du -h "${vault_backup}.gz" | cut -f1)
|
|
|
|
success "Vault backup created: vault_secrets.sql.gz ($size)"
|
|
|
|
# Encrypt (recommended for Vault backups)
|
|
if [[ "$ENCRYPT" == "true" ]]; then
|
|
encrypt_file "${vault_backup}.gz"
|
|
else
|
|
warning "Vault backup is not encrypted - consider using --encrypt"
|
|
fi
|
|
else
|
|
error "Vault backup failed"
|
|
fi
|
|
}
|
|
|
|
# Backup environment configuration
|
|
backup_environment() {
|
|
log "Backing up environment configuration..."
|
|
|
|
if [[ -f "$PROJECT_ROOT/.env.production" ]]; then
|
|
cp "$PROJECT_ROOT/.env.production" "${BACKUP_PATH}/env.production"
|
|
success "Environment configuration backed up"
|
|
|
|
if [[ "$ENCRYPT" == "true" ]]; then
|
|
encrypt_file "${BACKUP_PATH}/env.production"
|
|
fi
|
|
else
|
|
warning ".env.production not found"
|
|
fi
|
|
}
|
|
|
|
# Backup storage directory
|
|
backup_storage() {
|
|
log "Backing up storage directory..."
|
|
|
|
if [[ -d "$PROJECT_ROOT/storage" ]]; then
|
|
local storage_backup="${BACKUP_PATH}/storage.tar.gz"
|
|
|
|
tar -czf "$storage_backup" -C "$PROJECT_ROOT" storage
|
|
|
|
local size=$(du -h "$storage_backup" | cut -f1)
|
|
success "Storage backup created: storage.tar.gz ($size)"
|
|
|
|
if [[ "$ENCRYPT" == "true" ]]; then
|
|
encrypt_file "$storage_backup"
|
|
fi
|
|
else
|
|
warning "Storage directory not found"
|
|
fi
|
|
}
|
|
|
|
# Backup uploaded files
|
|
backup_uploads() {
|
|
log "Backing up uploaded files..."
|
|
|
|
if [[ -d "$PROJECT_ROOT/public/uploads" ]]; then
|
|
local uploads_backup="${BACKUP_PATH}/uploads.tar.gz"
|
|
|
|
tar -czf "$uploads_backup" -C "$PROJECT_ROOT/public" uploads
|
|
|
|
local size=$(du -h "$uploads_backup" | cut -f1)
|
|
success "Uploads backup created: uploads.tar.gz ($size)"
|
|
|
|
if [[ "$ENCRYPT" == "true" ]]; then
|
|
encrypt_file "$uploads_backup"
|
|
fi
|
|
else
|
|
log "No uploads directory found (skipping)"
|
|
fi
|
|
}
|
|
|
|
# Encrypt file with GPG
|
|
encrypt_file() {
|
|
local file="$1"
|
|
|
|
log "Encrypting $file..."
|
|
|
|
# Check if GPG is available
|
|
if ! command -v gpg &> /dev/null; then
|
|
warning "GPG not installed - skipping encryption"
|
|
return 1
|
|
fi
|
|
|
|
# Encrypt with symmetric encryption (password-based)
|
|
if gpg --symmetric --cipher-algo AES256 "$file"; then
|
|
rm -f "$file" # Remove unencrypted file
|
|
success "File encrypted: ${file}.gpg"
|
|
else
|
|
error "Encryption failed"
|
|
fi
|
|
}
|
|
|
|
# Create backup manifest
|
|
create_manifest() {
|
|
log "Creating backup manifest..."
|
|
|
|
local manifest="${BACKUP_PATH}/MANIFEST.txt"
|
|
|
|
{
|
|
echo "Backup Manifest"
|
|
echo "==============="
|
|
echo ""
|
|
echo "Timestamp: $(date -Iseconds)"
|
|
echo "Backup Type: $BACKUP_TYPE"
|
|
echo "Encrypted: $ENCRYPT"
|
|
echo ""
|
|
echo "Contents:"
|
|
echo ""
|
|
find "$BACKUP_PATH" -type f -exec du -h {} \; | sort -rh
|
|
echo ""
|
|
echo "Total Size: $(du -sh "$BACKUP_PATH" | cut -f1)"
|
|
} > "$manifest"
|
|
|
|
success "Backup manifest created"
|
|
}
|
|
|
|
# Cleanup old backups
|
|
cleanup_old_backups() {
|
|
log "Cleaning up old backups..."
|
|
|
|
# Keep last 7 days of backups
|
|
find "$BACKUP_DIR" -maxdepth 1 -type d -name "20*" -mtime +7 -exec rm -rf {} \;
|
|
|
|
success "Old backups cleaned up (kept last 7 days)"
|
|
}
|
|
|
|
# Verify backup integrity
|
|
verify_backup() {
|
|
log "Verifying backup integrity..."
|
|
|
|
local all_valid=true
|
|
|
|
# Verify gzip files
|
|
for file in "$BACKUP_PATH"/*.gz; do
|
|
if [[ -f "$file" ]]; then
|
|
if gzip -t "$file" 2>/dev/null; then
|
|
log "✓ $file is valid"
|
|
else
|
|
error "✗ $file is corrupted"
|
|
all_valid=false
|
|
fi
|
|
fi
|
|
done
|
|
|
|
# Verify tar.gz files
|
|
for file in "$BACKUP_PATH"/*.tar.gz; do
|
|
if [[ -f "$file" ]]; then
|
|
if tar -tzf "$file" &>/dev/null; then
|
|
log "✓ $file is valid"
|
|
else
|
|
error "✗ $file is corrupted"
|
|
all_valid=false
|
|
fi
|
|
fi
|
|
done
|
|
|
|
if [[ "$all_valid" == "true" ]]; then
|
|
success "All backup files verified successfully"
|
|
else
|
|
error "Some backup files are corrupted"
|
|
fi
|
|
}
|
|
|
|
# Display backup summary
|
|
display_summary() {
|
|
echo ""
|
|
echo -e "${GREEN}========================================${RESET}"
|
|
echo -e "${GREEN} Backup Summary${RESET}"
|
|
echo -e "${GREEN}========================================${RESET}"
|
|
echo ""
|
|
echo "📋 Backup Type: $BACKUP_TYPE"
|
|
echo "⏰ Timestamp: $(date)"
|
|
echo "📁 Location: $BACKUP_PATH"
|
|
echo "🔒 Encrypted: $ENCRYPT"
|
|
echo ""
|
|
echo "📦 Backup Contents:"
|
|
echo ""
|
|
find "$BACKUP_PATH" -type f -exec du -h {} \; | sort -rh | head -10
|
|
echo ""
|
|
echo "💾 Total Size: $(du -sh "$BACKUP_PATH" | cut -f1)"
|
|
echo ""
|
|
echo "📝 Restoration Commands:"
|
|
echo ""
|
|
|
|
if [[ -f "${BACKUP_PATH}/database.sql.gz" ]]; then
|
|
echo " Database:"
|
|
echo " gunzip -c database.sql.gz | docker compose exec -T db psql -U postgres michaelschiemer_prod"
|
|
echo ""
|
|
fi
|
|
|
|
if [[ -f "${BACKUP_PATH}/vault_secrets.sql.gz" ]]; then
|
|
echo " Vault:"
|
|
echo " gunzip -c vault_secrets.sql.gz | docker compose exec -T db psql -U postgres michaelschiemer_prod"
|
|
echo ""
|
|
fi
|
|
|
|
if [[ -f "${BACKUP_PATH}/storage.tar.gz" ]]; then
|
|
echo " Storage:"
|
|
echo " tar -xzf storage.tar.gz -C /path/to/project"
|
|
echo ""
|
|
fi
|
|
|
|
echo -e "${GREEN}========================================${RESET}"
|
|
}
|
|
|
|
# Main backup execution
|
|
main() {
|
|
log "🔐 Starting production backup (type: $BACKUP_TYPE)..."
|
|
echo ""
|
|
|
|
prepare_backup_dir
|
|
|
|
case "$BACKUP_TYPE" in
|
|
full)
|
|
backup_database
|
|
backup_vault
|
|
backup_environment
|
|
backup_storage
|
|
backup_uploads
|
|
;;
|
|
database)
|
|
backup_database
|
|
;;
|
|
vault)
|
|
backup_vault
|
|
;;
|
|
esac
|
|
|
|
create_manifest
|
|
verify_backup
|
|
cleanup_old_backups
|
|
display_summary
|
|
|
|
success "🎉 Backup completed successfully!"
|
|
}
|
|
|
|
# Run main
|
|
main "$@"
|