#!/usr/bin/env bash # ──────────────────────────────────────────────────────────────────────────── # Payload CMS Database Backup # Creates a timestamped pg_dump of the Payload Postgres database. # Usage: npm run backup:db # ──────────────────────────────────────────────────────────────────────────── set -euo pipefail # Load environment variables if [ -f .env ]; then set -a; source .env; set +a fi # Fallback for local development if not in .env DB_NAME="${POSTGRES_DB:-payload}" DB_USER="${POSTGRES_USER:-postgres}" # For production, we need the container name. # We'll use the PROJECT_NAME to find it if possible, otherwise use a default. PROJECT_NAME="${PROJECT_NAME:-mb-grid-solutions-production}" DB_CONTAINER="${DB_CONTAINER:-${PROJECT_NAME}-mb-grid-db-1}" BACKUP_DIR="./backups" TIMESTAMP=$(date +"%Y%m%d_%H%M%S") BACKUP_FILE="${BACKUP_DIR}/payload_${TIMESTAMP}.sql.gz" # Ensure backup directory exists mkdir -p "$BACKUP_DIR" # Check if container is running if ! docker ps --format '{{.Names}}' | grep -q "$DB_CONTAINER"; then echo "❌ Database container '$DB_CONTAINER' is not running." echo " Check your docker-compose status." exit 1 fi echo "📦 Backing up Payload database..." echo " Container: $DB_CONTAINER" echo " Database: $DB_NAME" echo " Output: $BACKUP_FILE" # Run pg_dump inside the container and compress # We use directus as user for now if we haven't fully switched to postgres user in all environments # But the script should be consistent with the environment. docker exec "$DB_CONTAINER" pg_dump -U "$DB_USER" -d "$DB_NAME" --clean --if-exists | gzip > "$BACKUP_FILE" # Show result SIZE=$(du -h "$BACKUP_FILE" | cut -f1) echo "" echo "✅ Backup complete: $BACKUP_FILE ($SIZE)" echo "" # Show existing backups echo "📋 Available backups:" ls -lh "$BACKUP_DIR"/*.sql.gz 2>/dev/null | awk '{print " " $NF " (" $5 ")"}'