#!/usr/bin/env bash # ──────────────────────────────────────────────────────────────────────────── # CMS Data Sync Tool # Safely syncs Payload CMS data (DB + media) between environments. # # Usage: # cms:push:testing – Push local → testing # cms:push:prod – Push local → production # cms:pull:testing – Pull testing → local # cms:pull:prod – Pull production → local # ──────────────────────────────────────────────────────────────────────────── set -euo pipefail SYNC_SUCCESS="false" LOCAL_BACKUP_FILE="" REMOTE_BACKUP_FILE="" cleanup_on_exit() { local exit_code=$? if [ "$SYNC_SUCCESS" != "true" ] && [ $exit_code -ne 0 ]; then echo "" echo "❌ Sync aborted or failed! (Exit code: $exit_code)" if [ "${DIRECTION:-}" = "push" ] && [ -n "${REMOTE_BACKUP_FILE:-}" ]; then echo "🔄 Rolling back $TARGET database..." ssh "$SSH_HOST" "gunzip -c $REMOTE_BACKUP_FILE | docker exec -i $REMOTE_DB_CONTAINER psql -U $REMOTE_DB_USER -d $REMOTE_DB_NAME --quiet" || echo "⚠️ Rollback failed" echo "✅ Rollback complete." elif [ "${DIRECTION:-}" = "pull" ] && [ -n "${LOCAL_BACKUP_FILE:-}" ]; then echo "🔄 Rolling back local database..." gunzip -c "$LOCAL_BACKUP_FILE" | docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$LOCAL_DB_USER" -d "$LOCAL_DB_NAME" --quiet || echo "⚠️ Rollback failed" echo "✅ Rollback complete." fi fi } trap 'cleanup_on_exit' EXIT # Load environment variables if [ -f .env ]; then set -a; source .env; set +a fi # ── Configuration ────────────────────────────────────────────────────────── DIRECTION="${1:-}" # push | pull TARGET="${2:-}" # testing | prod SSH_HOST="root@alpha.mintel.me" LOCAL_DB_USER="${PAYLOAD_DB_USER:-payload}" LOCAL_DB_NAME="${PAYLOAD_DB_NAME:-payload}" LOCAL_DB_CONTAINER="klz-2026-klz-db-1" LOCAL_MEDIA_DIR="./public/media" BACKUP_DIR="./backups" TIMESTAMP=$(date +"%Y%m%d_%H%M%S") # Remote credentials (resolved per-target from server env files) REMOTE_DB_USER="" REMOTE_DB_NAME="" # Migration names to insert after restore (keeps Payload from prompting) MIGRATIONS=( "20260223_195005_products_collection:1" "20260223_195151_remove_sku_unique:2" "20260225_003500_add_pages_collection:3" ) # ── Resolve target environment ───────────────────────────────────────────── resolve_target() { case "$TARGET" in testing) REMOTE_PROJECT="klz-testing" REMOTE_DB_CONTAINER="klz-testing-klz-db-1" REMOTE_APP_CONTAINER="klz-testing-klz-app-1" REMOTE_MEDIA_VOLUME="/var/lib/docker/volumes/klz-testing_klz_media_data/_data" REMOTE_SITE_DIR="/home/deploy/sites/testing.klz-cables.com" ;; staging) REMOTE_PROJECT="klz-staging" REMOTE_DB_CONTAINER="klz-staging-klz-db-1" REMOTE_APP_CONTAINER="klz-staging-klz-app-1" REMOTE_MEDIA_VOLUME="/var/lib/docker/volumes/klz-staging_klz_media_data/_data" REMOTE_SITE_DIR="/home/deploy/sites/staging.klz-cables.com" ;; prod|production) REMOTE_PROJECT="klz-cablescom" REMOTE_DB_CONTAINER="klz-cablescom-klz-db-1" REMOTE_APP_CONTAINER="klz-cablescom-klz-app-1" REMOTE_MEDIA_VOLUME="/var/lib/docker/volumes/klz-cablescom_klz_media_data/_data" REMOTE_SITE_DIR="/home/deploy/sites/klz-cables.com" ;; *) echo "❌ Unknown target: $TARGET" echo " Valid targets: testing, staging, prod" exit 1 ;; esac # Auto-detect remote DB credentials from the env file on the server echo "🔍 Detecting $TARGET database credentials..." REMOTE_DB_USER=$(ssh "$SSH_HOST" "grep -h '^PAYLOAD_DB_USER=' $REMOTE_SITE_DIR/.env* 2>/dev/null | tail -1 | cut -d= -f2" || echo "") REMOTE_DB_NAME=$(ssh "$SSH_HOST" "grep -h '^PAYLOAD_DB_NAME=' $REMOTE_SITE_DIR/.env* 2>/dev/null | tail -1 | cut -d= -f2" || echo "") REMOTE_DB_USER="${REMOTE_DB_USER:-payload}" REMOTE_DB_NAME="${REMOTE_DB_NAME:-payload}" echo " User: $REMOTE_DB_USER | DB: $REMOTE_DB_NAME" } # ── Ensure local DB is running ───────────────────────────────────────────── ensure_local_db() { if ! docker ps --format '{{.Names}}' | grep -q "$LOCAL_DB_CONTAINER"; then echo "⏳ Local DB container not running. Starting..." docker compose up -d klz-db echo "⏳ Waiting for local DB to be ready..." for i in $(seq 1 10); do if docker exec "$LOCAL_DB_CONTAINER" pg_isready -U "$LOCAL_DB_USER" -q 2>/dev/null; then echo "✅ Local DB is ready." return fi sleep 1 done echo "❌ Local DB failed to start." exit 1 fi } # ── Sanitize migrations table ────────────────────────────────────────────── sanitize_migrations() { local container="$1" local db_user="$2" local db_name="$3" local is_remote="$4" # "true" or "false" echo "🔧 Sanitizing payload_migrations table..." local SQL="DELETE FROM payload_migrations WHERE batch = -1;" for entry in "${MIGRATIONS[@]}"; do local name="${entry%%:*}" local batch="${entry##*:}" SQL="$SQL INSERT INTO payload_migrations (name, batch) SELECT '$name', $batch WHERE NOT EXISTS (SELECT 1 FROM payload_migrations WHERE name = '$name');" done if [ "$is_remote" = "true" ]; then ssh "$SSH_HOST" "docker exec $container psql -U $db_user -d $db_name -c \"$SQL\"" else docker exec "$container" psql -U "$db_user" -d "$db_name" -c "$SQL" fi } # ── Safety: Create backup before overwriting ─────────────────────────────── backup_local_db() { mkdir -p "$BACKUP_DIR" local file="$BACKUP_DIR/payload_pre_sync_${TIMESTAMP}.sql.gz" echo "📦 Creating safety backup of local DB → $file" docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$LOCAL_DB_USER" -d "$LOCAL_DB_NAME" --clean --if-exists | gzip > "$file" echo "✅ Backup: $file ($(du -h "$file" | cut -f1))" LOCAL_BACKUP_FILE="$file" } backup_remote_db() { local file="/tmp/payload_pre_sync_${TIMESTAMP}.sql.gz" echo "📦 Creating safety backup of $TARGET DB → $SSH_HOST:$file" ssh "$SSH_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $REMOTE_DB_USER -d $REMOTE_DB_NAME --clean --if-exists | gzip > $file" echo "✅ Remote backup: $file" REMOTE_BACKUP_FILE="$file" } # ── PUSH: local → remote ────────────────────────────────────────────────── do_push() { echo "" echo "┌──────────────────────────────────────────────────┐" echo "│ 📤 PUSH: local → $TARGET " echo "│ This will OVERWRITE the $TARGET database! " echo "│ A safety backup will be created first. " echo "└──────────────────────────────────────────────────┘" echo "" read -p "Are you sure? (y/N) " -n 1 -r echo "" [[ ! $REPLY =~ ^[Yy]$ ]] && { echo "Cancelled."; exit 0; } # 0. Ensure local DB is running ensure_local_db # 1. Safety backup of remote backup_remote_db # 2. Dump local DB echo "📤 Dumping local database..." local dump="/tmp/payload_push_${TIMESTAMP}.sql.gz" docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$LOCAL_DB_USER" -d "$LOCAL_DB_NAME" --clean --if-exists | gzip > "$dump" # 3. Transfer and restore echo "📤 Transferring to $SSH_HOST..." scp "$dump" "$SSH_HOST:/tmp/payload_push.sql.gz" echo "🔄 Restoring database on $TARGET..." ssh "$SSH_HOST" "gunzip -c /tmp/payload_push.sql.gz | docker exec -i $REMOTE_DB_CONTAINER psql -U $REMOTE_DB_USER -d $REMOTE_DB_NAME --quiet" # 4. Sanitize migrations sanitize_migrations "$REMOTE_DB_CONTAINER" "$REMOTE_DB_USER" "$REMOTE_DB_NAME" "true" # 5. Sync media echo "🖼️ Syncing media files..." rsync -az --delete --progress "$LOCAL_MEDIA_DIR/" "$SSH_HOST:$REMOTE_MEDIA_VOLUME/" # Fix ownership: rsync preserves local UID, but container runs as nextjs (1001) echo "🔑 Fixing media file permissions..." ssh "$SSH_HOST" "docker exec -u 0 $REMOTE_APP_CONTAINER chown -R 1001:65533 /app/public/media/ 2>/dev/null || true" # 6. Restart app echo "🔄 Restarting $TARGET app container..." ssh "$SSH_HOST" "docker restart $REMOTE_APP_CONTAINER" # Cleanup rm -f "$dump" ssh "$SSH_HOST" "rm -f /tmp/payload_push.sql.gz" SYNC_SUCCESS="true" echo "" echo "✅ Push to $TARGET complete!" } # ── PULL: remote → local ────────────────────────────────────────────────── do_pull() { echo "" echo "┌──────────────────────────────────────────────────┐" echo "│ 📥 PULL: $TARGET → local " echo "│ This will OVERWRITE your local database! " echo "│ A safety backup will be created first. " echo "└──────────────────────────────────────────────────┘" echo "" read -p "Are you sure? (y/N) " -n 1 -r echo "" [[ ! $REPLY =~ ^[Yy]$ ]] && { echo "Cancelled."; exit 0; } # 0. Ensure local DB is running ensure_local_db # 1. Safety backup of local backup_local_db # 2. Dump remote DB echo "📥 Dumping $TARGET database..." ssh "$SSH_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $REMOTE_DB_USER -d $REMOTE_DB_NAME --clean --if-exists | gzip > /tmp/payload_pull.sql.gz" # 3. Transfer and restore echo "📥 Downloading from $SSH_HOST..." scp "$SSH_HOST:/tmp/payload_pull.sql.gz" "/tmp/payload_pull.sql.gz" echo "🔄 Restoring database locally..." gunzip -c "/tmp/payload_pull.sql.gz" | docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$LOCAL_DB_USER" -d "$LOCAL_DB_NAME" --quiet # 4. Sync media echo "🖼️ Syncing media files..." mkdir -p "$LOCAL_MEDIA_DIR" rsync -az --delete --progress "$SSH_HOST:$REMOTE_MEDIA_VOLUME/" "$LOCAL_MEDIA_DIR/" # Cleanup rm -f "/tmp/payload_pull.sql.gz" ssh "$SSH_HOST" "rm -f /tmp/payload_pull.sql.gz" SYNC_SUCCESS="true" echo "" echo "✅ Pull from $TARGET complete! Restart dev server to see changes." } # ── Main ─────────────────────────────────────────────────────────────────── if [ -z "$DIRECTION" ] || [ -z "$TARGET" ]; then echo "📦 CMS Data Sync Tool" echo "" echo "Usage:" echo " pnpm cms:push:testing Push local DB + media → testing" echo " pnpm cms:push:staging Push local DB + media → staging" echo " pnpm cms:push:prod Push local DB + media → production" echo " pnpm cms:pull:testing Pull testing DB + media → local" echo " pnpm cms:pull:staging Pull staging DB + media → local" echo " pnpm cms:pull:prod Pull production DB + media → local" echo "" echo "Safety: A backup is always created before overwriting." exit 1 fi resolve_target case "$DIRECTION" in push) do_push ;; pull) do_pull ;; *) echo "❌ Unknown direction: $DIRECTION (use 'push' or 'pull')" exit 1 ;; esac