feat: add CMS data sync scripts (push/pull for testing + prod)
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 12s
Build & Deploy / 🧪 QA (push) Successful in 1m48s
Build & Deploy / 🏗️ Build (push) Successful in 3m54s
Build & Deploy / 🚀 Deploy (push) Failing after 29s
Build & Deploy / 🧪 Post-Deploy Verification (push) Has been skipped
Build & Deploy / ⚡ Performance & Accessibility (push) Has been skipped
Build & Deploy / 🔔 Notify (push) Successful in 3s

This commit is contained in:
2026-02-25 00:57:02 +01:00
parent f0f840ad5a
commit 1e9cf7d9ab
2 changed files with 210 additions and 0 deletions

View File

@@ -124,6 +124,10 @@
"pagespeed:urls": "tsx -e \"import sitemap from './app/sitemap'; sitemap().then(urls => console.log(urls.map(u => u.url).join('\\n')))\"", "pagespeed:urls": "tsx -e \"import sitemap from './app/sitemap'; sitemap().then(urls => console.log(urls.map(u => u.url).join('\\n')))\"",
"backup:db": "bash ./scripts/backup-db.sh", "backup:db": "bash ./scripts/backup-db.sh",
"restore:db": "bash ./scripts/restore-db.sh", "restore:db": "bash ./scripts/restore-db.sh",
"cms:push:testing": "bash ./scripts/cms-sync.sh push testing",
"cms:push:prod": "bash ./scripts/cms-sync.sh push prod",
"cms:pull:testing": "bash ./scripts/cms-sync.sh pull testing",
"cms:pull:prod": "bash ./scripts/cms-sync.sh pull prod",
"prepare": "husky", "prepare": "husky",
"preinstall": "npx only-allow pnpm" "preinstall": "npx only-allow pnpm"
}, },

206
scripts/cms-sync.sh Executable file
View File

@@ -0,0 +1,206 @@
#!/usr/bin/env bash
# ────────────────────────────────────────────────────────────────────────────
# CMS Data Sync Tool
# Safely syncs Payload CMS data (DB + media) between environments.
#
# Usage:
# cms:push:testing Push local → testing
# cms:push:prod Push local → production
# cms:pull:testing Pull testing → local
# cms:pull:prod Pull production → local
# ────────────────────────────────────────────────────────────────────────────
set -euo pipefail
# Load environment variables
if [ -f .env ]; then
set -a; source .env; set +a
fi
# ── Configuration ──────────────────────────────────────────────────────────
DIRECTION="${1:-}" # push | pull
TARGET="${2:-}" # testing | prod
SSH_HOST="root@alpha.mintel.me"
DB_USER="${PAYLOAD_DB_USER:-payload}"
DB_NAME="${PAYLOAD_DB_NAME:-payload}"
LOCAL_DB_CONTAINER="klz-2026-klz-db-1"
LOCAL_MEDIA_DIR="./public/media"
BACKUP_DIR="./backups"
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
# Migration names to insert after restore (keeps Payload from prompting)
MIGRATIONS=(
"20260223_195005_products_collection:1"
"20260223_195151_remove_sku_unique:2"
"20260225_003500_add_pages_collection:3"
)
# ── Resolve target environment ─────────────────────────────────────────────
resolve_target() {
case "$TARGET" in
testing)
REMOTE_PROJECT="klz-testing"
REMOTE_DB_CONTAINER="klz-testing-klz-db-1"
REMOTE_APP_CONTAINER="klz-testing-klz-app-1"
REMOTE_MEDIA_VOLUME="/var/lib/docker/volumes/klz-testing_klz_media_data/_data"
;;
prod|production)
REMOTE_PROJECT="klz-cablescom"
REMOTE_DB_CONTAINER="klz-cablescom-klz-db-1"
REMOTE_APP_CONTAINER="klz-cablescom-klz-app-1"
REMOTE_MEDIA_VOLUME="/var/lib/docker/volumes/klz-cablescom_klz_media_data/_data"
;;
*)
echo "❌ Unknown target: $TARGET"
echo " Valid targets: testing, prod"
exit 1
;;
esac
}
# ── Sanitize migrations table ──────────────────────────────────────────────
sanitize_migrations() {
local container="$1"
local exec_prefix="$2" # "" for local, "ssh $SSH_HOST" for remote
echo "🔧 Sanitizing payload_migrations table..."
local SQL="DELETE FROM payload_migrations WHERE batch = -1;"
for entry in "${MIGRATIONS[@]}"; do
local name="${entry%%:*}"
local batch="${entry##*:}"
SQL="$SQL INSERT INTO payload_migrations (name, batch) SELECT '$name', $batch WHERE NOT EXISTS (SELECT 1 FROM payload_migrations WHERE name = '$name');"
done
if [ -z "$exec_prefix" ]; then
docker exec "$container" psql -U "$DB_USER" -d "$DB_NAME" -c "$SQL"
else
$exec_prefix "docker exec $container psql -U $DB_USER -d $DB_NAME -c \"$SQL\""
fi
}
# ── Safety: Create backup before overwriting ───────────────────────────────
backup_local_db() {
mkdir -p "$BACKUP_DIR"
local file="$BACKUP_DIR/payload_pre_sync_${TIMESTAMP}.sql.gz"
echo "📦 Creating safety backup of local DB → $file"
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$DB_USER" -d "$DB_NAME" --clean --if-exists | gzip > "$file"
echo "✅ Backup: $file ($(du -h "$file" | cut -f1))"
}
backup_remote_db() {
local file="/tmp/payload_pre_sync_${TIMESTAMP}.sql.gz"
echo "📦 Creating safety backup of $TARGET DB → $SSH_HOST:$file"
ssh "$SSH_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $DB_USER -d $DB_NAME --clean --if-exists | gzip > $file"
echo "✅ Remote backup: $file"
}
# ── PUSH: local → remote ──────────────────────────────────────────────────
do_push() {
echo ""
echo "┌──────────────────────────────────────────────────┐"
echo "│ 📤 PUSH: local → $TARGET "
echo "│ This will OVERWRITE the $TARGET database! "
echo "│ A safety backup will be created first. "
echo "└──────────────────────────────────────────────────┘"
echo ""
read -p "Are you sure? (y/N) " -n 1 -r
echo ""
[[ ! $REPLY =~ ^[Yy]$ ]] && { echo "Cancelled."; exit 0; }
# 1. Safety backup of remote
backup_remote_db
# 2. Dump local DB
echo "📤 Dumping local database..."
local dump="/tmp/payload_push_${TIMESTAMP}.sql.gz"
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$DB_USER" -d "$DB_NAME" --clean --if-exists | gzip > "$dump"
# 3. Transfer and restore
echo "📤 Transferring to $SSH_HOST..."
scp "$dump" "$SSH_HOST:/tmp/payload_push.sql.gz"
echo "🔄 Restoring database on $TARGET..."
ssh "$SSH_HOST" "gunzip -c /tmp/payload_push.sql.gz | docker exec -i $REMOTE_DB_CONTAINER psql -U $DB_USER -d $DB_NAME --quiet"
# 4. Sanitize migrations
sanitize_migrations "$REMOTE_DB_CONTAINER" "ssh $SSH_HOST"
# 5. Sync media
echo "🖼️ Syncing media files..."
rsync -az --delete --info=progress2 "$LOCAL_MEDIA_DIR/" "$SSH_HOST:$REMOTE_MEDIA_VOLUME/"
# 6. Restart app
echo "🔄 Restarting $TARGET app container..."
ssh "$SSH_HOST" "docker restart $REMOTE_APP_CONTAINER"
# Cleanup
rm -f "$dump"
ssh "$SSH_HOST" "rm -f /tmp/payload_push.sql.gz"
echo ""
echo "✅ Push to $TARGET complete!"
}
# ── PULL: remote → local ──────────────────────────────────────────────────
do_pull() {
echo ""
echo "┌──────────────────────────────────────────────────┐"
echo "│ 📥 PULL: $TARGET → local "
echo "│ This will OVERWRITE your local database! "
echo "│ A safety backup will be created first. "
echo "└──────────────────────────────────────────────────┘"
echo ""
read -p "Are you sure? (y/N) " -n 1 -r
echo ""
[[ ! $REPLY =~ ^[Yy]$ ]] && { echo "Cancelled."; exit 0; }
# 1. Safety backup of local
backup_local_db
# 2. Dump remote DB
echo "📥 Dumping $TARGET database..."
ssh "$SSH_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $DB_USER -d $DB_NAME --clean --if-exists | gzip > /tmp/payload_pull.sql.gz"
# 3. Transfer and restore
echo "📥 Downloading from $SSH_HOST..."
scp "$SSH_HOST:/tmp/payload_pull.sql.gz" "/tmp/payload_pull.sql.gz"
echo "🔄 Restoring database locally..."
gunzip -c "/tmp/payload_pull.sql.gz" | docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$DB_USER" -d "$DB_NAME" --quiet
# 4. Sync media
echo "🖼️ Syncing media files..."
mkdir -p "$LOCAL_MEDIA_DIR"
rsync -az --delete --info=progress2 "$SSH_HOST:$REMOTE_MEDIA_VOLUME/" "$LOCAL_MEDIA_DIR/"
# Cleanup
rm -f "/tmp/payload_pull.sql.gz"
ssh "$SSH_HOST" "rm -f /tmp/payload_pull.sql.gz"
echo ""
echo "✅ Pull from $TARGET complete! Restart dev server to see changes."
}
# ── Main ───────────────────────────────────────────────────────────────────
if [ -z "$DIRECTION" ] || [ -z "$TARGET" ]; then
echo "📦 CMS Data Sync Tool"
echo ""
echo "Usage:"
echo " pnpm cms:push:testing Push local DB + media → testing"
echo " pnpm cms:push:prod Push local DB + media → production"
echo " pnpm cms:pull:testing Pull testing DB + media → local"
echo " pnpm cms:pull:prod Pull production DB + media → local"
echo ""
echo "Safety: A backup is always created before overwriting."
exit 1
fi
resolve_target
case "$DIRECTION" in
push) do_push ;;
pull) do_pull ;;
*)
echo "❌ Unknown direction: $DIRECTION (use 'push' or 'pull')"
exit 1
;;
esac