All checks were successful
Build & Deploy KLZ Cables / 🔍 Prepare Environment (push) Successful in 14s
Build & Deploy KLZ Cables / 🧪 Quality Assurance (push) Has been skipped
Build & Deploy KLZ Cables / 🏗️ Build App (push) Has been skipped
Build & Deploy KLZ Cables / 🚀 Deploy (push) Has been skipped
Build & Deploy KLZ Cables / ⚡ PageSpeed (push) Has been skipped
Build & Deploy KLZ Cables / 🔔 Notifications (push) Successful in 2s
147 lines
5.3 KiB
Bash
Executable File
147 lines
5.3 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# Configuration
|
|
REMOTE_HOST="root@alpha.mintel.me"
|
|
REMOTE_DIR="/home/deploy/sites/klz-cables.com"
|
|
|
|
# DB Details (matching docker-compose defaults)
|
|
DB_USER="directus"
|
|
DB_NAME="directus"
|
|
|
|
ACTION=$1
|
|
ENV=$2
|
|
|
|
# Help
|
|
if [ -z "$ACTION" ] || [ -z "$ENV" ]; then
|
|
echo "Usage: ./scripts/sync-directus.sh [push|pull] [testing|staging|production]"
|
|
echo ""
|
|
echo "Commands:"
|
|
echo " push Sync LOCAL data -> REMOTE"
|
|
echo " pull Sync REMOTE data -> LOCAL"
|
|
echo ""
|
|
echo "Environments:"
|
|
echo " testing, staging, production"
|
|
exit 1
|
|
fi
|
|
|
|
# Map Environment to Project Name
|
|
case $ENV in
|
|
testing)
|
|
PROJECT_NAME="klz-cables-testing"
|
|
ENV_FILE=".env.testing"
|
|
;;
|
|
staging)
|
|
PROJECT_NAME="klz-cables-staging"
|
|
ENV_FILE=".env.staging"
|
|
;;
|
|
production)
|
|
PROJECT_NAME="klz-cables-prod"
|
|
# Fallback to older project name if prod-specific one isn't found later in the script
|
|
OLD_PROJECT_NAME="klz-cablescom"
|
|
ENV_FILE=".env.prod"
|
|
;;
|
|
*)
|
|
echo "❌ Invalid environment: $ENV. Use testing, staging, or production."
|
|
exit 1
|
|
;;
|
|
esac
|
|
|
|
# Detect local container
|
|
echo "🔍 Detecting local database..."
|
|
# Use a more robust way to find the container if multiple projects exist locally
|
|
LOCAL_DB_CONTAINER=$(docker compose ps -q directus-db)
|
|
if [ -z "$LOCAL_DB_CONTAINER" ]; then
|
|
echo "❌ Local directus-db container not found. Is it running? (npm run dev)"
|
|
exit 1
|
|
fi
|
|
|
|
if [ "$ACTION" == "push" ]; then
|
|
echo "🚀 Pushing Local Data to $ENV ($PROJECT_NAME)..."
|
|
|
|
# 1. DB Dump
|
|
echo "📦 Dumping local database..."
|
|
# Note: we use --no-owner --no-privileges to ensure restore works on remote with different user setup
|
|
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$DB_USER" --clean --if-exists --no-owner --no-privileges "$DB_NAME" > dump.sql
|
|
|
|
# 2. Upload Dump
|
|
echo "📤 Uploading dump to remote server..."
|
|
scp dump.sql "$REMOTE_HOST:$REMOTE_DIR/dump.sql"
|
|
|
|
# 3. Restore on Remote
|
|
echo "🔄 Restoring dump on $ENV..."
|
|
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus-db")
|
|
if [ -z "$REMOTE_DB_CONTAINER" ] && [ -n "$OLD_PROJECT_NAME" ]; then
|
|
echo "⚠️ $PROJECT_NAME not found, trying fallback $OLD_PROJECT_NAME..."
|
|
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $OLD_PROJECT_NAME ps -q directus-db")
|
|
fi
|
|
|
|
if [ -z "$REMOTE_DB_CONTAINER" ]; then
|
|
echo "❌ Remote $ENV-db container not found!"
|
|
exit 1
|
|
fi
|
|
|
|
# Wipe remote DB clean before restore to avoid constraint errors
|
|
echo "🧹 Wiping remote database schema..."
|
|
ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER psql -U $DB_USER $DB_NAME -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'"
|
|
|
|
echo "⚡ Restoring database..."
|
|
ssh "$REMOTE_HOST" "docker exec -i $REMOTE_DB_CONTAINER psql -U $DB_USER $DB_NAME < $REMOTE_DIR/dump.sql"
|
|
|
|
# 4. Sync Uploads
|
|
echo "📁 Syncing uploads (Local -> $ENV)..."
|
|
# Note: If environments share the same directory, this might overwrite others' files if not careful.
|
|
# But since they share the same host directory currently, rsync will update the shared folder.
|
|
rsync -avz --progress ./directus/uploads/ "$REMOTE_HOST:$REMOTE_DIR/directus/uploads/"
|
|
|
|
# Clean up
|
|
rm dump.sql
|
|
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
|
|
|
|
# 5. Restart Directus to trigger migrations and refresh schema cache
|
|
echo "🔄 Restarting remote Directus to apply migrations..."
|
|
ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME restart directus"
|
|
|
|
echo "✨ Push to $ENV complete!"
|
|
|
|
elif [ "$ACTION" == "pull" ]; then
|
|
echo "📥 Pulling $ENV Data to Local..."
|
|
|
|
# 1. DB Dump on Remote
|
|
echo "📦 Dumping remote database ($ENV)..."
|
|
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus-db")
|
|
if [ -z "$REMOTE_DB_CONTAINER" ] && [ -n "$OLD_PROJECT_NAME" ]; then
|
|
echo "⚠️ $PROJECT_NAME not found, trying fallback $OLD_PROJECT_NAME..."
|
|
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $OLD_PROJECT_NAME ps -q directus-db")
|
|
fi
|
|
|
|
if [ -z "$REMOTE_DB_CONTAINER" ]; then
|
|
echo "❌ Remote $ENV-db container not found!"
|
|
exit 1
|
|
fi
|
|
ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $DB_USER --clean --if-exists --no-owner --no-privileges $DB_NAME > $REMOTE_DIR/dump.sql"
|
|
|
|
# 2. Download Dump
|
|
echo "📥 Downloading dump..."
|
|
scp "$REMOTE_HOST:$REMOTE_DIR/dump.sql" dump.sql
|
|
|
|
# Wipe local DB clean before restore to avoid constraint errors
|
|
echo "🧹 Wiping local database schema..."
|
|
docker exec "$LOCAL_DB_CONTAINER" psql -U "$DB_USER" "$DB_NAME" -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'
|
|
|
|
echo "⚡ Restoring database locally..."
|
|
docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$DB_USER" "$DB_NAME" < dump.sql
|
|
|
|
# 4. Sync Uploads
|
|
echo "📁 Syncing uploads ($ENV -> Local)..."
|
|
rsync -avz --progress "$REMOTE_HOST:$REMOTE_DIR/directus/uploads/" ./directus/uploads/
|
|
|
|
# Clean up
|
|
rm dump.sql
|
|
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
|
|
|
|
echo "✨ Pull to Local complete!"
|
|
else
|
|
echo "Invalid action: $ACTION. Use push or pull."
|
|
exit 1
|
|
fi
|