chore: remove Directus CMS and related dependencies
All checks were successful
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 3s
Monorepo Pipeline / 🧹 Lint (push) Successful in 1m19s
Monorepo Pipeline / 🧪 Test (push) Successful in 1m5s
Monorepo Pipeline / 🏗️ Build (push) Successful in 1m26s
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Image Processor (push) Has been skipped
Monorepo Pipeline / 🐳 Build Directus (Base) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
All checks were successful
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 3s
Monorepo Pipeline / 🧹 Lint (push) Successful in 1m19s
Monorepo Pipeline / 🧪 Test (push) Successful in 1m5s
Monorepo Pipeline / 🏗️ Build (push) Successful in 1m26s
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Image Processor (push) Has been skipped
Monorepo Pipeline / 🐳 Build Directus (Base) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
This commit is contained in:
@@ -1,77 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
PROJECT="infra-cms"
|
||||
LOCAL_SCHEMA_PATH="./packages/cms-infra/schema/snapshot.yaml"
|
||||
REMOTE_HOST="root@infra.mintel.me"
|
||||
REMOTE_DIR="/opt/infra/directus"
|
||||
|
||||
ENV=$1
|
||||
|
||||
if [ -z "$ENV" ]; then
|
||||
echo "Usage: ./scripts/cms-apply.sh [local|infra]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case $ENV in
|
||||
local)
|
||||
PROJECT="infra-cms"
|
||||
# Derive monorepo root
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
ROOT_DIR="$( dirname "$SCRIPT_DIR" )"
|
||||
CMD_PREFIX="docker compose -f $ROOT_DIR/packages/cms-infra/docker-compose.yml"
|
||||
|
||||
LOCAL_CONTAINER=$($CMD_PREFIX ps -q $PROJECT)
|
||||
if [ -z "$LOCAL_CONTAINER" ]; then
|
||||
echo "❌ Local $PROJECT container not found. Is it running?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "🧹 Reconciling database metadata..."
|
||||
./scripts/cms-reconcile.sh
|
||||
|
||||
echo "🚀 Applying schema to LOCAL $PROJECT..."
|
||||
docker exec "$LOCAL_CONTAINER" npx directus schema apply -y /directus/schema/snapshot.yaml
|
||||
;;
|
||||
infra)
|
||||
# 'infra' is the remote production server for at-mintel
|
||||
PROJECT="directus" # Remote project name
|
||||
|
||||
echo "🔍 Detecting remote container..."
|
||||
REMOTE_CONTAINER=$(ssh "$REMOTE_HOST" "docker ps --filter label=com.docker.compose.project=$PROJECT --filter label=com.docker.compose.service=directus -q")
|
||||
|
||||
if [ -z "$REMOTE_CONTAINER" ]; then
|
||||
# Fallback to older name if labels fail
|
||||
REMOTE_CONTAINER=$(ssh "$REMOTE_HOST" "docker ps -f name=directus-directus-1 -q")
|
||||
fi
|
||||
|
||||
if [ -z "$REMOTE_CONTAINER" ]; then
|
||||
echo "❌ Remote container for $ENV not found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "📦 Syncing extensions to REMOTE $ENV..."
|
||||
# Ensure remote directory exists
|
||||
ssh "$REMOTE_HOST" "mkdir -p $REMOTE_DIR/extensions"
|
||||
rsync -avz --delete ./packages/cms-infra/extensions/ "$REMOTE_HOST:$REMOTE_DIR/extensions/"
|
||||
|
||||
echo "📤 Injecting snapshot directly into container $REMOTE_CONTAINER..."
|
||||
# Inject file via stdin to avoid needing a host-side mount or scp path matching
|
||||
ssh "$REMOTE_HOST" "docker exec -i $REMOTE_CONTAINER sh -c 'cat > /tmp/snapshot.yaml'" < "$LOCAL_SCHEMA_PATH"
|
||||
|
||||
echo "🚀 Applying schema to REMOTE $ENV..."
|
||||
ssh "$REMOTE_HOST" "docker exec $REMOTE_CONTAINER npx directus schema apply -y /tmp/snapshot.yaml"
|
||||
|
||||
echo "🔄 Restarting remote Directus to clear cache..."
|
||||
ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose restart directus"
|
||||
|
||||
# Cleanup
|
||||
ssh "$REMOTE_HOST" "docker exec $REMOTE_CONTAINER rm /tmp/snapshot.yaml"
|
||||
;;
|
||||
*)
|
||||
echo "❌ Invalid environment: $ENV. Supported: local, infra."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "✨ Schema apply complete!"
|
||||
@@ -1,56 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
# Derive monorepo root
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
ROOT_DIR="$( dirname "$SCRIPT_DIR" )"
|
||||
DB_PATH="$ROOT_DIR/packages/cms-infra/database/data.db"
|
||||
|
||||
if [ ! -f "$DB_PATH" ]; then
|
||||
echo "❌ Database not found at $DB_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
reconcile_table() {
|
||||
local TABLE=$1
|
||||
echo "🔍 Reconciling table: $TABLE"
|
||||
|
||||
# 1. Get all columns from SQLite
|
||||
COLUMNS=$(sqlite3 "$DB_PATH" "PRAGMA table_info($TABLE);" | cut -d'|' -f2)
|
||||
|
||||
for COL in $COLUMNS; do
|
||||
# Skip system columns if needed, but usually it's safer to just check if they exist in Directus
|
||||
|
||||
# 2. Check if field exists in directus_fields
|
||||
EXISTS=$(sqlite3 "$DB_PATH" "SELECT count(*) FROM directus_fields WHERE collection = '$TABLE' AND field = '$COL';")
|
||||
|
||||
if [ "$EXISTS" -eq 0 ]; then
|
||||
echo "➕ Registering missing field: $TABLE.$COL"
|
||||
|
||||
# Determine a basic interface based on column name or type (very simplified)
|
||||
INTERFACE="input"
|
||||
case $COL in
|
||||
*id) INTERFACE="numeric" ;;
|
||||
*text) INTERFACE="input-multiline" ;;
|
||||
company|person|user_created|user_updated|feedback_id) INTERFACE="select-dropdown-m2o" ;;
|
||||
date_created|date_updated) INTERFACE="datetime" ;;
|
||||
screenshot|logo) INTERFACE="file" ;;
|
||||
status|type) INTERFACE="select-dropdown" ;;
|
||||
esac
|
||||
|
||||
sqlite3 "$DB_PATH" "INSERT INTO directus_fields (collection, field, interface) VALUES ('$TABLE', '$COL', '$INTERFACE');"
|
||||
else
|
||||
echo "✅ Field already registered: $TABLE.$COL"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Run for known problematic tables
|
||||
reconcile_table "visual_feedback"
|
||||
reconcile_table "visual_feedback_comments"
|
||||
reconcile_table "people"
|
||||
reconcile_table "leads"
|
||||
reconcile_table "client_users"
|
||||
reconcile_table "companies"
|
||||
|
||||
echo "✨ SQL Reconciliation complete!"
|
||||
@@ -1,23 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
PROJECT="infra-cms"
|
||||
SCHEMA_PATH="./packages/cms-infra/schema/snapshot.yaml"
|
||||
CMD_PREFIX="docker-compose -f packages/cms-infra/docker-compose.yml"
|
||||
|
||||
# Detect local container
|
||||
LOCAL_CONTAINER=$($CMD_PREFIX ps -q $PROJECT)
|
||||
|
||||
if [ -z "$LOCAL_CONTAINER" ]; then
|
||||
echo "❌ Local $PROJECT container not found. Is it running?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "📸 Creating schema snapshot for local $PROJECT..."
|
||||
# Note: we save it to the mounted volume path inside the container
|
||||
docker exec "$LOCAL_CONTAINER" npx directus schema snapshot -y /directus/schema/snapshot.yaml
|
||||
|
||||
echo "🛠️ Repairing snapshot for Postgres compatibility..."
|
||||
python3 ./scripts/fix_snapshot_v3.py
|
||||
|
||||
echo "✅ Snapshot saved and repaired at $SCHEMA_PATH"
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
echo "🚀 Starting CMS infrastructure..."
|
||||
|
||||
# 1. Build extensions (pass all arguments to handle flags like --link)
|
||||
"$SCRIPT_DIR/sync-extensions.sh" "$@"
|
||||
|
||||
# Filter out --link before passing to docker compose
|
||||
DOCKER_ARGS=()
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" != "--link" ]; then
|
||||
DOCKER_ARGS+=("$arg")
|
||||
fi
|
||||
done
|
||||
|
||||
# 2. Docker compose up with arguments
|
||||
cd "$REPO_ROOT/packages/cms-infra"
|
||||
docker compose up -d "${DOCKER_ARGS[@]}"
|
||||
|
||||
# 3. Apply core patch
|
||||
"$SCRIPT_DIR/patch-cms.sh"
|
||||
|
||||
echo "✨ CMS is up and patched!"
|
||||
@@ -1,96 +0,0 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
path = '/Users/marcmintel/Projects/at-mintel/packages/cms-infra/schema/snapshot.yaml'
|
||||
if not os.path.exists(path):
|
||||
print(f"File not found: {path}")
|
||||
sys.exit(1)
|
||||
|
||||
with open(path, 'r') as f:
|
||||
lines = f.readlines()
|
||||
|
||||
new_lines = []
|
||||
current_collection = None
|
||||
current_field = None
|
||||
in_schema = False
|
||||
|
||||
fix_fields = {'id', 'company', 'user_created', 'user_updated', 'screenshot', 'logo', 'feedback_id'}
|
||||
uuid_fields = {'id', 'company', 'user_created', 'user_updated'}
|
||||
|
||||
# For multi-pass logic
|
||||
snapshot_has_feedback_id = False
|
||||
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
|
||||
if stripped.startswith('- collection:'):
|
||||
current_collection = stripped.split(':')[-1].strip()
|
||||
in_schema = False
|
||||
elif stripped.startswith('field:'):
|
||||
current_field = stripped.split(':')[-1].strip()
|
||||
if current_collection == 'visual_feedback_comments' and current_field == 'feedback_id':
|
||||
snapshot_has_feedback_id = True
|
||||
elif stripped == 'schema:':
|
||||
in_schema = True
|
||||
elif stripped == 'meta:' or stripped.startswith('- collection:') or (not line.startswith(' ') and line.strip() and not line.startswith('-')):
|
||||
in_schema = False
|
||||
|
||||
# Top-level field type
|
||||
if not in_schema and stripped.startswith('type:') and current_field in uuid_fields:
|
||||
line = line.replace('type: string', 'type: uuid')
|
||||
|
||||
# Schema data type
|
||||
if in_schema and current_field in fix_fields:
|
||||
if 'data_type: char' in line or 'data_type: varchar' in line:
|
||||
line = line.replace('data_type: char', 'data_type: uuid').replace('data_type: varchar', 'data_type: uuid')
|
||||
if 'max_length:' in line:
|
||||
line = ' max_length: null\n'
|
||||
|
||||
new_lines.append(line)
|
||||
|
||||
# Handle Missing feedback_id Injection
|
||||
if not snapshot_has_feedback_id:
|
||||
# We find systemFields and inject before it
|
||||
injected = False
|
||||
final_lines = []
|
||||
feedback_id_block = """ - collection: visual_feedback_comments
|
||||
field: feedback_id
|
||||
type: integer
|
||||
meta:
|
||||
collection: visual_feedback_comments
|
||||
field: feedback_id
|
||||
interface: select-dropdown-m2o
|
||||
required: true
|
||||
sort: 4
|
||||
width: full
|
||||
schema:
|
||||
name: feedback_id
|
||||
table: visual_feedback_comments
|
||||
data_type: integer
|
||||
is_nullable: false
|
||||
is_indexed: true
|
||||
foreign_key_table: visual_feedback
|
||||
foreign_key_column: id
|
||||
"""
|
||||
for line in new_lines:
|
||||
if 'systemFields:' in line and not injected:
|
||||
final_lines.append(feedback_id_block)
|
||||
injected = True
|
||||
final_lines.append(line)
|
||||
new_lines = final_lines
|
||||
|
||||
# Second pass for primary key nullability
|
||||
final_lines = []
|
||||
for i in range(len(new_lines)):
|
||||
line = new_lines[i]
|
||||
if 'is_primary_key: true' in line:
|
||||
# Search backwards and forwards
|
||||
for j in range(max(0, i-10), min(len(new_lines), i+10)):
|
||||
if 'is_nullable: true' in new_lines[j]:
|
||||
new_lines[j] = new_lines[j].replace('is_nullable: true', 'is_nullable: false')
|
||||
final_lines.append(line)
|
||||
|
||||
with open(path, 'w') as f:
|
||||
f.writelines(new_lines)
|
||||
|
||||
print("SUCCESS: Full normalization and field injection complete.")
|
||||
@@ -1,116 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CONTAINERS=("cms-infra-infra-cms-1" "at-mintel-directus-1")
|
||||
|
||||
echo "🔧 Checking for Directus containers to patch..."
|
||||
|
||||
for CONTAINER in "${CONTAINERS[@]}"; do
|
||||
if [ "$(docker ps -q -f name=^/${CONTAINER}$)" ]; then
|
||||
echo "🔧 Applying core patches to: $CONTAINER..."
|
||||
|
||||
# Capture output to determine if restart is needed
|
||||
OUTPUT=$(docker exec -i "$CONTAINER" node << 'EOF'
|
||||
const fs = require("node:fs");
|
||||
const { execSync } = require("node:child_process");
|
||||
let patched = false;
|
||||
|
||||
try {
|
||||
// 1. Patch @directus/extensions node.js (Entrypoints)
|
||||
const findNodeCmd = "find /directus/node_modules -path \"*/@directus/extensions/dist/node.js\"";
|
||||
const nodePaths = execSync(findNodeCmd).toString().trim().split("\n").filter(Boolean);
|
||||
|
||||
nodePaths.forEach(targetPath => {
|
||||
let content = fs.readFileSync(targetPath, "utf8");
|
||||
let modified = false;
|
||||
|
||||
const filterPatch = 'extension.host === "app" && (extension.entrypoint.app || extension.entrypoint)';
|
||||
|
||||
// Only replace if the OLD pattern exists
|
||||
if (content.includes('extension.host === "app" && !!extension.entrypoint.app')) {
|
||||
content = content.replace(/extension\.host === "app" && !!extension\.entrypoint\.app/g, filterPatch);
|
||||
modified = true;
|
||||
}
|
||||
|
||||
// Only replace if the OLD pattern exists for entrypoint
|
||||
// We check if "extension.entrypoint.app" is present but NOT part of our patch
|
||||
// This is a simple heuristic: if the patch string is NOT present, but the target IS.
|
||||
if (!content.includes("(extension.entrypoint.app || extension.entrypoint)")) {
|
||||
if (content.includes("extension.entrypoint.app")) {
|
||||
content = content.replace(/extension\.entrypoint\.app/g, "(extension.entrypoint.app || extension.entrypoint)");
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (modified) {
|
||||
fs.writeFileSync(targetPath, content);
|
||||
console.log(`✅ Entrypoint patched.`);
|
||||
patched = true;
|
||||
}
|
||||
});
|
||||
|
||||
// 2. Patch @directus/api manager.js (HTML Injection)
|
||||
const findManagerCmd = "find /directus/node_modules -path \"*/@directus/api/dist/extensions/manager.js\"";
|
||||
const managerPaths = execSync(findManagerCmd).toString().trim().split("\n").filter(Boolean);
|
||||
|
||||
managerPaths.forEach(targetPath => {
|
||||
let content = fs.readFileSync(targetPath, "utf8");
|
||||
|
||||
const original = "head: wrapEmbeds('Custom Embed Head', this.hookEmbedsHead),";
|
||||
const injection = "head: '<script type=\"module\" src=\"/extensions/sources/index.js\"></script>\\n' + wrapEmbeds('Custom Embed Head', this.hookEmbedsHead),";
|
||||
|
||||
if (content.includes(original) && !content.includes("/extensions/sources/index.js")) {
|
||||
content = content.replace(original, injection);
|
||||
fs.writeFileSync(targetPath, content);
|
||||
console.log(`✅ Injection patched.`);
|
||||
patched = true;
|
||||
}
|
||||
});
|
||||
|
||||
// 3. Patch @directus/api app.js (CSP for unsafe-inline)
|
||||
const findAppCmd = "find /directus/node_modules -path \"*/@directus/api/dist/app.js\"";
|
||||
const appPaths = execSync(findAppCmd).toString().trim().split("\n").filter(Boolean);
|
||||
|
||||
appPaths.forEach(targetPath => {
|
||||
let content = fs.readFileSync(targetPath, "utf8");
|
||||
let modified = false;
|
||||
|
||||
const original = "scriptSrc: [\"'self'\", \"'unsafe-eval'\"],";
|
||||
const patchedStr = "scriptSrc: [\"'self'\", \"'unsafe-eval'\", \"'unsafe-inline'\"],";
|
||||
|
||||
if (content.includes(original)) {
|
||||
content = content.replace(original, patchedStr);
|
||||
modified = true;
|
||||
}
|
||||
|
||||
if (modified) {
|
||||
fs.writeFileSync(targetPath, content);
|
||||
console.log(`✅ CSP patched in app.js.`);
|
||||
patched = true;
|
||||
}
|
||||
});
|
||||
|
||||
if (patched) process.exit(100); // Signal restart needed
|
||||
|
||||
} catch (error) {
|
||||
console.error("❌ Error applying patch:", error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
EOF
|
||||
)
|
||||
EXIT_CODE=$?
|
||||
echo "$OUTPUT"
|
||||
|
||||
if [ $EXIT_CODE -eq 100 ]; then
|
||||
echo "🔄 Patches applied. Restarting Directus container: $CONTAINER..."
|
||||
docker restart "$CONTAINER"
|
||||
else
|
||||
echo "✅ Container $CONTAINER is already patched. No restart needed."
|
||||
fi
|
||||
else
|
||||
echo "ℹ️ Container $CONTAINER not found. Skipping."
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✨ All patches check complete."
|
||||
@@ -1,123 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
REMOTE_HOST="root@infra.mintel.me"
|
||||
REMOTE_DIR="/opt/infra/directus"
|
||||
|
||||
# DB Details (matching docker-compose defaults)
|
||||
DB_USER="directus"
|
||||
DB_NAME="directus"
|
||||
|
||||
ACTION=$1
|
||||
ENV=$2
|
||||
|
||||
# Help
|
||||
if [ -z "$ACTION" ] || [ -z "$ENV" ]; then
|
||||
echo "Usage: ./scripts/sync-directus.sh [push|pull] [infra|testing|staging|production]"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " push Sync LOCAL data -> REMOTE"
|
||||
echo " pull Sync REMOTE data -> LOCAL"
|
||||
echo ""
|
||||
echo "Environments:"
|
||||
echo " infra (infra.mintel.me)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Map Environment
|
||||
case $ENV in
|
||||
infra)
|
||||
PROJECT_NAME="directus"
|
||||
;;
|
||||
*)
|
||||
echo "❌ Invalid environment: $ENV. Only 'infra' is currently configured for monorepo sync."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Detect local containers
|
||||
echo "🔍 Detecting local database..."
|
||||
LOCAL_DB_CONTAINER=$(docker compose ps -q at-mintel-directus-db)
|
||||
if [ -z "$LOCAL_DB_CONTAINER" ]; then
|
||||
echo "❌ Local directus-db container not found. Is it running? (npm run dev)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$ACTION" == "push" ]; then
|
||||
echo "🚀 Pushing Local Data to $ENV..."
|
||||
|
||||
# 1. DB Dump
|
||||
echo "📦 Dumping local database..."
|
||||
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$DB_USER" --clean --if-exists --no-owner --no-privileges "$DB_NAME" > dump.sql
|
||||
|
||||
# 2. Upload Dump
|
||||
echo "📤 Uploading dump to remote server..."
|
||||
scp dump.sql "$REMOTE_HOST:$REMOTE_DIR/dump.sql"
|
||||
|
||||
# 3. Restore on Remote
|
||||
echo "🔄 Restoring dump on $ENV..."
|
||||
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus-postgres")
|
||||
|
||||
if [ -z "$REMOTE_DB_CONTAINER" ]; then
|
||||
echo "❌ Remote $ENV-db container not found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Wipe remote DB clean before restore to avoid constraint errors
|
||||
echo "🧹 Wiping remote database schema..."
|
||||
ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER psql -U $DB_USER $DB_NAME -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'"
|
||||
|
||||
echo "⚡ Restoring database..."
|
||||
ssh "$REMOTE_HOST" "docker exec -i $REMOTE_DB_CONTAINER psql -U $DB_USER $DB_NAME < $REMOTE_DIR/dump.sql"
|
||||
|
||||
# 4. Sync Uploads
|
||||
echo "📁 Syncing uploads (Local -> $ENV)..."
|
||||
rsync -avz --progress ./directus/uploads/ "$REMOTE_HOST:$REMOTE_DIR/uploads/"
|
||||
|
||||
# Clean up
|
||||
rm dump.sql
|
||||
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
|
||||
|
||||
# 5. Restart Directus to trigger migrations and refresh schema cache
|
||||
echo "🔄 Restarting remote Directus to apply migrations..."
|
||||
ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME restart directus"
|
||||
|
||||
echo "✨ Push to $ENV complete!"
|
||||
|
||||
elif [ "$ACTION" == "pull" ]; then
|
||||
echo "📥 Pulling $ENV Data to Local..."
|
||||
|
||||
# 1. DB Dump on Remote
|
||||
echo "📦 Dumping remote database ($ENV)..."
|
||||
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus-postgres")
|
||||
|
||||
if [ -z "$REMOTE_DB_CONTAINER" ]; then
|
||||
echo "❌ Remote $ENV-db container not found!"
|
||||
exit 1
|
||||
fi
|
||||
ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $DB_USER --clean --if-exists --no-owner --no-privileges $DB_NAME > $REMOTE_DIR/dump.sql"
|
||||
|
||||
# 2. Download Dump
|
||||
echo "📥 Downloading dump..."
|
||||
scp "$REMOTE_HOST:$REMOTE_DIR/dump.sql" dump.sql
|
||||
|
||||
# Wipe local DB clean before restore to avoid constraint errors
|
||||
echo "🧹 Wiping local database schema..."
|
||||
docker exec "$LOCAL_DB_CONTAINER" psql -U "$DB_USER" "$DB_NAME" -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'
|
||||
|
||||
echo "⚡ Restoring database locally..."
|
||||
docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$DB_USER" "$DB_NAME" < dump.sql
|
||||
|
||||
# 4. Sync Uploads
|
||||
echo "📁 Syncing uploads ($ENV -> Local)..."
|
||||
rsync -avz --progress "$REMOTE_HOST:$REMOTE_DIR/uploads/" ./directus/uploads/
|
||||
|
||||
# Clean up
|
||||
rm dump.sql
|
||||
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
|
||||
|
||||
echo "✨ Pull to Local complete!"
|
||||
else
|
||||
echo "Invalid action: $ACTION. Use push or pull."
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,138 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
EXTENSIONS_ROOT="$REPO_ROOT/packages"
|
||||
|
||||
# Strict local targets for bombproof isolation
|
||||
TARGET_DIRS=(
|
||||
"$REPO_ROOT/packages/cms-infra/extensions"
|
||||
"$REPO_ROOT/directus/extensions"
|
||||
)
|
||||
|
||||
# List of extension packages to sync
|
||||
EXTENSION_PACKAGES=(
|
||||
"acquisition"
|
||||
"acquisition-manager"
|
||||
"company-manager"
|
||||
"customer-manager"
|
||||
"feedback-commander"
|
||||
"people-manager"
|
||||
"unified-dashboard"
|
||||
)
|
||||
|
||||
# Parse flags
|
||||
LINK_MODE=false
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" == "--link" ]; then
|
||||
LINK_MODE=true
|
||||
fi
|
||||
done
|
||||
|
||||
echo "🚀 Starting isolated extension sync..."
|
||||
|
||||
# Ensure target directories exist
|
||||
for TARGET in "${TARGET_DIRS[@]}"; do
|
||||
mkdir -p "$TARGET"
|
||||
done
|
||||
|
||||
# Build the acquisition library if it exists
|
||||
if [ -d "$REPO_ROOT/packages/acquisition" ]; then
|
||||
echo "📦 Building acquisition..."
|
||||
(cd "$REPO_ROOT/packages/acquisition" && pnpm build)
|
||||
fi
|
||||
|
||||
for PKG in "${EXTENSION_PACKAGES[@]}"; do
|
||||
PKG_PATH="$EXTENSIONS_ROOT/$PKG"
|
||||
|
||||
if [ -d "$PKG_PATH" ]; then
|
||||
echo "📦 Processing $PKG..."
|
||||
|
||||
# 1. Build the extension
|
||||
(cd "$PKG_PATH" && pnpm build)
|
||||
|
||||
EXT_NAME="$PKG"
|
||||
echo "🚚 Syncing $EXT_NAME..."
|
||||
|
||||
# 3. Sync to each target directory
|
||||
for TARGET_BASE in "${TARGET_DIRS[@]}"; do
|
||||
# FLAT STRUCTURE: Directus 11.15.x local scanner is FLAT.
|
||||
FINAL_TARGET="$TARGET_BASE/$EXT_NAME"
|
||||
|
||||
echo "🚚 Syncing $EXT_NAME to $FINAL_TARGET..."
|
||||
|
||||
# Clean target first to avoid ghost files
|
||||
mkdir -p "$FINAL_TARGET"
|
||||
rm -rf "${FINAL_TARGET:?}"/*
|
||||
|
||||
# Copy build artifacts
|
||||
if [ -f "$PKG_PATH/dist/index.js" ]; then
|
||||
cp "$PKG_PATH/dist/index.js" "$FINAL_TARGET/index.js"
|
||||
elif [ -f "$PKG_PATH/index.js" ]; then
|
||||
cp "$PKG_PATH/index.js" "$FINAL_TARGET/index.js"
|
||||
fi
|
||||
|
||||
if [ -f "$PKG_PATH/package.json" ]; then
|
||||
# We ALWAYS copy and patch package.json to avoid messing with source
|
||||
cp "$PKG_PATH/package.json" "$FINAL_TARGET/"
|
||||
# We force the registration path to index.js and ensure host/source are set
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const pkgPath = '$FINAL_TARGET/package.json';
|
||||
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
|
||||
if (!pkg['directus:extension']) pkg['directus:extension'] = {};
|
||||
|
||||
// Standard metadata for Directus 11.15.x (with core patch applied)
|
||||
pkg['directus:extension'].path = 'index.js';
|
||||
if (!pkg['directus:extension'].host) {
|
||||
pkg['directus:extension'].host = pkg['directus:extension'].type === 'endpoint' ? 'api' : 'app';
|
||||
}
|
||||
if (!pkg['directus:extension'].source) {
|
||||
pkg['directus:extension'].source = 'src/index.ts';
|
||||
}
|
||||
|
||||
fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2));
|
||||
"
|
||||
fi
|
||||
|
||||
if [ -d "$PKG_PATH/dist" ]; then
|
||||
if [ "$LINK_MODE" = true ]; then
|
||||
REL_PATH=$(python3 -c "import os; print(os.path.relpath('$PKG_PATH/dist', '$FINAL_TARGET'))")
|
||||
ln -sf "$REL_PATH" "$FINAL_TARGET/dist"
|
||||
else
|
||||
cp -r "$PKG_PATH/dist" "$FINAL_TARGET/"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✅ $PKG synced."
|
||||
else
|
||||
echo "❌ Extension source not found: $PKG_PATH"
|
||||
fi
|
||||
done
|
||||
|
||||
# Cleanup: remove anything from extensions root that isn't in our whitelist
|
||||
WHITELIST=("${EXTENSION_PACKAGES[@]}" "sources" "endpoints" "hooks" "layouts" "modules" "operations" "panels" "displays" "interfaces")
|
||||
|
||||
for TARGET_BASE in "${TARGET_DIRS[@]}"; do
|
||||
echo "🧹 Cleaning up $TARGET_BASE..."
|
||||
for ITEM in "$TARGET_BASE"/*; do
|
||||
[ -e "$ITEM" ] || continue
|
||||
BN=$(basename "$ITEM")
|
||||
IS_ALLOWED=false
|
||||
for W in "${WHITELIST[@]}"; do
|
||||
if [[ "$BN" == "$W" ]]; then IS_ALLOWED=true; break; fi
|
||||
done
|
||||
|
||||
if [ "$IS_ALLOWED" = false ]; then
|
||||
echo " 🗑️ Removing extra/legacy item: $BN"
|
||||
rm -rf "$ITEM"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# Container patching is now handled by scripts/patch-cms.sh
|
||||
# which should be run AFTER the containers are up.
|
||||
|
||||
echo "✨ Sync complete! Extensions are in packages/cms-infra/extensions."
|
||||
@@ -1,91 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
CONTAINER="cms-infra-infra-cms-1"
|
||||
|
||||
echo "🔍 Validating Directus Extension Stability..."
|
||||
|
||||
# 1. Verify Patches
|
||||
echo "🛠️ Checking Core Patches..."
|
||||
docker exec -i "$CONTAINER" node << 'EOF'
|
||||
const fs = require('node:fs');
|
||||
const { execSync } = require('node:child_process');
|
||||
|
||||
let failures = 0;
|
||||
|
||||
// Check Node.js patch
|
||||
const findNode = 'find /directus/node_modules -path "*/@directus/extensions/dist/node.js"';
|
||||
const nodePaths = execSync(findNode).toString().trim().split('\n').filter(Boolean);
|
||||
nodePaths.forEach(p => {
|
||||
const c = fs.readFileSync(p, 'utf8');
|
||||
if (!c.includes('(extension.entrypoint.app || extension.entrypoint)')) {
|
||||
console.error('❌ Missing node.js patch at ' + p);
|
||||
failures++;
|
||||
}
|
||||
});
|
||||
|
||||
// Check Manager.js patch
|
||||
const findManager = 'find /directus/node_modules -path "*/@directus/api/dist/extensions/manager.js"';
|
||||
const managerPaths = execSync(findManager).toString().trim().split('\n').filter(Boolean);
|
||||
managerPaths.forEach(p => {
|
||||
const c = fs.readFileSync(p, 'utf8');
|
||||
if (!c.includes('/extensions/sources/index.js')) {
|
||||
console.error('❌ Missing manager.js patch at ' + p);
|
||||
failures++;
|
||||
}
|
||||
});
|
||||
|
||||
if (failures === 0) {
|
||||
console.log('✅ Core patches are healthy.');
|
||||
}
|
||||
process.exit(failures > 0 ? 1 : 0);
|
||||
EOF
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "⚠️ Core patches missing! Run 'bash scripts/patch-cms.sh' to fix."
|
||||
fi
|
||||
|
||||
# 2. Verify Module Bar
|
||||
echo "📋 Checking Sidebar Configuration..."
|
||||
docker exec -i "$CONTAINER" node << 'EOF'
|
||||
const sqlite3 = require('/directus/node_modules/.pnpm/sqlite3@5.1.7/node_modules/sqlite3');
|
||||
const db = new sqlite3.Database('/directus/database/data.db');
|
||||
const managerIds = ["unified-dashboard", "acquisition-manager", "company-manager", "customer-manager", "feedback-commander", "people-manager"];
|
||||
|
||||
db.get('SELECT module_bar FROM directus_settings WHERE id = 1', (err, row) => {
|
||||
if (err) { console.error('❌ DB Error:', err.message); process.exit(1); }
|
||||
|
||||
let mb = [];
|
||||
try { mb = JSON.parse(row.module_bar || '[]'); } catch(e) { mb = []; }
|
||||
|
||||
const existingIds = mb.map(m => m.id);
|
||||
const missing = managerIds.filter(id => !existingIds.includes(id));
|
||||
const disabled = mb.filter(m => managerIds.includes(m.id) && m.enabled === false);
|
||||
|
||||
if (missing.length === 0 && disabled.length === 0) {
|
||||
console.log('✅ Sidebar is healthy with all manager modules enabled.');
|
||||
process.exit(0);
|
||||
} else {
|
||||
if (missing.length > 0) console.log('⚠️ Missing modules:', missing.join(', '));
|
||||
if (disabled.length > 0) console.log('⚠️ Disabled modules:', disabled.map(m => m.id).join(', '));
|
||||
|
||||
console.log('🔧 Self-healing in progress...');
|
||||
|
||||
// Construct Golden State Module Bar
|
||||
const goldenMB = [
|
||||
{ type: 'module', id: 'content', enabled: true },
|
||||
{ type: 'module', id: 'users', enabled: true },
|
||||
{ type: 'module', id: 'files', enabled: true },
|
||||
{ type: 'module', id: 'insights', enabled: true },
|
||||
...managerIds.map(id => ({ type: 'module', id, enabled: true })),
|
||||
{ type: 'module', id: 'settings', enabled: true }
|
||||
];
|
||||
|
||||
db.run('UPDATE directus_settings SET module_bar = ? WHERE id = 1', [JSON.stringify(goldenMB)], function(err) {
|
||||
if (err) { console.error('❌ Repair failed:', err.message); process.exit(1); }
|
||||
console.log('✨ Sidebar repaired successfully!');
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
EOF
|
||||
@@ -1,67 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
TARGET_DIRS=(
|
||||
"$REPO_ROOT/packages/cms-infra/extensions"
|
||||
"$REPO_ROOT/directus/extensions"
|
||||
)
|
||||
|
||||
echo "🛡️ Directus Extension Validator"
|
||||
echo "================================="
|
||||
|
||||
for TARGET in "${TARGET_DIRS[@]}"; do
|
||||
echo ""
|
||||
echo "📂 Checking: $TARGET"
|
||||
|
||||
if [ ! -d "$TARGET" ]; then
|
||||
echo " ❌ Directory does not exist!"
|
||||
continue
|
||||
fi
|
||||
|
||||
CATEGORIES=("endpoints" "hooks" "layouts" "modules" "operations" "panels" "displays" "interfaces")
|
||||
FOUND_ANY=false
|
||||
|
||||
for CAT in "${CATEGORIES[@]}"; do
|
||||
CAT_PATH="$TARGET/$CAT"
|
||||
if [ -d "$CAT_PATH" ]; then
|
||||
EXTS=$(ls "$CAT_PATH")
|
||||
if [ -n "$EXTS" ]; then
|
||||
FOUND_ANY=true
|
||||
echo " 📦 $CAT:"
|
||||
for EXT in $EXTS; do
|
||||
EXT_PATH="$CAT_PATH/$EXT"
|
||||
if [ -f "$EXT_PATH/package.json" ]; then
|
||||
VERSION=$(node -e "console.log(require('$EXT_PATH/package.json').version)")
|
||||
echo " ✅ $EXT (v$VERSION)"
|
||||
else
|
||||
echo " ⚠️ $EXT (MISSING package.json!)"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$FOUND_ANY" = false ]; then
|
||||
echo " 📭 No extensions found in standard category folders."
|
||||
fi
|
||||
|
||||
# Check for legacy files
|
||||
LEGACY=$(find "$TARGET" -maxdepth 1 -not -path "$TARGET" -not -name ".*" -type d)
|
||||
for L in $LEGACY; do
|
||||
BN=$(basename "$L")
|
||||
IS_CAT=false
|
||||
for CAT in "${CATEGORIES[@]}"; do
|
||||
if [ "$BN" == "$CAT" ]; then IS_CAT=true; break; fi
|
||||
done
|
||||
|
||||
if [ "$IS_CAT" = false ]; then
|
||||
echo " 🚨 LEGACY/UNRESOLVED FOLDER FOUND: $BN (Will NOT be loaded by Directus)"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "✨ Validation complete."
|
||||
@@ -1,100 +0,0 @@
|
||||
#!/bin/bash
|
||||
# validate-sdk-imports.sh
|
||||
# Validates that Directus extensions only use exports that exist in @directus/extensions-sdk.
|
||||
# Prevents the "SyntaxError: doesn't provide an export named" runtime crash
|
||||
# that silently breaks ALL extensions in the browser.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
# Valid exports from @directus/extensions-sdk in Directus 11.x
|
||||
# If Directus is upgraded, update this list by running:
|
||||
# curl -s http://cms.localhost/admin/assets/@directus_extensions-sdk.*.entry.js | grep -oE 'export\{[^}]+\}'
|
||||
VALID_EXPORTS=(
|
||||
"defineDisplay"
|
||||
"defineEndpoint"
|
||||
"defineHook"
|
||||
"defineInterface"
|
||||
"defineLayout"
|
||||
"defineModule"
|
||||
"defineOperationApi"
|
||||
"defineOperationApp"
|
||||
"definePanel"
|
||||
"defineTheme"
|
||||
"getFieldsFromTemplate"
|
||||
"getRelationType"
|
||||
"useApi"
|
||||
"useCollection"
|
||||
"useExtensions"
|
||||
"useFilterFields"
|
||||
"useItems"
|
||||
"useLayout"
|
||||
"useSdk"
|
||||
"useStores"
|
||||
"useSync"
|
||||
)
|
||||
|
||||
ERRORS=0
|
||||
|
||||
echo "🔍 Validating @directus/extensions-sdk imports..."
|
||||
echo ""
|
||||
|
||||
# Search all .ts and .vue files in extension directories
|
||||
SEARCH_DIRS=(
|
||||
"$REPO_ROOT/packages/cms-infra/extensions"
|
||||
"$REPO_ROOT/packages/unified-dashboard"
|
||||
"$REPO_ROOT/packages/customer-manager"
|
||||
"$REPO_ROOT/packages/company-manager"
|
||||
"$REPO_ROOT/packages/people-manager"
|
||||
"$REPO_ROOT/packages/acquisition-manager"
|
||||
"$REPO_ROOT/packages/feedback-commander"
|
||||
)
|
||||
|
||||
for DIR in "${SEARCH_DIRS[@]}"; do
|
||||
[ -d "$DIR" ] || continue
|
||||
|
||||
# Find all imports from @directus/extensions-sdk
|
||||
while IFS= read -r line; do
|
||||
FILE=$(echo "$line" | cut -d: -f1)
|
||||
LINENUM=$(echo "$line" | cut -d: -f2)
|
||||
CONTENT=$(echo "$line" | cut -d: -f3-)
|
||||
|
||||
# Extract imported names from the import statement
|
||||
IMPORTS=$(echo "$CONTENT" | grep -oE '\{[^}]+\}' | tr -d '{}' | tr ',' '\n' | sed 's/^ *//;s/ *$//' | sed 's/ as .*//')
|
||||
|
||||
for IMPORT in $IMPORTS; do
|
||||
[ -z "$IMPORT" ] && continue
|
||||
FOUND=false
|
||||
for VALID in "${VALID_EXPORTS[@]}"; do
|
||||
if [ "$IMPORT" = "$VALID" ]; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$FOUND" = false ]; then
|
||||
echo "❌ INVALID IMPORT: '$IMPORT' in $FILE:$LINENUM"
|
||||
echo " '$IMPORT' is NOT exported by @directus/extensions-sdk in Directus 11.x"
|
||||
echo " This WILL crash ALL extensions at runtime!"
|
||||
echo ""
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
done
|
||||
done < <(grep -rn "from ['\"]@directus/extensions-sdk['\"]" "$DIR" --include="*.ts" --include="*.vue" 2>/dev/null || true)
|
||||
done
|
||||
|
||||
if [ "$ERRORS" -gt 0 ]; then
|
||||
echo "💥 Found $ERRORS invalid import(s)!"
|
||||
echo ""
|
||||
echo "Valid exports from @directus/extensions-sdk:"
|
||||
printf " %s\n" "${VALID_EXPORTS[@]}"
|
||||
echo ""
|
||||
echo "Common fixes:"
|
||||
echo " useRouter → import { useRouter } from 'vue-router'"
|
||||
echo " useRoute → import { useRoute } from 'vue-router'"
|
||||
exit 1
|
||||
else
|
||||
echo "✅ All @directus/extensions-sdk imports are valid."
|
||||
fi
|
||||
@@ -1,46 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
HOST="http://cms.localhost"
|
||||
EXTENSIONS=("customer-manager" "people-manager" "company-manager" "feedback-commander" "unified-dashboard")
|
||||
|
||||
echo "🔍 Verifying extensions at $HOST..."
|
||||
|
||||
# 1. Check Main Manifest
|
||||
MANIFEST=$(curl -s "$HOST/extensions/sources/index.js")
|
||||
|
||||
if [ -z "$MANIFEST" ]; then
|
||||
echo "❌ Error: Manifest returned empty response."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Manifest loaded (${#MANIFEST} bytes)."
|
||||
|
||||
# 2. Check for unexpected 404/500
|
||||
if echo "$MANIFEST" | grep -q "<!DOCTYPE html>"; then
|
||||
echo "❌ Error: Manifest returned HTML (likely 404 or error page) instead of JS."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 3. Verify each extension is in the bundle
|
||||
FAILURE=0
|
||||
for EXT in "${EXTENSIONS[@]}"; do
|
||||
# Directus bundles strings usually, or imports them.
|
||||
# We look for the ID or the unique module name from src (e.g. "Customer Manager")
|
||||
# Or simply the path matching.
|
||||
|
||||
if echo "$MANIFEST" | grep -q "$EXT"; then
|
||||
echo "✅ Found '$EXT' in manifest."
|
||||
else
|
||||
echo "❌ MISSING '$EXT' in manifest!"
|
||||
FAILURE=1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $FAILURE -eq 1 ]; then
|
||||
echo "🚨 VERIFICATION FAILED: One or more extensions are missing from the public bundle."
|
||||
exit 1
|
||||
else
|
||||
echo "🎉 ALL EXTENSIONS VERIFIED."
|
||||
exit 0
|
||||
fi
|
||||
Reference in New Issue
Block a user