diff --git a/next-env.d.ts b/next-env.d.ts
index 9edff1c..c4b7818 100644
--- a/next-env.d.ts
+++ b/next-env.d.ts
@@ -1,6 +1,6 @@
///
///
-import "./.next/types/routes.d.ts";
+import "./.next/dev/types/routes.d.ts";
// NOTE: This file should not be edited
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
diff --git a/package.json b/package.json
index c53ea8a..2f23095 100644
--- a/package.json
+++ b/package.json
@@ -11,13 +11,13 @@
"lint": "eslint app components lib scripts",
"test": "vitest",
"prepare": "husky",
- "directus:bootstrap": "DIRECTUS_URL=http://localhost:8055 npx tsx --env-file=.env scripts/setup-directus.ts",
- "directus:push:staging": "./scripts/sync-directus.sh push staging",
- "directus:pull:staging": "./scripts/sync-directus.sh pull staging",
- "directus:push:testing": "./scripts/sync-directus.sh push testing",
- "directus:pull:testing": "./scripts/sync-directus.sh pull testing",
- "directus:push:prod": "./scripts/sync-directus.sh push production",
- "directus:pull:prod": "./scripts/sync-directus.sh pull production",
+ "cms:bootstrap": "DIRECTUS_URL=http://localhost:8055 npx tsx --env-file=.env scripts/setup-directus.ts",
+ "cms:push:staging": "./scripts/sync-directus.sh push staging",
+ "cms:pull:staging": "./scripts/sync-directus.sh pull staging",
+ "cms:push:testing": "./scripts/sync-directus.sh push testing",
+ "cms:pull:testing": "./scripts/sync-directus.sh pull testing",
+ "cms:push:prod": "./scripts/sync-directus.sh push production",
+ "cms:pull:prod": "./scripts/sync-directus.sh pull production",
"pagespeed:test": "mintel pagespeed test"
},
"keywords": [],
diff --git a/scripts/setup-directus.ts b/scripts/setup-directus.ts
index bb03dce..4ec3f76 100644
--- a/scripts/setup-directus.ts
+++ b/scripts/setup-directus.ts
@@ -7,26 +7,36 @@ import { createCollection, createField, updateSettings } from "@directus/sdk";
const client = createMintelDirectusClient();
async function setupBranding() {
- const prjName = process.env.PROJECT_NAME || "Mintel Project";
+ const prjName = process.env.PROJECT_NAME || "MB Grid Solutions";
const prjColor = process.env.PROJECT_COLOR || "#82ed20";
- console.log(`๐จ Setup Directus Branding for ${prjName}...`);
+ console.log(`๐จ Refining Directus Branding for ${prjName}...`);
await ensureDirectusAuthenticated(client);
const cssInjection = `
-
-
MINTEL INFRASTRUCTURE ENGINE
-
${prjName.toUpperCase()} RELIABILITY.
+
+
Mintel Infrastructure Engine
+
${prjName.toUpperCase()} SYNC.
`;
@@ -36,25 +46,23 @@ async function setupBranding() {
project_name: prjName,
project_color: prjColor,
public_note: cssInjection,
+ module_bar_background: "#00081a",
theme_light_overrides: {
primary: prjColor,
- borderRadius: "16px",
+ borderRadius: "12px",
navigationBackground: "#000c24",
navigationForeground: "#ffffff",
+ moduleBarBackground: "#00081a",
},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any),
);
console.log("โจ Branding applied!");
- try {
- await createCollectionAndFields();
- console.log("๐๏ธ Schema alignment complete!");
- } catch (error) {
- console.error("โ Error aligning schema:", error);
- }
+ await createCollectionAndFields();
+ console.log("๐๏ธ Schema alignment complete!");
} catch (error) {
- console.error("โ Error setting up branding:", error);
+ console.error("โ Error during bootstrap:", error);
}
}
@@ -69,6 +77,9 @@ async function createCollectionAndFields() {
meta: {
icon: "contact_mail",
display_template: "{{name}} <{{email}}>",
+ group: null,
+ sort: null,
+ collapse: "open",
},
}),
);
@@ -84,9 +95,7 @@ async function createCollectionAndFields() {
);
console.log(`โ
Collection ${collectionName} created.`);
} catch {
- console.log(
- `โน๏ธ Collection ${collectionName} already exists or error occured.`,
- );
+ console.log(`โน๏ธ Collection ${collectionName} exists.`);
}
const safeAddField = async (
@@ -102,13 +111,32 @@ async function createCollectionAndFields() {
}
};
- await safeAddField("name", "string", { interface: "input" });
- await safeAddField("email", "string", { interface: "input" });
- await safeAddField("company", "string", { interface: "input" });
- await safeAddField("message", "text", { interface: "textarea" });
+ await safeAddField("name", "string", {
+ interface: "input",
+ display: "raw",
+ width: "half",
+ });
+ await safeAddField("email", "string", {
+ interface: "input",
+ display: "raw",
+ width: "half",
+ });
+ await safeAddField("company", "string", {
+ interface: "input",
+ display: "raw",
+ width: "half",
+ });
+ await safeAddField("message", "text", {
+ interface: "textarea",
+ display: "raw",
+ width: "full",
+ });
await safeAddField("date_created", "timestamp", {
interface: "datetime",
special: ["date-created"],
+ display: "datetime",
+ display_options: { relative: true },
+ width: "half",
});
}
diff --git a/scripts/sync-directus.sh b/scripts/sync-directus.sh
index c4ea1bc..2ee4d26 100755
--- a/scripts/sync-directus.sh
+++ b/scripts/sync-directus.sh
@@ -1,15 +1,13 @@
#!/bin/bash
-# Mintel Directus Sync Engine
-# Synchronizes Directus Data (Postgres + Uploads) between Local and Remote
-
+# Configuration
REMOTE_HOST="${SSH_HOST:-root@alpha.mintel.me}"
ACTION=$1
ENV=$2
# Help
if [ -z "$ACTION" ] || [ -z "$ENV" ]; then
- echo "Usage: mintel-sync [push|pull] [testing|staging|production]"
+ echo "Usage: ./scripts/sync-directus.sh [push|pull] [testing|staging|production]"
echo ""
echo "Commands:"
echo " push Sync LOCAL data -> REMOTE"
@@ -20,7 +18,10 @@ if [ -z "$ACTION" ] || [ -z "$ENV" ]; then
exit 1
fi
-PRJ_ID=$(jq -r .name package.json | sed 's/@mintel\///')
+# Project Configuration (extracted from package.json and aligned with deploy.yml)
+PRJ_ID=$(jq -r .name package.json | sed 's/@mintel\///' | sed 's/\.com$//')
+REMOTE_DIR="/home/deploy/sites/${PRJ_ID}.com"
+
case $ENV in
testing) PROJECT_NAME="${PRJ_ID}-testing"; ENV_FILE=".env.testing" ;;
staging) PROJECT_NAME="${PRJ_ID}-staging"; ENV_FILE=".env.staging" ;;
@@ -28,41 +29,92 @@ case $ENV in
*) echo "โ Invalid environment: $ENV"; exit 1 ;;
esac
-REMOTE_DIR="/home/deploy/sites/${PRJ_ID}.com"
-
-# DB Details
+# DB Details (matching docker-compose defaults)
DB_USER="directus"
DB_NAME="directus"
echo "๐ Detecting local database..."
LOCAL_DB_CONTAINER=$(docker compose ps -q directus-db)
if [ -z "$LOCAL_DB_CONTAINER" ]; then
- echo "โ Local directus-db container not found. Running?"
+ echo "โ Local directus-db container not found. Is it running? (npm run dev)"
exit 1
fi
if [ "$ACTION" == "push" ]; then
echo "๐ Pushing LOCAL -> $ENV ($PROJECT_NAME)..."
+
+ # 1. DB Dump
+ echo "๐ฆ Dumping local database..."
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$DB_USER" --clean --if-exists --no-owner --no-privileges "$DB_NAME" > dump.sql
+
+ # 2. Upload Dump
+ echo "๐ค Uploading dump to remote server..."
scp dump.sql "$REMOTE_HOST:$REMOTE_DIR/dump.sql"
+ # 3. Restore on Remote
+ echo "๐ Restoring dump on $ENV..."
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus-db")
+
+ if [ -z "$REMOTE_DB_CONTAINER" ]; then
+ echo "โ Remote $ENV-db container not found!"
+ exit 1
+ fi
+
+ # Wipe remote DB clean before restore to avoid constraint errors
+ echo "๐งน Wiping remote database schema..."
+ ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER psql -U $DB_USER $DB_NAME -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'"
+
+ echo "โก Restoring database..."
ssh "$REMOTE_HOST" "docker exec -i $REMOTE_DB_CONTAINER psql -U $DB_USER $DB_NAME < $REMOTE_DIR/dump.sql"
+ # 4. Sync Uploads
+ echo "๐ Syncing uploads (Local -> $ENV)..."
rsync -avz --progress ./directus/uploads/ "$REMOTE_HOST:$REMOTE_DIR/directus/uploads/"
+
+ # Clean up
rm dump.sql
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
- echo "โจ Push complete!"
+
+ # 5. Restart Directus to trigger migrations and refresh schema cache
+ echo "๐ Restarting remote Directus to apply migrations..."
+ ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME restart directus"
+
+ echo "โจ Push to $ENV complete!"
elif [ "$ACTION" == "pull" ]; then
- echo "๐ฅ Pulling $ENV -> LOCAL..."
+ echo "๐ฅ Pulling $ENV Data -> LOCAL..."
+
+ # 1. DB Dump on Remote
+ echo "๐ฆ Dumping remote database ($ENV)..."
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus-db")
+
+ if [ -z "$REMOTE_DB_CONTAINER" ]; then
+ echo "โ Remote $ENV-db container not found!"
+ exit 1
+ fi
+
ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $DB_USER --clean --if-exists --no-owner --no-privileges $DB_NAME > $REMOTE_DIR/dump.sql"
+
+ # 2. Download Dump
+ echo "๐ฅ Downloading dump..."
scp "$REMOTE_HOST:$REMOTE_DIR/dump.sql" dump.sql
+ # 3. Restore Locally
+ # Wipe local DB clean before restore to avoid constraint errors
+ echo "๐งน Wiping local database schema..."
+ docker exec "$LOCAL_DB_CONTAINER" psql -U "$DB_USER" "$DB_NAME" -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'
+
+ echo "โก Restoring database locally..."
docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$DB_USER" "$DB_NAME" < dump.sql
+
+ # 4. Sync Uploads
+ echo "๐ Syncing uploads ($ENV -> Local)..."
rsync -avz --progress "$REMOTE_HOST:$REMOTE_DIR/directus/uploads/" ./directus/uploads/
+
+ # Clean up
rm dump.sql
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
- echo "โจ Pull complete!"
+
+ echo "โจ Pull to Local complete!"
fi
+