feat: payload cms
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 8s
Build & Deploy / 🧪 QA (push) Failing after 1m13s
Build & Deploy / 🏗️ Build (push) Failing after 5m53s
Build & Deploy / 🚀 Deploy (push) Has been skipped
Build & Deploy / 🧪 Smoke Test (push) Has been skipped
Build & Deploy / ⚡ Lighthouse (push) Has been skipped
Build & Deploy / ♿ WCAG (push) Has been skipped
Build & Deploy / 🛡️ Quality Gates (push) Has been skipped
Build & Deploy / 🔔 Notify (push) Successful in 4s

This commit is contained in:
2026-02-24 02:28:48 +01:00
parent 41cfe19cbf
commit a5d77fc69b
89 changed files with 25282 additions and 1903 deletions

View File

@@ -1,65 +0,0 @@
#!/bin/bash
ENV=$1
REMOTE_HOST="root@alpha.mintel.me"
REMOTE_DIR="/home/deploy/sites/klz-cables.com"
if [ -z "$ENV" ]; then
echo "Usage: ./scripts/cms-apply.sh [local|testing|staging|production]"
exit 1
fi
PRJ_ID=$(jq -r .name package.json | sed 's/@mintel\///' | sed 's/\.com$//' | sed 's/-nextjs$//' | sed 's/-cables$//')
case $ENV in
local)
CONTAINER=$(docker compose ps -q klz-cms)
if [ -z "$CONTAINER" ]; then
echo "❌ Local directus container not found."
exit 1
fi
echo "🚀 Applying schema locally..."
docker exec "$CONTAINER" npx directus schema apply /directus/schema/snapshot.yaml --yes
;;
testing|staging|production)
case $ENV in
testing) PROJECT_NAME="${PRJ_ID}-testing" ;;
staging) PROJECT_NAME="${PRJ_ID}-staging" ;;
production)
PROJECT_NAME="${PRJ_ID}-production"
OLD_PROJECT_NAME="${PRJ_ID}-prod" # Fallback for previous convention
;;
esac
echo "📤 Uploading snapshot to $ENV..."
scp ./directus/schema/snapshot.yaml "$REMOTE_HOST:$REMOTE_DIR/directus/schema/snapshot.yaml"
echo "🔍 Detecting remote container..."
REMOTE_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus")
if [ -z "$REMOTE_CONTAINER" ] && [ -n "$OLD_PROJECT_NAME" ]; then
echo "⚠️ $PROJECT_NAME not found, trying fallback $OLD_PROJECT_NAME..."
REMOTE_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $OLD_PROJECT_NAME ps -q directus")
if [ -n "$REMOTE_CONTAINER" ]; then
PROJECT_NAME=$OLD_PROJECT_NAME
fi
fi
if [ -z "$REMOTE_CONTAINER" ]; then
echo "❌ Remote container for $ENV not found (checked $PROJECT_NAME)."
exit 1
fi
echo "🚀 Applying schema to $ENV..."
ssh "$REMOTE_HOST" "docker exec $REMOTE_CONTAINER npx directus schema apply /directus/schema/snapshot.yaml --yes"
echo "🔄 Restarting Directus to clear cache..."
ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME restart directus"
;;
*)
echo "❌ Invalid environment."
exit 1
;;
esac
echo "✨ Schema apply complete!"

View File

@@ -1,15 +0,0 @@
#!/bin/bash
# Detect local container
LOCAL_CONTAINER=$(docker compose ps -q directus)
if [ -z "$LOCAL_CONTAINER" ]; then
echo "❌ Local directus container not found. Is it running?"
exit 1
fi
echo "📸 Creating schema snapshot..."
# Note: we save it to the mounted volume path inside the container
docker exec "$LOCAL_CONTAINER" npx directus schema snapshot /directus/schema/snapshot.yaml
echo "✅ Snapshot saved to ./directus/schema/snapshot.yaml"

140
scripts/migrate-mdx.ts Normal file
View File

@@ -0,0 +1,140 @@
import { getPayload } from 'payload';
import configPromise from '@payload-config';
import fs from 'fs';
import path from 'path';
import matter from 'gray-matter';
import { parseMarkdownToLexical } from '../src/payload/utils/lexicalParser';
async function mapImageToMediaId(payload: any, imagePath: string): Promise<string | null> {
if (!imagePath) return null;
const filename = path.basename(imagePath);
const media = await payload.find({
collection: 'media',
where: {
filename: {
equals: filename,
},
},
limit: 1,
});
if (media.docs.length > 0) {
return media.docs[0].id;
}
// Auto-ingest missing images from legacy public/ directory
const cleanPath = imagePath.startsWith('/') ? imagePath.slice(1) : imagePath;
const fullPath = path.join(process.cwd(), 'public', cleanPath);
if (fs.existsSync(fullPath)) {
try {
console.log(`[Blog Migration] 📤 Ingesting missing Media into Payload: ${filename}`);
const newMedia = await payload.create({
collection: 'media',
data: {
alt: filename.replace(/[-_]/g, ' ').replace(/\.[^/.]+$/, ''), // create a human readable alt text
},
filePath: fullPath,
});
return newMedia.id;
} catch (err: any) {
console.error(`[Blog Migration] ❌ Failed to ingest ${filename}:`, err);
}
} else {
console.warn(`[Blog Migration] ⚠️ Missing image entirely from disk: ${fullPath}`);
}
return null;
}
async function migrateBlogPosts() {
const payload = await getPayload({ config: configPromise });
const locales = ['en', 'de'];
for (const locale of locales) {
const postsDir = path.join(process.cwd(), 'data', 'blog', locale);
if (!fs.existsSync(postsDir)) continue;
const files = fs.readdirSync(postsDir);
for (const file of files) {
if (!file.endsWith('.mdx')) continue;
const slug = file.replace(/\.mdx$/, '');
const filePath = path.join(postsDir, file);
const fileContent = fs.readFileSync(filePath, 'utf8');
const { data, content } = matter(fileContent);
console.log(`Migrating ${locale}/${slug}...`);
const lexicalBlocks = parseMarkdownToLexical(content);
const lexicalAST = {
root: {
type: 'root',
format: '',
indent: 0,
version: 1,
children: lexicalBlocks,
direction: 'ltr',
},
};
const publishDate = data.date ? new Date(data.date).toISOString() : new Date().toISOString();
const status = data.public === false ? 'draft' : 'published';
let featuredImageId = null;
if (data.featuredImage || data.image) {
featuredImageId = await mapImageToMediaId(payload, data.featuredImage || data.image);
}
try {
// Find existing post
const existing = await payload.find({
collection: 'posts',
where: { slug: { equals: slug }, locale: { equals: locale } as any },
});
if (slug.includes('welcome-to-the-future')) {
console.log(`\n--- AST for ${slug} ---`);
console.log(JSON.stringify(lexicalAST, null, 2));
console.log(`-----------------------\n`);
}
if (existing.docs.length > 0) {
await payload.update({
collection: 'posts',
id: existing.docs[0].id,
data: {
content: lexicalAST as any,
_status: status as any,
...(featuredImageId ? { featuredImage: featuredImageId } : {}),
},
});
console.log(`✅ AST Components & Image RE-INJECTED for ${slug}`);
} else {
await payload.create({
collection: 'posts',
data: {
title: data.title,
slug: slug,
locale: locale,
date: publishDate,
category: data.category || '',
excerpt: data.excerpt || '',
content: lexicalAST as any,
_status: status as any,
...(featuredImageId ? { featuredImage: featuredImageId } : {}),
},
});
console.log(`✅ Created ${slug}`);
}
} catch (err: any) {
console.error(`❌ Failed ${slug}`, err.message);
}
}
}
console.log('Migration completed.');
process.exit(0);
}
migrateBlogPosts().catch(console.error);

135
scripts/migrate-pages.ts Normal file
View File

@@ -0,0 +1,135 @@
import { getPayload } from 'payload';
import configPromise from '@payload-config';
import fs from 'fs';
import path from 'path';
import matter from 'gray-matter';
import { parseMarkdownToLexical } from '../src/payload/utils/lexicalParser';
async function mapImageToMediaId(payload: any, imagePath: string): Promise<string | null> {
if (!imagePath) return null;
const filename = path.basename(imagePath);
const media = await payload.find({
collection: 'media',
where: {
filename: {
equals: filename,
},
},
limit: 1,
});
if (media.docs.length > 0) {
return media.docs[0].id;
}
// Auto-ingest missing images from legacy public/ directory
const cleanPath = imagePath.startsWith('/') ? imagePath.slice(1) : imagePath;
const fullPath = path.join(process.cwd(), 'public', cleanPath);
if (fs.existsSync(fullPath)) {
try {
console.log(`[Blog Migration] 📤 Ingesting missing Media into Payload: ${filename}`);
const newMedia = await payload.create({
collection: 'media',
data: {
alt: filename.replace(/[-_]/g, ' ').replace(/\.[^/.]+$/, ''), // create a human readable alt text
},
filePath: fullPath,
});
return newMedia.id;
} catch (err: any) {
console.error(`[Blog Migration] ❌ Failed to ingest ${filename}:`, err);
}
} else {
console.warn(`[Blog Migration] ⚠️ Missing image entirely from disk: ${fullPath}`);
}
return null;
}
async function migrateBlogPages() {
const payload = await getPayload({ config: configPromise });
const locales = ['en', 'de'];
for (const locale of locales) {
const pagesDir = path.join(process.cwd(), 'data', 'pages', locale);
if (!fs.existsSync(pagesDir)) continue;
const files = fs.readdirSync(pagesDir);
for (const file of files) {
if (!file.endsWith('.mdx')) continue;
const slug = file.replace(/\.mdx$/, '');
const filePath = path.join(pagesDir, file);
const fileContent = fs.readFileSync(filePath, 'utf8');
const { data, content } = matter(fileContent);
console.log(`Migrating ${locale}/${slug}...`);
const lexicalBlocks = parseMarkdownToLexical(content);
const lexicalAST = {
root: {
type: 'root',
format: '',
indent: 0,
version: 1,
children: lexicalBlocks,
direction: 'ltr',
},
};
const status = data.public === false ? 'draft' : 'published';
let featuredImageId = null;
if (data.featuredImage || data.image) {
featuredImageId = await mapImageToMediaId(payload, data.featuredImage || data.image);
}
try {
// Find existing post
const existing = await payload.find({
collection: 'pages',
where: { slug: { equals: slug }, locale: { equals: locale } as any },
});
if (slug.includes('welcome-to-the-future')) {
console.log(`\n--- AST for ${slug} ---`);
console.log(JSON.stringify(lexicalAST, null, 2));
console.log(`-----------------------\n`);
}
if (existing.docs.length > 0) {
await payload.update({
collection: 'pages',
id: existing.docs[0].id,
data: {
content: lexicalAST as any,
...(featuredImageId ? { featuredImage: featuredImageId } : {}),
},
});
console.log(`✅ AST Components & Image RE-INJECTED for ${slug}`);
} else {
await payload.create({
collection: 'pages',
data: {
title: data.title,
slug: slug,
locale: locale,
excerpt: data.excerpt || '',
content: lexicalAST as any,
...(featuredImageId ? { featuredImage: featuredImageId } : {}),
},
});
console.log(`✅ Created ${slug}`);
}
} catch (err: any) {
console.error(`❌ Failed ${slug}`, err.message);
}
}
}
console.log('Migration completed.');
process.exit(0);
}
migrateBlogPages().catch(console.error);

153
scripts/migrate-products.ts Normal file
View File

@@ -0,0 +1,153 @@
import { getPayload } from 'payload';
import configPromise from '../payload.config';
import fs from 'fs';
import path from 'path';
import matter from 'gray-matter';
import { parseMarkdownToLexical } from '../src/payload/utils/lexicalParser';
async function mapImageToMediaId(payload: any, imagePath: string): Promise<string | null> {
if (!imagePath) return null;
const filename = path.basename(imagePath);
// Exact match instead of substring to avoid matching "cable-black.jpg" with "cable.jpg"
const media = await payload.find({
collection: 'media',
where: {
filename: {
equals: filename,
},
},
limit: 1,
});
if (media.docs.length > 0) {
return media.docs[0].id;
}
const cleanPath = imagePath.startsWith('/') ? imagePath.slice(1) : imagePath;
const fullPath = path.join(process.cwd(), 'public', cleanPath);
if (fs.existsSync(fullPath)) {
try {
console.log(`[Products Migration] 📤 Ingesting missing Media into Payload: ${filename}`);
const newMedia = await payload.create({
collection: 'media',
data: {
alt: filename.replace(/[-_]/g, ' ').replace(/\.[^/.]+$/, ''),
},
filePath: fullPath,
});
return newMedia.id;
} catch (err: any) {
console.error(`[Products Migration] ❌ Failed to ingest ${filename}:`, err);
}
} else {
console.warn(`[Products Migration] ⚠️ Missing image entirely from disk: ${fullPath}`);
}
return null;
}
export async function migrateProducts() {
const payload = await getPayload({ config: configPromise });
const productLocales = ['en', 'de'];
for (const locale of productLocales) {
const productsDir = path.join(process.cwd(), 'data', 'products', locale);
if (!fs.existsSync(productsDir)) continue;
// Recursive file finder
const mdFiles: string[] = [];
const walk = (dir: string) => {
const files = fs.readdirSync(dir);
for (const file of files) {
const fullPath = path.join(dir, file);
const stat = fs.statSync(fullPath);
if (stat.isDirectory()) {
walk(fullPath);
} else if (file.endsWith('.mdx')) {
mdFiles.push(fullPath);
}
}
};
walk(productsDir);
for (const filePath of mdFiles) {
const fileContent = fs.readFileSync(filePath, 'utf8');
const { data, content } = matter(fileContent);
console.log(`Processing Product: [${locale.toUpperCase()}] ${data.title}`);
// 1. Process Images
const mediaIds = [];
if (data.images && Array.isArray(data.images)) {
for (const imgPath of data.images) {
const id = await mapImageToMediaId(payload, imgPath);
if (id) mediaIds.push(id);
}
}
// 2. Map Lexical AST for deeply nested components (like ProductTabs + Technical data)
const lexicalContent = parseMarkdownToLexical(content);
const wrapLexical = (blocks: any[]) => ({
root: {
type: 'root',
format: '',
indent: 0,
version: 1,
children: blocks,
direction: 'ltr',
},
});
// Payload expects category objects via the 'category' key
const formattedCategories = Array.isArray(data.categories)
? data.categories.map((c: string) => ({ category: c }))
: [];
const productData = {
title: data.title,
sku: data.sku || path.basename(filePath, '.mdx'),
slug: path.basename(filePath, '.mdx'),
locale: locale as 'en' | 'de',
categories: formattedCategories,
description: data.description || '',
featuredImage: mediaIds.length > 0 ? mediaIds[0] : undefined,
images: mediaIds.length > 0 ? mediaIds : undefined,
content: wrapLexical(lexicalContent) as any,
application: data.application
? (wrapLexical(parseMarkdownToLexical(data.application)) as any)
: undefined,
_status: 'published' as any,
};
// Check if product exists (by sku combined with locale, since slug may differ by language)
const existing = await payload.find({
collection: 'products',
where: {
and: [{ slug: { equals: productData.slug } }, { locale: { equals: locale } }],
},
});
if (existing.docs.length > 0) {
console.log(`Updating existing product ${productData.slug} (${locale})`);
await payload.update({
collection: 'products',
id: existing.docs[0].id,
data: productData,
});
} else {
console.log(`Creating new product ${productData.slug} (${locale})`);
await payload.create({
collection: 'products',
data: productData,
});
}
}
}
console.log(`\n✅ Products Migration Complete!`);
process.exit(0);
}
migrateProducts().catch(console.error);

View File

@@ -1,217 +0,0 @@
import client, { ensureAuthenticated } from '../lib/directus';
import { updateSettings, uploadFiles } from '@directus/sdk';
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
// Helper for ESM __dirname
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
async function setupBranding() {
console.log('🎨 Refining Directus Branding for Premium Website Look...');
// 1. Authenticate
await ensureAuthenticated();
try {
// 2. Upload Assets (MIME FIXED)
console.log('📤 Re-uploading assets for clean IDs...');
const getMimeType = (filePath: string) => {
const ext = path.extname(filePath).toLowerCase();
switch (ext) {
case '.svg':
return 'image/svg+xml';
case '.png':
return 'image/png';
case '.jpg':
case '.jpeg':
return 'image/jpeg';
case '.ico':
return 'image/x-icon';
default:
return 'application/octet-stream';
}
};
const uploadAsset = async (filePath: string, title: string) => {
if (!fs.existsSync(filePath)) {
console.warn(`⚠️ File not found: ${filePath}`);
return null;
}
const mimeType = getMimeType(filePath);
const form = new FormData();
const fileBuffer = fs.readFileSync(filePath);
const blob = new Blob([fileBuffer], { type: mimeType });
form.append('file', blob, path.basename(filePath));
form.append('title', title);
const res = await client.request(uploadFiles(form));
return res.id;
};
const logoWhiteId = await uploadAsset(
path.resolve(__dirname, '../public/logo-white.svg'),
'Logo White',
);
const logoBlueId = await uploadAsset(
path.resolve(__dirname, '../public/logo-blue.svg'),
'Logo Blue',
);
const faviconId = await uploadAsset(
path.resolve(__dirname, '../public/favicon.ico'),
'Favicon',
);
// Smoother Background SVG
const bgSvgPath = path.resolve(__dirname, '../public/login-bg.svg');
fs.writeFileSync(
bgSvgPath,
`<svg width="1920" height="1080" viewBox="0 0 1920 1080" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="1920" height="1080" fill="#001a4d"/>
<ellipse cx="960" cy="540" rx="800" ry="600" fill="url(#paint0_radial_premium)"/>
<defs>
<radialGradient id="paint0_radial_premium" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(960 540) rotate(90) scale(600 800)">
<stop stop-color="#003d82" stop-opacity="0.8"/>
<stop offset="1" stop-color="#001a4d" stop-opacity="0"/>
</radialGradient>
</defs>
</svg>`,
);
const backgroundId = await uploadAsset(bgSvgPath, 'Login Bg');
if (fs.existsSync(bgSvgPath)) fs.unlinkSync(bgSvgPath);
// 3. Update Settings with "Premium Web" Theme
console.log('⚙️ Updating Directus settings...');
const COLOR_PRIMARY = '#001a4d'; // Deep Blue
const COLOR_ACCENT = '#82ed20'; // Sustainability Green
const COLOR_SECONDARY = '#003d82';
const customCss = `
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap');
/* Global Login Styles */
body, .v-app {
font-family: 'Inter', sans-serif !important;
-webkit-font-smoothing: antialiased;
}
/* Glassmorphism Effect for Login Card */
.public-view .v-card {
background: rgba(255, 255, 255, 0.95) !important;
backdrop-filter: blur(20px);
border: 1px solid rgba(255, 255, 255, 0.3) !important;
border-radius: 32px !important;
box-shadow: 0 50px 100px -20px rgba(0, 0, 0, 0.4) !important;
padding: 40px !important;
}
.public-view .v-button {
border-radius: 9999px !important;
height: 56px !important;
font-weight: 600 !important;
letter-spacing: -0.01em !important;
transition: all 0.4s cubic-bezier(0.16, 1, 0.3, 1) !important;
}
.public-view .v-button:hover {
transform: translateY(-2px);
box-shadow: 0 15px 30px rgba(130, 237, 32, 0.2) !important;
}
.public-view .v-input {
--v-input-border-radius: 12px !important;
--v-input-background-color: #f8f9fa !important;
}
/* Inject Headline via CSS to avoid raw HTML display in public_note */
.public-view .form::before {
content: 'Sustainable Energy. Industrial Reliability.';
display: block;
text-align: center;
font-size: 18px;
font-weight: 700;
color: #ffffff;
margin-bottom: 8px;
}
.public-view .form::after {
content: 'KLZ INFRASTRUCTURE ENGINE';
display: block;
text-align: center;
font-size: 11px;
font-weight: 600;
letter-spacing: 0.1em;
color: rgba(255, 255, 255, 0.5);
margin-top: 24px;
}
`;
const publicNote = '';
await client.request(
updateSettings({
project_name: 'KLZ Cables',
project_url: 'https://klz-cables.com',
project_color: COLOR_ACCENT,
project_descriptor: 'Sustainable Energy Infrastructure',
// FIXED: Use WHITE logo for the Blue Sidebar
project_logo: logoWhiteId as any,
public_foreground: logoWhiteId as any,
public_background: backgroundId as any,
public_note: publicNote,
public_favicon: faviconId as any,
custom_css: customCss,
// DEEP PREMIUM THEME
theme_light_overrides: {
// Brands
primary: COLOR_ACCENT, // Buttons/Actions are GREEN like the website
secondary: COLOR_SECONDARY,
// Content Area
background: '#f1f3f7',
backgroundNormal: '#ffffff',
backgroundAccent: '#eef2ff',
// Sidebar Branding
navigationBackground: COLOR_PRIMARY,
navigationForeground: '#ffffff',
navigationBackgroundHover: 'rgba(255,255,255,0.05)',
navigationForegroundHover: '#ffffff',
navigationBackgroundActive: 'rgba(130, 237, 32, 0.15)', // Subtle Green highlight
navigationForegroundActive: COLOR_ACCENT, // Active item is GREEN
// Module Bar (Thin far left)
moduleBarBackground: '#000d26',
moduleBarForeground: '#ffffff',
moduleBarForegroundActive: COLOR_ACCENT,
// UI Standards
borderRadius: '16px', // Larger radius for modern feel
borderWidth: '1px',
borderColor: '#e2e8f0',
formFieldHeight: '48px', // Touch-target height
} as any,
theme_dark_overrides: {
primary: COLOR_ACCENT,
background: '#0a0a0a',
navigationBackground: '#000000',
moduleBarBackground: COLOR_PRIMARY,
borderRadius: '16px',
formFieldHeight: '48px',
} as any,
}),
);
console.log('✨ Premium Theme applied successfully!');
} catch (error: any) {
console.error('❌ Error:', JSON.stringify(error, null, 2));
}
}
setupBranding();

View File

@@ -1,132 +0,0 @@
#!/bin/bash
# Configuration
REMOTE_HOST="root@alpha.mintel.me"
REMOTE_DIR="/home/deploy/sites/klz-cables.com"
# DB Details (matching docker-compose defaults)
LOCAL_DB_USER="klz_db_user"
REMOTE_DB_USER="directus"
DB_NAME="directus"
ACTION=$1
ENV=$2
# Help
if [ -z "$ACTION" ] || [ -z "$ENV" ]; then
echo "Usage: ./scripts/sync-directus.sh [push|pull] [testing|staging|production]"
echo ""
echo "Commands:"
echo " push Sync LOCAL data -> REMOTE"
echo " pull Sync REMOTE data -> LOCAL"
echo ""
echo "Environments:"
echo " testing, staging, production"
exit 1
fi
# Map Environment to Project Name
case $ENV in
production|staging|testing)
PROJECT_NAME="klz-cablescom"
ENV_FILE=".env"
;;
*)
echo "❌ Invalid environment: $ENV. Use testing, staging, or production."
exit 1
;;
esac
# Detect local container
echo "🔍 Detecting local database..."
# Use a more robust way to find the container if multiple projects exist locally
LOCAL_DB_CONTAINER=$(docker compose ps -q klz-db)
if [ -z "$LOCAL_DB_CONTAINER" ]; then
echo "❌ Local klz-directus-db container not found. Is it running? (npm run dev)"
exit 1
fi
if [ "$ACTION" == "push" ]; then
echo "🚀 Pushing Local Data to $ENV ($PROJECT_NAME)..."
# 1. DB Dump
echo "📦 Dumping local database..."
# Note: we use --no-owner --no-privileges to ensure restore works on remote with different user setup
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$DB_USER" --clean --if-exists --no-owner --no-privileges "$DB_NAME" > dump.sql
# 2. Upload Dump
echo "📤 Uploading dump to remote server..."
scp dump.sql "$REMOTE_HOST:$REMOTE_DIR/dump.sql"
# 3. Restore on Remote
echo "🔄 Restoring dump on $ENV..."
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus-db")
if [ -z "$REMOTE_DB_CONTAINER" ] && [ -n "$OLD_PROJECT_NAME" ]; then
echo "⚠️ $PROJECT_NAME not found, trying fallback $OLD_PROJECT_NAME..."
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $OLD_PROJECT_NAME ps -q directus-db")
fi
if [ -z "$REMOTE_DB_CONTAINER" ]; then
echo "❌ Remote $ENV-db container not found!"
exit 1
fi
# Wipe remote DB clean before restore to avoid constraint errors
echo "🧹 Wiping remote database schema..."
ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER psql -U $REMOTE_DB_USER $DB_NAME -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'"
echo "⚡ Restoring database..."
ssh "$REMOTE_HOST" "docker exec -i $REMOTE_DB_CONTAINER psql -U $REMOTE_DB_USER $DB_NAME < $REMOTE_DIR/dump.sql"
# 4. Sync Uploads
echo "📁 Syncing uploads (Local -> $ENV)..."
# Note: If environments share the same directory, this might overwrite others' files if not careful.
# But since they share the same host directory currently, rsync will update the shared folder.
rsync -avz --progress ./directus/uploads/ "$REMOTE_HOST:$REMOTE_DIR/directus/uploads/"
# Clean up
rm dump.sql
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
# 5. Restart Directus to trigger migrations and refresh schema cache
echo "🔄 Restarting remote Directus to apply migrations..."
ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME restart directus"
echo "✨ Push to $ENV complete!"
elif [ "$ACTION" == "pull" ]; then
echo "📥 Pulling $ENV Data to Local..."
# The remote service name is 'klz-db' according to docker compose config
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q klz-db")
if [ -z "$REMOTE_DB_CONTAINER" ]; then
echo "❌ Remote $ENV-db container not found!"
exit 1
fi
ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $REMOTE_DB_USER --clean --if-exists --no-owner --no-privileges $DB_NAME > $REMOTE_DIR/dump.sql"
# 2. Download Dump
echo "📥 Downloading dump..."
scp "$REMOTE_HOST:$REMOTE_DIR/dump.sql" dump.sql
# Wipe local DB clean before restore to avoid constraint errors
echo "🧹 Wiping local database schema..."
docker exec "$LOCAL_DB_CONTAINER" psql -U "$LOCAL_DB_USER" "$DB_NAME" -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'
echo "⚡ Restoring database locally..."
docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$LOCAL_DB_USER" "$DB_NAME" < dump.sql
# 4. Sync Uploads
echo "📁 Syncing uploads ($ENV -> Local)..."
rsync -avz --progress "$REMOTE_HOST:$REMOTE_DIR/directus/uploads/" ./directus/uploads/
# Clean up
rm dump.sql
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
echo "✨ Pull to Local complete!"
else
echo "Invalid action: $ACTION. Use push or pull."
exit 1
fi