feat: migration von directus zu payloadcms
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 1m19s
Build & Deploy / 🧪 QA (push) Failing after 3m32s
Build & Deploy / 🚀 Deploy (push) Has been skipped
Build & Deploy / 🏗️ Build (push) Failing after 7m51s
Build & Deploy / ⚡ Lighthouse (push) Has been skipped
Build & Deploy / 🧪 Smoke Test (push) Has been skipped
Build & Deploy / ♿ WCAG (push) Has been skipped
Build & Deploy / 🛡️ Quality Gates (push) Has been skipped
Build & Deploy / 🔔 Notify (push) Successful in 10s

This commit is contained in:
2026-02-24 19:25:43 +01:00
parent 2bac8d6e8a
commit f7aa880d9f
91 changed files with 1010 additions and 1028 deletions

View File

@@ -15,19 +15,19 @@ export NEXT_PUBLIC_CI=true
export CI=true
docker network create infra 2>/dev/null || true
docker volume create klz-cablescom_directus-db-data 2>/dev/null || true
docker volume create klz_db_data 2>/dev/null || true
# 2. Start infra services (DB, CMS, Gatekeeper)
echo "📦 Starting infrastructure services..."
# Using --remove-orphans to ensure a clean state
docker-compose up -d --remove-orphans klz-db klz-cms klz-gatekeeper
docker-compose up -d --remove-orphans klz-db klz-gatekeeper
# 3. Build and Start klz-app and klz-imgproxy in Production Mode
# 3. Build and Start klz-app in Production Mode
echo "🏗️ Building and starting klz-app (Production)..."
# We bypass the dev override by explicitly using the base compose file
NEXT_PUBLIC_BASE_URL=$NEXT_URL \
NEXT_PUBLIC_CI=true \
docker-compose -f docker-compose.yml up -d --build klz-app klz-imgproxy
docker-compose -f docker-compose.yml up -d --build klz-app
# 4. Wait for application to be ready
echo "⏳ Waiting for application to be healthy..."

47
scripts/backup-db.sh Executable file
View File

@@ -0,0 +1,47 @@
#!/usr/bin/env bash
# ────────────────────────────────────────────────────────────────────────────
# Payload CMS Database Backup
# Creates a timestamped pg_dump of the Payload Postgres database.
# Usage: npm run backup:db
# ────────────────────────────────────────────────────────────────────────────
set -euo pipefail
# Load environment variables
if [ -f .env ]; then
set -a; source .env; set +a
fi
DB_NAME="${PAYLOAD_DB_NAME:-payload}"
DB_USER="${PAYLOAD_DB_USER:-payload}"
DB_CONTAINER="klz-2026-klz-db-1"
BACKUP_DIR="./backups"
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
BACKUP_FILE="${BACKUP_DIR}/payload_${TIMESTAMP}.sql.gz"
# Ensure backup directory exists
mkdir -p "$BACKUP_DIR"
# Check if container is running
if ! docker ps --format '{{.Names}}' | grep -q "$DB_CONTAINER"; then
echo "❌ Database container '$DB_CONTAINER' is not running."
echo " Start it with: docker compose up -d klz-db"
exit 1
fi
echo "📦 Backing up Payload database..."
echo " Container: $DB_CONTAINER"
echo " Database: $DB_NAME"
echo " Output: $BACKUP_FILE"
# Run pg_dump inside the container and compress
docker exec "$DB_CONTAINER" pg_dump -U "$DB_USER" -d "$DB_NAME" --clean --if-exists | gzip > "$BACKUP_FILE"
# Show result
SIZE=$(du -h "$BACKUP_FILE" | cut -f1)
echo ""
echo "✅ Backup complete: $BACKUP_FILE ($SIZE)"
echo ""
# Show existing backups
echo "📋 Available backups:"
ls -lh "$BACKUP_DIR"/*.sql.gz 2>/dev/null | awk '{print " " $NF " (" $5 ")"}'

View File

@@ -0,0 +1,180 @@
import puppeteer, { HTTPResponse } from 'puppeteer';
import axios from 'axios';
import * as cheerio from 'cheerio';
const targetUrl = process.argv[2] || process.env.NEXT_PUBLIC_BASE_URL || 'http://localhost:3000';
const gatekeeperPassword = process.env.GATEKEEPER_PASSWORD || 'klz2026';
async function main() {
console.log(`\n🚀 Starting Strict Asset Integrity Check for: ${targetUrl}`);
// 1. Fetch Sitemap to discover all routes
const sitemapUrl = `${targetUrl.replace(/\/$/, '')}/sitemap.xml`;
let urls: string[] = [];
try {
console.log(`📥 Fetching sitemap from ${sitemapUrl}...`);
const response = await axios.get(sitemapUrl, {
headers: { Cookie: `klz_gatekeeper_session=${gatekeeperPassword}` },
});
const $ = cheerio.load(response.data, { xmlMode: true });
urls = $('url loc')
.map((i, el) => $(el).text())
.get();
// Normalize to target URL instance
const urlPattern = /https?:\/\/[^\/]+/;
urls = [...new Set(urls)]
.filter((u) => u.startsWith('http'))
.map((u) => u.replace(urlPattern, targetUrl.replace(/\/$/, '')))
.sort();
console.log(`✅ Found ${urls.length} target URLs.`);
} catch (err: any) {
console.error(`❌ Failed to fetch sitemap: ${err.message}`);
process.exit(1);
}
// 2. Launch Headless Browser
console.log(`\n🕷 Launching Puppeteer Headless Engine...`);
const browser = await puppeteer.launch({
headless: true,
args: ['--no-sandbox', '--disable-setuid-sandbox', '--disable-dev-shm-usage'],
});
const page = await browser.newPage();
// Inject Gatekeeper session bypassing auth screens
await page.setCookie({
name: 'klz_gatekeeper_session',
value: gatekeeperPassword,
domain: new URL(targetUrl).hostname,
path: '/',
httpOnly: true,
secure: targetUrl.startsWith('https://'),
});
let hasBrokenAssets = false;
let hasConsoleErrors = false;
const brokenAssetsList: Array<{ url: string; status: number; page: string }> = [];
const consoleErrorsList: Array<{ type: string; error: string; page: string }> = [];
// Listen for unhandled exceptions natively in the page
page.on('pageerror', (err) => {
consoleErrorsList.push({
type: 'PAGE_ERROR',
error: err.message,
page: page.url(),
});
hasConsoleErrors = true;
});
// Listen for console.error and console.warn messages (like Next.js Image warnings, hydration errors, CSP blocks)
page.on('console', (msg) => {
const type = msg.type();
if (type === 'error' || type === 'warning') {
const text = msg.text();
// Exclude common browser extension noise or third party tracker warnings
if (
text.includes('google-analytics') ||
text.includes('googletagmanager') ||
text.includes('SES Removing unpermitted intrinsics') ||
text.includes('Third-party cookie will be blocked') ||
text.includes('Fast Refresh')
)
return;
consoleErrorsList.push({
type: type.toUpperCase(),
error: text,
page: page.url(),
});
hasConsoleErrors = true;
}
});
// Listen to ALL network responses
page.on('response', (response: HTTPResponse) => {
const status = response.status();
// Catch classic 404s and 500s on ANY fetch/image/script
if (
status >= 400 &&
status !== 999 &&
!response.url().includes('google-analytics') &&
!response.url().includes('googletagmanager')
) {
const type = response.request().resourceType();
// We explicitly care about images, stylesheets, scripts, and fetch requests (API) getting 404/500s.
if (['image', 'script', 'stylesheet', 'fetch', 'xhr'].includes(type)) {
brokenAssetsList.push({
url: response.url(),
status: status,
page: page.url(),
});
hasBrokenAssets = true;
}
}
});
// 3. Scan each page
for (let i = 0; i < urls.length; i++) {
const u = urls[i];
console.log(`[${i + 1}/${urls.length}] Scanning: ${u}`);
try {
// Wait until network is idle to ensure all Next.js hydration and image lazy-loads trigger
await page.goto(u, { waitUntil: 'networkidle0', timeout: 30000 });
// Force scroll to bottom to trigger any IntersectionObserver lazy-loaded images
await page.evaluate(async () => {
await new Promise<void>((resolve) => {
let totalHeight = 0;
const distance = 100;
const timer = setInterval(() => {
const scrollHeight = document.body.scrollHeight;
window.scrollBy(0, distance);
totalHeight += distance;
if (totalHeight >= scrollHeight) {
clearInterval(timer);
resolve();
}
}, 100);
});
});
// Wait a tiny bit more for final lazy loads
await new Promise((r) => setTimeout(r, 1000));
} catch (err: any) {
console.error(`⚠️ Timeout or navigation error on ${u}: ${err.message}`);
// Don't fail the whole script just because one page timed out, but flag it
hasBrokenAssets = true;
}
}
await browser.close();
// 4. Report Results
if (hasBrokenAssets && brokenAssetsList.length > 0) {
console.error(`\n❌ FATAL: Broken assets (404/500) detected heavily on the site!`);
console.table(brokenAssetsList);
}
if (hasConsoleErrors && consoleErrorsList.length > 0) {
console.error(`\n❌ FATAL: Console Errors/Warnings detected on the site!`);
console.table(consoleErrorsList);
}
if (hasBrokenAssets || hasConsoleErrors) {
console.error(`\n🚨 The CI build will now fail to prevent bad code from reaching production.`);
process.exit(1);
} else {
console.log(
`\n🎉 SUCCESS: All ${urls.length} pages rendered perfectly with 0 broken images or console errors!`,
);
process.exit(0);
}
}
main();

View File

@@ -1,5 +1,8 @@
import { getPayload } from 'payload';
import configPromise from '@payload-config';
import * as dotenv from 'dotenv';
dotenv.config();
import fs from 'fs';
import path from 'path';
import matter from 'gray-matter';
@@ -48,7 +51,16 @@ async function mapImageToMediaId(payload: any, imagePath: string): Promise<strin
}
async function migrateBlogPosts() {
const payload = await getPayload({ config: configPromise });
console.log('[Blog Migration] 🔍 Using POSTGRES_URI:', process.env.POSTGRES_URI || 'NOT SET');
console.log('[Blog Migration] 🔍 Using DATABASE_URI:', process.env.DATABASE_URI || 'NOT SET');
let payload;
try {
payload = await getPayload({ config: configPromise });
} catch (err: any) {
console.error('[Blog Migration] ❌ Failed to initialize Payload:', err);
process.exit(1);
}
const locales = ['en', 'de'];
for (const locale of locales) {

View File

@@ -1,5 +1,8 @@
import { getPayload } from 'payload';
import configPromise from '../payload.config';
import * as dotenv from 'dotenv';
dotenv.config();
import fs from 'fs';
import path from 'path';
import matter from 'gray-matter';

54
scripts/restore-db.sh Executable file
View File

@@ -0,0 +1,54 @@
#!/usr/bin/env bash
# ────────────────────────────────────────────────────────────────────────────
# Payload CMS Database Restore
# Restores a backup created by backup-db.sh
# Usage: npm run restore:db -- backups/payload_20260224_191900.sql.gz
# ────────────────────────────────────────────────────────────────────────────
set -euo pipefail
# Load environment variables
if [ -f .env ]; then
set -a; source .env; set +a
fi
DB_NAME="${PAYLOAD_DB_NAME:-payload}"
DB_USER="${PAYLOAD_DB_USER:-payload}"
DB_CONTAINER="klz-2026-klz-db-1"
BACKUP_FILE="${1:-}"
if [ -z "$BACKUP_FILE" ]; then
echo "❌ Usage: npm run restore:db -- <backup-file>"
echo ""
echo "📋 Available backups:"
ls -lh backups/*.sql.gz 2>/dev/null | awk '{print " " $NF " (" $5 ")"}' || echo " No backups found."
exit 1
fi
if [ ! -f "$BACKUP_FILE" ]; then
echo "❌ Backup file not found: $BACKUP_FILE"
exit 1
fi
# Check if container is running
if ! docker ps --format '{{.Names}}' | grep -q "$DB_CONTAINER"; then
echo "❌ Database container '$DB_CONTAINER' is not running."
echo " Start it with: docker compose up -d klz-db"
exit 1
fi
echo "⚠️ WARNING: This will REPLACE ALL DATA in the '$DB_NAME' database!"
echo " Backup file: $BACKUP_FILE"
echo ""
read -p "Are you sure? (y/N) " -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Cancelled."
exit 0
fi
echo "🔄 Restoring database from $BACKUP_FILE..."
gunzip -c "$BACKUP_FILE" | docker exec -i "$DB_CONTAINER" psql -U "$DB_USER" -d "$DB_NAME" --quiet
echo "✅ Database restored successfully!"

View File

@@ -3,7 +3,7 @@ import fs from 'node:fs';
import path from 'node:path';
import matter from 'gray-matter';
const TARGET_DIRS = ['./data/blog', './data/products', './data/pages'];
const TARGET_DIRS = ['./data/blog', './data/products'];
function getAllFiles(dirPath, arrayOfFiles) {
if (!fs.existsSync(dirPath)) return arrayOfFiles || [];