feat: payload cms

This commit is contained in:
2026-02-26 01:32:22 +01:00
parent 1963a93123
commit 7d65237ee9
67 changed files with 3179 additions and 760 deletions

View File

@@ -40,6 +40,7 @@ async function main() {
console.log(`\n🕷 Launching Puppeteer Headless Engine...`);
const browser = await puppeteer.launch({
headless: true,
executablePath: process.env.PUPPETEER_EXECUTABLE_PATH || process.env.CHROME_PATH || undefined,
args: ['--no-sandbox', '--disable-setuid-sandbox', '--disable-dev-shm-usage'],
});
@@ -61,7 +62,7 @@ async function main() {
const consoleErrorsList: Array<{ type: string; error: string; page: string }> = [];
// Listen for unhandled exceptions natively in the page
page.on('pageerror', (err) => {
page.on('pageerror', (err: any) => {
consoleErrorsList.push({
type: 'PAGE_ERROR',
error: err.message,
@@ -73,7 +74,7 @@ async function main() {
// Listen for console.error and console.warn messages (like Next.js Image warnings, hydration errors, CSP blocks)
page.on('console', (msg) => {
const type = msg.type();
if (type === 'error' || type === 'warning') {
if (type === 'error' || type === 'warn') {
const text = msg.text();
// Exclude common browser extension noise or third party tracker warnings

14
scripts/check-pages.ts Normal file
View File

@@ -0,0 +1,14 @@
import { getPayload } from 'payload';
import configPromise from '@payload-config';
async function run() {
const payload = await getPayload({ config: configPromise });
const result = await payload.find({ collection: 'pages', limit: 1 });
const doc = result.docs[0] as any;
console.log('Sample page:', doc.slug);
console.log('Content structure (first 2 levels):');
console.log(JSON.stringify(doc.content, null, 2).slice(0, 3000));
process.exit(0);
}
run();

14
scripts/check-start.ts Normal file
View File

@@ -0,0 +1,14 @@
import { getPayload } from 'payload';
import configPromise from '@payload-config';
async function run() {
const payload = await getPayload({ config: configPromise });
const existing = await payload.find({
collection: 'pages',
where: { slug: { equals: 'start' }, locale: { equals: 'de' } },
limit: 1,
});
console.log(JSON.stringify(existing.docs[0].content, null, 2).slice(0, 1500));
process.exit(0);
}
run();

14
scripts/check-team.ts Normal file
View File

@@ -0,0 +1,14 @@
import { getPayload } from 'payload';
import configPromise from '@payload-config';
async function run() {
const payload = await getPayload({ config: configPromise });
const existing = await payload.find({
collection: 'pages',
where: { slug: { equals: 'team' }, locale: { equals: 'de' } },
limit: 1,
});
console.log(JSON.stringify(existing.docs[0].content, null, 2).slice(0, 500));
process.exit(0);
}
run();

View File

@@ -11,6 +11,28 @@
# ────────────────────────────────────────────────────────────────────────────
set -euo pipefail
SYNC_SUCCESS="false"
LOCAL_BACKUP_FILE=""
REMOTE_BACKUP_FILE=""
cleanup_on_exit() {
local exit_code=$?
if [ "$SYNC_SUCCESS" != "true" ] && [ $exit_code -ne 0 ]; then
echo ""
echo "❌ Sync aborted or failed! (Exit code: $exit_code)"
if [ "${DIRECTION:-}" = "push" ] && [ -n "${REMOTE_BACKUP_FILE:-}" ]; then
echo "🔄 Rolling back $TARGET database..."
ssh "$SSH_HOST" "gunzip -c $REMOTE_BACKUP_FILE | docker exec -i $REMOTE_DB_CONTAINER psql -U $REMOTE_DB_USER -d $REMOTE_DB_NAME --quiet" || echo "⚠️ Rollback failed"
echo "✅ Rollback complete."
elif [ "${DIRECTION:-}" = "pull" ] && [ -n "${LOCAL_BACKUP_FILE:-}" ]; then
echo "🔄 Rolling back local database..."
gunzip -c "$LOCAL_BACKUP_FILE" | docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$LOCAL_DB_USER" -d "$LOCAL_DB_NAME" --quiet || echo "⚠️ Rollback failed"
echo "✅ Rollback complete."
fi
fi
}
trap 'cleanup_on_exit' EXIT
# Load environment variables
if [ -f .env ]; then
set -a; source .env; set +a
@@ -48,6 +70,13 @@ resolve_target() {
REMOTE_MEDIA_VOLUME="/var/lib/docker/volumes/klz-testing_klz_media_data/_data"
REMOTE_SITE_DIR="/home/deploy/sites/testing.klz-cables.com"
;;
staging)
REMOTE_PROJECT="klz-staging"
REMOTE_DB_CONTAINER="klz-staging-klz-db-1"
REMOTE_APP_CONTAINER="klz-staging-klz-app-1"
REMOTE_MEDIA_VOLUME="/var/lib/docker/volumes/klz-staging_klz_media_data/_data"
REMOTE_SITE_DIR="/home/deploy/sites/staging.klz-cables.com"
;;
prod|production)
REMOTE_PROJECT="klz-cablescom"
REMOTE_DB_CONTAINER="klz-cablescom-klz-db-1"
@@ -57,7 +86,7 @@ resolve_target() {
;;
*)
echo "❌ Unknown target: $TARGET"
echo " Valid targets: testing, prod"
echo " Valid targets: testing, staging, prod"
exit 1
;;
esac
@@ -118,6 +147,7 @@ backup_local_db() {
echo "📦 Creating safety backup of local DB → $file"
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$LOCAL_DB_USER" -d "$LOCAL_DB_NAME" --clean --if-exists | gzip > "$file"
echo "✅ Backup: $file ($(du -h "$file" | cut -f1))"
LOCAL_BACKUP_FILE="$file"
}
backup_remote_db() {
@@ -125,6 +155,7 @@ backup_remote_db() {
echo "📦 Creating safety backup of $TARGET DB → $SSH_HOST:$file"
ssh "$SSH_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $REMOTE_DB_USER -d $REMOTE_DB_NAME --clean --if-exists | gzip > $file"
echo "✅ Remote backup: $file"
REMOTE_BACKUP_FILE="$file"
}
# ── PUSH: local → remote ──────────────────────────────────────────────────
@@ -177,6 +208,7 @@ do_push() {
rm -f "$dump"
ssh "$SSH_HOST" "rm -f /tmp/payload_push.sql.gz"
SYNC_SUCCESS="true"
echo ""
echo "✅ Push to $TARGET complete!"
}
@@ -214,12 +246,13 @@ do_pull() {
# 4. Sync media
echo "🖼️ Syncing media files..."
mkdir -p "$LOCAL_MEDIA_DIR"
rsync -az --delete --info=progress2 "$SSH_HOST:$REMOTE_MEDIA_VOLUME/" "$LOCAL_MEDIA_DIR/"
rsync -az --delete --progress "$SSH_HOST:$REMOTE_MEDIA_VOLUME/" "$LOCAL_MEDIA_DIR/"
# Cleanup
rm -f "/tmp/payload_pull.sql.gz"
ssh "$SSH_HOST" "rm -f /tmp/payload_pull.sql.gz"
SYNC_SUCCESS="true"
echo ""
echo "✅ Pull from $TARGET complete! Restart dev server to see changes."
}
@@ -230,8 +263,10 @@ if [ -z "$DIRECTION" ] || [ -z "$TARGET" ]; then
echo ""
echo "Usage:"
echo " pnpm cms:push:testing Push local DB + media → testing"
echo " pnpm cms:push:staging Push local DB + media → staging"
echo " pnpm cms:push:prod Push local DB + media → production"
echo " pnpm cms:pull:testing Pull testing DB + media → local"
echo " pnpm cms:pull:staging Pull staging DB + media → local"
echo " pnpm cms:pull:prod Pull production DB + media → local"
echo ""
echo "Safety: A backup is always created before overwriting."

View File

@@ -0,0 +1,33 @@
const fs = require('fs');
const blocks = [
'HomeHero',
'HomeProductCategories',
'HomeWhatWeDo',
'HomeRecentPosts',
'HomeExperience',
'HomeWhyChooseUs',
'HomeMeetTheTeam',
'HomeVideo',
];
blocks.forEach(name => {
const content = `import { Block } from 'payload';
export const ${name}: Block = {
slug: '${name.charAt(0).toLowerCase() + name.slice(1)}',
interfaceName: '${name}Block',
fields: [
{
name: 'note',
type: 'text',
admin: {
description: 'This is a dedicated layout block for the homepage wrapper. Content is managed via translation files.',
},
},
],
};
`;
fs.writeFileSync(\`src/payload/blocks/\${name}.ts\`, content);
console.log(\`Created \${name}.ts\`);
});

View File

@@ -0,0 +1,260 @@
/**
* merge-locale-duplicates.ts
*
* Merges duplicate DE/EN documents into single Payload localized documents.
*
* Problem: Before native localization, DE and EN were stored as separate rows.
* Now each should be one document with locale-specific data in the *_locales tables.
*
* Strategy:
* 1. Products: Match by slug → Keep DE row as canonical, copy EN data, delete EN row
* 2. Posts: Match by slug → Same strategy
* 3. Pages: Match by slug map (impressum↔legal-notice, blog↔blog, etc.) → Same strategy
*/
import pg from 'pg';
const { Pool } = pg;
const DB_URL =
process.env.DATABASE_URI ||
process.env.POSTGRES_URI ||
`postgresql://payload:120in09oenaoinsd9iaidon@127.0.0.1:54322/payload`;
const pool = new Pool({ connectionString: DB_URL });
async function q<T = any>(query: string, values: unknown[] = []): Promise<T[]> {
const result = await pool.query(query, values);
return result.rows as T[];
}
async function mergeProducts() {
console.log('\n── PRODUCTS ───────────────────────────────────────');
const pairs = await q<{ de_id: number; en_id: number; slug: string }>(`
SELECT
de.id as de_id,
en.id as en_id,
de_loc.slug as slug
FROM products de
JOIN products_locales de_loc ON de_loc._parent_id = de.id AND de_loc._locale = 'de'
JOIN products_locales en_loc ON en_loc.slug = de_loc.slug AND en_loc._locale = 'en'
JOIN products en ON en.id = en_loc._parent_id
WHERE de.id != en.id
`);
console.log(`Found ${pairs.length} DE/EN product pairs to merge`);
for (const { de_id, en_id, slug } of pairs) {
console.log(` Merging: ${slug} (DE id=${de_id} ← EN id=${en_id})`);
const [enData] = await q(`
SELECT * FROM products_locales WHERE _parent_id = $1 AND _locale = 'en'
`, [en_id]);
if (enData) {
await q(`
INSERT INTO products_locales (title, description, application, content, _locale, _parent_id)
VALUES ($1, $2, $3, $4, 'en', $5)
ON CONFLICT (_locale, _parent_id) DO UPDATE
SET title = EXCLUDED.title,
description = EXCLUDED.description,
application = EXCLUDED.application,
content = EXCLUDED.content
`, [enData.title, enData.description, enData.application, enData.content, de_id]);
}
// Move categories from EN to DE if DE has none
await q(`
UPDATE products_categories SET _parent_id = $1
WHERE _parent_id = $2
AND NOT EXISTS (SELECT 1 FROM products_categories WHERE _parent_id = $1)
`, [de_id, en_id]);
// Move images (rels) from EN to DE if DE has none
await q(`
UPDATE products_rels SET parent = $1
WHERE parent = $2
AND NOT EXISTS (SELECT 1 FROM products_rels WHERE parent = $1)
`, [de_id, en_id]);
// Copy featuredImage if DE is missing one
await q(`
UPDATE products SET featured_image_id = (
SELECT featured_image_id FROM products WHERE id = $2
)
WHERE id = $1 AND featured_image_id IS NULL
`, [de_id, en_id]);
// Delete EN locale entry and EN product row
await q(`DELETE FROM products_locales WHERE _parent_id = $1`, [en_id]);
await q(`DELETE FROM _products_v WHERE parent = $1`, [en_id]);
await q(`DELETE FROM products WHERE id = $1`, [en_id]);
console.log(`${slug}`);
}
const [{ count }] = await q(`SELECT count(*) FROM products`);
console.log(`Products remaining: ${count}`);
}
async function mergePosts() {
console.log('\n── POSTS ──────────────────────────────────────────');
const pairs = await q<{ de_id: number; en_id: number; slug: string }>(`
SELECT
de.id as de_id,
en.id as en_id,
de_loc.slug as slug
FROM posts de
JOIN posts_locales de_loc ON de_loc._parent_id = de.id AND de_loc._locale = 'de'
JOIN posts_locales en_loc ON en_loc.slug = de_loc.slug AND en_loc._locale = 'en'
JOIN posts en ON en.id = en_loc._parent_id
WHERE de.id != en.id
`);
console.log(`Found ${pairs.length} DE/EN post pairs to merge`);
for (const { de_id, en_id, slug } of pairs) {
console.log(` Merging post: ${slug} (DE id=${de_id} ← EN id=${en_id})`);
const [enData] = await q(`
SELECT * FROM posts_locales WHERE _parent_id = $1 AND _locale = 'en'
`, [en_id]);
if (enData) {
await q(`
INSERT INTO posts_locales (title, slug, excerpt, category, content, _locale, _parent_id)
VALUES ($1, $2, $3, $4, $5, 'en', $6)
ON CONFLICT (_locale, _parent_id) DO UPDATE
SET title = EXCLUDED.title, slug = EXCLUDED.slug,
excerpt = EXCLUDED.excerpt, category = EXCLUDED.category,
content = EXCLUDED.content
`, [enData.title, enData.slug, enData.excerpt, enData.category, enData.content, de_id]);
}
// Copy featuredImage/date from EN if DE is missing
await q(`
UPDATE posts SET
featured_image_id = COALESCE(featured_image_id, (SELECT featured_image_id FROM posts WHERE id = $2)),
date = COALESCE(date, (SELECT date FROM posts WHERE id = $2))
WHERE id = $1
`, [de_id, en_id]);
await q(`DELETE FROM posts_locales WHERE _parent_id = $1`, [en_id]);
await q(`DELETE FROM _posts_v WHERE parent = $1`, [en_id]);
await q(`DELETE FROM posts WHERE id = $1`, [en_id]);
console.log(`${slug}`);
}
const [{ count }] = await q(`SELECT count(*) FROM posts`);
console.log(`Posts remaining: ${count}`);
}
// DE slug → EN slug mapping for pages
const PAGE_SLUG_MAP: Record<string, string> = {
impressum: 'legal-notice',
datenschutz: 'privacy-policy',
agbs: 'terms',
kontakt: 'contact',
produkte: 'products',
blog: 'blog',
team: 'team',
start: 'start',
danke: 'thanks',
};
async function mergePages() {
console.log('\n── PAGES ──────────────────────────────────────────');
for (const [deSlug, enSlug] of Object.entries(PAGE_SLUG_MAP)) {
const [dePage] = await q<{ id: number }>(`
SELECT p.id FROM pages p
JOIN pages_locales pl ON pl._parent_id = p.id AND pl._locale = 'de' AND pl.slug = $1
LIMIT 1
`, [deSlug]);
const [enPage] = await q<{ id: number }>(`
SELECT p.id FROM pages p
JOIN pages_locales pl ON pl._parent_id = p.id AND pl._locale = 'en' AND pl.slug = $1
LIMIT 1
`, [enSlug]);
if (!dePage && !enPage) {
console.log(` ⚠ No page found for ${deSlug}/${enSlug} — skipping`);
continue;
}
if (!dePage) {
console.log(` ⚠ No DE page for "${deSlug}" — EN-only page id=${enPage!.id} kept`);
continue;
}
if (!enPage) {
console.log(` ⚠ No EN page for "${enSlug}" — DE-only page id=${dePage.id} kept`);
continue;
}
if (dePage.id === enPage.id) {
console.log(`${deSlug}/${enSlug} already merged (id=${dePage.id})`);
continue;
}
console.log(` Merging: ${deSlug}${enSlug} (DE id=${dePage.id} ← EN id=${enPage.id})`);
const [enData] = await q(`
SELECT * FROM pages_locales WHERE _parent_id = $1 AND _locale = 'en'
`, [enPage.id]);
if (enData) {
await q(`
INSERT INTO pages_locales (title, slug, excerpt, content, _locale, _parent_id)
VALUES ($1, $2, $3, $4, 'en', $5)
ON CONFLICT (_locale, _parent_id) DO UPDATE
SET title = EXCLUDED.title, slug = EXCLUDED.slug,
excerpt = EXCLUDED.excerpt, content = EXCLUDED.content
`, [enData.title, enData.slug, enData.excerpt, enData.content, dePage.id]);
}
// Copy featuredImage/layout from EN if DE is missing
await q(`
UPDATE pages SET
featured_image_id = COALESCE(featured_image_id, (SELECT featured_image_id FROM pages WHERE id = $2)),
layout = COALESCE(layout, (SELECT layout FROM pages WHERE id = $2))
WHERE id = $1
`, [dePage.id, enPage.id]);
await q(`DELETE FROM pages_locales WHERE _parent_id = $1`, [enPage.id]);
await q(`DELETE FROM _pages_v WHERE parent = $1`, [enPage.id]);
await q(`DELETE FROM pages WHERE id = $1`, [enPage.id]);
console.log(`${deSlug}/${enSlug}`);
}
const [{ count }] = await q(`SELECT count(*) FROM pages`);
console.log(`Pages remaining: ${count}`);
}
async function main() {
console.log('🔀 Merging duplicate locale documents into native Payload localization...');
try {
await mergeProducts();
await mergePosts();
await mergePages();
console.log('\n── Final pages state ──────────────────────────────');
const pages = await q(`
SELECT p.id, pl._locale, pl.slug, pl.title FROM pages p
JOIN pages_locales pl ON pl._parent_id = p.id
ORDER BY p.id, pl._locale
`);
pages.forEach((r) => console.log(` [id=${r.id}] ${r._locale}: ${r.slug}${r.title}`));
console.log('\n✅ Done!');
} finally {
await pool.end();
}
}
main().catch((err) => {
console.error('Fatal error:', err);
process.exit(1);
});

View File

@@ -1,152 +0,0 @@
import { getPayload } from 'payload';
import configPromise from '@payload-config';
import * as dotenv from 'dotenv';
dotenv.config();
import fs from 'fs';
import path from 'path';
import matter from 'gray-matter';
import { parseMarkdownToLexical } from '../src/payload/utils/lexicalParser';
async function mapImageToMediaId(payload: any, imagePath: string): Promise<string | null> {
if (!imagePath) return null;
const filename = path.basename(imagePath);
const media = await payload.find({
collection: 'media',
where: {
filename: {
equals: filename,
},
},
limit: 1,
});
if (media.docs.length > 0) {
return media.docs[0].id;
}
// Auto-ingest missing images from legacy public/ directory
const cleanPath = imagePath.startsWith('/') ? imagePath.slice(1) : imagePath;
const fullPath = path.join(process.cwd(), 'public', cleanPath);
if (fs.existsSync(fullPath)) {
try {
console.log(`[Blog Migration] 📤 Ingesting missing Media into Payload: ${filename}`);
const newMedia = await payload.create({
collection: 'media',
data: {
alt: filename.replace(/[-_]/g, ' ').replace(/\.[^/.]+$/, ''), // create a human readable alt text
},
filePath: fullPath,
});
return newMedia.id;
} catch (err: any) {
console.error(`[Blog Migration] ❌ Failed to ingest ${filename}:`, err);
}
} else {
console.warn(`[Blog Migration] ⚠️ Missing image entirely from disk: ${fullPath}`);
}
return null;
}
async function migrateBlogPosts() {
console.log('[Blog Migration] 🔍 Using POSTGRES_URI:', process.env.POSTGRES_URI || 'NOT SET');
console.log('[Blog Migration] 🔍 Using DATABASE_URI:', process.env.DATABASE_URI || 'NOT SET');
let payload;
try {
payload = await getPayload({ config: configPromise });
} catch (err: any) {
console.error('[Blog Migration] ❌ Failed to initialize Payload:', err);
process.exit(1);
}
const locales = ['en', 'de'];
for (const locale of locales) {
const postsDir = path.join(process.cwd(), 'data', 'blog', locale);
if (!fs.existsSync(postsDir)) continue;
const files = fs.readdirSync(postsDir);
for (const file of files) {
if (!file.endsWith('.mdx')) continue;
const slug = file.replace(/\.mdx$/, '');
const filePath = path.join(postsDir, file);
const fileContent = fs.readFileSync(filePath, 'utf8');
const { data, content } = matter(fileContent);
console.log(`Migrating ${locale}/${slug}...`);
const lexicalBlocks = parseMarkdownToLexical(content);
const lexicalAST = {
root: {
type: 'root',
format: '',
indent: 0,
version: 1,
children: lexicalBlocks,
direction: 'ltr',
},
};
const publishDate = data.date ? new Date(data.date).toISOString() : new Date().toISOString();
const status = data.public === false ? 'draft' : 'published';
let featuredImageId = null;
if (data.featuredImage || data.image) {
featuredImageId = await mapImageToMediaId(payload, data.featuredImage || data.image);
}
try {
// Find existing post
const existing = await payload.find({
collection: 'posts',
where: { slug: { equals: slug }, locale: { equals: locale } as any },
});
if (slug.includes('welcome-to-the-future')) {
console.log(`\n--- AST for ${slug} ---`);
console.log(JSON.stringify(lexicalAST, null, 2));
console.log(`-----------------------\n`);
}
if (existing.docs.length > 0) {
await payload.update({
collection: 'posts',
id: existing.docs[0].id,
data: {
content: lexicalAST as any,
_status: status as any,
...(featuredImageId ? { featuredImage: featuredImageId } : {}),
},
});
console.log(`✅ AST Components & Image RE-INJECTED for ${slug}`);
} else {
await payload.create({
collection: 'posts',
data: {
title: data.title,
slug: slug,
locale: locale,
date: publishDate,
category: data.category || '',
excerpt: data.excerpt || '',
content: lexicalAST as any,
_status: status as any,
...(featuredImageId ? { featuredImage: featuredImageId } : {}),
},
});
console.log(`✅ Created ${slug}`);
}
} catch (err: any) {
console.error(`❌ Failed ${slug}`, err.message);
}
}
}
console.log('Migration completed.');
process.exit(0);
}
migrateBlogPosts().catch(console.error);

View File

@@ -1,156 +0,0 @@
import { getPayload } from 'payload';
import configPromise from '../payload.config';
import * as dotenv from 'dotenv';
dotenv.config();
import fs from 'fs';
import path from 'path';
import matter from 'gray-matter';
import { parseMarkdownToLexical } from '../src/payload/utils/lexicalParser';
async function mapImageToMediaId(payload: any, imagePath: string): Promise<string | null> {
if (!imagePath) return null;
const filename = path.basename(imagePath);
// Exact match instead of substring to avoid matching "cable-black.jpg" with "cable.jpg"
const media = await payload.find({
collection: 'media',
where: {
filename: {
equals: filename,
},
},
limit: 1,
});
if (media.docs.length > 0) {
return media.docs[0].id;
}
const cleanPath = imagePath.startsWith('/') ? imagePath.slice(1) : imagePath;
const fullPath = path.join(process.cwd(), 'public', cleanPath);
if (fs.existsSync(fullPath)) {
try {
console.log(`[Products Migration] 📤 Ingesting missing Media into Payload: ${filename}`);
const newMedia = await payload.create({
collection: 'media',
data: {
alt: filename.replace(/[-_]/g, ' ').replace(/\.[^/.]+$/, ''),
},
filePath: fullPath,
});
return newMedia.id;
} catch (err: any) {
console.error(`[Products Migration] ❌ Failed to ingest ${filename}:`, err);
}
} else {
console.warn(`[Products Migration] ⚠️ Missing image entirely from disk: ${fullPath}`);
}
return null;
}
export async function migrateProducts() {
const payload = await getPayload({ config: configPromise });
const productLocales = ['en', 'de'];
for (const locale of productLocales) {
const productsDir = path.join(process.cwd(), 'data', 'products', locale);
if (!fs.existsSync(productsDir)) continue;
// Recursive file finder
const mdFiles: string[] = [];
const walk = (dir: string) => {
const files = fs.readdirSync(dir);
for (const file of files) {
const fullPath = path.join(dir, file);
const stat = fs.statSync(fullPath);
if (stat.isDirectory()) {
walk(fullPath);
} else if (file.endsWith('.mdx')) {
mdFiles.push(fullPath);
}
}
};
walk(productsDir);
for (const filePath of mdFiles) {
const fileContent = fs.readFileSync(filePath, 'utf8');
const { data, content } = matter(fileContent);
console.log(`Processing Product: [${locale.toUpperCase()}] ${data.title}`);
// 1. Process Images
const mediaIds = [];
if (data.images && Array.isArray(data.images)) {
for (const imgPath of data.images) {
const id = await mapImageToMediaId(payload, imgPath);
if (id) mediaIds.push(id);
}
}
// 2. Map Lexical AST for deeply nested components (like ProductTabs + Technical data)
const lexicalContent = parseMarkdownToLexical(content);
const wrapLexical = (blocks: any[]) => ({
root: {
type: 'root',
format: '',
indent: 0,
version: 1,
children: blocks,
direction: 'ltr',
},
});
// Payload expects category objects via the 'category' key
const formattedCategories = Array.isArray(data.categories)
? data.categories.map((c: string) => ({ category: c }))
: [];
const productData = {
title: data.title,
sku: data.sku || path.basename(filePath, '.mdx'),
slug: path.basename(filePath, '.mdx'),
locale: locale as 'en' | 'de',
categories: formattedCategories,
description: data.description || '',
featuredImage: mediaIds.length > 0 ? mediaIds[0] : undefined,
images: mediaIds.length > 0 ? mediaIds : undefined,
content: wrapLexical(lexicalContent) as any,
application: data.application
? (wrapLexical(parseMarkdownToLexical(data.application)) as any)
: undefined,
_status: 'published' as any,
};
// Check if product exists (by sku combined with locale, since slug may differ by language)
const existing = await payload.find({
collection: 'products',
where: {
and: [{ slug: { equals: productData.slug } }, { locale: { equals: locale } }],
},
});
if (existing.docs.length > 0) {
console.log(`Updating existing product ${productData.slug} (${locale})`);
await payload.update({
collection: 'products',
id: existing.docs[0].id,
data: productData,
});
} else {
console.log(`Creating new product ${productData.slug} (${locale})`);
await payload.create({
collection: 'products',
data: productData,
});
}
}
}
console.log(`\n✅ Products Migration Complete!`);
process.exit(0);
}
migrateProducts().catch(console.error);

View File

@@ -86,7 +86,7 @@ async function main() {
// Using a more robust way to execute and capture output
// We remove 'npx lhci upload' to keep everything local and avoid Google-hosted reports
const lhciCommand = `npx lhci collect ${urlArgs} ${chromePathArg} --config=config/lighthouserc.json --collect.settings.extraHeaders='${extraHeaders}' && npx lhci assert --config=config/lighthouserc.json`;
const lhciCommand = `npx lhci collect ${urlArgs} ${chromePathArg} --config=config/lighthouserc.json --collect.settings.extraHeaders='${extraHeaders}' --collect.settings.chromeFlags="--no-sandbox --disable-setuid-sandbox --disable-dev-shm-usage" && npx lhci assert --config=config/lighthouserc.json`;
console.log(`💻 Executing LHCI...`);

131
scripts/seed-home.ts Normal file
View File

@@ -0,0 +1,131 @@
/**
* Migration: Seed homepage ('start') as Lexical block content into Payload CMS.
*
* Usage:
* pnpm tsx scripts/seed-home.ts
*/
import { getPayload } from 'payload';
import configPromise from '@payload-config';
function lexicalBlock(blockType: string, fields: Record<string, any> = {}) {
return {
type: 'block',
version: 2,
fields: {
blockType,
...fields,
},
};
}
function lexicalDoc(blocks: any[]) {
return {
root: {
type: 'root',
format: '',
indent: 0,
version: 1,
children: blocks,
direction: 'ltr',
},
};
}
const PAGES = [
// ─── Homepage (DE) ─────────────────────────────────────────
{
title: 'Startseite',
slug: 'start',
locale: 'de',
layout: 'fullBleed',
excerpt: 'Ihr Experte für hochwertige Stromkabel, Mittelspannungslösungen und Solarkabel. Zuverlässige Infrastruktur für eine grüne Energiezukunft.',
_status: 'published',
content: lexicalDoc([
lexicalBlock('homeHero', { note: 'Hero section with primary video and CTA.' }),
lexicalBlock('homeProductCategories', { note: 'Product categories overview based on CMS data.' }),
lexicalBlock('homeWhatWeDo', { note: 'What we do / capabilities overview.' }),
lexicalBlock('homeRecentPosts', { note: 'Latest 3 blog articles snippet.' }),
lexicalBlock('homeExperience', { note: 'Experience and history timeline snippet.' }),
lexicalBlock('homeWhyChooseUs', { note: 'Why choose KLZ Cables metrics and selling points.' }),
lexicalBlock('homeMeetTheTeam', { note: 'High-level Meet the Team teaser.' }),
lexicalBlock('homeGallery', { note: 'Image gallery from our facilities.' }),
lexicalBlock('homeVideo', { note: 'Secondary informative background video.' }),
lexicalBlock('homeCTA', { note: 'Bottom call to action linking to contact.' }),
]),
},
// ─── Homepage (EN) ─────────────────────────────────────────
{
title: 'Homepage',
slug: 'start',
locale: 'en',
layout: 'fullBleed',
excerpt: 'Your expert for high-quality power cables, medium voltage solutions, and solar cables. Reliable infrastructure for a green energy future.',
_status: 'published',
content: lexicalDoc([
lexicalBlock('homeHero', { note: 'Hero section with primary video and CTA.' }),
lexicalBlock('homeProductCategories', { note: 'Product categories overview based on CMS data.' }),
lexicalBlock('homeWhatWeDo', { note: 'What we do / capabilities overview.' }),
lexicalBlock('homeRecentPosts', { note: 'Latest 3 blog articles snippet.' }),
lexicalBlock('homeExperience', { note: 'Experience and history timeline snippet.' }),
lexicalBlock('homeWhyChooseUs', { note: 'Why choose KLZ Cables metrics and selling points.' }),
lexicalBlock('homeMeetTheTeam', { note: 'High-level Meet the Team teaser.' }),
lexicalBlock('homeGallery', { note: 'Image gallery from our facilities.' }),
lexicalBlock('homeVideo', { note: 'Secondary informative background video.' }),
lexicalBlock('homeCTA', { note: 'Bottom call to action linking to contact.' }),
]),
},
];
async function seedHome() {
const payload = await getPayload({ config: configPromise });
for (const page of PAGES) {
const existing = await payload.find({
collection: 'pages',
where: {
slug: { equals: page.slug },
locale: { equals: page.locale },
},
limit: 1,
});
const docs = existing.docs as any[];
if (docs.length > 0) {
await payload.update({
collection: 'pages',
id: docs[0].id,
data: {
title: page.title,
layout: page.layout as any,
excerpt: page.excerpt,
_status: page._status as any,
content: page.content as any,
},
});
console.log(`✅ Updated: ${page.slug} (${page.locale})`);
} else {
await payload.create({
collection: 'pages',
data: {
title: page.title,
slug: page.slug,
locale: page.locale,
layout: page.layout as any,
excerpt: page.excerpt,
_status: page._status as any,
content: page.content as any,
},
});
console.log(`✅ Created: ${page.slug} (${page.locale})`);
}
}
console.log('\n🎉 Homepage seeded successfully!');
process.exit(0);
}
seedHome().catch((err) => {
console.error('❌ Seed failed:', err);
process.exit(1);
});

242
scripts/seed-pages.ts Normal file
View File

@@ -0,0 +1,242 @@
/**
* Migration: Seed team, contact, and other missing pages as Lexical block content into Payload CMS.
*
* Usage:
* pnpm tsx scripts/seed-pages.ts
*/
import { getPayload } from 'payload';
import configPromise from '@payload-config';
function lexicalBlock(blockType: string, fields: Record<string, any>) {
return {
type: 'block',
version: 2,
fields: {
blockType,
...fields,
},
};
}
function lexicalDoc(blocks: any[]) {
return {
root: {
type: 'root',
format: '',
indent: 0,
version: 1,
children: blocks,
direction: 'ltr',
},
};
}
const PAGES = [
// ─── Team (DE) ────────────────────────────────────────────
{
title: 'Team',
slug: 'team',
locale: 'de',
layout: 'fullBleed',
excerpt: '',
_status: 'published',
content: lexicalDoc([
lexicalBlock('heroSection', {
badge: 'Das Team',
title: 'Tradition trifft Moderne',
subtitle: 'Zwei Generationen, eine Vision: Deutschlands zuverlässigster Partner für Kabel & Leitungen.',
alignment: 'center',
}),
lexicalBlock('teamProfile', {
name: 'Michael Bodemer',
role: 'Geschäftsführer',
quote: 'Innovation entsteht dort, wo Erfahrung auf frische Ideen trifft.',
description: 'Als Geschäftsführer verbindet Michael jahrzehntelange Branchenexpertise mit einem klaren Blick für die Zukunft. Sein Fokus liegt auf nachhaltigen Lösungen und modernster Technologie.',
linkedinUrl: 'https://www.linkedin.com/in/michael-bodemer-33b493122/',
layout: 'imageRight',
colorScheme: 'dark',
}),
lexicalBlock('stats', {
stats: [
{ value: '30+', label: 'Jahre Expertise' },
{ value: 'Global', label: 'Netzwerk' },
],
}),
lexicalBlock('teamProfile', {
name: 'Klaus Mintel',
role: 'Gründer & Berater',
quote: 'Qualität ist kein Zufall sie ist das Ergebnis von Engagement und Erfahrung.',
description: 'Klaus gründete KLZ Cables und hat das Unternehmen zu einem der zuverlässigsten Partner der Kabelindustrie aufgebaut. Er bringt Jahrzehnte an Expertise ein.',
linkedinUrl: 'https://www.linkedin.com/in/klaus-mintel-b80a8b193/',
layout: 'imageLeft',
colorScheme: 'light',
}),
lexicalBlock('manifestoGrid', {
title: 'Unsere Werte',
subtitle: 'Was uns antreibt',
tagline: 'Seit der Gründung leiten uns klare Prinzipien, die wir jeden Tag leben.',
items: [
{ title: 'Qualität', description: 'Wir liefern nur Produkte, die höchsten Standards entsprechen.' },
{ title: 'Zuverlässigkeit', description: 'Termingerechte Lieferung ist für uns selbstverständlich.' },
{ title: 'Partnerschaft', description: 'Langfristige Beziehungen sind die Grundlage unseres Erfolgs.' },
{ title: 'Innovation', description: 'Wir investieren in neue Technologien und nachhaltige Lösungen.' },
{ title: 'Transparenz', description: 'Offene Kommunikation und faire Preise zeichnen uns aus.' },
{ title: 'Nachhaltigkeit', description: 'Verantwortung für Umwelt und Gesellschaft ist Teil unserer DNA.' },
],
}),
// Removed the imageGallery since it requires at least 1 image and we don't have media upload IDs yet.
]),
},
// ─── Team (EN) ────────────────────────────────────────────
{
title: 'Team',
slug: 'team',
locale: 'en',
layout: 'fullBleed',
excerpt: '',
_status: 'published',
content: lexicalDoc([
lexicalBlock('heroSection', {
badge: 'The Team',
title: 'Tradition Meets Innovation',
subtitle: 'Two generations, one vision: Germany\'s most reliable partner for cables & wiring.',
alignment: 'center',
}),
lexicalBlock('teamProfile', {
name: 'Michael Bodemer',
role: 'Managing Director',
quote: 'Innovation happens where experience meets fresh ideas.',
description: 'As Managing Director, Michael combines decades of industry expertise with a clear vision for the future. His focus is on sustainable solutions and cutting-edge technology.',
linkedinUrl: 'https://www.linkedin.com/in/michael-bodemer-33b493122/',
layout: 'imageRight',
colorScheme: 'dark',
}),
lexicalBlock('stats', {
stats: [
{ value: '30+', label: 'Years of Expertise' },
{ value: 'Global', label: 'Network' },
],
}),
lexicalBlock('teamProfile', {
name: 'Klaus Mintel',
role: 'Founder & Advisor',
quote: 'Quality is no accident it is the result of commitment and experience.',
description: 'Klaus founded KLZ Cables and built the company into one of the most reliable partners in the cable industry. He brings decades of expertise.',
linkedinUrl: 'https://www.linkedin.com/in/klaus-mintel-b80a8b193/',
layout: 'imageLeft',
colorScheme: 'light',
}),
lexicalBlock('manifestoGrid', {
title: 'Our Values',
subtitle: 'What drives us',
tagline: 'Since our founding, clear principles have guided us every day.',
items: [
{ title: 'Quality', description: 'We only deliver products that meet the highest standards.' },
{ title: 'Reliability', description: 'On-time delivery is our standard.' },
{ title: 'Partnership', description: 'Long-term relationships are the foundation of our success.' },
{ title: 'Innovation', description: 'We invest in new technologies and sustainable solutions.' },
{ title: 'Transparency', description: 'Open communication and fair pricing define us.' },
{ title: 'Sustainability', description: 'Responsibility for the environment and society is part of our DNA.' },
],
}),
]),
},
// ─── Contact (DE) ─────────────────────────────────────────
{
title: 'Kontakt',
slug: 'kontakt',
locale: 'de',
layout: 'fullBleed',
excerpt: '',
_status: 'published',
content: lexicalDoc([
lexicalBlock('heroSection', {
badge: 'Kontakt',
title: 'Sprechen Sie mit uns',
subtitle: 'Wir sind für Sie da. Kontaktieren Sie uns für Beratung, Angebote oder technische Fragen.',
alignment: 'left',
}),
lexicalBlock('contactSection', {
showForm: true,
showMap: true,
showHours: true,
}),
]),
},
// ─── Contact (EN) ─────────────────────────────────────────
{
title: 'Contact',
slug: 'contact',
locale: 'en',
layout: 'fullBleed',
excerpt: '',
_status: 'published',
content: lexicalDoc([
lexicalBlock('heroSection', {
badge: 'Contact',
title: 'Talk to us',
subtitle: 'We are here for you. Contact us for consulting, quotes, or technical questions.',
alignment: 'left',
}),
lexicalBlock('contactSection', {
showForm: true,
showMap: true,
showHours: true,
}),
]),
},
];
async function seedPages() {
const payload = await getPayload({ config: configPromise });
for (const page of PAGES) {
const existing = await payload.find({
collection: 'pages',
where: {
slug: { equals: page.slug },
locale: { equals: page.locale },
},
limit: 1,
});
const docs = existing.docs as any[];
if (docs.length > 0) {
await payload.update({
collection: 'pages',
id: docs[0].id,
locale: page.locale,
data: {
title: page.title,
layout: page.layout as any,
_status: page._status as any,
content: page.content as any,
},
});
console.log(`✅ Updated: ${page.slug} (${page.locale})`);
} else {
await payload.create({
collection: 'pages',
locale: page.locale,
data: {
title: page.title,
slug: page.slug,
layout: page.layout as any,
excerpt: page.excerpt,
_status: page._status as any,
content: page.content as any,
},
});
console.log(`✅ Created: ${page.slug} (${page.locale})`);
}
}
console.log('\n🎉 All pages seeded successfully!');
process.exit(0);
}
seedPages().catch((err) => {
console.error('❌ Seed failed:', err);
process.exit(1);
});

View File

@@ -0,0 +1,18 @@
ALTER TABLE _products_v DROP COLUMN IF EXISTS version_title;
ALTER TABLE _products_v DROP COLUMN IF EXISTS version_description;
ALTER TABLE _products_v DROP COLUMN IF EXISTS version_locale;
ALTER TABLE _products_v DROP COLUMN IF EXISTS version_application;
ALTER TABLE _products_v DROP COLUMN IF EXISTS version_content;
ALTER TABLE _posts_v DROP COLUMN IF EXISTS version_title;
ALTER TABLE _posts_v DROP COLUMN IF EXISTS version_slug;
ALTER TABLE _posts_v DROP COLUMN IF EXISTS version_excerpt;
ALTER TABLE _posts_v DROP COLUMN IF EXISTS version_locale;
ALTER TABLE _posts_v DROP COLUMN IF EXISTS version_category;
ALTER TABLE _posts_v DROP COLUMN IF EXISTS version_content;
ALTER TABLE _pages_v DROP COLUMN IF EXISTS version_title;
ALTER TABLE _pages_v DROP COLUMN IF EXISTS version_slug;
ALTER TABLE _pages_v DROP COLUMN IF EXISTS version_locale;
ALTER TABLE _pages_v DROP COLUMN IF EXISTS version_excerpt;
ALTER TABLE _pages_v DROP COLUMN IF EXISTS version_content;

14
scripts/test-rich-text.js Normal file
View File

@@ -0,0 +1,14 @@
import { getPayload } from 'payload';
import configPromise from '@payload-config';
async function run() {
const payload = await getPayload({ config: configPromise });
const existing = await payload.find({
collection: 'pages',
where: { slug: { equals: 'start' } },
limit: 1,
});
console.log('Homepage blocks found:', existing.docs[0].content?.root?.children?.length);
process.exit(0);
}
run();