chore: remove legacy mdx artifacts and dependencies
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 7s
Build & Deploy / 🧪 QA (push) Failing after 55s
Build & Deploy / 🏗️ Build (push) Has been skipped
Build & Deploy / 🚀 Deploy (push) Has been skipped
Build & Deploy / 🧪 Post-Deploy Verification (push) Has been skipped
Build & Deploy / ⚡ Performance & Accessibility (push) Has been skipped
Build & Deploy / 🔔 Notify (push) Successful in 2s

This commit is contained in:
2026-02-26 01:47:30 +01:00
parent 7d65237ee9
commit 3de13b4fb3
132 changed files with 521 additions and 223914 deletions

View File

@@ -49,6 +49,6 @@ echo "🚀 Starting Deep Link Assessment (Lychee)..."
--exclude "fonts\." \
--base-url "http://127.0.0.1" \
--accept 200,204,308,401,403,999 \
"./data/**/*.mdx" "./data/**/*.md" "./app/**/*.tsx" "./components/**/*.tsx"
"./app/**/*.tsx" "./components/**/*.tsx"
echo "✅ All project source links are alive and healthy!"

View File

@@ -6,14 +6,15 @@
*
* Source of truth:
* - All technical data + cross-section tables: Excel files in `data/excel/`
* - Product description text: MDX files in `data/products/{en,de}/*.mdx`
* - Product description text: Fetched dynamically from Payload CMS
*/
import * as fs from 'fs';
import * as path from 'path';
import * as XLSX from 'xlsx';
const matter = require('gray-matter') as (src: string) => { data: unknown; content: string };
import { getPayload } from 'payload';
import configPromise from '@payload-config';
import type { ProductData } from './pdf/model/types';
import { generateDatasheetPdfBuffer } from './pdf/react-pdf/generate-datasheet-pdf';
@@ -26,13 +27,19 @@ const CONFIG = {
const EXCEL_FILES = [
{ path: path.join(process.cwd(), 'data/excel/high-voltage.xlsx'), voltageType: 'high-voltage' },
{ path: path.join(process.cwd(), 'data/excel/medium-voltage-KM.xlsx'), voltageType: 'medium-voltage' },
{ path: path.join(process.cwd(), 'data/excel/medium-voltage-KM 170126.xlsx'), voltageType: 'medium-voltage' },
{
path: path.join(process.cwd(), 'data/excel/medium-voltage-KM.xlsx'),
voltageType: 'medium-voltage',
},
{
path: path.join(process.cwd(), 'data/excel/medium-voltage-KM 170126.xlsx'),
voltageType: 'medium-voltage',
},
{ path: path.join(process.cwd(), 'data/excel/low-voltage-KM.xlsx'), voltageType: 'low-voltage' },
{ path: path.join(process.cwd(), 'data/excel/solar-cables.xlsx'), voltageType: 'solar' },
] as const;
type MdxProduct = {
type CmsProduct = {
slug: string;
title: string;
sku: string;
@@ -42,7 +49,7 @@ type MdxProduct = {
applicationHtml: string;
};
type MdxIndex = Map<string, MdxProduct>; // key: normalized designation/title
type CmsIndex = Map<string, CmsProduct>; // key: normalized designation/title
function ensureOutputDir(): void {
if (!fs.existsSync(CONFIG.outputDir)) {
@@ -57,39 +64,51 @@ function normalizeExcelKey(value: string): string {
.replace(/[^A-Z0-9]+/g, '');
}
function extractDescriptionFromMdxFrontmatter(data: any): string {
const description = normalizeValue(String(data?.description || ''));
return description;
}
async function buildCmsIndex(locale: 'en' | 'de'): Promise<CmsIndex> {
const idx: CmsIndex = new Map();
try {
const payload = await getPayload({ config: configPromise });
const isDev = process.env.NODE_ENV === 'development';
const result = await payload.find({
collection: 'products',
where: {
...(!isDev ? { _status: { equals: 'published' } } : {}),
},
locale: locale as any,
pagination: false,
});
function buildMdxIndex(locale: 'en' | 'de'): MdxIndex {
const dir = path.join(process.cwd(), 'data/products', locale);
const idx: MdxIndex = new Map();
if (!fs.existsSync(dir)) return idx;
for (const doc of result.docs) {
if (!doc.title) continue;
const files = fs
.readdirSync(dir)
.filter(f => f.endsWith('.mdx'))
.sort();
const title = normalizeValue(String(doc.title));
const sku = normalizeValue(String(doc.sku || ''));
const categories = Array.isArray(doc.categories)
? doc.categories.map((c: any) => normalizeValue(String(c.category || c))).filter(Boolean)
: [];
for (const file of files) {
const filePath = path.join(dir, file);
const raw = fs.readFileSync(filePath, 'utf8');
const parsed = matter(raw);
const data = (parsed.data || {}) as any;
const images = Array.isArray(doc.images)
? doc.images
.map((i: any) => normalizeValue(String(typeof i === 'string' ? i : i.url)))
.filter(Boolean)
: [];
const title = normalizeValue(String(data.title || ''));
if (!title) continue;
const descriptionHtml = normalizeValue(String(doc.description || ''));
const applicationHtml = ''; // Application usually part of description in Payload now
const sku = normalizeValue(String(data.sku || ''));
const categories = Array.isArray(data.categories) ? data.categories.map((c: any) => normalizeValue(String(c))).filter(Boolean) : [];
const images = Array.isArray(data.images) ? data.images.map((i: any) => normalizeValue(String(i))).filter(Boolean) : [];
const descriptionHtml = extractDescriptionFromMdxFrontmatter(data);
const applicationHtml = normalizeValue(String(data?.application || ''));
const slug = path.basename(file, '.mdx');
idx.set(normalizeExcelKey(title), { slug, title, sku, categories, images, descriptionHtml, applicationHtml });
const slug = doc.slug || '';
idx.set(normalizeExcelKey(title), {
slug,
title,
sku,
categories,
images,
descriptionHtml,
applicationHtml,
});
}
} catch (error) {
console.error(`[Payload] Failed to fetch products for CMS index (${locale}):`, error);
}
return idx;
@@ -157,46 +176,46 @@ function loadAllExcelDesignations(): Map<string, { designation: string; voltageT
return out;
}
async function loadProductsFromExcelAndMdx(locale: 'en' | 'de'): Promise<ProductData[]> {
const mdxIndex = buildMdxIndex(locale);
async function loadProductsFromExcelAndCms(locale: 'en' | 'de'): Promise<ProductData[]> {
const cmsIndex = await buildCmsIndex(locale);
const excelDesignations = loadAllExcelDesignations();
const products: ProductData[] = [];
let id = 1;
Array.from(excelDesignations.entries()).forEach(([key, data]) => {
const mdx = mdxIndex.get(key) || null;
const cmsItem = cmsIndex.get(key) || null;
const title = mdx?.title || data.designation;
const title = cmsItem?.title || data.designation;
const slug =
mdx?.slug ||
cmsItem?.slug ||
title
.toLowerCase()
.replace(/[^a-z0-9]+/g, '-')
.replace(/-+/g, '-')
.replace(/^-|-$/g, '');
// Only the product description comes from MDX. Everything else is Excel-driven
// Only the product description comes from CMS. Everything else is Excel-driven
// during model building (technicalItems + voltage tables).
const descriptionHtml = mdx?.descriptionHtml || '';
const descriptionHtml = cmsItem?.descriptionHtml || '';
products.push({
id: id++,
name: title,
shortDescriptionHtml: '',
descriptionHtml,
applicationHtml: mdx?.applicationHtml || '',
images: mdx?.images || [],
featuredImage: (mdx?.images && mdx.images[0]) || null,
sku: mdx?.sku || title,
applicationHtml: cmsItem?.applicationHtml || '',
images: cmsItem?.images || [],
featuredImage: (cmsItem?.images && cmsItem.images[0]) || null,
sku: cmsItem?.sku || title,
slug,
translationKey: slug,
locale,
categories: (mdx?.categories || []).map(name => ({ name })),
categories: (cmsItem?.categories || []).map((name) => ({ name })),
attributes: [],
voltageType: (() => {
const cats = (mdx?.categories || []).map(c => String(c));
const isMV = cats.some(c => /medium[-\s]?voltage|mittelspannung/i.test(c));
const cats = (cmsItem?.categories || []).map((c) => String(c));
const isMV = cats.some((c) => /medium[-\s]?voltage|mittelspannung/i.test(c));
if (isMV && data.voltageType === 'high-voltage') return 'medium-voltage';
return data.voltageType;
})(),
@@ -204,33 +223,41 @@ async function loadProductsFromExcelAndMdx(locale: 'en' | 'de'): Promise<Product
});
// Deterministic order: by slug, then name.
products.sort((a, b) => (a.slug || '').localeCompare(b.slug || '') || a.name.localeCompare(b.name));
products.sort(
(a, b) => (a.slug || '').localeCompare(b.slug || '') || a.name.localeCompare(b.name),
);
// Drop products that have no readable name.
return products.filter(p => stripHtml(p.name));
return products.filter((p) => stripHtml(p.name));
}
async function processChunk(products: ProductData[], chunkIndex: number, totalChunks: number): Promise<void> {
console.log(`\nProcessing chunk ${chunkIndex + 1}/${totalChunks} (${products.length} products)...`);
async function processChunk(
products: ProductData[],
chunkIndex: number,
totalChunks: number,
): Promise<void> {
console.log(
`\nProcessing chunk ${chunkIndex + 1}/${totalChunks} (${products.length} products)...`,
);
for (const product of products) {
try {
const locale = (product.locale || 'en') as 'en' | 'de';
const buffer = await generateDatasheetPdfBuffer({ product, locale });
const fileName = generateFileName(product, locale);
// Determine subfolder based on voltage type
const voltageType = (product as any).voltageType || 'other';
const subfolder = path.join(CONFIG.outputDir, voltageType);
// Create subfolder if it doesn't exist
if (!fs.existsSync(subfolder)) {
fs.mkdirSync(subfolder, { recursive: true });
}
fs.writeFileSync(path.join(subfolder, fileName), buffer);
console.log(`${locale.toUpperCase()}: ${voltageType}/${fileName}`);
await new Promise(resolve => setTimeout(resolve, 25));
await new Promise((resolve) => setTimeout(resolve, 25));
} catch (error) {
console.error(`✗ Failed to process product ${product.id}:`, error);
}
@@ -242,11 +269,12 @@ async function processProductsInChunks(): Promise<void> {
ensureOutputDir();
const onlyLocale = normalizeValue(String(process.env.PDF_LOCALE || '')).toLowerCase();
const locales: Array<'en' | 'de'> = onlyLocale === 'de' || onlyLocale === 'en' ? [onlyLocale] : ['en', 'de'];
const locales: Array<'en' | 'de'> =
onlyLocale === 'de' || onlyLocale === 'en' ? [onlyLocale] : ['en', 'de'];
const allProducts: ProductData[] = [];
for (const locale of locales) {
const products = await loadProductsFromExcelAndMdx(locale);
const products = await loadProductsFromExcelAndCms(locale);
allProducts.push(...products);
}
@@ -261,11 +289,8 @@ async function processProductsInChunks(): Promise<void> {
const match = normalizeValue(String(process.env.PDF_MATCH || '')).toLowerCase();
if (match) {
products = products.filter(p => {
const hay = [p.slug, p.translationKey, p.sku, p.name]
.filter(Boolean)
.join(' ')
.toLowerCase();
products = products.filter((p) => {
const hay = [p.slug, p.translationKey, p.sku, p.name].filter(Boolean).join(' ').toLowerCase();
return hay.includes(match);
});
}
@@ -273,8 +298,8 @@ async function processProductsInChunks(): Promise<void> {
const limit = Number(process.env.PDF_LIMIT || '0');
products = Number.isFinite(limit) && limit > 0 ? products.slice(0, limit) : products;
const enProducts = products.filter(p => (p.locale || 'en') === 'en');
const deProducts = products.filter(p => (p.locale || 'en') === 'de');
const enProducts = products.filter((p) => (p.locale || 'en') === 'en');
const deProducts = products.filter((p) => (p.locale || 'en') === 'de');
console.log(`Found ${enProducts.length} EN + ${deProducts.length} DE products`);
const totalChunks = Math.ceil(products.length / CONFIG.chunkSize);

View File

@@ -1,55 +0,0 @@
import { compile } from '@mdx-js/mdx';
import fs from 'node:fs';
import path from 'node:path';
import matter from 'gray-matter';
const TARGET_DIRS = ['./data/blog', './data/products'];
function getAllFiles(dirPath, arrayOfFiles) {
if (!fs.existsSync(dirPath)) return arrayOfFiles || [];
const files = fs.readdirSync(dirPath);
arrayOfFiles = arrayOfFiles || [];
files.forEach(function (file) {
const fullPath = path.join(dirPath, file);
if (fs.statSync(fullPath).isDirectory()) {
arrayOfFiles = getAllFiles(fullPath, arrayOfFiles);
} else {
arrayOfFiles.push(fullPath);
}
});
return arrayOfFiles;
}
const allMdxFiles = TARGET_DIRS.flatMap(dir => getAllFiles(dir)).filter(file => file.endsWith('.mdx'));
console.log(`Found ${allMdxFiles.length} MDX files to validate...`);
let errorCount = 0;
for (const file of allMdxFiles) {
const fileContent = fs.readFileSync(file, 'utf8');
const { content } = matter(fileContent);
try {
// Attempt to compile MDX content
await compile(content);
} catch (err) {
console.error(`\x1b[31mError in ${file}:\x1b[0m`);
console.error(err.message);
if (err.line && err.column) {
console.error(`At line ${err.line}, column ${err.column}`);
}
errorCount++;
}
}
if (errorCount > 0) {
console.error(`\n\x1b[31mValidation failed: ${errorCount} errors found.\x1b[0m`);
process.exit(1);
} else {
console.log('\n\x1b[32mAll MDX files are valid.\x1b[0m');
}