fix(blog): optimize component share logic, typography, and modal layouts
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 5s
Build & Deploy / 🏗️ Build (push) Failing after 14s
Build & Deploy / 🧪 QA (push) Failing after 1m48s
Build & Deploy / 🚀 Deploy (push) Has been skipped
Build & Deploy / 🩺 Health Check (push) Has been skipped
Build & Deploy / 🔔 Notify (push) Successful in 2s
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 5s
Build & Deploy / 🏗️ Build (push) Failing after 14s
Build & Deploy / 🧪 QA (push) Failing after 1m48s
Build & Deploy / 🚀 Deploy (push) Has been skipped
Build & Deploy / 🩺 Health Check (push) Has been skipped
Build & Deploy / 🔔 Notify (push) Successful in 2s
This commit is contained in:
@@ -208,7 +208,7 @@ async function main() {
|
||||
if (usage.prompt > 0) {
|
||||
console.log("--------------------------------------------------");
|
||||
console.log("📊 ACCUMULATED API USAGE (SUM OF 6 PASSES)");
|
||||
console.log(` Model: google/gemini-3-flash-preview`);
|
||||
console.log(` Model: google/gemini-2.5-flash`);
|
||||
console.log(` Total Prompt: ${usage.prompt.toLocaleString()}`);
|
||||
console.log(` Total Completion: ${usage.completion.toLocaleString()}`);
|
||||
console.log(
|
||||
@@ -244,7 +244,7 @@ Return ONLY the bullet points. No intro/outro.
|
||||
const resp = await axios.post(
|
||||
"https://openrouter.ai/api/v1/chat/completions",
|
||||
{
|
||||
model: "google/gemini-3-flash-preview",
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{ role: "system", content: systemPrompt },
|
||||
{
|
||||
@@ -440,17 +440,16 @@ Focus 100% on the BRIEFING text provided by the user. Use the DISTILLED_CRAWL on
|
||||
- API Integrations: 800 € / stk
|
||||
- Inhalts-Verwaltung (CMS-Modul): 1.500 € (optional)
|
||||
|
||||
${
|
||||
budget
|
||||
? `### BUDGET LOGIC (ULTRA-STRICT):
|
||||
${budget
|
||||
? `### BUDGET LOGIC (ULTRA-STRICT):
|
||||
1. **Mental Calculation**: Start with 7.000 €. Add items based on the reference above.
|
||||
2. **Hard Ceiling**: If total > ${budget}, you MUST discard lower priority items.
|
||||
3. **Priority**: High-End Design and Core Pages > Features.
|
||||
4. **Restriction**: For ${budget}, do NOT exceed 2 features and 4 extra pages.
|
||||
5. THE TOTAL COST CALCULATED BY THESE RULES MUST BE <= ${budget}.
|
||||
6. Do NOT mention the budget in any string fields.`
|
||||
: ""
|
||||
}
|
||||
: ""
|
||||
}
|
||||
|
||||
- ** features **: Items from the FEATURE_REFERENCE.
|
||||
- ** ABSOLUTE CONSERVATIVE RULE **: Only use features if the briefing implies *dynamic complexity* (CMS, filtering, search, database).
|
||||
@@ -500,7 +499,7 @@ ${
|
||||
const p1Resp = await axios.post(
|
||||
"https://openrouter.ai/api/v1/chat/completions",
|
||||
{
|
||||
model: "google/gemini-3-flash-preview",
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{ role: "system", content: pass1SystemPrompt },
|
||||
{ role: "user", content: pass1UserPrompt },
|
||||
@@ -547,7 +546,7 @@ Return only the corrected 'features' and 'otherPages' arrays.
|
||||
const p15Resp = await axios.post(
|
||||
"https://openrouter.ai/api/v1/chat/completions",
|
||||
{
|
||||
model: "google/gemini-3-flash-preview",
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{ role: "system", content: pass15SystemPrompt },
|
||||
{
|
||||
@@ -603,7 +602,7 @@ ${JSON.stringify(facts, null, 2)}
|
||||
const p2Resp = await axios.post(
|
||||
"https://openrouter.ai/api/v1/chat/completions",
|
||||
{
|
||||
model: "google/gemini-3-flash-preview",
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{ role: "system", content: pass2SystemPrompt },
|
||||
{ role: "user", content: briefing },
|
||||
@@ -658,7 +657,7 @@ ${tone}
|
||||
const p3Resp = await axios.post(
|
||||
"https://openrouter.ai/api/v1/chat/completions",
|
||||
{
|
||||
model: "google/gemini-3-flash-preview",
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{ role: "system", content: pass3SystemPrompt },
|
||||
{
|
||||
@@ -712,7 +711,7 @@ ${JSON.stringify({ facts, strategy }, null, 2)}
|
||||
const p4Resp = await axios.post(
|
||||
"https://openrouter.ai/api/v1/chat/completions",
|
||||
{
|
||||
model: "google/gemini-3-flash-preview",
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{ role: "system", content: pass4SystemPrompt },
|
||||
{
|
||||
@@ -808,7 +807,7 @@ ${JSON.stringify({ facts, details, strategy, ia }, null, 2)}
|
||||
const p5Resp = await axios.post(
|
||||
"https://openrouter.ai/api/v1/chat/completions",
|
||||
{
|
||||
model: "google/gemini-3-flash-preview",
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{ role: "system", content: pass5SystemPrompt },
|
||||
{ role: "user", content: briefing },
|
||||
@@ -862,7 +861,7 @@ ${JSON.stringify({ facts, strategy, ia, positionsData }, null, 2)}
|
||||
const p6Resp = await axios.post(
|
||||
"https://openrouter.ai/api/v1/chat/completions",
|
||||
{
|
||||
model: "google/gemini-3-flash-preview",
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{ role: "system", content: pass6SystemPrompt },
|
||||
{ role: "user", content: `BRIEFING_TRUTH: \n${briefing} ` },
|
||||
@@ -1004,8 +1003,8 @@ ${JSON.stringify({ facts, strategy, ia, positionsData }, null, 2)}
|
||||
const normalizedValue =
|
||||
typeof value === "object"
|
||||
? (value as any).beschreibung ||
|
||||
(value as any).description ||
|
||||
JSON.stringify(value)
|
||||
(value as any).description ||
|
||||
JSON.stringify(value)
|
||||
: value;
|
||||
normalized[key] = normalizedValue as string;
|
||||
});
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
function cleanMermaidFormatting(content: string): string {
|
||||
// Fix: The repair script reformatted <Mermaid> tags badly with extra blank lines
|
||||
// Pattern: <Mermaid\n \n graph={`...`}\n \n />
|
||||
// Should be: <Mermaid\n graph={`...`}\n id="..."\n .../>
|
||||
|
||||
// Remove empty lines between <Mermaid and graph=
|
||||
let result = content.replace(/<Mermaid\s*\n\s*\n\s*graph=/g, '<Mermaid\n graph=');
|
||||
|
||||
// Remove trailing empty space before />
|
||||
result = result.replace(/`\}\s*\n\s*\n\s*\/>/g, '`}\n />');
|
||||
|
||||
// Fix: Remove trailing whitespace-only lines before id= or title= or showShare=
|
||||
result = result.replace(/`\}\s*\n\s*\n\s*(id=)/g, '`}\n $1');
|
||||
result = result.replace(/`\}\s*\n\s*\n\s*(title=)/g, '`}\n $1');
|
||||
result = result.replace(/`\}\s*\n\s*\n\s*(showShare=)/g, '`}\n $1');
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
let fixCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
const cleaned = cleanMermaidFormatting(content);
|
||||
|
||||
if (content !== cleaned) {
|
||||
fs.writeFileSync(filePath, cleaned);
|
||||
fixCount++;
|
||||
console.log(`✅ Cleaned formatting in ${file}`);
|
||||
} else {
|
||||
console.log(`- ${file} OK`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nTotal cleaned: ${fixCount}`);
|
||||
}
|
||||
|
||||
processFiles();
|
||||
@@ -1,365 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
/**
|
||||
* Convert <Mermaid graph={`...`} id="..." ... /> to <Mermaid id="..." ...>{`...`}</Mermaid>
|
||||
* This fixes the RSC serialization issue where template literal props are stripped.
|
||||
*/
|
||||
function convertMermaidToChildren(content: string): string {
|
||||
// Match <Mermaid ... graph={`...`} ... /> (self-closing)
|
||||
// We need a multi-pass approach since the graph prop can appear anywhere in the tag
|
||||
|
||||
const mermaidBlockRegex = /<Mermaid\s+([\s\S]*?)\/>/g;
|
||||
|
||||
return content.replace(mermaidBlockRegex, (match) => {
|
||||
// Extract the graph prop value (template literal)
|
||||
const graphMatch = match.match(/graph=\{`([\s\S]*?)`\}/);
|
||||
if (!graphMatch) return match; // No graph prop, skip
|
||||
|
||||
const graphContent = graphMatch[1];
|
||||
|
||||
// Remove the graph prop from the tag
|
||||
let cleanedTag = match.replace(/\s*graph=\{`[\s\S]*?`\}\s*/g, ' ');
|
||||
|
||||
// Remove the self-closing /> and add children
|
||||
cleanedTag = cleanedTag.replace(/\s*\/>$/, '');
|
||||
|
||||
// Clean up extra whitespace
|
||||
cleanedTag = cleanedTag.replace(/\s+/g, ' ').trim();
|
||||
|
||||
return `${cleanedTag}>\n{\`${graphContent}\`}\n</Mermaid>`;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert <DiagramPie data={[...]} ... /> to <Mermaid ...>{`pie\n "label": value\n...`}</Mermaid>
|
||||
*/
|
||||
function convertDiagramPie(content: string): string {
|
||||
const pieRegex = /<DiagramPie\s+([\s\S]*?)\/>/g;
|
||||
|
||||
return content.replace(pieRegex, (match) => {
|
||||
// Extract data array
|
||||
const dataMatch = match.match(/data=\{\[([\s\S]*?)\]\}/);
|
||||
if (!dataMatch) return match;
|
||||
|
||||
const dataStr = dataMatch[1];
|
||||
|
||||
// Parse the array items: { label: "...", value: ... }
|
||||
const items: { label: string; value: number }[] = [];
|
||||
const itemRegex = /\{\s*label:\s*"([^"]+)"\s*,\s*value:\s*(\d+)\s*\}/g;
|
||||
let itemMatch;
|
||||
while ((itemMatch = itemRegex.exec(dataStr)) !== null) {
|
||||
items.push({ label: itemMatch[1], value: parseInt(itemMatch[2]) });
|
||||
}
|
||||
|
||||
if (items.length === 0) return match;
|
||||
|
||||
// Build pie chart Mermaid syntax
|
||||
const pieLines = items.map(item => ` "${item.label}" : ${item.value}`).join('\n');
|
||||
const pieGraph = `pie\n${pieLines}`;
|
||||
|
||||
// Extract other props
|
||||
const titleMatch = match.match(/title="([^"]+)"/);
|
||||
const captionMatch = match.match(/caption="([^"]+)"/);
|
||||
const idMatch = match.match(/id="([^"]+)"/);
|
||||
const showShareMatch = match.match(/showShare=\{(true|false)\}/);
|
||||
|
||||
const title = titleMatch ? titleMatch[1] : '';
|
||||
const caption = captionMatch ? captionMatch[1] : '';
|
||||
const id = idMatch ? idMatch[1] : '';
|
||||
const showShare = showShareMatch ? showShareMatch[1] : 'true';
|
||||
|
||||
// Build replacement with Mermaid component wrapped in div
|
||||
let result = `<div className="my-12">\n <Mermaid`;
|
||||
if (id) result += ` id="${id}"`;
|
||||
if (title) result += ` title="${title}"`;
|
||||
result += ` showShare={${showShare}}>`;
|
||||
result += `\n{\`${pieGraph}\`}\n</Mermaid>`;
|
||||
if (caption) {
|
||||
result += `\n <div className="text-center text-xs text-slate-400 mt-4 italic">\n ${caption}\n </div>`;
|
||||
}
|
||||
result += `\n</div>`;
|
||||
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert <DiagramGantt tasks={[...]} ... /> to <Mermaid ...>{`gantt\n...`}</Mermaid>
|
||||
*/
|
||||
function convertDiagramGantt(content: string): string {
|
||||
const ganttRegex = /<DiagramGantt\s+([\s\S]*?)\/>/g;
|
||||
|
||||
return content.replace(ganttRegex, (match) => {
|
||||
// Extract tasks array
|
||||
const tasksMatch = match.match(/tasks=\{\[([\s\S]*?)\]\}/);
|
||||
if (!tasksMatch) return match;
|
||||
|
||||
const tasksStr = tasksMatch[1];
|
||||
|
||||
// Parse the task items
|
||||
const taskRegex = /\{\s*id:\s*"([^"]+)"\s*,\s*name:\s*"([^"]+)"\s*,\s*start:\s*"([^"]+)"\s*,\s*duration:\s*"([^"]+)"\s*(?:,\s*dependencies:\s*\[([^\]]*)\])?\s*\}/g;
|
||||
const tasks: { id: string; name: string; start: string; duration: string; deps?: string }[] = [];
|
||||
let taskMatch;
|
||||
while ((taskMatch = taskRegex.exec(tasksStr)) !== null) {
|
||||
tasks.push({
|
||||
id: taskMatch[1],
|
||||
name: taskMatch[2],
|
||||
start: taskMatch[3],
|
||||
duration: taskMatch[4],
|
||||
deps: taskMatch[5] || '',
|
||||
});
|
||||
}
|
||||
|
||||
if (tasks.length === 0) return match;
|
||||
|
||||
// Build gantt chart Mermaid syntax
|
||||
const ganttLines = tasks.map(task => {
|
||||
const deps = task.deps ? `, after ${task.deps.replace(/"/g, '').trim()}` : '';
|
||||
return ` ${task.name} :${task.id}, ${task.start}, ${task.duration}${deps}`;
|
||||
}).join('\n');
|
||||
const ganttGraph = `gantt\n dateFormat YYYY-MM-DD\n${ganttLines}`;
|
||||
|
||||
// Extract other props
|
||||
const titleMatch = match.match(/title="([^"]+)"/);
|
||||
const captionMatch = match.match(/caption="([^"]+)"/);
|
||||
const idMatch = match.match(/id="([^"]+)"/);
|
||||
const showShareMatch = match.match(/showShare=\{(true|false)\}/);
|
||||
|
||||
const title = titleMatch ? titleMatch[1] : '';
|
||||
const caption = captionMatch ? captionMatch[1] : '';
|
||||
const id = idMatch ? idMatch[1] : '';
|
||||
const showShare = showShareMatch ? showShareMatch[1] : 'true';
|
||||
|
||||
let result = `<div className="my-12">\n <Mermaid`;
|
||||
if (id) result += ` id="${id}"`;
|
||||
if (title) result += ` title="${title}"`;
|
||||
result += ` showShare={${showShare}}>`;
|
||||
result += `\n{\`${ganttGraph}\`}\n</Mermaid>`;
|
||||
if (caption) {
|
||||
result += `\n <div className="text-center text-xs text-slate-400 mt-4 italic">\n ${caption}\n </div>`;
|
||||
}
|
||||
result += `\n</div>`;
|
||||
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert <DiagramSequence participants={[...]} messages={[...]} ... /> to <Mermaid ...>{`sequenceDiagram\n...`}</Mermaid>
|
||||
*/
|
||||
function convertDiagramSequence(content: string): string {
|
||||
const seqRegex = /<DiagramSequence\s+([\s\S]*?)\/>/g;
|
||||
|
||||
return content.replace(seqRegex, (match) => {
|
||||
// Extract participants array
|
||||
const participantsMatch = match.match(/participants=\{\[([\s\S]*?)\]\}/);
|
||||
if (!participantsMatch) return match;
|
||||
|
||||
// Extract messages array
|
||||
const messagesMatch = match.match(/messages=\{\[([\s\S]*?)\]\}/);
|
||||
if (!messagesMatch) return match;
|
||||
|
||||
const participantsStr = participantsMatch[1];
|
||||
const messagesStr = messagesMatch[1];
|
||||
|
||||
// Parse participants
|
||||
const participants = participantsStr.match(/"([^"]+)"/g)?.map(s => s.replace(/"/g, '')) || [];
|
||||
|
||||
// Parse messages
|
||||
const msgRegex = /\{\s*from:\s*"([^"]+)"\s*,\s*to:\s*"([^"]+)"\s*,\s*message:\s*"([^"]+)"(?:\s*,\s*type:\s*"([^"]+)")?\s*\}/g;
|
||||
const messages: { from: string; to: string; message: string; type?: string }[] = [];
|
||||
let msgMatch;
|
||||
while ((msgMatch = msgRegex.exec(messagesStr)) !== null) {
|
||||
messages.push({
|
||||
from: msgMatch[1],
|
||||
to: msgMatch[2],
|
||||
message: msgMatch[3],
|
||||
type: msgMatch[4],
|
||||
});
|
||||
}
|
||||
|
||||
if (participants.length === 0 || messages.length === 0) return match;
|
||||
|
||||
// Build sequence diagram Mermaid syntax
|
||||
const getArrow = (type?: string) => {
|
||||
switch (type) {
|
||||
case 'dotted': return '-->>';
|
||||
case 'async': return '->>';
|
||||
default: return '->>';
|
||||
}
|
||||
};
|
||||
|
||||
const participantLines = participants.map(p => ` participant ${p}`).join('\n');
|
||||
const messageLines = messages.map(m => ` ${m.from}${getArrow(m.type)}${m.to}: ${m.message}`).join('\n');
|
||||
const seqGraph = `sequenceDiagram\n${participantLines}\n${messageLines}`;
|
||||
|
||||
// Extract other props
|
||||
const titleMatch = match.match(/title="([^"]+)"/);
|
||||
const captionMatch = match.match(/caption="([^"]+)"/);
|
||||
const idMatch = match.match(/id="([^"]+)"/);
|
||||
const showShareMatch = match.match(/showShare=\{(true|false)\}/);
|
||||
|
||||
const title = titleMatch ? titleMatch[1] : '';
|
||||
const caption = captionMatch ? captionMatch[1] : '';
|
||||
const id = idMatch ? idMatch[1] : '';
|
||||
const showShare = showShareMatch ? showShareMatch[1] : 'true';
|
||||
|
||||
let result = `<div className="my-12">\n <Mermaid`;
|
||||
if (id) result += ` id="${id}"`;
|
||||
if (title) result += ` title="${title}"`;
|
||||
result += ` showShare={${showShare}}>`;
|
||||
result += `\n{\`${seqGraph}\`}\n</Mermaid>`;
|
||||
if (caption) {
|
||||
result += `\n <div className="text-center text-xs text-slate-400 mt-4 italic">\n ${caption}\n </div>`;
|
||||
}
|
||||
result += `\n</div>`;
|
||||
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert <DiagramTimeline events={[...]} ... /> to <Mermaid ...>{`timeline\n...`}</Mermaid>
|
||||
*/
|
||||
function convertDiagramTimeline(content: string): string {
|
||||
const timelineRegex = /<DiagramTimeline\s+([\s\S]*?)\/>/g;
|
||||
|
||||
return content.replace(timelineRegex, (match) => {
|
||||
const eventsMatch = match.match(/events=\{\[([\s\S]*?)\]\}/);
|
||||
if (!eventsMatch) return match;
|
||||
|
||||
const eventsStr = eventsMatch[1];
|
||||
|
||||
const eventRegex = /\{\s*year:\s*"([^"]+)"\s*,\s*title:\s*"([^"]+)"\s*\}/g;
|
||||
const events: { year: string; title: string }[] = [];
|
||||
let eventMatch;
|
||||
while ((eventMatch = eventRegex.exec(eventsStr)) !== null) {
|
||||
events.push({ year: eventMatch[1], title: eventMatch[2] });
|
||||
}
|
||||
|
||||
if (events.length === 0) return match;
|
||||
|
||||
const titleMatch = match.match(/title="([^"]+)"/);
|
||||
const captionMatch = match.match(/caption="([^"]+)"/);
|
||||
const idMatch = match.match(/id="([^"]+)"/);
|
||||
const showShareMatch = match.match(/showShare=\{(true|false)\}/);
|
||||
|
||||
const title = titleMatch ? titleMatch[1] : '';
|
||||
const caption = captionMatch ? captionMatch[1] : '';
|
||||
const id = idMatch ? idMatch[1] : '';
|
||||
const showShare = showShareMatch ? showShareMatch[1] : 'true';
|
||||
|
||||
const eventLines = events.map(e => ` ${e.year} : ${e.title}`).join('\n');
|
||||
const timelineGraph = `timeline\n title ${title || 'Timeline'}\n${eventLines}`;
|
||||
|
||||
let result = `<div className="my-12">\n <Mermaid`;
|
||||
if (id) result += ` id="${id}"`;
|
||||
if (title) result += ` title="${title}"`;
|
||||
result += ` showShare={${showShare}}>`;
|
||||
result += `\n{\`${timelineGraph}\`}\n</Mermaid>`;
|
||||
if (caption) {
|
||||
result += `\n <div className="text-center text-xs text-slate-400 mt-4 italic">\n ${caption}\n </div>`;
|
||||
}
|
||||
result += `\n</div>`;
|
||||
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert <DiagramState states={[...]} transitions={[...]} ... /> to <Mermaid ...>{`stateDiagram-v2\n...`}</Mermaid>
|
||||
*/
|
||||
function convertDiagramState(content: string): string {
|
||||
const stateRegex = /<DiagramState\s+([\s\S]*?)\/>/g;
|
||||
|
||||
return content.replace(stateRegex, (match) => {
|
||||
// Extract transitions
|
||||
const transitionsMatch = match.match(/transitions=\{\[([\s\S]*?)\]\}/);
|
||||
if (!transitionsMatch) return match;
|
||||
|
||||
const transitionsStr = transitionsMatch[1];
|
||||
|
||||
const transRegex = /\{\s*from:\s*"([^"]+)"\s*,\s*to:\s*"([^"]+)"(?:\s*,\s*label:\s*"([^"]+)")?\s*\}/g;
|
||||
const transitions: { from: string; to: string; label?: string }[] = [];
|
||||
let transMatch;
|
||||
while ((transMatch = transRegex.exec(transitionsStr)) !== null) {
|
||||
transitions.push({
|
||||
from: transMatch[1],
|
||||
to: transMatch[2],
|
||||
label: transMatch[3],
|
||||
});
|
||||
}
|
||||
|
||||
if (transitions.length === 0) return match;
|
||||
|
||||
// Extract initialState
|
||||
const initialStateMatch = match.match(/initialState="([^"]+)"/);
|
||||
const initialState = initialStateMatch ? initialStateMatch[1] : '';
|
||||
|
||||
// Extract finalStates
|
||||
const finalStatesMatch = match.match(/finalStates=\{\[([^\]]*)\]\}/);
|
||||
const finalStatesStr = finalStatesMatch ? finalStatesMatch[1] : '';
|
||||
const finalStates = finalStatesStr.match(/"([^"]+)"/g)?.map(s => s.replace(/"/g, '')) || [];
|
||||
|
||||
const titleMatch = match.match(/title="([^"]+)"/);
|
||||
const captionMatch = match.match(/caption="([^"]+)"/);
|
||||
const idMatch = match.match(/id="([^"]+)"/);
|
||||
const showShareMatch = match.match(/showShare=\{(true|false)\}/);
|
||||
|
||||
const title = titleMatch ? titleMatch[1] : '';
|
||||
const caption = captionMatch ? captionMatch[1] : '';
|
||||
const id = idMatch ? idMatch[1] : '';
|
||||
const showShare = showShareMatch ? showShareMatch[1] : 'true';
|
||||
|
||||
let stateLines = 'stateDiagram-v2';
|
||||
if (initialState) {
|
||||
stateLines += `\n [*] --> ${initialState}`;
|
||||
}
|
||||
stateLines += '\n' + transitions.map(t => {
|
||||
const label = t.label ? ` : ${t.label}` : '';
|
||||
return ` ${t.from} --> ${t.to}${label}`;
|
||||
}).join('\n');
|
||||
stateLines += '\n' + finalStates.map(s => ` ${s} --> [*]`).join('\n');
|
||||
|
||||
let result = `<div className="my-12">\n <Mermaid`;
|
||||
if (id) result += ` id="${id}"`;
|
||||
if (title) result += ` title="${title}"`;
|
||||
result += ` showShare={${showShare}}>`;
|
||||
result += `\n{\`${stateLines}\`}\n</Mermaid>`;
|
||||
if (caption) {
|
||||
result += `\n <div className="text-center text-xs text-slate-400 mt-4 italic">\n ${caption}\n </div>`;
|
||||
}
|
||||
result += `\n</div>`;
|
||||
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
let content = fs.readFileSync(filePath, 'utf8');
|
||||
const original = content;
|
||||
|
||||
content = convertMermaidToChildren(content);
|
||||
content = convertDiagramPie(content);
|
||||
content = convertDiagramGantt(content);
|
||||
content = convertDiagramSequence(content);
|
||||
content = convertDiagramTimeline(content);
|
||||
content = convertDiagramState(content);
|
||||
|
||||
if (content !== original) {
|
||||
fs.writeFileSync(filePath, content);
|
||||
console.log(`✅ Converted diagrams in ${file}`);
|
||||
} else {
|
||||
console.log(`- ${file} (no changes needed)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
processFiles();
|
||||
@@ -1,56 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
/**
|
||||
* Convert ugly single-line graph="..." props to clean multi-line children.
|
||||
*
|
||||
* FROM:
|
||||
* <Mermaid graph="graph TD\n A-->B\n B-->C" id="..." title="..." />
|
||||
*
|
||||
* TO:
|
||||
* <Mermaid id="..." title="...">
|
||||
* graph TD
|
||||
* A-->B
|
||||
* B-->C
|
||||
* </Mermaid>
|
||||
*/
|
||||
function convertToChildren(content: string): string {
|
||||
// Match <Mermaid graph="..." ... />
|
||||
const mermaidRegex = /<Mermaid\s+graph="([^"]*)"([^>]*?)\/>/g;
|
||||
|
||||
return content.replace(mermaidRegex, (match, graphValue, otherProps) => {
|
||||
// Unescape \n to real newlines
|
||||
const cleanGraph = graphValue.replace(/\\n/g, '\n');
|
||||
|
||||
// Clean up other props
|
||||
const cleanProps = otherProps.trim();
|
||||
|
||||
// Build the new format with children
|
||||
return `<Mermaid${cleanProps ? ' ' + cleanProps : ''}>\n${cleanGraph}\n</Mermaid>`;
|
||||
});
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
let fixCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
const fixed = convertToChildren(content);
|
||||
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(filePath, fixed);
|
||||
fixCount++;
|
||||
console.log(`✅ Converted to children: ${file}`);
|
||||
} else {
|
||||
console.log(`- ${file} (no changes needed)`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nTotal converted: ${fixCount}`);
|
||||
}
|
||||
|
||||
processFiles();
|
||||
@@ -1,46 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
/**
|
||||
* Convert graph={"..."} to graph="..." (plain string prop without JSX expression wrapper).
|
||||
* MDXRemote RSC strips JSX expression props but keeps plain string props.
|
||||
*
|
||||
* But we also need the escape sequences to go through.
|
||||
* The plain string prop `graph="graph TD\nA-->B"` will have the \n treated as
|
||||
* literal characters by MDX's parser, not as a newline. The Mermaid component
|
||||
* then unescapes them.
|
||||
*/
|
||||
function convertToPlainStringProps(content: string): string {
|
||||
// Match graph={" ... "} and convert to graph="..."
|
||||
// The content inside should already have escaped newlines and quotes
|
||||
const pattern = /graph=\{"((?:[^"\\]|\\.)*)"\}/g;
|
||||
|
||||
return content.replace(pattern, (match, graphContent) => {
|
||||
return `graph="${graphContent}"`;
|
||||
});
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
let fixCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
const fixed = convertToPlainStringProps(content);
|
||||
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(filePath, fixed);
|
||||
fixCount++;
|
||||
console.log(`✅ Converted: ${file}`);
|
||||
} else {
|
||||
console.log(`- ${file} (no changes needed)`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nTotal converted: ${fixCount}`);
|
||||
}
|
||||
|
||||
processFiles();
|
||||
@@ -1,60 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
/**
|
||||
* Convert all Mermaid children syntax back to graph prop,
|
||||
* BUT use a regular double-quoted string with escaped newlines instead of template literals.
|
||||
*
|
||||
* MDXRemote RSC strips template literals!
|
||||
*
|
||||
* Convert:
|
||||
* <Mermaid id="..." title="..." showShare={true}>
|
||||
* {`graph TD
|
||||
* A --> B`}
|
||||
* </Mermaid>
|
||||
*
|
||||
* To:
|
||||
* <Mermaid graph={"graph TD\n A --> B"} id="..." title="..." showShare={true} />
|
||||
*/
|
||||
function convertToPlainStringProp(content: string): string {
|
||||
// Match <Mermaid ...>{\`...\`}</Mermaid>
|
||||
const mermaidChildrenRegex = /<Mermaid\s+([^>]*?)>\s*\{`([\s\S]*?)`\}\s*<\/Mermaid>/g;
|
||||
|
||||
return content.replace(mermaidChildrenRegex, (match, propsStr, graphContent) => {
|
||||
// Escape double quotes in the graph content
|
||||
const escapedGraph = graphContent
|
||||
.replace(/\\/g, '\\\\') // escape backslashes first
|
||||
.replace(/"/g, '\\"') // escape double quotes
|
||||
.replace(/\n/g, '\\n'); // escape newlines
|
||||
|
||||
// Clean up props string
|
||||
const cleanProps = propsStr.trim();
|
||||
|
||||
return `<Mermaid graph={"${escapedGraph}"} ${cleanProps} />`;
|
||||
});
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
let fixCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
const fixed = convertToPlainStringProp(content);
|
||||
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(filePath, fixed);
|
||||
fixCount++;
|
||||
console.log(`✅ Converted to plain string: ${file}`);
|
||||
} else {
|
||||
console.log(`- ${file} (no Mermaid children found)`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nTotal converted: ${fixCount}`);
|
||||
}
|
||||
|
||||
processFiles();
|
||||
@@ -1,47 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
/**
|
||||
* FINAL ATTEMPT: Standardize EVERYTHING in Mermaid blocks to double quotes.
|
||||
*
|
||||
* 1. Find all text inside <Mermaid>...</Mermaid>.
|
||||
* 2. Replace any ['Label'] or ['Label's'] or ["Label"] patterns.
|
||||
* 3. Enforce ["Label"] for all labels.
|
||||
* 4. Remove any internal single quotes that break parsing.
|
||||
*/
|
||||
function finalMermaidFix(content: string): string {
|
||||
const mermaidRegex = /(<Mermaid[^>]*>)([\s\S]*?)(<\/Mermaid>)/g;
|
||||
|
||||
return content.replace(mermaidRegex, (match, open, body, close) => {
|
||||
let fixedBody = body;
|
||||
|
||||
// Convert common label syntax to clean double quotes
|
||||
// Match: [followed by optional space and any quote, capture content, end with optional quote and space]
|
||||
fixedBody = fixedBody.replace(/\[\s*['"]?([^\]'"]+?)['"]?\s*\]/g, (m, label) => {
|
||||
// Clean the label: remove any internal quotes that could cause issues
|
||||
const cleanLabel = label.replace(/['"]/g, "").trim();
|
||||
return `["${cleanLabel}"]`;
|
||||
});
|
||||
|
||||
// Also handle Pie charts which use 'Label' : value
|
||||
fixedBody = fixedBody.replace(/^\s*'([^']+)'\s*:/gm, (m, label) => {
|
||||
const cleanLabel = label.replace(/['"]/g, "").trim();
|
||||
return ` "${cleanLabel}" :`;
|
||||
});
|
||||
|
||||
return open + fixedBody + close;
|
||||
});
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
for (const file of files) {
|
||||
const fp = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(fp, 'utf8');
|
||||
const fixed = finalMermaidFix(content);
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(fp, fixed);
|
||||
console.log(`✅ Fixed potentially problematic syntax in: ${file}`);
|
||||
}
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
const TARGET_FILE = process.argv[2] ? path.resolve(process.argv[2]) : null;
|
||||
|
||||
function fixFencedMermaid(content: string): string {
|
||||
// Regex to find fenced mermaid blocks
|
||||
// ```mermaid
|
||||
// graph TD...
|
||||
// ```
|
||||
const fencedRegex = /```mermaid\n([\s\S]*?)\n```/g;
|
||||
|
||||
return content.replace(fencedRegex, (match, code) => {
|
||||
// Generate a random ID or use a placeholder
|
||||
const id = `diagram-${Math.random().toString(36).substr(2, 9)}`;
|
||||
return `<div className="my-12">
|
||||
<Mermaid id="${id}" title="Generated Diagram" showShare={true}>
|
||||
${code.trim()}
|
||||
</Mermaid>
|
||||
</div>`;
|
||||
});
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
if (TARGET_FILE) {
|
||||
console.log(`Processing single file: ${TARGET_FILE}`);
|
||||
if (fs.existsSync(TARGET_FILE)) {
|
||||
const content = fs.readFileSync(TARGET_FILE, 'utf8');
|
||||
const fixed = fixFencedMermaid(content);
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(TARGET_FILE, fixed);
|
||||
console.log(`✅ Fixed fenced mermaid in: ${TARGET_FILE}`);
|
||||
} else {
|
||||
console.log(`- No changes needed.`);
|
||||
}
|
||||
} else {
|
||||
console.error(`File not found: ${TARGET_FILE}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
let fixCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
const fixed = fixFencedMermaid(content);
|
||||
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(filePath, fixed);
|
||||
fixCount++;
|
||||
console.log(`✅ Fixed fenced mermaid in: ${file}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nTotal fixed: ${fixCount}`);
|
||||
}
|
||||
|
||||
processFiles();
|
||||
@@ -1,86 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
/**
|
||||
* Fix escaped double quotes in Mermaid graph props.
|
||||
*
|
||||
* The graph prop contains \" which is invalid in MDX attribute syntax.
|
||||
* Replace \" with ' (single quote) - Mermaid supports both.
|
||||
*
|
||||
* Also fix \\n to just \n (single backslash) - the MDX parser
|
||||
* will treat \n as literal characters, and the Mermaid component
|
||||
* will unescape them.
|
||||
*/
|
||||
function fixGraphQuotes(content: string): string {
|
||||
// Match graph="..." prop (the entire value including escaped content)
|
||||
// This is tricky because the value can contain escaped quotes
|
||||
// We need to match from graph=" to the closing " that ends the prop value
|
||||
|
||||
// Strategy: Find graph=" then scan forward, tracking escape sequences
|
||||
const graphPropStart = 'graph="';
|
||||
let result = '';
|
||||
let i = 0;
|
||||
|
||||
while (i < content.length) {
|
||||
const idx = content.indexOf(graphPropStart, i);
|
||||
if (idx === -1) {
|
||||
result += content.slice(i);
|
||||
break;
|
||||
}
|
||||
|
||||
// Copy everything up to and including graph="
|
||||
result += content.slice(i, idx + graphPropStart.length);
|
||||
|
||||
// Now scan the value, replacing \" with '
|
||||
let j = idx + graphPropStart.length;
|
||||
let graphValue = '';
|
||||
|
||||
while (j < content.length) {
|
||||
if (content[j] === '\\' && content[j + 1] === '"') {
|
||||
// Replace \" with '
|
||||
graphValue += "'";
|
||||
j += 2;
|
||||
} else if (content[j] === '\\' && content[j + 1] === '\\') {
|
||||
// Keep \\ as is
|
||||
graphValue += '\\\\';
|
||||
j += 2;
|
||||
} else if (content[j] === '"') {
|
||||
// End of attribute value
|
||||
break;
|
||||
} else {
|
||||
graphValue += content[j];
|
||||
j++;
|
||||
}
|
||||
}
|
||||
|
||||
result += graphValue;
|
||||
i = j; // Continue from the closing quote (will be added in next iteration)
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
let fixCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
const fixed = fixGraphQuotes(content);
|
||||
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(filePath, fixed);
|
||||
fixCount++;
|
||||
console.log(`✅ Fixed quotes: ${file}`);
|
||||
} else {
|
||||
console.log(`- ${file} (no changes needed)`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nTotal fixed: ${fixCount}`);
|
||||
}
|
||||
|
||||
processFiles();
|
||||
@@ -1,123 +0,0 @@
|
||||
#!/usr/bin/env tsx
|
||||
|
||||
/**
|
||||
* Fix missing whitespace in MDX files by comparing with TSX originals
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
// Mapping of TSX component names to MDX slugs
|
||||
const TSX_TO_MDX_MAP: Record<string, string> = {
|
||||
'PageSpeedFails': 'why-pagespeed-fails',
|
||||
'SlowLoadingDebt': 'slow-loading-costs-customers',
|
||||
'AgencySlowdown': 'why-agencies-are-slow',
|
||||
'WordPressPlugins': 'hidden-costs-of-wordpress-plugins',
|
||||
'WebsiteStability': 'why-websites-break-after-updates',
|
||||
'CookieFreeDesign': 'website-without-cookie-banners',
|
||||
'LocalCloud': 'no-us-cloud-platforms',
|
||||
'GDPRSystem': 'gdpr-conformity-system-approach',
|
||||
'VendorLockIn': 'builder-systems-threaten-independence',
|
||||
'PrivacyAnalytics': 'analytics-without-tracking',
|
||||
'GreenIT': 'green-it-sustainable-web',
|
||||
'FixedPrice': 'fixed-price-digital-projects',
|
||||
'BuildFirst': 'build-first-digital-architecture',
|
||||
'MaintenanceNoCMS': 'maintenance-for-headless-systems',
|
||||
'Longevity': 'digital-longevity-architecture',
|
||||
'CleanCode': 'clean-code-for-business-value',
|
||||
'ResponsiveDesign': 'responsive-design-high-fidelity',
|
||||
'HostingOps': 'professional-hosting-operations',
|
||||
'NoTemplates': 'why-no-templates-matter',
|
||||
'CRMSync': 'crm-synchronization-headless',
|
||||
};
|
||||
|
||||
const TSX_BASE = path.join(process.cwd(), 'src/components/blog/posts');
|
||||
const MDX_BASE = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
function findTsxFile(componentName: string): string | null {
|
||||
for (const group of ['Group1', 'Group2', 'Group3', 'Group4']) {
|
||||
const tsxPath = path.join(TSX_BASE, group, `${componentName}.tsx`);
|
||||
if (fs.existsSync(tsxPath)) {
|
||||
return tsxPath;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function fixWhitespace() {
|
||||
let totalFixed = 0;
|
||||
|
||||
for (const [tsxName, mdxSlug] of Object.entries(TSX_TO_MDX_MAP)) {
|
||||
const tsxPath = findTsxFile(tsxName);
|
||||
const mdxPath = path.join(MDX_BASE, `${mdxSlug}.mdx`);
|
||||
|
||||
if (!tsxPath || !fs.existsSync(mdxPath)) {
|
||||
console.log(`⚠️ Skipping ${tsxName}: files not found`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const tsxContent = fs.readFileSync(tsxPath, 'utf-8');
|
||||
let mdxContent = fs.readFileSync(mdxPath, 'utf-8');
|
||||
|
||||
// Count occurrences of {" "} in both files
|
||||
const tsxSpaces = (tsxContent.match(/\{" "\}/g) || []).length;
|
||||
const mdxSpacesBefore = (mdxContent.match(/\{" "\}/g) || []).length;
|
||||
|
||||
if (tsxSpaces === 0) {
|
||||
console.log(`✓ ${mdxSlug}: No whitespace needed`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract all lines with {" "} from TSX
|
||||
const tsxLines = tsxContent.split('\n');
|
||||
const spacedLines: Array<{ lineNum: number; content: string }> = [];
|
||||
|
||||
tsxLines.forEach((line, idx) => {
|
||||
if (line.includes('{" "}')) {
|
||||
spacedLines.push({ lineNum: idx, content: line.trim() });
|
||||
}
|
||||
});
|
||||
|
||||
// For each spaced line in TSX, find similar content in MDX and add {" "}
|
||||
let fixCount = 0;
|
||||
for (const { content } of spacedLines) {
|
||||
// Extract the text pattern before {" "}
|
||||
const match = content.match(/(.+?)\{" "\}/);
|
||||
if (!match) continue;
|
||||
|
||||
const textBefore = match[1].trim();
|
||||
|
||||
// Find this pattern in MDX without the space
|
||||
const searchPattern = new RegExp(
|
||||
textBefore.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') + '(?!\\{" "\\})',
|
||||
'g'
|
||||
);
|
||||
|
||||
const newMdxContent = mdxContent.replace(searchPattern, (matched) => {
|
||||
// Only add {" "} if it's not already there and if it's followed by a tag
|
||||
if (mdxContent.indexOf(matched + '{" "}') === -1 &&
|
||||
mdxContent.indexOf(matched) < mdxContent.indexOf('<', mdxContent.indexOf(matched))) {
|
||||
fixCount++;
|
||||
return matched + '{" "}';
|
||||
}
|
||||
return matched;
|
||||
});
|
||||
|
||||
mdxContent = newMdxContent;
|
||||
}
|
||||
|
||||
const mdxSpacesAfter = (mdxContent.match(/\{" "\}/g) || []).length;
|
||||
|
||||
if (fixCount > 0) {
|
||||
fs.writeFileSync(mdxPath, mdxContent, 'utf-8');
|
||||
console.log(`✓ ${mdxSlug}: Fixed ${fixCount} whitespace issues (${mdxSpacesBefore} → ${mdxSpacesAfter})`);
|
||||
totalFixed += fixCount;
|
||||
} else {
|
||||
console.log(`✓ ${mdxSlug}: Already correct (${mdxSpacesBefore} spaces)`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n✅ Total whitespace fixes: ${totalFixed}`);
|
||||
}
|
||||
|
||||
fixWhitespace();
|
||||
@@ -1,61 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
/**
|
||||
* Fix apostrophes in Mermaid labels by removing them.
|
||||
*
|
||||
* Mermaid parser treats ' as a quote delimiter even inside ["..."].
|
||||
* Replace ' with nothing or use HTML entity ' (but simpler to just remove).
|
||||
*/
|
||||
function fixMermaidApostrophes(content: string): string {
|
||||
// Find all Mermaid blocks
|
||||
const mermaidBlockRegex = /(<Mermaid[^>]*>)([\s\S]*?)(<\/Mermaid>)/g;
|
||||
|
||||
return content.replace(mermaidBlockRegex, (match, openTag, mermaidContent, closeTag) => {
|
||||
// Within Mermaid content, find all ["..."] labels and remove apostrophes
|
||||
const fixed = mermaidContent.replace(/\["([^"]*)"\]/g, (m: string, label: string) => {
|
||||
// Remove all apostrophes from the label
|
||||
const cleanLabel = label.replace(/'/g, '');
|
||||
return `["${cleanLabel}"]`;
|
||||
});
|
||||
|
||||
return openTag + fixed + closeTag;
|
||||
});
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
let fixCount = 0;
|
||||
let totalApostrophes = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
// Count apostrophes in Mermaid blocks before fixing
|
||||
const mermaidBlocks = content.match(/<Mermaid[^>]*>[\s\S]*?<\/Mermaid>/g) || [];
|
||||
for (const block of mermaidBlocks) {
|
||||
const apostrophes = (block.match(/\["[^"]*'[^"]*"\]/g) || []).length;
|
||||
if (apostrophes > 0) {
|
||||
totalApostrophes += apostrophes;
|
||||
}
|
||||
}
|
||||
|
||||
const fixed = fixMermaidApostrophes(content);
|
||||
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(filePath, fixed);
|
||||
fixCount++;
|
||||
console.log(`✅ Fixed apostrophes: ${file}`);
|
||||
} else {
|
||||
console.log(`- ${file} (no apostrophes found)`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nTotal files fixed: ${fixCount}`);
|
||||
console.log(`Total apostrophes removed: ${totalApostrophes}`);
|
||||
}
|
||||
|
||||
processFiles();
|
||||
@@ -1,69 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
/**
|
||||
* STRICTLY fixes Mermaid node labels.
|
||||
*
|
||||
* Goal:
|
||||
* 1. Find all content inside [...] in Mermaid blocks.
|
||||
* 2. Strip ALL outer quotes (single or double).
|
||||
* 3. Sanitize the inner text:
|
||||
* - Remove/Replace internal double quotes
|
||||
* - Remove/Replace internal single quotes (to avoid any ambiguity)
|
||||
* 4. Wrap strictly in ["..."].
|
||||
*/
|
||||
function fixMermaidLabels(content: string): string {
|
||||
// Find Mermaid blocks
|
||||
return content.replace(/(<Mermaid[^>]*>)([\s\S]*?)(<\/Mermaid>)/g, (match, open, body, close) => {
|
||||
|
||||
// Process the body line by line to be safe, or just regex the labels.
|
||||
// Regex for labels: \[ followed by anything until \]
|
||||
// Note: We assume labels don't contain nested brackets for now (Mermaid usually doesn't).
|
||||
const fixedBody = body.replace(/\[([^\]]+)\]/g, (labelMatch, innerContent) => {
|
||||
let text = innerContent.trim();
|
||||
|
||||
// Check if it looks like a quoted label
|
||||
const hasOuterQuotes = /^['"]|['"]$/.test(text);
|
||||
if (hasOuterQuotes) {
|
||||
// Remove ALL starting/ending quotes (handling multiple if messed up)
|
||||
text = text.replace(/^['"]+|['"]+$/g, '');
|
||||
}
|
||||
|
||||
// Sanitize internal text
|
||||
// Replace " with ' to avoid breaking the outer double quotes
|
||||
text = text.replace(/"/g, "'");
|
||||
|
||||
// Verify parsing safety:
|
||||
// Replace ' with space or nothing if we want to be super safe,
|
||||
// but "Text with 'quotes'" SHOULD be valid in Mermaid.
|
||||
// However, the previous error might have been due to MDX interference.
|
||||
// Let's keep single quotes inside, but ensure outer are double.
|
||||
|
||||
// WAIT: The specific error was `B['Server ... 'inner' ...']`.
|
||||
// If we convert to `B["Server ... 'inner' ..."]`, it should work.
|
||||
|
||||
return `["${text}"]`;
|
||||
});
|
||||
|
||||
return open + fixedBody + close;
|
||||
});
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
let fixedCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
const fixed = fixMermaidLabels(content);
|
||||
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(filePath, fixed);
|
||||
console.log(`✅ Fixed labels in: ${file}`);
|
||||
fixedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nFixed ${fixedCount} files.`);
|
||||
@@ -1,49 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
/**
|
||||
* Fix ALL quote variations in Mermaid labels to use consistent double quotes.
|
||||
*
|
||||
* Handles:
|
||||
* - ['Label'] → ["Label"]
|
||||
* - ["Label'] → ["Label"]
|
||||
* - ['Label"] → ["Label"]
|
||||
* - ["Label"] → ["Label"] (already correct)
|
||||
*/
|
||||
function fixMermaidQuotes(content: string): string {
|
||||
// Find all Mermaid blocks (between <Mermaid> and </Mermaid>)
|
||||
const mermaidBlockRegex = /(<Mermaid[^>]*>)([\s\S]*?)(<\/Mermaid>)/g;
|
||||
|
||||
return content.replace(mermaidBlockRegex, (match, openTag, mermaidContent, closeTag) => {
|
||||
// Replace all variations: [' or [" at start, '] or "] at end
|
||||
// Match pattern: [ followed by ' or ", then content, then ' or ", then ]
|
||||
const fixed = mermaidContent.replace(/\[['"]([^'"]*)['"]\]/g, '["$1"]');
|
||||
|
||||
return openTag + fixed + closeTag;
|
||||
});
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
let fixCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
const fixed = fixMermaidQuotes(content);
|
||||
|
||||
if (content !== fixed) {
|
||||
fs.writeFileSync(filePath, fixed);
|
||||
fixCount++;
|
||||
console.log(`✅ Fixed Mermaid quotes: ${file}`);
|
||||
} else {
|
||||
console.log(`- ${file} (no changes needed)`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nTotal fixed: ${fixCount}`);
|
||||
}
|
||||
|
||||
processFiles();
|
||||
45
apps/web/scripts/generate-thumbnail.ts
Normal file
45
apps/web/scripts/generate-thumbnail.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { ThumbnailGenerator } from '@mintel/thumbnail-generator';
|
||||
import * as path from 'node:path';
|
||||
import * as fs from 'node:fs/promises';
|
||||
|
||||
async function run() {
|
||||
const apiKey = process.env.REPLICATE_API_KEY;
|
||||
if (!apiKey) {
|
||||
console.error("❌ Missing REPLICATE_API_KEY in environment.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const targetFile = process.argv[2];
|
||||
if (!targetFile) {
|
||||
console.error("❌ Usage: npx tsx scripts/generate-thumbnail.ts <file-or-topic>");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let topic = targetFile;
|
||||
let filename = "thumbnail.png";
|
||||
|
||||
// Try to parse the topic from the MDX frontmatter if a file is provided
|
||||
if (targetFile.endsWith('.mdx')) {
|
||||
try {
|
||||
const content = await fs.readFile(targetFile, 'utf8');
|
||||
const titleMatch = content.match(/title:\s*"?([^"\n]+)"?/);
|
||||
topic = titleMatch ? titleMatch[1] : path.basename(targetFile, '.mdx');
|
||||
filename = `${path.basename(targetFile, '.mdx')}-thumb.png`;
|
||||
} catch (e) {
|
||||
console.warn(`⚠️ Could not read ${targetFile} as a file. Using literal argument as topic.`);
|
||||
topic = targetFile;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Generating abstract thumbnail for topic: "${topic}"`);
|
||||
|
||||
const generator = new ThumbnailGenerator({ replicateApiKey: apiKey });
|
||||
const outputPath = path.join(process.cwd(), 'public', 'blog', filename);
|
||||
|
||||
await generator.generateImage(topic, outputPath);
|
||||
}
|
||||
|
||||
run().catch((e) => {
|
||||
console.error("❌ Thumbnail generation failed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,107 +1,31 @@
|
||||
|
||||
import { ContentGenerator, OptimizationOptions } from "@mintel/content-engine";
|
||||
import * as fs from "node:fs/promises";
|
||||
import * as path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
import { AiBlogPostOrchestrator } from "@mintel/content-engine";
|
||||
import { config } from "../content-engine.config.js";
|
||||
|
||||
async function main() {
|
||||
const OPENROUTER_KEY = process.env.OPENROUTER_KEY;
|
||||
const OPENROUTER_KEY = process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
|
||||
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
|
||||
|
||||
if (!OPENROUTER_KEY) {
|
||||
console.error("❌ Error: OPENROUTER_KEY not found in environment.");
|
||||
console.error("❌ Error: OPENROUTER_KEY or OPENROUTER_API_KEY not found in environment.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
let targetFile = args[0];
|
||||
|
||||
const targetFile = process.argv[2];
|
||||
if (!targetFile) {
|
||||
console.error("❌ Usage: npx tsx scripts/optimize-blog-post.ts <path/to/post.mdx>");
|
||||
console.error("❌ Usage: npx tsx scripts/optimize-blog-post.ts <file>");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Resolve absolute path
|
||||
if (!path.isAbsolute(targetFile)) {
|
||||
targetFile = path.resolve(process.cwd(), targetFile);
|
||||
}
|
||||
const orchestrator = new AiBlogPostOrchestrator({
|
||||
apiKey: OPENROUTER_KEY,
|
||||
replicateApiKey: REPLICATE_KEY,
|
||||
model: 'google/gemini-3-flash-preview'
|
||||
});
|
||||
|
||||
console.log(`📄 Reading file: ${targetFile}`);
|
||||
let content = "";
|
||||
try {
|
||||
content = await fs.readFile(targetFile, "utf8");
|
||||
} catch (err: any) {
|
||||
console.error(`❌ Could not read file: ${err.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Backup original
|
||||
const backupPath = `${targetFile}.bak`;
|
||||
await fs.writeFile(backupPath, content);
|
||||
console.log(`💾 Backup created at: ${backupPath}`);
|
||||
|
||||
// Instantiate Generator
|
||||
const generator = new ContentGenerator(OPENROUTER_KEY);
|
||||
|
||||
const context = `
|
||||
Project: Mintel.me
|
||||
Style: Industrial, Technical, High-Performance, "No-BS".
|
||||
Author: Marc Mintel (Digital Architect).
|
||||
Focus: Web Architecture, PageSpeed, Core Web Vitals, Data-Driven Design.
|
||||
`;
|
||||
|
||||
// Define Optimization Options based on user request ("astrein verbessert mit daten gestützt, links zu quellen usw... mermaid, memes")
|
||||
const options: OptimizationOptions = {
|
||||
enhanceFacts: true,
|
||||
addMemes: true,
|
||||
addDiagrams: true,
|
||||
projectContext: context,
|
||||
availableComponents: [
|
||||
{
|
||||
name: "StatsDisplay",
|
||||
description: "A row of 3 clear statistic cards with values and labels.",
|
||||
usageExample: `<StatsDisplay
|
||||
items={[
|
||||
{ value: "-20%", label: "Conversion", description: "Source: Google" },
|
||||
{ value: "53%", label: "Bounce Rate", description: "Mobile users > 3s" },
|
||||
{ value: "0.1s", label: "Latency", description: "Edge Network" }
|
||||
]}
|
||||
/>`
|
||||
},
|
||||
{
|
||||
name: "ComparisonRow",
|
||||
description: "A comparison component showing a negative 'Standard' vs a positive 'Mintel' approach.",
|
||||
usageExample: `<ComparisonRow
|
||||
description="Architecture Comparison"
|
||||
negativeLabel="Legacy CMS"
|
||||
negativeText="Slow database queries, vulnerable plugins."
|
||||
positiveLabel="Mintel Stack"
|
||||
positiveText="Static generation, perfect security."
|
||||
/>`
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// 1. Separate Frontmatter from Body
|
||||
const fmMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
||||
const frontmatter = fmMatch ? fmMatch[0] : "";
|
||||
const body = fmMatch ? content.replace(frontmatter, "").trim() : content;
|
||||
|
||||
console.log("🚀 Starting Optimization via ContentEngine...");
|
||||
const result = await generator.optimizePost(body, options);
|
||||
|
||||
console.log("✅ Optimization Complete!");
|
||||
console.log(` - Added ${result.research.length} facts`);
|
||||
console.log(` - Added ${result.memes.length} meme concepts`);
|
||||
console.log(` - Generated ${result.diagrams.length} diagrams`);
|
||||
|
||||
// We write the content back (re-attaching frontmatter if it was there)
|
||||
const finalContent = frontmatter ? `${frontmatter}\n\n${result.content}` : result.content;
|
||||
|
||||
await fs.writeFile(targetFile, finalContent);
|
||||
|
||||
console.log(`💾 Saved optimized content to: ${targetFile}`);
|
||||
await orchestrator.optimizeFile(targetFile, {
|
||||
contextDir: config.contextDir,
|
||||
availableComponents: config.components
|
||||
});
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const MDX_DIR = path.join(process.cwd(), 'content/blog');
|
||||
|
||||
function repairMermaidSyntax(content: string): string {
|
||||
// 1. Convert <Mermaid graph={`...`} /> to <Mermaid graph={`...`}>...</Mermaid> style or just fix the graph prop
|
||||
// Actually, let's keep the graph prop but make sure the content is VERY safe.
|
||||
|
||||
const mermaidRegex = /<Mermaid\s+([\s\S]*?)graph=\{(`[\s\S]*?`)\}([\s\S]*?)\/>/g;
|
||||
|
||||
return content.replace(mermaidRegex, (match, before, graphLiteral, after) => {
|
||||
let graphContent = graphLiteral.slice(1, -1);
|
||||
|
||||
// Replace all {Label} with ["Label"]
|
||||
graphContent = graphContent.replace(/\{([^{}]+)\}/g, '["$1"]');
|
||||
|
||||
// Sometimes people use double {{Label}}
|
||||
graphContent = graphContent.replace(/\{\{([^{}]+)\}\}/g, '["$1"]');
|
||||
|
||||
// Remove any trailing backticks inside that might have been accidentally added
|
||||
graphContent = graphContent.trim();
|
||||
|
||||
return `<Mermaid\n ${before.trim()}\n graph={\`${graphContent}\`}\n ${after.trim()}\n />`;
|
||||
});
|
||||
}
|
||||
|
||||
// Additional fix for other diagram components that might have similar issues with props
|
||||
function repairOtherDiagrams(content: string): string {
|
||||
// For DiagramSequence, DiagramTimeline etc., we often pass arrays of objects.
|
||||
// MDX handles these better, but let's make sure there are no weird backticks.
|
||||
return content;
|
||||
}
|
||||
|
||||
function processFiles() {
|
||||
const files = fs.readdirSync(MDX_DIR).filter(f => f.endsWith('.mdx'));
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(MDX_DIR, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
let repaired = repairMermaidSyntax(content);
|
||||
repaired = repairOtherDiagrams(repaired);
|
||||
|
||||
if (content !== repaired) {
|
||||
fs.writeFileSync(filePath, repaired);
|
||||
console.log(`✅ Repaired Mermaid syntax in ${file}`);
|
||||
} else {
|
||||
console.log(`- Checked ${file}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
processFiles();
|
||||
@@ -1,220 +0,0 @@
|
||||
#!/usr/bin/env tsx
|
||||
|
||||
/**
|
||||
* Updated link test for the Next.js blog with App Router
|
||||
* Tests: All references are valid, files exist
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
console.log("🔗 Checking links and references (Next.js App Router)...\n");
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
function test(name: string, fn: () => void): void {
|
||||
try {
|
||||
fn();
|
||||
console.log(`✅ ${name}`);
|
||||
passed++;
|
||||
} catch (error) {
|
||||
console.log(`❌ ${name}`);
|
||||
if (error instanceof Error) {
|
||||
console.log(` Error: ${error.message}`);
|
||||
}
|
||||
failed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Test 1: Check that blog posts reference valid data
|
||||
test("Blog posts reference valid data", () => {
|
||||
const blogPostsPath = path.join(process.cwd(), "src/data/blogPosts.ts");
|
||||
const content = fs.readFileSync(blogPostsPath, "utf8");
|
||||
|
||||
// Extract all slugs
|
||||
const slugMatches = content.match(/slug:\s*['"]([^'"]+)['"]/g) || [];
|
||||
const slugs = slugMatches.map((m) => m.match(/['"]([^'"]+)['"]/)?.[1]);
|
||||
|
||||
if (slugs.length === 0) {
|
||||
throw new Error("No slugs found in blogPosts.ts");
|
||||
}
|
||||
|
||||
// Verify dynamic route page exists
|
||||
const slugPagePath = path.join(process.cwd(), "app/blog/[slug]/page.tsx");
|
||||
if (!fs.existsSync(slugPagePath)) {
|
||||
throw new Error(
|
||||
"Dynamic slug page app/blog/[slug]/page.tsx does not exist",
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Test 2: Verify tag references are valid
|
||||
test("Tag references are valid", () => {
|
||||
const blogPostsPath = path.join(process.cwd(), "src/data/blogPosts.ts");
|
||||
const content = fs.readFileSync(blogPostsPath, "utf8");
|
||||
|
||||
// Extract all tags
|
||||
const tagMatches = content.match(/tags:\s*\[([^\]]+)\]/g) || [];
|
||||
|
||||
if (tagMatches.length === 0) {
|
||||
throw new Error("No tags found in blogPosts.ts");
|
||||
}
|
||||
|
||||
// Verify tag page exists
|
||||
const tagPagePath = path.join(process.cwd(), "app/tags/[tag]/page.tsx");
|
||||
if (!fs.existsSync(tagPagePath)) {
|
||||
throw new Error("Tag page app/tags/[tag]/page.tsx does not exist");
|
||||
}
|
||||
});
|
||||
|
||||
// Test 3: Verify all component imports are valid
|
||||
test("All component imports are valid", () => {
|
||||
const components = [
|
||||
"src/components/MediumCard.tsx",
|
||||
"src/components/SearchBar.tsx",
|
||||
"src/components/ArticleBlockquote.tsx",
|
||||
"src/components/ArticleHeading.tsx",
|
||||
"src/components/ArticleParagraph.tsx",
|
||||
"src/components/ArticleList.tsx",
|
||||
"src/components/Footer.tsx",
|
||||
"src/components/Hero.tsx",
|
||||
"src/components/Tag.tsx",
|
||||
"src/components/FileExample.tsx",
|
||||
"src/components/FileExamplesList.tsx",
|
||||
];
|
||||
|
||||
for (const component of components) {
|
||||
const componentPath = path.join(process.cwd(), component);
|
||||
if (!fs.existsSync(componentPath)) {
|
||||
throw new Error(`Component missing: ${component}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Test 4: Verify all required pages exist
|
||||
test("All required pages exist", () => {
|
||||
const requiredPages = [
|
||||
"app/page.tsx",
|
||||
"app/blog/[slug]/page.tsx",
|
||||
"app/tags/[tag]/page.tsx",
|
||||
"app/api/download-zip/route.ts",
|
||||
];
|
||||
|
||||
for (const page of requiredPages) {
|
||||
const pagePath = path.join(process.cwd(), page);
|
||||
if (!fs.existsSync(pagePath)) {
|
||||
throw new Error(`Required page missing: ${page}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Test 5: Verify layout files are valid
|
||||
test("Layout files are valid", () => {
|
||||
const layoutPath = path.join(process.cwd(), "app/layout.tsx");
|
||||
|
||||
if (!fs.existsSync(layoutPath)) {
|
||||
throw new Error("Layout missing: app/layout.tsx");
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(layoutPath, "utf8");
|
||||
|
||||
if (!content.includes("<html") || !content.includes("</html>")) {
|
||||
throw new Error("RootLayout does not contain proper HTML structure");
|
||||
}
|
||||
|
||||
if (!content.includes("<body") || !content.includes("</body>")) {
|
||||
throw new Error("RootLayout missing body section");
|
||||
}
|
||||
});
|
||||
|
||||
// Test 6: Verify global styles are properly imported
|
||||
test("Global styles are properly imported", () => {
|
||||
const stylesPath = path.join(process.cwd(), "app/globals.css");
|
||||
|
||||
if (!fs.existsSync(stylesPath)) {
|
||||
throw new Error("Global styles file missing: app/globals.css");
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(stylesPath, "utf8");
|
||||
|
||||
// Check for Tailwind imports
|
||||
if (
|
||||
!content.includes("@tailwind base") ||
|
||||
!content.includes("@tailwind components") ||
|
||||
!content.includes("@tailwind utilities")
|
||||
) {
|
||||
throw new Error("Global styles missing Tailwind imports");
|
||||
}
|
||||
|
||||
// Check for required classes (Next.js version uses different ones or we check the ones we found)
|
||||
const requiredClasses = [".container", ".post-card", ".highlighter-tag"];
|
||||
for (const className of requiredClasses) {
|
||||
if (!content.includes(className)) {
|
||||
throw new Error(`Global styles missing required class: ${className}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Test 7: Verify file examples data structure
|
||||
test("File examples data structure is valid", () => {
|
||||
const fileExamplesPath = path.join(process.cwd(), "src/data/fileExamples.ts");
|
||||
|
||||
if (!fs.existsSync(fileExamplesPath)) {
|
||||
throw new Error("File examples data file missing");
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(fileExamplesPath, "utf8");
|
||||
|
||||
if (
|
||||
!content.includes("export interface FileExample") &&
|
||||
!content.includes("type FileExample")
|
||||
) {
|
||||
throw new Error("FileExample interface/type not found");
|
||||
}
|
||||
|
||||
if (!content.includes("export const sampleFileExamples")) {
|
||||
throw new Error("sampleFileExamples not exported");
|
||||
}
|
||||
});
|
||||
|
||||
// Test 8: Verify API endpoint structure
|
||||
test("API endpoint structure is valid", () => {
|
||||
const apiPath = path.join(process.cwd(), "app/api/download-zip/route.ts");
|
||||
|
||||
if (!fs.existsSync(apiPath)) {
|
||||
throw new Error("API route missing");
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(apiPath, "utf8");
|
||||
|
||||
if (!content.includes("export async function POST")) {
|
||||
throw new Error("API missing POST handler");
|
||||
}
|
||||
|
||||
if (!content.includes("export async function GET")) {
|
||||
throw new Error("API missing GET handler");
|
||||
}
|
||||
});
|
||||
|
||||
// Summary
|
||||
console.log("\n" + "=".repeat(50));
|
||||
console.log(`Tests passed: ${passed}`);
|
||||
console.log(`Tests failed: ${failed}`);
|
||||
console.log("=".repeat(50));
|
||||
|
||||
if (failed === 0) {
|
||||
console.log("\n🎉 All link checks passed! All references are valid.");
|
||||
console.log("\nVerified:");
|
||||
console.log(" ✅ Blog posts data and routing (Next.js)");
|
||||
console.log(" ✅ Tag filtering system");
|
||||
console.log(" ✅ All components exist");
|
||||
console.log(" ✅ All pages exist");
|
||||
console.log(" ✅ Layout structure (App Router)");
|
||||
console.log(" ✅ File examples functionality");
|
||||
console.log(" ✅ API routes");
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.log("\n❌ Some checks failed. Please fix the errors above.");
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
#!/usr/bin/env tsx
|
||||
/**
|
||||
* Verify components can be imported and used (Next.js Version)
|
||||
*/
|
||||
|
||||
import { join } from "path";
|
||||
import fs from "fs";
|
||||
|
||||
console.log("🔍 Verifying Embed Components (Next.js)...\n");
|
||||
|
||||
// Test 1: Check if components exist
|
||||
const components = ["YouTubeEmbed.tsx", "TwitterEmbed.tsx", "GenericEmbed.tsx"];
|
||||
|
||||
for (const component of components) {
|
||||
const componentPath = join(process.cwd(), "src", "components", component);
|
||||
if (fs.existsSync(componentPath)) {
|
||||
console.log(`✅ ${component} exists`);
|
||||
} else {
|
||||
console.log(`❌ Component missing: ${component}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Test 2: Check demo post accessibility
|
||||
try {
|
||||
const demoPath = join(process.cwd(), "src", "data", "embedDemoPost.ts");
|
||||
|
||||
if (fs.existsSync(demoPath)) {
|
||||
console.log("✅ embedDemoPost.ts data file exists");
|
||||
} else {
|
||||
console.log("❌ embedDemoPost.ts missing");
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("❌ Demo post check error:", error);
|
||||
}
|
||||
|
||||
// Test 3: Check blogPosts array
|
||||
try {
|
||||
const blogPostsPath = join(process.cwd(), "src", "data", "blogPosts.ts");
|
||||
if (fs.existsSync(blogPostsPath)) {
|
||||
// Check if embed-demo needs to be added (actually it's blog-embed-demo or similar usually)
|
||||
console.log("✅ Checking blogPosts array integration...");
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("❌ blogPosts check error:", error);
|
||||
}
|
||||
|
||||
console.log("\n" + "=".repeat(60));
|
||||
console.log("📋 SUMMARY:");
|
||||
console.log("• Components are verified for Next.js");
|
||||
console.log("• Data structure is verified");
|
||||
61
apps/web/scripts/verify-embeds.ts
Normal file
61
apps/web/scripts/verify-embeds.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import puppeteer from 'puppeteer';
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
console.log("Starting Chrome...");
|
||||
const browser = await puppeteer.launch({
|
||||
headless: 'new',
|
||||
args: ['--no-sandbox', '--disable-setuid-sandbox']
|
||||
});
|
||||
|
||||
const page = await browser.newPage();
|
||||
|
||||
// Pass any console logs from the browser to our terminal
|
||||
page.on('console', msg => console.log('BROWSER LOG:', msg.text()));
|
||||
|
||||
console.log("Navigating to http://localhost:3000/blog/why-pagespeed-fails ...");
|
||||
await page.goto('http://localhost:3000/blog/why-pagespeed-fails', { waitUntil: 'networkidle0' });
|
||||
|
||||
// Wait a bit just in case
|
||||
await new Promise(r => setTimeout(r, 2000));
|
||||
|
||||
console.log("--- Inspecting Mermaid ---");
|
||||
const mermaidLabels = await page.evaluate(() => {
|
||||
const labels = Array.from(document.querySelectorAll('.mermaid svg text, .mermaid svg .nodeLabel'));
|
||||
return labels.map(l => l.textContent).filter(Boolean);
|
||||
});
|
||||
console.log(`Found ${mermaidLabels.length} mermaid labels.`);
|
||||
if (mermaidLabels.length > 0) {
|
||||
console.log("Sample labels:", mermaidLabels.slice(0, 5));
|
||||
} else {
|
||||
console.log("FAIL: No SVG labels found inside Mermaid containers!");
|
||||
}
|
||||
|
||||
console.log("\n--- Inspecting Twitter Embed ---");
|
||||
const tweets = await page.evaluate(() => {
|
||||
const tweetContainers = Array.from(document.querySelectorAll('.react-tweet-theme'));
|
||||
return tweetContainers.map(container => ({
|
||||
html: container.outerHTML.substring(0, 150) + "..."
|
||||
}));
|
||||
});
|
||||
|
||||
console.log(`Found ${tweets.length} Tweet containers.`);
|
||||
if (tweets.length > 0) {
|
||||
console.log("Success! Tweet container snippet:", tweets[0].html);
|
||||
|
||||
// Further inspection of react-tweet - it sometimes renders an error div if not found
|
||||
const tweetErrors = await page.evaluate(() => {
|
||||
return Array.from(document.querySelectorAll('.react-tweet-theme [data-testid="tweet-not-found"]')).length;
|
||||
});
|
||||
if (tweetErrors > 0) {
|
||||
console.log(`FAIL: Found ${tweetErrors} 'Tweet not found' error states inside the container.`);
|
||||
}
|
||||
} else {
|
||||
console.log("FAIL: No react-tweet containers found on page. It might be completely crashing or skipped.");
|
||||
}
|
||||
|
||||
await browser.close();
|
||||
} catch (e) {
|
||||
console.error("Script failed:", e);
|
||||
}
|
||||
})();
|
||||
Reference in New Issue
Block a user