feat(mcps): add kabelfachmann MCP with Kabelhandbuch integration and remove legacy PM2 orchestration
Some checks failed
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 2s
Monorepo Pipeline / 🧪 Test (push) Successful in 1m6s
Monorepo Pipeline / 🏗️ Build (push) Successful in 2m52s
Monorepo Pipeline / 🧹 Lint (push) Successful in 3m1s
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
🏥 Server Maintenance / 🧹 Prune & Clean (push) Failing after 4s

This commit is contained in:
2026-03-08 01:01:43 +01:00
parent 048fafa3db
commit 541f1c17b7
16 changed files with 866 additions and 254 deletions

View File

@@ -0,0 +1,11 @@
FROM node:20-slim
WORKDIR /app
COPY package.json ./
# Install prod dependencies
RUN npm install --omit=dev --legacy-peer-deps
COPY ./dist ./dist
COPY ./data ./data
ENTRYPOINT ["node", "dist/index.js"]

View File

@@ -0,0 +1,31 @@
{
"name": "@mintel/kabelfachmann-mcp",
"version": "1.0.0",
"description": "Kabelfachmann MCP server",
"main": "dist/index.js",
"type": "module",
"scripts": {
"build": "tsc",
"start": "node dist/index.js",
"dev": "tsx watch src/index.ts",
"ingest": "tsx src/ingest.ts"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.5.0",
"@qdrant/js-client-rest": "^1.12.0",
"@xenova/transformers": "^2.17.2",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"node-fetch": "^3.3.2",
"onnxruntime-node": "^1.14.0",
"pdf-parse": "^1.1.1",
"zod": "^3.23.8"
},
"devDependencies": {
"@types/express": "^5.0.6",
"@types/node": "^20.14.10",
"@types/pdf-parse": "^1.1.4",
"tsx": "^4.19.1",
"typescript": "^5.5.3"
}
}

View File

@@ -0,0 +1,142 @@
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
import express from "express";
import { z } from "zod";
import { QdrantMemoryService } from "./qdrant.js";
import { askOpenRouter } from "./llm.js";
async function main() {
const server = new McpServer({
name: "@mintel/kabelfachmann-mcp",
version: "1.0.0",
});
const qdrantService = new QdrantMemoryService();
server.tool(
"ask_kabelfachmann",
"Ask the KLZ Kabelfachmann a question about cables based on the KLZ technical handbook. This consultant knows everything about cable specifications, geometries, weights, ampacity (Strombelastbarkeit), and materials.",
{
query: z
.string()
.describe(
"The user's question regarding cables or a specific cable type.",
),
},
async (args) => {
console.error(`Kabelfachmann received query: ${args.query}`);
// Retrieve relevant chunks from the handbook
const results = await qdrantService.retrieveMemory(args.query, 10);
const contextText = results
.map(
(r) =>
`--- Excerpt (Relevance: ${r.score.toFixed(2)}) ---\n${r.content}`,
)
.join("\n\n");
if (!contextText) {
return {
content: [
{
type: "text",
text: "Der Kabelfachmann konnte keine relevanten Informationen im Handbuch finden.",
},
],
};
}
const systemPrompt = `Du bist der "KLZ Kabelfachmann" (KLZ Cable Expert), ein professioneller beratender KI-Experte.
Du arbeitest für die Kabeltechnik-Firma "KLZ".
Beantworte die folgende Frage des Nutzers fachlich absolut korrekt und **nur** basierend auf den bereitgestellten Auszügen aus dem KLZ Kabelhandbuch.
Wenn die Information nicht im Kontext enthalten ist, sage höflich, dass dir dazu keine KLZ-Daten vorliegen. Erfinde niemals Spezifikationen oder Daten.
Halte dich relativ knapp und präzise, aber professionell (Siezen).
Hier ist der Kontext aus dem Handbuch:
${contextText}`;
try {
const answer = await askOpenRouter(systemPrompt, args.query);
return {
content: [{ type: "text", text: answer }],
};
} catch (error: any) {
console.error("Error querying OpenRouter:", error);
return {
content: [
{
type: "text",
text: `Ein Fehler ist bei der KI-Anfrage aufgetreten: ${error.message}`,
},
],
isError: true,
};
}
},
);
const isStdio = process.argv.includes("--stdio");
if (isStdio) {
const { StdioServerTransport } =
await import("@modelcontextprotocol/sdk/server/stdio.js");
const transport = new StdioServerTransport();
await server.connect(transport);
console.error("Kabelfachmann MCP server is running on stdio");
try {
await qdrantService.initialize();
} catch (e) {
console.error("Failed to initialize local dependencies:", e);
}
} else {
const app = express();
let transport: SSEServerTransport | null = null;
app.get("/sse", async (req, res) => {
console.error("New SSE connection established");
if (transport) {
try {
await transport.close();
} catch (e) {
console.error("Error closing previous transport:", e);
}
}
transport = new SSEServerTransport("/message", res);
try {
await server.connect(transport);
} catch (e) {
console.error("Failed to connect new transport:", e);
}
});
app.post("/message", async (req, res) => {
if (!transport) {
res.status(400).send("No active SSE connection");
return;
}
await transport.handlePostMessage(req, res);
});
const PORT = process.env.KABELFACHMANN_MCP_PORT || 3007;
const HOST = process.env.HOST || "0.0.0.0";
app.listen(PORT as number, HOST, async () => {
console.error(
`Kabelfachmann MCP server running on http://${HOST}:${PORT}/sse`,
);
try {
await qdrantService.initialize();
} catch (e) {
console.error("Failed to initialize local dependencies:", e);
}
});
}
}
main().catch((error) => {
console.error("Fatal error:", error);
process.exit(1);
});

View File

@@ -0,0 +1,76 @@
import fs from "fs";
import path from "path";
import pdf from "pdf-parse";
import { QdrantMemoryService } from "./qdrant.js";
async function start() {
const qdrant = new QdrantMemoryService(
process.env.QDRANT_URL || "http://localhost:6333",
);
await qdrant.initialize();
const pdfPath = path.join(process.cwd(), "data", "pdf", "kabelhandbuch.pdf");
console.error(`Reading PDF from ${pdfPath}...`);
let dataBuffer;
try {
dataBuffer = fs.readFileSync(pdfPath);
} catch (e) {
console.error(
"PDF file not found. Ensure it exists at data/pdf/kabelhandbuch.pdf",
);
process.exit(1);
}
const data = await pdf(dataBuffer);
const text = data.text;
// chunk text
// A simple chunking strategy by paragraph or chunks of ~1000 characters
const paragraphs = text
.split(/\n\s*\n/)
.map((p) => p.trim())
.filter((p) => p.length > 50);
let currentChunk = "";
const chunks: string[] = [];
const MAX_CHUNK_LENGTH = 1500;
for (const p of paragraphs) {
if (currentChunk.length + p.length > MAX_CHUNK_LENGTH) {
chunks.push(currentChunk);
currentChunk = p;
} else {
currentChunk += (currentChunk.length ? "\n\n" : "") + p;
}
}
if (currentChunk.length > 0) {
chunks.push(currentChunk);
}
console.error(
`Split PDF into ${chunks.length} chunks. Ingesting to Qdrant...`,
);
let successCount = 0;
for (let i = 0; i < chunks.length; i++) {
const chunk = chunks[i];
const success = await qdrant.storeMemory(`Handbuch Teil ${i + 1}`, chunk);
if (success) {
successCount++;
}
if ((i + 1) % 10 === 0) {
console.error(`Ingested ${i + 1}/${chunks.length} chunks...`);
}
}
console.error(
`Ingestion complete! Successfully stored ${successCount}/${chunks.length} chunks.`,
);
process.exit(0);
}
start().catch((e) => {
console.error(e);
process.exit(1);
});

View File

@@ -0,0 +1,41 @@
import fetch from "node-fetch";
export async function askOpenRouter(
systemPrompt: string,
userPrompt: string,
): Promise<string> {
const apiKey = process.env.OPENROUTER_API_KEY;
if (!apiKey) {
throw new Error("OPENROUTER_API_KEY is not set");
}
const response = await fetch(
"https://openrouter.ai/api/v1/chat/completions",
{
method: "POST",
headers: {
Authorization: `Bearer ${apiKey}`,
"HTTP-Referer": "https://mintel.me",
"X-Title": "Mintel MCP",
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "google/gemini-3-flash-preview",
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: userPrompt },
],
}),
},
);
if (!response.ok) {
const text = await response.text();
throw new Error(
`OpenRouter API error: ${response.status} ${response.statusText} - ${text}`,
);
}
const data = (await response.json()) as any;
return data.choices[0].message.content;
}

View File

@@ -0,0 +1,104 @@
import { pipeline, env } from "@xenova/transformers";
import { QdrantClient } from "@qdrant/js-client-rest";
import crypto from "crypto";
env.allowRemoteModels = true;
env.localModelPath = "./models";
export class QdrantMemoryService {
private client: QdrantClient;
private collectionName = "kabelfachmann";
private embedder: any = null;
constructor(
url: string = process.env.QDRANT_URL || "http://qdrant-mcp:6333",
) {
this.client = new QdrantClient({ url });
}
async initialize() {
console.error("Loading embedding model...");
this.embedder = await pipeline(
"feature-extraction",
"Xenova/all-MiniLM-L6-v2",
);
console.error(`Checking for collection: ${this.collectionName}`);
try {
const collections = await this.client.getCollections();
const exists = collections.collections.some(
(c) => c.name === this.collectionName,
);
if (!exists) {
console.error(`Creating collection: ${this.collectionName}`);
await this.client.createCollection(this.collectionName, {
vectors: {
size: 384,
distance: "Cosine",
},
});
console.error("Collection created successfully.");
}
} catch (e) {
console.error("Failed to initialize Qdrant collection:", e);
throw e;
}
}
private async getEmbedding(text: string): Promise<number[]> {
if (!this.embedder) {
throw new Error("Embedder not initialized. Call initialize() first.");
}
const output = await this.embedder(text, {
pooling: "mean",
normalize: true,
});
return Array.from(output.data);
}
async storeMemory(label: string, content: string): Promise<boolean> {
try {
const vector = await this.getEmbedding(content);
const id = crypto.randomUUID();
await this.client.upsert(this.collectionName, {
wait: true,
points: [
{
id,
vector,
payload: { label, content, timestamp: new Date().toISOString() },
},
],
});
return true;
} catch (e) {
console.error("Failed to store memory:", e);
return false;
}
}
async retrieveMemory(
query: string,
limit: number = 5,
): Promise<Array<{ label: string; content: string; score: number }>> {
try {
const vector = await this.getEmbedding(query);
const searchResults = await this.client.search(this.collectionName, {
vector,
limit,
with_payload: true,
});
return searchResults.map((result) => ({
label: String(result.payload?.label || ""),
content: String(result.payload?.content || ""),
score: result.score,
}));
} catch (e) {
console.error("Failed to retrieve memory:", e);
return [];
}
}
}

View File

@@ -0,0 +1,16 @@
import { config } from "dotenv";
import { resolve } from "path";
import { fileURLToPath } from "url";
const __dirname = fileURLToPath(new URL(".", import.meta.url));
// Try to load .env.local first (contains credentials usually)
config({ quiet: true, path: resolve(__dirname, "../../../.env.local") });
// Fallback to .env (contains defaults)
config({ quiet: true, path: resolve(__dirname, "../../../.env") });
// Now boot the compiled MCP index
import("./index.js").catch((err) => {
console.error("Failed to start MCP Server:", err);
process.exit(1);
});

View File

@@ -0,0 +1,38 @@
import { Client } from "@modelcontextprotocol/sdk/client/index.js";
import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
async function main() {
console.log("Connecting to Kabelfachmann MCP on localhost:3007/sse...");
const transport = new SSEClientTransport(
new URL("http://localhost:3007/sse"),
);
const client = new Client(
{ name: "test-client", version: "1.0.0" },
{ capabilities: {} },
);
await client.connect(transport);
console.log("Connected! Requesting tools...");
const tools = await client.listTools();
console.log(
"Available tools:",
tools.tools.map((t) => t.name),
);
console.log("Calling ask_kabelfachmann...");
const result = await client.callTool({
name: "ask_kabelfachmann",
arguments: {
query:
"Was ist der Mindestbiegeradius von einem NYY-J 5x1,5 Kabel laut Handbuch?",
},
});
console.log("\n--- RESULT ---");
console.log(JSON.stringify(result, null, 2));
process.exit(0);
}
main().catch(console.error);

View File

@@ -0,0 +1,15 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true
},
"include": ["src/**/*"]
}

View File

@@ -4,126 +4,157 @@ import * as path from "node:path";
import * as os from "node:os";
async function getOrchestrator() {
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
if (!OPENROUTER_KEY) {
throw new Error(
"Missing OPENROUTER_API_KEY in .env (Required for AI generation)",
);
}
const importDynamic = new Function("modulePath", "return import(modulePath)");
const { AiBlogPostOrchestrator } = await importDynamic(
"@mintel/content-engine",
if (!OPENROUTER_KEY) {
throw new Error(
"Missing OPENROUTER_API_KEY in .env (Required for AI generation)",
);
}
return new AiBlogPostOrchestrator({
apiKey: OPENROUTER_KEY,
replicateApiKey: REPLICATE_KEY,
model: "google/gemini-3-flash-preview",
});
const importDynamic = new Function("modulePath", "return import(modulePath)");
const { AiBlogPostOrchestrator } = await importDynamic(
"@mintel/content-engine",
);
return new AiBlogPostOrchestrator({
apiKey: OPENROUTER_KEY,
replicateApiKey: REPLICATE_KEY,
model: "google/gemini-3-flash-preview",
});
}
export const generateSlugEndpoint = async (req: PayloadRequest) => {
try {
let body: any = {};
try {
const { title, draftContent, oldSlug, instructions } = (await req.json?.() || {}) as any;
const orchestrator = await getOrchestrator();
const newSlug = await orchestrator.generateSlug(
draftContent,
title,
instructions,
);
if (oldSlug && oldSlug !== newSlug) {
await req.payload.create({
collection: "redirects" as any,
data: {
from: oldSlug,
to: newSlug,
},
});
}
return Response.json({ success: true, slug: newSlug });
} catch (e: any) {
return Response.json({ success: false, error: e.message }, { status: 500 });
if (req.body) body = (await req.json?.()) || {};
} catch {
/* ignore */
}
}
const { title, draftContent, oldSlug, instructions } = body;
const orchestrator = await getOrchestrator();
const newSlug = await orchestrator.generateSlug(
draftContent,
title,
instructions,
);
if (oldSlug && oldSlug !== newSlug) {
await req.payload.create({
collection: "redirects" as any,
data: {
from: oldSlug,
to: newSlug,
},
});
}
return Response.json({ success: true, slug: newSlug });
} catch (e: any) {
return Response.json({ success: false, error: e.message }, { status: 500 });
}
};
export const generateThumbnailEndpoint = async (req: PayloadRequest) => {
try {
let body: any = {};
try {
const { draftContent, title, instructions } = (await req.json?.() || {}) as any;
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
if (!OPENROUTER_KEY) throw new Error("Missing OPENROUTER_API_KEY in .env");
if (!REPLICATE_KEY) throw new Error("Missing REPLICATE_API_KEY in .env");
const importDynamic = new Function("modulePath", "return import(modulePath)");
const { AiBlogPostOrchestrator } = await importDynamic("@mintel/content-engine");
const { ThumbnailGenerator } = await importDynamic("@mintel/thumbnail-generator");
const orchestrator = new AiBlogPostOrchestrator({
apiKey: OPENROUTER_KEY,
replicateApiKey: REPLICATE_KEY,
model: "google/gemini-3-flash-preview",
});
const tg = new ThumbnailGenerator({ replicateApiKey: REPLICATE_KEY });
const prompt = await orchestrator.generateVisualPrompt(
draftContent || title || "Technology",
instructions,
);
const tmpPath = path.join(os.tmpdir(), `mintel-thumb-${Date.now()}.png`);
await tg.generateImage(prompt, tmpPath);
const fileData = await fs.readFile(tmpPath);
const stat = await fs.stat(tmpPath);
const fileName = path.basename(tmpPath);
const newMedia = await req.payload.create({
collection: "media" as any,
data: {
alt: title ? `Thumbnail for ${title}` : "AI Generated Thumbnail",
},
file: {
data: fileData,
name: fileName,
mimetype: "image/png",
size: stat.size,
},
});
await fs.unlink(tmpPath).catch(() => { });
return Response.json({ success: true, mediaId: newMedia.id });
} catch (e: any) {
return Response.json({ success: false, error: e.message }, { status: 500 });
if (req.body) body = (await req.json?.()) || {};
} catch {
/* ignore */
}
}
const { draftContent, title, instructions } = body;
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
if (!OPENROUTER_KEY) throw new Error("Missing OPENROUTER_API_KEY in .env");
if (!REPLICATE_KEY) throw new Error("Missing REPLICATE_API_KEY in .env");
const importDynamic = new Function(
"modulePath",
"return import(modulePath)",
);
const { AiBlogPostOrchestrator } = await importDynamic(
"@mintel/content-engine",
);
const { ThumbnailGenerator } = await importDynamic(
"@mintel/thumbnail-generator",
);
const orchestrator = new AiBlogPostOrchestrator({
apiKey: OPENROUTER_KEY,
replicateApiKey: REPLICATE_KEY,
model: "google/gemini-3-flash-preview",
});
const tg = new ThumbnailGenerator({ replicateApiKey: REPLICATE_KEY });
const prompt = await orchestrator.generateVisualPrompt(
draftContent || title || "Technology",
instructions,
);
const tmpPath = path.join(os.tmpdir(), `mintel-thumb-${Date.now()}.png`);
await tg.generateImage(prompt, tmpPath);
const fileData = await fs.readFile(tmpPath);
const stat = await fs.stat(tmpPath);
const fileName = path.basename(tmpPath);
const newMedia = await req.payload.create({
collection: "media" as any,
data: {
alt: title ? `Thumbnail for ${title}` : "AI Generated Thumbnail",
},
file: {
data: fileData,
name: fileName,
mimetype: "image/png",
size: stat.size,
},
});
await fs.unlink(tmpPath).catch(() => {});
return Response.json({ success: true, mediaId: newMedia.id });
} catch (e: any) {
return Response.json({ success: false, error: e.message }, { status: 500 });
}
};
export const generateSingleFieldEndpoint = async (req: PayloadRequest) => {
try {
let body: any = {};
try {
const { documentTitle, documentContent, fieldName, fieldDescription, instructions } = (await req.json?.() || {}) as any;
if (req.body) body = (await req.json?.()) || {};
} catch {
/* ignore */
}
const {
documentTitle,
documentContent,
fieldName,
fieldDescription,
instructions,
} = body;
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
if (!OPENROUTER_KEY) throw new Error("Missing OPENROUTER_API_KEY");
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
if (!OPENROUTER_KEY) throw new Error("Missing OPENROUTER_API_KEY");
const contextDocsData = await req.payload.find({
collection: "context-files" as any,
limit: 100,
});
const projectContext = contextDocsData.docs
.map((doc: any) => `--- ${doc.filename} ---\n${doc.content}`)
.join("\n\n");
const contextDocsData = await req.payload.find({
collection: "context-files" as any,
limit: 100,
});
const projectContext = contextDocsData.docs
.map((doc: any) => `--- ${doc.filename} ---\n${doc.content}`)
.join("\n\n");
const prompt = `You are an expert AI assistant perfectly trained for generating exact data values for CMS components.
const prompt = `You are an expert AI assistant perfectly trained for generating exact data values for CMS components.
PROJECT STRATEGY & CONTEXT:
${projectContext}
@@ -138,21 +169,21 @@ CRITICAL RULES:
3. If the field implies a diagram or flow, output RAW Mermaid.js code.
4. If it's standard text, write professional B2B German. No quotes, no conversational filler.`;
const res = await fetch("https://openrouter.ai/api/v1/chat/completions", {
method: "POST",
headers: {
Authorization: `Bearer ${OPENROUTER_KEY}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "google/gemini-3-flash-preview",
messages: [{ role: "user", content: prompt }],
}),
});
const data = await res.json();
const text = data.choices?.[0]?.message?.content?.trim() || "";
return Response.json({ success: true, text });
} catch (e: any) {
return Response.json({ success: false, error: e.message }, { status: 500 });
}
}
const res = await fetch("https://openrouter.ai/api/v1/chat/completions", {
method: "POST",
headers: {
Authorization: `Bearer ${OPENROUTER_KEY}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "google/gemini-3-flash-preview",
messages: [{ role: "user", content: prompt }],
}),
});
const data = await res.json();
const text = data.choices?.[0]?.message?.content?.trim() || "";
return Response.json({ success: true, text });
} catch (e: any) {
return Response.json({ success: false, error: e.message }, { status: 500 });
}
};

View File

@@ -1,75 +1,108 @@
import { PayloadRequest } from 'payload'
import { PayloadRequest } from "payload";
import { parseMarkdownToLexical } from "../utils/lexicalParser.js";
export const optimizePostEndpoint = async (req: PayloadRequest) => {
try {
let body: any = {};
try {
const { draftContent, instructions } = (await req.json?.() || {}) as { draftContent: string; instructions?: string };
if (!draftContent) {
return Response.json({ error: 'Missing draftContent' }, { status: 400 })
}
const globalAiSettings = (await req.payload.findGlobal({ slug: "ai-settings" })) as any;
const customSources =
globalAiSettings?.customSources?.map((s: any) => s.sourceName) || [];
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
if (!OPENROUTER_KEY) {
return Response.json({ error: "OPENROUTER_KEY not found in environment." }, { status: 500 })
}
// Dynamically import to avoid bundling it into client components that might accidentally import this file
const importDynamic = new Function("modulePath", "return import(modulePath)");
const { AiBlogPostOrchestrator } = await importDynamic("@mintel/content-engine");
const orchestrator = new AiBlogPostOrchestrator({
apiKey: OPENROUTER_KEY,
replicateApiKey: REPLICATE_KEY,
model: "google/gemini-3-flash-preview",
});
const contextDocsData = await req.payload.find({
collection: "context-files" as any,
limit: 100,
});
const projectContext = contextDocsData.docs.map((doc: any) => doc.content);
const optimizedMarkdown = await orchestrator.optimizeDocument({
content: draftContent,
projectContext,
availableComponents: [],
instructions,
internalLinks: [],
customSources,
});
if (!optimizedMarkdown || typeof optimizedMarkdown !== "string") {
return Response.json({ error: "AI returned invalid markup." }, { status: 500 })
}
const blocks = parseMarkdownToLexical(optimizedMarkdown);
return Response.json({
success: true,
lexicalAST: {
root: {
type: "root",
format: "",
indent: 0,
version: 1,
children: blocks,
direction: "ltr",
},
},
})
} catch (error: any) {
console.error("Failed to optimize post in endpoint:", error);
return Response.json({
success: false,
error: error.message || "An unknown error occurred during optimization.",
}, { status: 500 })
if (req.body) {
// req.json() acts as a method in Next.js/Payload req wrapper
body = (await req.json?.()) || {};
}
} catch (e) {
// Ignore JSON parse error, body remains empty
}
}
const { draftContent, instructions } = body as {
draftContent?: string;
instructions?: string;
};
if (!draftContent) {
return Response.json(
{ success: false, error: "Missing draftContent" },
{ status: 400 },
);
}
const globalAiSettings = (await req.payload.findGlobal({
slug: "ai-settings",
})) as any;
const customSources =
globalAiSettings?.customSources?.map((s: any) => s.sourceName) || [];
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
if (!OPENROUTER_KEY) {
return Response.json(
{ error: "OPENROUTER_KEY not found in environment." },
{ status: 500 },
);
}
// Dynamically import to avoid bundling it into client components that might accidentally import this file
const importDynamic = new Function(
"modulePath",
"return import(modulePath)",
);
const { AiBlogPostOrchestrator } = await importDynamic(
"@mintel/content-engine",
);
const orchestrator = new AiBlogPostOrchestrator({
apiKey: OPENROUTER_KEY,
replicateApiKey: REPLICATE_KEY,
model: "google/gemini-3-flash-preview",
});
const contextDocsData = await req.payload.find({
collection: "context-files" as any,
limit: 100,
});
const projectContext = contextDocsData.docs.map((doc: any) => doc.content);
const optimizedMarkdown = await orchestrator.optimizeDocument({
content: draftContent,
projectContext,
availableComponents: [],
instructions,
internalLinks: [],
customSources,
});
if (!optimizedMarkdown || typeof optimizedMarkdown !== "string") {
return Response.json(
{ error: "AI returned invalid markup." },
{ status: 500 },
);
}
const blocks = parseMarkdownToLexical(optimizedMarkdown);
return Response.json({
success: true,
lexicalAST: {
root: {
type: "root",
format: "",
indent: 0,
version: 1,
children: blocks,
direction: "ltr",
},
},
});
} catch (error: any) {
console.error("Failed to optimize post in endpoint:", error);
return Response.json(
{
success: false,
error:
error.message || "An unknown error occurred during optimization.",
},
{ status: 500 },
);
}
};