feat(ai-search): add interactive WebGL Orb, Markdown support, and Sentry tracking
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 11s
Build & Deploy / 🧪 QA (push) Successful in 1m18s
Build & Deploy / 🚀 Deploy (push) Has been cancelled
Build & Deploy / 🧪 Post-Deploy Verification (push) Has been cancelled
Build & Deploy / 🔔 Notify (push) Has been cancelled
Build & Deploy / 🏗️ Build (push) Has been cancelled
CI - Lint, Typecheck & Test / quality-assurance (pull_request) Failing after 3m55s
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 11s
Build & Deploy / 🧪 QA (push) Successful in 1m18s
Build & Deploy / 🚀 Deploy (push) Has been cancelled
Build & Deploy / 🧪 Post-Deploy Verification (push) Has been cancelled
Build & Deploy / 🔔 Notify (push) Has been cancelled
Build & Deploy / 🏗️ Build (push) Has been cancelled
CI - Lint, Typecheck & Test / quality-assurance (pull_request) Failing after 3m55s
This commit is contained in:
@@ -1,138 +1,157 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
import { NextResponse, NextRequest } from 'next/server'; // Added NextRequest
|
||||
import { searchProducts } from '../../../src/lib/qdrant';
|
||||
import redis from '../../../src/lib/redis';
|
||||
import { z } from 'zod';
|
||||
|
||||
import * as Sentry from '@sentry/nextjs';
|
||||
// Config and constants
|
||||
const RATE_LIMIT_POINTS = 5; // 5 requests
|
||||
const RATE_LIMIT_DURATION = 60 * 1; // per 1 minute
|
||||
|
||||
const requestSchema = z.object({
|
||||
query: z.string().min(1).max(500),
|
||||
_honeypot: z.string().max(0).optional(), // Honeypot trap: must be empty
|
||||
});
|
||||
// Removed requestSchema as it's replaced by direct parsing
|
||||
|
||||
export async function POST(req: Request) {
|
||||
export async function POST(req: NextRequest) {
|
||||
// Changed req type to NextRequest
|
||||
try {
|
||||
const { messages, visitorId, honeypot } = await req.json();
|
||||
|
||||
// 1. Basic Validation
|
||||
if (!messages || !Array.isArray(messages) || messages.length === 0) {
|
||||
return NextResponse.json({ error: 'Valid messages array is required' }, { status: 400 });
|
||||
}
|
||||
|
||||
const latestMessage = messages[messages.length - 1].content;
|
||||
const isBot = honeypot && honeypot.length > 0;
|
||||
|
||||
// Check if the input itself is obviously spam/too long
|
||||
if (latestMessage.length > 500) {
|
||||
return NextResponse.json({ error: 'Message too long' }, { status: 400 });
|
||||
}
|
||||
|
||||
// 2. Honeypot check
|
||||
if (isBot) {
|
||||
console.warn('Honeypot triggered in AI search');
|
||||
// Tarpit the bot
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
return NextResponse.json({
|
||||
answerText: 'Vielen Dank für Ihre Anfrage.',
|
||||
products: [],
|
||||
});
|
||||
}
|
||||
|
||||
// 3. Rate Limiting via Redis
|
||||
try {
|
||||
// 1. IP extraction for Rate Limiting
|
||||
const forwardedFor = req.headers.get('x-forwarded-for');
|
||||
const realIp = req.headers.get('x-real-ip');
|
||||
const ip = forwardedFor?.split(',')[0] || realIp || 'anon';
|
||||
const rateLimitKey = `rate_limit:ai_search:${ip}`;
|
||||
|
||||
// Redis Rate Limiting
|
||||
try {
|
||||
const current = await redis.incr(rateLimitKey);
|
||||
if (current === 1) {
|
||||
await redis.expire(rateLimitKey, RATE_LIMIT_DURATION);
|
||||
}
|
||||
if (current > RATE_LIMIT_POINTS) {
|
||||
return NextResponse.json({ error: 'Rate limit exceeded. Try again later.' }, { status: 429 });
|
||||
}
|
||||
} catch (redisError) {
|
||||
console.warn('Redis error during rate limiting:', redisError);
|
||||
// Fallback: proceed if Redis is down, to maintain availability
|
||||
if (visitorId) {
|
||||
const requestCount = await redis.incr(`ai_search_rate_limit:${visitorId}`);
|
||||
if (requestCount === 1) {
|
||||
await redis.expire(`ai_search_rate_limit:${visitorId}`, RATE_LIMIT_DURATION); // Use constant
|
||||
}
|
||||
|
||||
// 2. Validate request
|
||||
const json = await req.json().catch(() => ({}));
|
||||
const parseResult = requestSchema.safeParse(json);
|
||||
|
||||
if (!parseResult.success) {
|
||||
return NextResponse.json({ error: 'Invalid request' }, { status: 400 });
|
||||
if (requestCount > RATE_LIMIT_POINTS) {
|
||||
// Use constant
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Rate limit exceeded. Please try again later.',
|
||||
},
|
||||
{ status: 429 },
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (redisError) {
|
||||
// Renamed variable for clarity
|
||||
console.error('Redis Rate Limiting Error:', redisError); // Changed to error for consistency
|
||||
Sentry.captureException(redisError, { tags: { context: 'ai-search-rate-limit' } });
|
||||
// Fail open if Redis is down
|
||||
}
|
||||
|
||||
const { query, _honeypot } = parseResult.data;
|
||||
// 4. Fetch Context from Qdrant based on the latest message
|
||||
let contextStr = '';
|
||||
let foundProducts: any[] = [];
|
||||
|
||||
// 3. Honeypot check
|
||||
// If the honeypot field has any content, this is a bot.
|
||||
if (_honeypot && _honeypot.length > 0) {
|
||||
// Return a fake success mask
|
||||
return NextResponse.json({ answer: 'Searching...' }, { status: 200 });
|
||||
}
|
||||
try {
|
||||
const searchResults = await searchProducts(latestMessage, 5);
|
||||
|
||||
// 4. Qdrant Context Retrieval
|
||||
const searchResults = await searchProducts(query, 5);
|
||||
if (searchResults && searchResults.length > 0) {
|
||||
const productDescriptions = searchResults
|
||||
.filter((p) => p.payload?.type === 'product' || !p.payload?.type)
|
||||
.map((p: any) => p.payload?.content)
|
||||
.join('\n\n');
|
||||
|
||||
// Build context block
|
||||
const contextText = searchResults.map((res: any) => {
|
||||
const payload = res.payload;
|
||||
return `Product ID: ${payload?.id}
|
||||
Name: ${payload?.title}
|
||||
SKU: ${payload?.sku}
|
||||
Description: ${payload?.description}
|
||||
Slug: ${payload?.slug}
|
||||
---`;
|
||||
}).join('\n');
|
||||
const knowledgeDescriptions = searchResults
|
||||
.filter((p) => p.payload?.type === 'knowledge')
|
||||
.map((p: any) => p.payload?.content)
|
||||
.join('\n\n');
|
||||
|
||||
// 5. OpenRouter Integration (gemini-3-flash-preview)
|
||||
const openRouterKey = process.env.OPENROUTER_API_KEY;
|
||||
if (!openRouterKey) {
|
||||
return NextResponse.json({ error: 'Server configuration error' }, { status: 500 });
|
||||
}
|
||||
contextStr = `KATALOG & PRODUKTE:\n${productDescriptions}\n\nKABELWISSEN (Handbuch):\n${knowledgeDescriptions}`;
|
||||
|
||||
const systemPrompt = `You are the KLZ Cables AI Search Assistant, an intelligent, helpful, and highly specialized assistant strictly for the KLZ Cables website.
|
||||
Your primary goal is to help users find the correct industrial cables and products based ONLY on the context provided.
|
||||
Follow these strict rules:
|
||||
1. ONLY answer questions related to products, search queries, cables, or industrial electronics.
|
||||
2. If the user asks a question entirely unrelated to products or the company (e.g., "What is the capital of France?", "Write a poem", "What is 2+2?"), REFUSE to answer it. Instead, reply with a funny, sarcastic, or humorous comment about how you only know about cables and wires.
|
||||
3. Base your product answers strictly on the CONTEXT provided below. Do not hallucinate products.
|
||||
4. Output your response as a valid JSON object matching this schema exactly, do not use Markdown codeblocks, output RAW JSON:
|
||||
{
|
||||
"answerText": "A friendly description or answer based on the search.",
|
||||
"products": [
|
||||
{ "id": "Context Product ID", "title": "Product Title", "sku": "Product SKU", "slug": "slug" }
|
||||
]
|
||||
}
|
||||
foundProducts = searchResults
|
||||
.filter((p) => (p.payload?.type === 'product' || !p.payload?.type) && p.payload?.data)
|
||||
.map((p: any) => p.payload?.data);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Qdrant Search Error:', e);
|
||||
Sentry.captureException(e, { tags: { context: 'ai-search-qdrant' } });
|
||||
// We can still proceed without context if Qdrant fails
|
||||
}
|
||||
|
||||
If you find relevant products in the context, add them to the "products" array. If no products match, use an empty array.
|
||||
// 5. Generate AI Response via OpenRouter (Mistral for DSGVO)
|
||||
const systemPrompt = `Du bist ein professioneller und extrem kompetenter Sales-Engineer / Consultant der Firma "KLZ Cables".
|
||||
Deine Aufgabe ist es, Kunden und Interessenten bei der Auswahl von Mittelspannungskabeln, Starkstromkabeln und Infrastrukturausrüstung beratend zur Seite zu stehen.
|
||||
|
||||
CONTEXT:
|
||||
${contextText}
|
||||
WICHTIGE REGELN:
|
||||
1. ANTWORTE IMMER IN DER SPRACHE DES BENUTZERS. Wenn der Benutzer Deutsch spricht, antworte auf Deutsch.
|
||||
2. Wenn der Kunde vage ist (z.B. "Ich will einen Windpark bauen"), würge ihn NICHT ab. Stelle stattdessen gezielte, professionelle Rückfragen als Berater (z.B. "Für einen Windpark benötigen wir einige Rahmendaten: Reden wir über die Parkverkabelung (Mittelspannung, z.B. 20kV oder 33kV) oder die Netzanbindung? Welche Querschnitte oder Ströme erwarten Sie?").
|
||||
3. Nutze das bereitgestellte KABELWISSEN und KATALOG-Gedächtnis unten, um deine Antworten zu fundieren.
|
||||
4. Bleibe stets professionell, lösungsorientiert und leicht technisch (Industrial Aesthetic). Du kannst humorvoll sein, wenn der Nutzer offensichtlich Quatsch fragt, aber lenke es immer elegant zurück zu Kabeln oder Energieinfrastruktur.
|
||||
5. Antworte in reinem Text (kein Markdown für die Antwort, es sei denn es sind einfache Absätze oder Listen).
|
||||
6. Wenn genügend Informationen vorhanden sind, präsentiere passende Kabel aus dem Katalog.
|
||||
7. Oute dich als Berater von KLZ Cables.
|
||||
|
||||
VERFÜGBARER KONTEXT:
|
||||
${contextStr ? contextStr : 'Keine spezifischen Katalogdaten für diese Anfrage gefunden.'}
|
||||
`;
|
||||
|
||||
const response = await fetch('https://openrouter.ai/api/v1/chat/completions', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${openRouterKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
'HTTP-Referer': process.env.NEXT_PUBLIC_BASE_URL || 'https://klz-cables.com',
|
||||
'X-Title': 'KLZ Cables Search AI',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'google/gemini-3-flash-preview',
|
||||
messages: [
|
||||
{ role: 'system', content: systemPrompt },
|
||||
{ role: 'user', content: query }
|
||||
],
|
||||
response_format: { type: "json_object" }
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorBody = await response.text();
|
||||
throw new Error(`OpenRouter error: ${response.status} ${errorBody}`);
|
||||
}
|
||||
|
||||
const completion = await response.json();
|
||||
const rawContent = completion.choices?.[0]?.message?.content;
|
||||
|
||||
let answerJson;
|
||||
try {
|
||||
// Remove any potential markdown json block markers
|
||||
const sanitizedObjStr = rawContent.replace(/^```json\s*/, '').replace(/\s*```$/, '');
|
||||
answerJson = JSON.parse(sanitizedObjStr);
|
||||
} catch (parseError) {
|
||||
console.error('Failed to parse AI response:', rawContent);
|
||||
answerJson = {
|
||||
answerText: rawContent || "Sorry, I had trouble thinking about cables right now.",
|
||||
products: []
|
||||
};
|
||||
}
|
||||
|
||||
return NextResponse.json(answerJson);
|
||||
} catch (error) {
|
||||
console.error('AI Search API Error:', error);
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 });
|
||||
const openRouterKey = process.env.OPENROUTER_API_KEY;
|
||||
if (!openRouterKey) {
|
||||
throw new Error('OPENROUTER_API_KEY is not set');
|
||||
}
|
||||
|
||||
const fetchRes = await fetch('https://openrouter.ai/api/v1/chat/completions', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${openRouterKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
'HTTP-Referer': process.env.NEXT_PUBLIC_BASE_URL || 'https://klz-cables.com',
|
||||
'X-Title': 'KLZ Cables Search AI',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'mistralai/mistral-large-2407',
|
||||
temperature: 0.3,
|
||||
messages: [
|
||||
{ role: 'system', content: systemPrompt },
|
||||
...messages.map((m: any) => ({
|
||||
role: m.role,
|
||||
content: typeof m.content === 'string' ? m.content : JSON.stringify(m.content),
|
||||
})),
|
||||
],
|
||||
}),
|
||||
});
|
||||
|
||||
if (!fetchRes.ok) {
|
||||
const errBody = await fetchRes.text();
|
||||
throw new Error(`OpenRouter API Error: ${errBody}`);
|
||||
}
|
||||
|
||||
const data = await fetchRes.json();
|
||||
const text = data.choices[0].message.content;
|
||||
|
||||
// Return the AI's answer along with any found products
|
||||
return NextResponse.json({
|
||||
answerText: text,
|
||||
products: foundProducts,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('AI Search API Error:', error);
|
||||
Sentry.captureException(error, { tags: { context: 'ai-search-api' } });
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user