Compare commits
33 Commits
a9adb2eff7
...
v1.8.21
| Author | SHA1 | Date | |
|---|---|---|---|
| efd1341762 | |||
| 36a952db56 | |||
| 8c637f0220 | |||
| 6dd97e7a6b | |||
| 9f426470bb | |||
| 960914ebb8 | |||
| a55a5bb834 | |||
| 0aaf858f5b | |||
| ec562c1b2c | |||
| 02e15c3f4a | |||
| cd4c2193ce | |||
| df7a464e03 | |||
| e2e0653de6 | |||
| 590ae6f69b | |||
| 2a169f1dfc | |||
| 1bbe89c879 | |||
| 554ca81c9b | |||
| aac0fe81b9 | |||
| ada1e9c717 | |||
| 4d295d10d1 | |||
| c00f4e5ea5 | |||
| 5f7a254fcb | |||
| 21c0c778f9 | |||
| 4f6d62a85c | |||
| 7d9604a65a | |||
| b3d089ac6d | |||
| baecc9c83c | |||
| d5632b009a | |||
| 90a9e34c7e | |||
| 99f040cfb0 | |||
| 02bffbc67f | |||
| f4507ef121 | |||
| 3a1a88db89 |
@@ -1,14 +1,26 @@
|
||||
node_modules
|
||||
**/node_modules
|
||||
.next
|
||||
**/.next
|
||||
.git
|
||||
# .npmrc is allowed as it contains the registry template
|
||||
dist
|
||||
**/dist
|
||||
build
|
||||
**/build
|
||||
out
|
||||
**/out
|
||||
coverage
|
||||
**/coverage
|
||||
.vercel
|
||||
**/.vercel
|
||||
.turbo
|
||||
**/.turbo
|
||||
*.log
|
||||
**/*.log
|
||||
.DS_Store
|
||||
**/.DS_Store
|
||||
.pnpm-store
|
||||
**/.pnpm-store
|
||||
.gitea
|
||||
**/.gitea
|
||||
|
||||
2
.env
2
.env
@@ -1,5 +1,5 @@
|
||||
# Project
|
||||
IMAGE_TAG=v1.8.10
|
||||
IMAGE_TAG=v1.8.19
|
||||
PROJECT_NAME=at-mintel
|
||||
PROJECT_COLOR=#82ed20
|
||||
GITEA_TOKEN=ccce002e30fe16a31a6c9d5a414740af2f72a582
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Project
|
||||
IMAGE_TAG=v1.8.10
|
||||
IMAGE_TAG=v1.8.21
|
||||
PROJECT_NAME=sample-website
|
||||
PROJECT_COLOR=#82ed20
|
||||
|
||||
|
||||
@@ -192,6 +192,9 @@ jobs:
|
||||
- image: directus
|
||||
file: packages/infra/docker/Dockerfile.directus
|
||||
name: Directus (Base)
|
||||
- image: image-processor
|
||||
file: apps/image-service/Dockerfile
|
||||
name: Image Processor
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -211,7 +214,7 @@ jobs:
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.file }}
|
||||
platforms: linux/arm64
|
||||
platforms: linux/amd64
|
||||
pull: true
|
||||
provenance: false
|
||||
push: true
|
||||
|
||||
@@ -35,8 +35,9 @@ do
|
||||
# Push the updated tag directly (using --no-verify to avoid recursion)
|
||||
git push origin "$TAG" --force --no-verify
|
||||
|
||||
echo "✨ All done! Hook integrated the sync and pushed for you."
|
||||
exit 1 # Still exit 1 to abort the original (now outdated) push attempt
|
||||
echo "✨ Success! The hook synchronized the versions and pushed the updated tag for you."
|
||||
echo "ℹ️ Note: The original push command was aborted in favor of the auto-push. This is normal."
|
||||
exit 0 # Change to exit 0 to not show as an 'error' in vscode/terminal, though original push will still be technically 'failed' by git
|
||||
else
|
||||
echo "✨ Versions already in sync for $TAG."
|
||||
fi
|
||||
|
||||
@@ -81,3 +81,4 @@ Client websites scaffolded via the CLI use a **tag-based deployment** strategy:
|
||||
|
||||
See the [`@mintel/infra`](packages/infra/README.md) package for detailed template documentation.
|
||||
|
||||
Trigger rebuilding for x86 architecture.
|
||||
|
||||
40
apps/image-service/Dockerfile
Normal file
40
apps/image-service/Dockerfile
Normal file
@@ -0,0 +1,40 @@
|
||||
FROM node:20.18-bookworm-slim AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
python3 \
|
||||
libcairo2-dev \
|
||||
libpango1.0-dev \
|
||||
libjpeg-dev \
|
||||
libgif-dev \
|
||||
librsvg2-dev \
|
||||
libexpat1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN npm install -g pnpm@10.30.1
|
||||
|
||||
FROM base AS build
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
# We only need standard pnpm install now, no C++ tools needed for basic Sharp
|
||||
RUN pnpm install --frozen-lockfile
|
||||
RUN pnpm --filter @mintel/image-processor build
|
||||
RUN pnpm --filter image-service build
|
||||
|
||||
FROM base
|
||||
WORKDIR /app
|
||||
# Instead of copying node_modules which contains native C++ bindings for canvas and tfjs-node,
|
||||
# we copy the package.json files and install natively in the final stage so the bindings are correct.
|
||||
COPY package.json pnpm-workspace.yaml pnpm-lock.yaml ./
|
||||
COPY apps/image-service/package.json ./apps/image-service/package.json
|
||||
COPY packages/image-processor/package.json ./packages/image-processor/package.json
|
||||
|
||||
RUN pnpm install --frozen-lockfile --filter image-service...
|
||||
|
||||
COPY --from=build /app/apps/image-service/dist ./apps/image-service/dist
|
||||
COPY --from=build /app/packages/image-processor/dist ./packages/image-processor/dist
|
||||
COPY --from=build /app/packages/image-processor/models ./packages/image-processor/models
|
||||
|
||||
EXPOSE 8080
|
||||
WORKDIR /app/apps/image-service
|
||||
CMD ["npm", "run", "start"]
|
||||
23
apps/image-service/package.json
Normal file
23
apps/image-service/package.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "image-service",
|
||||
"version": "1.8.21",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"build": "tsc",
|
||||
"start": "node dist/index.js",
|
||||
"lint": "eslint src"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mintel/image-processor": "workspace:*",
|
||||
"fastify": "^4.26.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@mintel/eslint-config": "workspace:*",
|
||||
"@mintel/tsconfig": "workspace:*",
|
||||
"@types/node": "^20.0.0",
|
||||
"tsx": "^4.7.1",
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
109
apps/image-service/src/index.ts
Normal file
109
apps/image-service/src/index.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import Fastify from "fastify";
|
||||
import {
|
||||
processImageWithSmartCrop,
|
||||
parseImgproxyOptions,
|
||||
mapUrl,
|
||||
} from "@mintel/image-processor";
|
||||
|
||||
const fastify = Fastify({
|
||||
logger: true,
|
||||
});
|
||||
|
||||
fastify.get("/unsafe/:options/:urlSafeB64", async (request, reply) => {
|
||||
const { options, urlSafeB64 } = request.params as {
|
||||
options: string;
|
||||
urlSafeB64: string;
|
||||
};
|
||||
|
||||
// urlSafeB64 might be "plain/http://..." or a Base64 string
|
||||
let url = "";
|
||||
if (urlSafeB64.startsWith("plain/")) {
|
||||
url = urlSafeB64.substring(6);
|
||||
} else {
|
||||
try {
|
||||
url = Buffer.from(urlSafeB64, "base64").toString("utf-8");
|
||||
} catch (e) {
|
||||
return reply.status(400).send({ error: "Invalid Base64 URL" });
|
||||
}
|
||||
}
|
||||
|
||||
const parsedOptions = parseImgproxyOptions(options);
|
||||
const mappedUrl = mapUrl(url, process.env.IMGPROXY_URL_MAPPING);
|
||||
|
||||
return handleProcessing(mappedUrl, parsedOptions, reply);
|
||||
});
|
||||
|
||||
// Helper to avoid duplication
|
||||
async function handleProcessing(url: string, options: any, reply: any) {
|
||||
const width = options.width || 800;
|
||||
const height = options.height || 600;
|
||||
const quality = options.quality || 80;
|
||||
const format = options.format || "webp";
|
||||
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) {
|
||||
return reply.status(response.status).send({
|
||||
error: `Failed to fetch source image: ${response.statusText}`,
|
||||
});
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
const processedBuffer = await processImageWithSmartCrop(buffer, {
|
||||
width,
|
||||
height,
|
||||
format,
|
||||
quality,
|
||||
});
|
||||
|
||||
reply.header("Content-Type", `image/${format}`);
|
||||
reply.header("Cache-Control", "public, max-age=31536000, immutable");
|
||||
return reply.send(processedBuffer);
|
||||
} catch (err) {
|
||||
fastify.log.error(err);
|
||||
return reply
|
||||
.status(500)
|
||||
.send({ error: "Internal Server Error processing image" });
|
||||
}
|
||||
}
|
||||
|
||||
fastify.get("/process", async (request, reply) => {
|
||||
const query = request.query as {
|
||||
url?: string;
|
||||
w?: string;
|
||||
h?: string;
|
||||
q?: string;
|
||||
format?: string;
|
||||
};
|
||||
|
||||
const { url } = query;
|
||||
const width = parseInt(query.w || "800", 10);
|
||||
const height = parseInt(query.h || "600", 10);
|
||||
const quality = parseInt(query.q || "80", 10);
|
||||
const format = (query.format || "webp") as any;
|
||||
|
||||
if (!url) {
|
||||
return reply.status(400).send({ error: 'Parameter "url" is required' });
|
||||
}
|
||||
|
||||
const mappedUrl = mapUrl(url, process.env.IMGPROXY_URL_MAPPING);
|
||||
return handleProcessing(mappedUrl, { width, height, quality, format }, reply);
|
||||
});
|
||||
|
||||
fastify.get("/health", async () => {
|
||||
return { status: "ok" };
|
||||
});
|
||||
|
||||
const start = async () => {
|
||||
try {
|
||||
await fastify.listen({ port: 8080, host: "0.0.0.0" });
|
||||
console.log(`Server listening on 8080`);
|
||||
} catch (err) {
|
||||
fastify.log.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
start();
|
||||
11
apps/image-service/tsconfig.json
Normal file
11
apps/image-service/tsconfig.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"extends": "@mintel/tsconfig/base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"rootDir": "src",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"noEmit": false
|
||||
},
|
||||
"include": ["src/**/*"]
|
||||
}
|
||||
@@ -1,6 +1,13 @@
|
||||
import mintelNextConfig from "@mintel/next-config";
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {};
|
||||
const nextConfig = {
|
||||
serverExternalPackages: [
|
||||
"@mintel/image-processor",
|
||||
"@tensorflow/tfjs-node",
|
||||
"sharp",
|
||||
"canvas",
|
||||
],
|
||||
};
|
||||
|
||||
export default mintelNextConfig(nextConfig);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "sample-website",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -18,6 +18,7 @@
|
||||
"@mintel/next-utils": "workspace:*",
|
||||
"@mintel/observability": "workspace:*",
|
||||
"@mintel/next-observability": "workspace:*",
|
||||
"@mintel/image-processor": "workspace:*",
|
||||
"@sentry/nextjs": "10.38.0",
|
||||
"next": "16.1.6",
|
||||
"next-intl": "^4.8.2",
|
||||
|
||||
60
apps/sample-website/src/app/api/image/route.ts
Normal file
60
apps/sample-website/src/app/api/image/route.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
export const runtime = "nodejs";
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const { searchParams } = new URL(request.url);
|
||||
const url = searchParams.get("url");
|
||||
const width = parseInt(searchParams.get("w") || "800");
|
||||
const height = parseInt(searchParams.get("h") || "600");
|
||||
const q = parseInt(searchParams.get("q") || "80");
|
||||
|
||||
if (!url) {
|
||||
return NextResponse.json(
|
||||
{ error: "Missing url parameter" },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// 1. Fetch image from original URL
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: "Failed to fetch original image" },
|
||||
{ status: response.status },
|
||||
);
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
// Dynamically import to prevent Next.js from trying to bundle tfjs-node/sharp locally at build time
|
||||
const { processImageWithSmartCrop } =
|
||||
await import("@mintel/image-processor");
|
||||
|
||||
// 2. Process image with Face-API and Sharp
|
||||
const processedBuffer = await processImageWithSmartCrop(buffer, {
|
||||
width,
|
||||
height,
|
||||
format: "webp",
|
||||
quality: q,
|
||||
});
|
||||
|
||||
// 3. Return the processed image
|
||||
return new NextResponse(new Uint8Array(processedBuffer), {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "image/webp",
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Image Processing Error:", error);
|
||||
return NextResponse.json(
|
||||
{ error: "Failed to process image" },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
}
|
||||
1
models/tiny_face_detector_model-shard1
Normal file
1
models/tiny_face_detector_model-shard1
Normal file
@@ -0,0 +1 @@
|
||||
404: Not Found
|
||||
30
models/tiny_face_detector_model-weights_manifest.json
Normal file
30
models/tiny_face_detector_model-weights_manifest.json
Normal file
@@ -0,0 +1,30 @@
|
||||
[
|
||||
{
|
||||
"weights":
|
||||
[
|
||||
{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009007044399485869,"min":-1.2069439495311063}},
|
||||
{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005263455241334205,"min":-0.9211046672334858}},
|
||||
{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004001977630690033,"min":-0.5042491814669441}},
|
||||
{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013836609615999109,"min":-1.411334180831909}},
|
||||
{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015159862590771096,"min":-0.30926119685173037}},
|
||||
{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002666276225856706,"min":-0.317286870876948}},
|
||||
{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015265831292844286,"min":-1.6792414422128714}},
|
||||
{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0020280554598453,"min":-0.37113414915168985}},
|
||||
{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006100742489683862,"min":-0.8907084034938438}},
|
||||
{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016276211832083907,"min":-2.0508026908425725}},
|
||||
{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394414279975143,"min":-0.7637432129944072}},
|
||||
{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006716050119961009,"min":-0.8059260143953211}},
|
||||
{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021875603993733724,"min":-2.8875797271728514}},
|
||||
{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0041141652009066415,"min":-0.8187188749804216}},
|
||||
{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008423839597141042,"min":-0.9013508368940915}},
|
||||
{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.030007277283014035,"min":-3.8709387695088107}},
|
||||
{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008402082966823203,"min":-1.4871686851277068}},
|
||||
{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.028336129469030042,"min":-4.675461362389957}},
|
||||
{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002268134028303857,"min":-0.41053225912299807}}
|
||||
],
|
||||
"paths":
|
||||
[
|
||||
"tiny_face_detector_model.bin"
|
||||
]
|
||||
}
|
||||
]
|
||||
14
optimize-images.sh
Normal file
14
optimize-images.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Ghost Image Optimizer
|
||||
# Target directory for Ghost content
|
||||
TARGET_DIR="/home/deploy/sites/marisas.world/content/images"
|
||||
|
||||
echo "Starting image optimization for $TARGET_DIR..."
|
||||
|
||||
# Find all original images, excluding the 'size/' directory where Ghost stores thumbnails
|
||||
# Resize images larger than 2500px down to 2500px width
|
||||
# Compress JPEG/PNG to 80% quality
|
||||
find "$TARGET_DIR" -type d -name "size" -prune -o \( -iname "*.jpg" -o -iname "*.jpeg" -o -iname "*.png" \) -type f -exec mogrify -resize '2500x>' -quality 80 {} +
|
||||
|
||||
echo "Optimization complete."
|
||||
16
package.json
16
package.json
@@ -57,11 +57,23 @@
|
||||
"pino-pretty": "^13.1.3",
|
||||
"require-in-the-middle": "^8.0.1"
|
||||
},
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"pnpm": {
|
||||
"onlyBuiltDependencies": [
|
||||
"@parcel/watcher",
|
||||
"@sentry/cli",
|
||||
"@swc/core",
|
||||
"@tensorflow/tfjs-node",
|
||||
"canvas",
|
||||
"core-js",
|
||||
"esbuild",
|
||||
"sharp",
|
||||
"unrs-resolver",
|
||||
"vue-demi"
|
||||
],
|
||||
"overrides": {
|
||||
"next": "16.1.6",
|
||||
"@sentry/nextjs": "10.38.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "acquisition-manager",
|
||||
"description": "Custom High-Fidelity Management for Directus",
|
||||
"icon": "extension",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"type": "module",
|
||||
"keywords": [
|
||||
"directus",
|
||||
@@ -27,4 +27,4 @@
|
||||
"@directus/extensions-sdk": "11.0.2",
|
||||
"vue": "^3.4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "acquisition",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"type": "module",
|
||||
"directus:extension": {
|
||||
"type": "endpoint",
|
||||
@@ -24,4 +24,4 @@
|
||||
"react": "^19.2.4",
|
||||
"react-dom": "^19.2.4"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/cli",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/cloner",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.js",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/cms-infra",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -15,4 +15,4 @@
|
||||
"sync:push": "../../scripts/sync-directus.sh push infra",
|
||||
"sync:pull": "../../scripts/sync-directus.sh pull infra"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "company-manager",
|
||||
"description": "Custom High-Fidelity Management for Directus",
|
||||
"icon": "extension",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"type": "module",
|
||||
"keywords": [
|
||||
"directus",
|
||||
@@ -27,4 +27,4 @@
|
||||
"@directus/extensions-sdk": "11.0.2",
|
||||
"vue": "^3.4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
38
packages/concept-engine/package.json
Normal file
38
packages/concept-engine/package.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"name": "@mintel/concept-engine",
|
||||
"version": "1.8.21",
|
||||
"private": true,
|
||||
"description": "AI-powered web project concept generation and analysis",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"bin": {
|
||||
"concept": "./dist/cli.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"dev": "tsup --watch",
|
||||
"test": "vitest",
|
||||
"clean": "rm -rf dist",
|
||||
"lint": "eslint src --ext .ts",
|
||||
"concept": "tsx src/cli.ts run"
|
||||
},
|
||||
"dependencies": {
|
||||
"@crawlee/cheerio": "^3.11.2",
|
||||
"@mintel/journaling": "workspace:*",
|
||||
"@mintel/page-audit": "workspace:*",
|
||||
"axios": "^1.7.9",
|
||||
"cheerio": "1.0.0-rc.12",
|
||||
"commander": "^13.1.0",
|
||||
"dotenv": "^16.4.7",
|
||||
"zod": "^3.24.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.17.17",
|
||||
"tsup": "^8.3.6",
|
||||
"tsx": "^4.19.2",
|
||||
"typescript": "^5.7.3",
|
||||
"vitest": "^3.0.5"
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@mintel/content-engine",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"version": "1.8.21",
|
||||
"private": false,
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
@@ -20,6 +20,7 @@
|
||||
"dependencies": {
|
||||
"@mintel/journaling": "workspace:*",
|
||||
"@mintel/meme-generator": "workspace:*",
|
||||
"@mintel/thumbnail-generator": "workspace:*",
|
||||
"dotenv": "^17.3.1",
|
||||
"openai": "^4.82.0"
|
||||
},
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import OpenAI from "openai";
|
||||
import { ResearchAgent, Fact, SocialPost } from "@mintel/journaling";
|
||||
import { ResearchAgent, type Fact, type SocialPost } from "@mintel/journaling";
|
||||
import { MemeGenerator, MemeSuggestion } from "@mintel/meme-generator";
|
||||
import * as fs from "node:fs/promises";
|
||||
import * as path from "node:path";
|
||||
@@ -237,11 +237,21 @@ REGELN:
|
||||
console.log(` → ${factInsertions.length} fact enrichments planned`);
|
||||
}
|
||||
|
||||
// ----- STEP 1.5: Social Media Search -----
|
||||
console.log("📱 Identifying real social media posts...");
|
||||
const socialPosts = await this.researchAgent.findSocialPosts(
|
||||
content.substring(0, 200),
|
||||
);
|
||||
// ----- STEP 1.5: Social Media Extraction (no LLM — regex only) -----
|
||||
console.log("📱 Extracting existing social media embeds...");
|
||||
const socialPosts = this.researchAgent.extractSocialPosts(content);
|
||||
|
||||
// If none exist, fetch real ones via Serper API
|
||||
if (socialPosts.length === 0) {
|
||||
console.log(
|
||||
" → None found. Fetching real social posts via Serper API...",
|
||||
);
|
||||
const newPosts = await this.researchAgent.fetchRealSocialPosts(
|
||||
content.slice(0, 500),
|
||||
);
|
||||
socialPosts.push(...newPosts);
|
||||
}
|
||||
|
||||
if (socialPosts.length > 0) {
|
||||
console.log(
|
||||
`📝 Planning placement for ${socialPosts.length} social media posts...`,
|
||||
@@ -593,7 +603,7 @@ RULES:
|
||||
- youtube -> <YouTubeEmbed videoId="ID" />
|
||||
- twitter -> <TwitterEmbed tweetId="ID" theme="light" />
|
||||
- linkedin -> <LinkedInEmbed urn="ID" />
|
||||
- Add a 1-sentence intro paragraph above the embed to contextualize it.
|
||||
- Add a 1-sentence intro paragraph above the embed to contextualize it naturally in the flow of the text (e.g. "Wie Experte XY im folgenden Video detailliert erklärt:"). This context is MANDATORY. Do not just drop the Component without text reference.
|
||||
|
||||
CONTEXT:
|
||||
${context.slice(0, 3000)}
|
||||
@@ -842,6 +852,11 @@ Tone: ${tone}.
|
||||
Facts: ${factsContext}
|
||||
${componentsContext}
|
||||
|
||||
BLOG POST BEST PRACTICES (MANDATORY):
|
||||
- DEVIL'S ADVOCATE: Füge zwingend eine kurze kritische Sektion ein (z.B. mit \`<ComparisonRow>\` oder \`<IconList>\`), in der du offen die Nachteile/Kosten/Haken deiner eigenen Lösung ansprichst ("Der Haken an der Sache...").
|
||||
- FAQ GENERATOR: Am absoluten Ende des Artikels erstellst du zwingend eine Markdown-Liste mit den 3 wichtigsten Fragen (FAQ) und Antworten (jeweils 2 Sätze) für Google Rich Snippets.
|
||||
- Nutze wo passend die obigen React-Komponenten für ein hochwertiges Layout.
|
||||
|
||||
Format as Markdown. Start with # H1.
|
||||
For places where a diagram would help, insert: <!-- DIAGRAM_PLACEHOLDER: Concept Name -->
|
||||
Return ONLY raw content.`,
|
||||
@@ -891,6 +906,7 @@ RULES:
|
||||
- CRITICAL: Generate ONLY ONE single connected graph. Do NOT generate multiple independent graphs or isolated subgraphs in the same Mermaid block.
|
||||
- No nested subgraphs. Keep instructions short.
|
||||
- Use double-quoted labels for nodes: A["Label"]
|
||||
- VERY CRITICAL: DO NOT use curly braces '{}' or brackets '[]' inside labels unless they are wrapped in double quotes (e.g. A["Text {with braces}"]).
|
||||
- VERY CRITICAL: DO NOT use any HTML tags (no <br>, no <br/>, no <b>, etc).
|
||||
- VERY CRITICAL: DO NOT use special characters like '&', '<', '>', or double-quotes inside the label strings. They break the mermaid parser in our environment.
|
||||
- Return ONLY the raw mermaid code. No markdown blocks, no backticks.
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import OpenAI from "openai";
|
||||
import { ResearchAgent, Fact, SocialPost } from "@mintel/journaling";
|
||||
import { ResearchAgent, type Fact, type SocialPost } from "@mintel/journaling";
|
||||
import { ThumbnailGenerator } from "@mintel/thumbnail-generator";
|
||||
import { ComponentDefinition } from "./generator";
|
||||
import * as fs from "node:fs/promises";
|
||||
import * as path from "node:path";
|
||||
|
||||
export interface OrchestratorConfig {
|
||||
apiKey: string;
|
||||
replicateApiKey?: string;
|
||||
model?: string;
|
||||
}
|
||||
|
||||
@@ -14,16 +16,20 @@ export interface OptimizationTask {
|
||||
projectContext: string;
|
||||
availableComponents?: ComponentDefinition[];
|
||||
instructions?: string;
|
||||
internalLinks?: { title: string; slug: string }[];
|
||||
customSources?: string[];
|
||||
}
|
||||
|
||||
export interface OptimizeFileOptions {
|
||||
contextDir: string;
|
||||
availableComponents?: ComponentDefinition[];
|
||||
shouldRename?: boolean;
|
||||
}
|
||||
|
||||
export class AiBlogPostOrchestrator {
|
||||
private openai: OpenAI;
|
||||
private researchAgent: ResearchAgent;
|
||||
private thumbnailGenerator?: ThumbnailGenerator;
|
||||
private model: string;
|
||||
|
||||
constructor(config: OrchestratorConfig) {
|
||||
@@ -37,6 +43,11 @@ export class AiBlogPostOrchestrator {
|
||||
},
|
||||
});
|
||||
this.researchAgent = new ResearchAgent(config.apiKey);
|
||||
if (config.replicateApiKey) {
|
||||
this.thumbnailGenerator = new ThumbnailGenerator({
|
||||
replicateApiKey: config.replicateApiKey,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -76,9 +87,15 @@ export class AiBlogPostOrchestrator {
|
||||
|
||||
const content = await fs.readFile(absPath, "utf8");
|
||||
|
||||
const fmMatch = content.match(/^---\s*\n([\s\S]*?)\n---/);
|
||||
const frontmatter = fmMatch ? fmMatch[0] : "";
|
||||
const body = fmMatch ? content.slice(frontmatter.length).trim() : content;
|
||||
// Idea 4: We no longer split frontmatter and body. We pass the whole file
|
||||
// to the LLM so it can optimize the SEO title and description.
|
||||
|
||||
// Idea 1: Build Internal Link Graph
|
||||
const blogDir = path.dirname(absPath);
|
||||
const internalLinks = await this.buildInternalLinkGraph(
|
||||
blogDir,
|
||||
path.basename(absPath),
|
||||
);
|
||||
|
||||
console.log(`📖 Loading context from: ${options.contextDir}`);
|
||||
const projectContext = await this.loadContext(options.contextDir);
|
||||
@@ -89,50 +106,243 @@ export class AiBlogPostOrchestrator {
|
||||
}
|
||||
|
||||
const optimizedContent = await this.optimizeDocument({
|
||||
content: body,
|
||||
content: content,
|
||||
projectContext,
|
||||
availableComponents: options.availableComponents,
|
||||
internalLinks: internalLinks, // pass to orchestrator
|
||||
});
|
||||
|
||||
const finalOutput = frontmatter
|
||||
? `${frontmatter}\n\n${optimizedContent}`
|
||||
: optimizedContent;
|
||||
// Idea 4b: Extract the potentially updated title to rename the file (SEO Slug)
|
||||
const newFmMatch = optimizedContent.match(/^---\s*\n([\s\S]*?)\n---/);
|
||||
let finalPath = absPath;
|
||||
let finalSlug = path.basename(absPath, ".mdx");
|
||||
|
||||
await fs.writeFile(`${absPath}.bak`, content); // Keep simple backup
|
||||
await fs.writeFile(absPath, finalOutput);
|
||||
console.log(`✅ Saved optimized file to: ${absPath}`);
|
||||
if (options.shouldRename && newFmMatch && newFmMatch[1]) {
|
||||
const titleMatch = newFmMatch[1].match(/title:\s*["']([^"']+)["']/);
|
||||
if (titleMatch && titleMatch[1]) {
|
||||
const newTitle = titleMatch[1];
|
||||
// Generate SEO Slug
|
||||
finalSlug = newTitle
|
||||
.toLowerCase()
|
||||
.replace(/ä/g, "ae")
|
||||
.replace(/ö/g, "oe")
|
||||
.replace(/ü/g, "ue")
|
||||
.replace(/ß/g, "ss")
|
||||
.replace(/[^a-z0-9]+/g, "-")
|
||||
.replace(/^-+|-+$/g, "");
|
||||
|
||||
const newAbsPath = path.join(path.dirname(absPath), `${finalSlug}.mdx`);
|
||||
if (newAbsPath !== absPath) {
|
||||
console.log(
|
||||
`🔄 SEO Title changed! Renaming file to: ${finalSlug}.mdx`,
|
||||
);
|
||||
// Delete old file if the title changed significantly
|
||||
try {
|
||||
await fs.unlink(absPath);
|
||||
} catch (_err) {
|
||||
// ignore
|
||||
}
|
||||
finalPath = newAbsPath;
|
||||
}
|
||||
}
|
||||
} else if (newFmMatch && newFmMatch[1]) {
|
||||
console.log(
|
||||
`ℹ️ Rename skipped (permalink stability active). If you want to rename, use --rename.`,
|
||||
);
|
||||
}
|
||||
|
||||
// Idea 5: Automatic Thumbnails
|
||||
let finalContent = optimizedContent;
|
||||
|
||||
// Skip if thumbnail already exists in frontmatter
|
||||
const hasExistingThumbnail = /thumbnail:\s*["'][^"']+["']/.test(
|
||||
finalContent,
|
||||
);
|
||||
|
||||
if (this.thumbnailGenerator && !hasExistingThumbnail) {
|
||||
console.log("🎨 Phase 5: Generating/Linking visual thumbnail...");
|
||||
try {
|
||||
const webPublicDir = path.resolve(process.cwd(), "apps/web/public");
|
||||
const thumbnailRelPath = `/blog/${finalSlug}.png`;
|
||||
const thumbnailAbsPath = path.join(
|
||||
webPublicDir,
|
||||
"blog",
|
||||
`${finalSlug}.png`,
|
||||
);
|
||||
|
||||
// Check if the physical file already exists
|
||||
let physicalFileExists = false;
|
||||
try {
|
||||
await fs.access(thumbnailAbsPath);
|
||||
physicalFileExists = true;
|
||||
} catch (_err) {
|
||||
// File does not exist
|
||||
}
|
||||
|
||||
if (physicalFileExists) {
|
||||
console.log(
|
||||
`⏭️ Thumbnail already exists on disk, skipping generation: ${thumbnailAbsPath}`,
|
||||
);
|
||||
} else {
|
||||
const visualPrompt = await this.generateVisualPrompt(finalContent);
|
||||
await this.thumbnailGenerator.generateImage(
|
||||
visualPrompt,
|
||||
thumbnailAbsPath,
|
||||
);
|
||||
}
|
||||
|
||||
// Update frontmatter with thumbnail
|
||||
if (finalContent.includes("thumbnail:")) {
|
||||
finalContent = finalContent.replace(
|
||||
/thumbnail:\s*["'].*?["']/,
|
||||
`thumbnail: "${thumbnailRelPath}"`,
|
||||
);
|
||||
} else {
|
||||
finalContent = finalContent.replace(
|
||||
/(title:\s*["'].*?["'])/,
|
||||
`$1\nthumbnail: "${thumbnailRelPath}"`,
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("⚠️ Thumbnail processing failed, skipping:", e);
|
||||
}
|
||||
}
|
||||
|
||||
await fs.writeFile(finalPath, finalContent);
|
||||
console.log(`✅ Saved optimized file to: ${finalPath}`);
|
||||
}
|
||||
|
||||
async generateSlug(content: string, title?: string, instructions?: string): Promise<string> {
|
||||
const response = await this.openai.chat.completions.create({
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: `You generate SEO-optimized URL slugs for B2B blog posts based on the provided content.
|
||||
Return ONLY a JSON object with a single string field "slug".
|
||||
Example: {"slug": "how-to-optimize-react-performance"}
|
||||
Rules: Use lowercase letters, numbers, and hyphens only. No special characters. Keep it concise (2-5 words).`,
|
||||
},
|
||||
{ role: "user", content: `Title: ${title || "Unknown"}\n\nContent:\n${content.slice(0, 3000)}...${instructions ? `\n\nEDITOR INSTRUCTIONS:\nPlease strictly follow these instructions from the editor when generating the slug:\n${instructions}` : ""}` },
|
||||
],
|
||||
response_format: { type: "json_object" },
|
||||
});
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(response.choices[0].message.content || '{"slug": ""}');
|
||||
let slug = parsed.slug || "new-post";
|
||||
return slug.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "");
|
||||
} catch {
|
||||
return "new-post";
|
||||
}
|
||||
}
|
||||
|
||||
public async generateVisualPrompt(content: string, instructions?: string): Promise<string> {
|
||||
const response = await this.openai.chat.completions.create({
|
||||
model: this.model,
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: `You are a Visual Discovery Agent for an architectural design system.
|
||||
Review the provided blog post and create a 1-sentence abstract visual description for an image generator (like Flux).
|
||||
|
||||
THEME: Technical blueprint / structural illustration.
|
||||
STYLE: Clean lines, geometric shapes, monochrome base with one highlighter accent color (green, pink, or yellow).
|
||||
NO TEXT. NO PEOPLE. NO REALISTIC PHOTOS.
|
||||
FOCUS: The core metaphor or technical concept of the article.
|
||||
|
||||
Example output: "A complex network of glowing fiber optic nodes forming a recursive pyramid structure, technical blue lineart style."`,
|
||||
},
|
||||
{ role: "user", content: `${content.slice(0, 5000)}${instructions ? `\n\nEDITOR INSTRUCTIONS:\nPlease strictly follow these instructions from the editor when generating the visual prompt:\n${instructions}` : ""}` },
|
||||
],
|
||||
max_tokens: 100,
|
||||
});
|
||||
return (
|
||||
response.choices[0].message.content ||
|
||||
"Technical architectural blueprint of a digital system"
|
||||
);
|
||||
}
|
||||
|
||||
private async buildInternalLinkGraph(
|
||||
blogDir: string,
|
||||
currentFile: string,
|
||||
): Promise<{ title: string; slug: string }[]> {
|
||||
try {
|
||||
const files = await fs.readdir(blogDir);
|
||||
const mdxFiles = files.filter(
|
||||
(f) => f.endsWith(".mdx") && f !== currentFile,
|
||||
);
|
||||
const graph: { title: string; slug: string }[] = [];
|
||||
|
||||
for (const file of mdxFiles) {
|
||||
const fileContent = await fs.readFile(path.join(blogDir, file), "utf8");
|
||||
const titleMatch = fileContent.match(/title:\s*["']([^"']+)["']/);
|
||||
if (titleMatch && titleMatch[1]) {
|
||||
graph.push({
|
||||
title: titleMatch[1],
|
||||
slug: `/blog/${file.replace(".mdx", "")}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
return graph;
|
||||
} catch (e) {
|
||||
console.warn("Could not build internal link graph", e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the 3-step optimization pipeline:
|
||||
* 1. Fakten recherchieren
|
||||
* 2. Social Posts recherchieren
|
||||
* 2. Bestehende Social Posts extrahieren (kein LLM — nur Regex)
|
||||
* 3. AI anweisen daraus Artikel zu erstellen
|
||||
*/
|
||||
async optimizeDocument(task: OptimizationTask): Promise<string> {
|
||||
console.log(`🚀 Starting AI Orchestration Pipeline (${this.model})...`);
|
||||
|
||||
// 1. Fakten recherchieren
|
||||
console.log("1️⃣ Recherchiere Fakten...");
|
||||
// 1. Fakten & Konkurrenz recherchieren
|
||||
console.log("1️⃣ Recherchiere Fakten und analysiere Konkurrenz...");
|
||||
const researchTopics = await this.identifyTopics(task.content);
|
||||
const facts: Fact[] = [];
|
||||
for (const topic of researchTopics) {
|
||||
const topicFacts = await this.researchAgent.researchTopic(topic);
|
||||
facts.push(...topicFacts);
|
||||
}
|
||||
const competitorInsights: string[] = [];
|
||||
|
||||
// 2. Social Posts recherchieren
|
||||
console.log(
|
||||
"2️⃣ Recherchiere Social Media Posts (YouTube, Twitter, LinkedIn)...",
|
||||
);
|
||||
// Use the first 2000 chars to find relevant social posts
|
||||
const socialPosts = await this.researchAgent.findSocialPosts(
|
||||
task.content.substring(0, 2000),
|
||||
// Paralellize competitor research and fact research
|
||||
await Promise.all(
|
||||
researchTopics.map(async (topic) => {
|
||||
const [topicFacts, insights] = await Promise.all([
|
||||
this.researchAgent.researchTopic(topic),
|
||||
this.researchAgent.researchCompetitors(topic),
|
||||
]);
|
||||
facts.push(...topicFacts);
|
||||
competitorInsights.push(...insights);
|
||||
}),
|
||||
);
|
||||
|
||||
// 2. Bestehende Social Posts aus dem Content extrahieren (deterministisch, kein LLM)
|
||||
console.log("2️⃣ Extrahiere bestehende Social Media Embeds aus Content...");
|
||||
const socialPosts = this.researchAgent.extractSocialPosts(task.content);
|
||||
|
||||
// Wenn keine vorhanden sind, besorge echte von der Serper API
|
||||
if (socialPosts.length === 0) {
|
||||
console.log(
|
||||
" → Keine bestehenden Posts gefunden. Suche neue über Serper API...",
|
||||
);
|
||||
const realPosts = await this.researchAgent.fetchRealSocialPosts(
|
||||
task.content.slice(0, 500),
|
||||
task.customSources
|
||||
);
|
||||
socialPosts.push(...realPosts);
|
||||
}
|
||||
|
||||
// 3. AI anweisen daraus Artikel zu erstellen
|
||||
console.log("3️⃣ Erstelle optimierten Artikel (Agentic Rewrite)...");
|
||||
return await this.compileArticle(task, facts, socialPosts);
|
||||
return await this.compileArticle(
|
||||
task,
|
||||
facts,
|
||||
competitorInsights,
|
||||
socialPosts,
|
||||
task.internalLinks || [],
|
||||
);
|
||||
}
|
||||
|
||||
private async identifyTopics(content: string): Promise<string[]> {
|
||||
@@ -170,24 +380,82 @@ Return ONLY the JSON.`,
|
||||
private async compileArticle(
|
||||
task: OptimizationTask,
|
||||
facts: Fact[],
|
||||
competitorInsights: string[],
|
||||
socialPosts: SocialPost[],
|
||||
internalLinks: { title: string; slug: string }[],
|
||||
retryCount = 0,
|
||||
): Promise<string> {
|
||||
const factsText = facts
|
||||
.map((f, i) => `${i + 1}. ${f.statement} [Source: ${f.source}]`)
|
||||
.join("\n");
|
||||
|
||||
const socialText = socialPosts
|
||||
.map(
|
||||
(p, i) =>
|
||||
`Platform: ${p.platform}, ID: ${p.embedId} (${p.description})`,
|
||||
)
|
||||
.join("\n");
|
||||
let socialText = `CRITICAL RULE: NO VERIFIED SOCIAL MEDIA POSTS FOUND. You MUST NOT use <YouTubeEmbed />, <TwitterEmbed />, or <LinkedInEmbed /> under ANY circumstances in this article. DO NOT hallucinate IDs.`;
|
||||
|
||||
if (socialPosts.length > 0) {
|
||||
const allowedTags: string[] = [];
|
||||
if (socialPosts.some((p) => p.platform === "youtube"))
|
||||
allowedTags.push('<YouTubeEmbed videoId="..." />');
|
||||
if (socialPosts.some((p) => p.platform === "twitter"))
|
||||
allowedTags.push('<TwitterEmbed tweetId="..." />');
|
||||
if (socialPosts.some((p) => p.platform === "linkedin"))
|
||||
allowedTags.push('<LinkedInEmbed url="..." />');
|
||||
|
||||
socialText = `Social Media Posts to embed (use ONLY these tags, do not use others: ${allowedTags.join(", ")}):\n${socialPosts.map((p) => `Platform: ${p.platform}, ID: ${p.embedId} (${p.description})`).join("\n")}\nCRITICAL: Do not invent any IDs that are not explicitly listed in the list above.`;
|
||||
}
|
||||
|
||||
const componentsText = (task.availableComponents || [])
|
||||
.map((c) => `<${c.name}>: ${c.description}\n Example: ${c.usageExample}`)
|
||||
.filter((c) => {
|
||||
if (
|
||||
c.name === "YouTubeEmbed" &&
|
||||
!socialPosts.some((p) => p.platform === "youtube")
|
||||
)
|
||||
return false;
|
||||
if (
|
||||
c.name === "TwitterEmbed" &&
|
||||
!socialPosts.some((p) => p.platform === "twitter")
|
||||
)
|
||||
return false;
|
||||
if (
|
||||
c.name === "LinkedInEmbed" &&
|
||||
!socialPosts.some((p) => p.platform === "linkedin")
|
||||
)
|
||||
return false;
|
||||
return true;
|
||||
})
|
||||
.map((c) => {
|
||||
// Ensure LinkedInEmbed usage example consistently uses 'url'
|
||||
if (c.name === "LinkedInEmbed") {
|
||||
return `<${c.name}>: ${c.description}\n Example: <LinkedInEmbed url="https://www.linkedin.com/posts/..." />`;
|
||||
}
|
||||
return `<${c.name}>: ${c.description}\n Example: ${c.usageExample}`;
|
||||
})
|
||||
.join("\n\n");
|
||||
|
||||
const memeTemplates = [
|
||||
"db", // Distracted Boyfriend
|
||||
"gb", // Galaxy Brain
|
||||
"fine", // This is Fine
|
||||
"ds", // Daily Struggle
|
||||
"gru", // Gru's Plan
|
||||
"cmm", // Change My Mind
|
||||
"astronaut", // Always Has Been (ahb)
|
||||
"disastergirl",
|
||||
"pigeon", // Is this a pigeon?
|
||||
"rollsafe",
|
||||
"slap", // Will Smith
|
||||
"exit", // Left Exit 12
|
||||
"mordor",
|
||||
"panik-kalm-panik",
|
||||
"woman-cat", // Woman yelling at cat
|
||||
"grumpycat",
|
||||
"sadfrog",
|
||||
"stonks",
|
||||
"same", // They're the same picture
|
||||
"spongebob",
|
||||
];
|
||||
const forcedMeme =
|
||||
memeTemplates[Math.floor(Math.random() * memeTemplates.length)];
|
||||
|
||||
const response = await this.openai.chat.completions.create({
|
||||
model: this.model,
|
||||
messages: [
|
||||
@@ -202,30 +470,49 @@ CONTEXT & RULES:
|
||||
Project Context / Tone:
|
||||
${task.projectContext}
|
||||
|
||||
Facts to weave in:
|
||||
${factsText || "None"}
|
||||
FACTS TO INTEGRATE:
|
||||
${factsText || "No new facts needed."}
|
||||
|
||||
Social Media Posts to embed (use <YouTubeEmbed videoId="..." />, <TwitterEmbed tweetId="..." />, or <LinkedInEmbed url="..." />):
|
||||
${socialText || "None"}
|
||||
COMPETITOR BENCHMARK (TOP RANKING ARTICLES):
|
||||
Here are snippets from the top 5 ranking Google articles for this topic. Read them carefully and ensure our article covers these topics but is fundamentally BETTER, deeper, and more authoritative:
|
||||
${competitorInsights.length > 0 ? competitorInsights.join("\n") : "No competitor insights found."}
|
||||
|
||||
Available MDX Components you can use contextually:
|
||||
${componentsText || "None"}
|
||||
AVAILABLE UI COMPONENTS:
|
||||
${componentsText}
|
||||
|
||||
SOCIAL MEDIA POSTS:
|
||||
${socialText}
|
||||
|
||||
INTERNAL LINKING GRAPH:
|
||||
Hier sind unsere existierenden Blog-Posts (Titel und URL-Slug). Finde 2-3 passende Stellen im Text, um organisch mit regulärem Markdown (\`[passender Text]([slug])\`) auf diese Posts zu verlinken. Nutze KEIN <ExternalLink> für B2B-interne Links.
|
||||
${internalLinks.length > 0 ? internalLinks.map((l) => `- "${l.title}" -> ${l.slug}`).join("\n") : "Keine internen Links verfügbar."}
|
||||
|
||||
Special Instructions from User:
|
||||
${task.instructions || "None"}
|
||||
|
||||
BLOG POST BEST PRACTICES (MANDATORY):
|
||||
- DEVIL'S ADVOCATE: Füge zwingend eine kurze kritische Sektion ein (z.B. mit \`<ComparisonRow>\` oder \`<IconList>\`), in der du offen die Nachteile/Kosten/Haken deiner eigenen Lösung ansprichst ("Der Haken an der Sache..."). Das baut Vertrauen bei B2B Entscheidenden auf.
|
||||
- FAQ GENERATOR: Am absoluten Ende des Artikels erstellst du zwingend eine Markdown-Liste mit den 3 wichtigsten Fragen (FAQ) und Antworten (jeweils 2 Sätze) für Google Rich Snippets. Nutze dazu das FAQSection Component oder normales Markdown.
|
||||
- SUBTLE CTAs: Webe 1-2 subtile CTAs für High-End Website Entwicklung ein. Nutze ZWINGEND die Komponente [LeadMagnet] für diese Zwecke anstelle von einfachen Buttons. [LeadMagnet] bietet mehr Kontext und Vertrauen. Beispiel: <LeadMagnet title="Performance-Check anfragen" description="Wir analysieren Ihre Core Web Vitals und decken Umsatzpotenziale auf." buttonText="Jetzt analysieren lassen" href="/contact" variant="performance" />. Die Texte im LeadMagnet müssen absolut überzeugend, hochprofessionell und B2B-fokussiert sein (KEIN Robotik-Marketing-Sprech).
|
||||
- MEME DIVERSITY: Du MUSST ZWINGEND für jedes Meme (sofern passend) abwechslungsreiche Templates nutzen. Um dies zu garantieren, wurde für diesen Artikel das folgende Template ausgewählt: '${forcedMeme}'. Du MUSST EXAKT DIESES TEMPLATE NUTZEN. Versuche nicht, es durch ein Standard-Template wie 'drake' zu ersetzen!
|
||||
- Zitat-Varianten: Wenn du Organisationen oder Studien zitierst, nutze ArticleQuote (mit isCompany=true für Firmen). Für Personen lass isCompany weg.
|
||||
- Füge zwingend ein prägnantes 'TL;DR' ganz am Anfang ein.
|
||||
- Füge ein sauberes '<TableOfContents />' ein.
|
||||
- Verwende unsere Komponenten stilvoll für Visualisierungen.
|
||||
- Agiere als hochprofessioneller Digital Architect und entferne alte MDX-Metadaten im Body.
|
||||
- Fazit: Schließe JEDEN Artikel ZWINGEND mit einem starken, klaren 'Fazit' ab (z.B. als <H2>Fazit: ...</H2> gefolgt von deinen Empfehlungen).
|
||||
- Fazit: Schließe JEDEN Artikel ZWINGEND mit einem starken, klaren 'Fazit' ab.
|
||||
- ORIGINAL LANGUAGE QUOTES: Übersetze NIEMALS Zitate (z.B. in ArticleQuote). Behalte das Original (z.B. Englisch), wenn du Studien von Deloitte, McKinsey oder Aussagen von CEOs zitierst. Das erhöht die Authentizität im B2B-Mittelstand.
|
||||
- CONTENT PRUNING: Wenn das dir übergebene MDX bereits interaktive Komponenten (z.B. \`<YouTubeEmbed>\`) enthält, die **nicht** oder **nicht mehr** zum inhaltlichen Fokus passen (z.B. irrelevante Videos oder platzhalter-ähnliche Snippets), MUSST du diese radikal **entfernen**. Behalte keine halluzinierten oder unpassenden Medien, nur weil sie schon da waren.
|
||||
|
||||
STRICT MDX OUTPUT RULES:
|
||||
1. ONLY use the exact components defined above.
|
||||
2. For Social Media Embeds, you MUST ONLY use the EXACT IDs provided in the list above. Do NOT invent IDs.
|
||||
3. If ANY verified social media posts are provided, you MUST integrate at least one naturally with a contextual sentence.
|
||||
4. Keep the original content blocks and headings as much as possible, just improve flow.
|
||||
5. FRONTMATTER SEO (Idea 4): Ich übergebe dir die KOMPLETTE Datei inklusive Markdown-Frontmatter (--- ... ---). Du MUSST das Frontmatter ebenfalls zurückgeben! Optimiere darin den \`title\` und die \`description\` maximal für B2B SEO. Lasse die anderen Keys im Frontmatter (date, tags) unangetastet.
|
||||
|
||||
CRITICAL GUIDELINES (NEVER BREAK THESE):
|
||||
1. ONLY return the content for the BODY of the MDX file.
|
||||
2. DO NOT INCLUDE FRONTMATTER (blocks starting and ending with ---). I ALREADY HAVE THE FRONTMATTER.
|
||||
3. DO NOT REPEAT METADATA IN THE BODY. Do not output lines like "title: ...", "description: ...", "date: ..." inside the text.
|
||||
4. DO NOT INCLUDE MARKDOWN WRAPPERS (do not wrap in \`\`\`mdx ... \`\`\`).
|
||||
1. THE OUTPUT MUST START WITH YAML FRONTMATTER AND END WITH THE MDX BODY.
|
||||
2. DO NOT INCLUDE MARKDOWN WRAPPERS (do not wrap in \`\`\`mdx ... \`\`\`).
|
||||
5. Be clean. Do NOT clump all components together. Provide 3-4 paragraphs of normal text between visual items.
|
||||
6. If you insert components, ensure their syntax is 100% valid JSX/MDX.
|
||||
7. CRITICAL MERMAID RULE: If you use <Mermaid>, the inner content MUST be 100% valid Mermaid.js syntax. NO HTML inside labels. NO quotes inside brackets without valid syntax.
|
||||
@@ -239,37 +526,55 @@ CRITICAL GUIDELINES (NEVER BREAK THESE):
|
||||
});
|
||||
|
||||
let rawContent = response.choices[0].message.content || task.content;
|
||||
rawContent = this.cleanResponse(rawContent);
|
||||
rawContent = this.cleanResponse(rawContent, socialPosts);
|
||||
|
||||
// Validation Layer: Check Mermaid syntax
|
||||
if (retryCount < 2 && rawContent.includes("<Mermaid>")) {
|
||||
// --- Autonomous Validation Layer ---
|
||||
let hasError = false;
|
||||
let errorFeedback = "";
|
||||
|
||||
// 1. Validate Meme Templates
|
||||
const memeRegex = /<ArticleMeme[^>]+template=["']([^"']+)["'][^>]*>/g;
|
||||
let memeMatch;
|
||||
const invalidMemes: string[] = [];
|
||||
while ((memeMatch = memeRegex.exec(rawContent)) !== null) {
|
||||
if (!memeTemplates.includes(memeMatch[1])) {
|
||||
invalidMemes.push(memeMatch[1]);
|
||||
}
|
||||
}
|
||||
if (invalidMemes.length > 0) {
|
||||
hasError = true;
|
||||
errorFeedback += `\n- You hallucinated invalid meme templates: ${invalidMemes.join(", ")}. You MUST ONLY use templates from this exact list: ${memeTemplates.join(", ")}. DO NOT INVENT TEMPLATES.\n`;
|
||||
}
|
||||
|
||||
// 2. Validate Mermaid Syntax
|
||||
if (rawContent.includes("<Mermaid>")) {
|
||||
console.log("🔍 Validating Mermaid syntax in AI response...");
|
||||
const mermaidBlocks = this.extractMermaidBlocks(rawContent);
|
||||
let hasError = false;
|
||||
let errorFeedback = "";
|
||||
|
||||
for (const block of mermaidBlocks) {
|
||||
const validationResult = await this.validateMermaidSyntax(block);
|
||||
if (!validationResult.valid) {
|
||||
hasError = true;
|
||||
errorFeedback += `\nInvalid Mermaid block:\n${block}\nError context: ${validationResult.error}\n\n`;
|
||||
errorFeedback += `\n- Invalid Mermaid block:\n${block}\nError context: ${validationResult.error}\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasError) {
|
||||
console.log(
|
||||
`❌ Invalid Mermaid syntax detected. Retrying compilation (Attempt ${retryCount + 1}/2)...`,
|
||||
);
|
||||
return this.compileArticle(
|
||||
{
|
||||
...task,
|
||||
content: `The previous attempt failed because you generated invalid Mermaid.js syntax. Please rewrite the MDX and FIX the following Mermaid errors. \n\nErrors:\n${errorFeedback}\n\nOriginal Draft:\n${task.content}`,
|
||||
},
|
||||
facts,
|
||||
socialPosts,
|
||||
retryCount + 1,
|
||||
);
|
||||
}
|
||||
if (hasError && retryCount < 3) {
|
||||
console.log(
|
||||
`❌ Validation errors detected. Retrying compilation (Attempt ${retryCount + 1}/3)...`,
|
||||
);
|
||||
return this.compileArticle(
|
||||
{
|
||||
...task,
|
||||
content: `CRITICAL ERROR IN PREVIOUS ATTEMPT:\nYour generated MDX contained the following errors that MUST be fixed:\n${errorFeedback}\n\nPlease rewrite the MDX and FIX these errors. Pay strict attention to the rules.\n\nOriginal Draft:\n${task.content}`,
|
||||
},
|
||||
facts,
|
||||
competitorInsights,
|
||||
socialPosts,
|
||||
internalLinks,
|
||||
retryCount + 1,
|
||||
);
|
||||
}
|
||||
|
||||
return rawContent;
|
||||
@@ -320,11 +625,7 @@ CRITICAL GUIDELINES (NEVER BREAK THESE):
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-processing to ensure the AI didn't include "help" text,
|
||||
* duplicate frontmatter, or markdown wrappers.
|
||||
*/
|
||||
private cleanResponse(content: string): string {
|
||||
private cleanResponse(content: string, socialPosts: SocialPost[]): string {
|
||||
let cleaned = content.trim();
|
||||
|
||||
// 1. Strip Markdown Wrappers (e.g. ```mdx ... ```)
|
||||
@@ -334,16 +635,52 @@ CRITICAL GUIDELINES (NEVER BREAK THESE):
|
||||
.replace(/\n?```\s*$/, "");
|
||||
}
|
||||
|
||||
// 2. Strip redundant frontmatter (the AI sometimes helpfully repeats it)
|
||||
// Look for the --- delimiters and remove the block if it exists
|
||||
const fmRegex = /^---\s*\n([\s\S]*?)\n---\s*\n?/;
|
||||
const match = cleaned.match(fmRegex);
|
||||
if (match) {
|
||||
console.log(
|
||||
"♻️ Stripping redundant frontmatter detected in AI response...",
|
||||
);
|
||||
cleaned = cleaned.replace(fmRegex, "").trim();
|
||||
}
|
||||
// 2. We NO LONGER strip redundant frontmatter, because we requested the LLM to output it.
|
||||
// Ensure the output actually has frontmatter, if not, something went wrong, but we just pass it along.
|
||||
|
||||
// 3. Strip any social embeds the AI hallucinated (IDs not in our extracted set)
|
||||
const knownYtIds = new Set(
|
||||
socialPosts.filter((p) => p.platform === "youtube").map((p) => p.embedId),
|
||||
);
|
||||
const knownTwIds = new Set(
|
||||
socialPosts.filter((p) => p.platform === "twitter").map((p) => p.embedId),
|
||||
);
|
||||
const knownLiIds = new Set(
|
||||
socialPosts
|
||||
.filter((p) => p.platform === "linkedin")
|
||||
.map((p) => p.embedId),
|
||||
);
|
||||
|
||||
cleaned = cleaned.replace(
|
||||
/<YouTubeEmbed[^>]*videoId="([^"]+)"[^>]*\/>/gi,
|
||||
(tag, id) => {
|
||||
if (knownYtIds.has(id)) return tag;
|
||||
console.log(
|
||||
`🛑 Stripped hallucinated YouTubeEmbed with videoId="${id}"`,
|
||||
);
|
||||
return "";
|
||||
},
|
||||
);
|
||||
|
||||
cleaned = cleaned.replace(
|
||||
/<TwitterEmbed[^>]*tweetId="([^"]+)"[^>]*\/>/gi,
|
||||
(tag, id) => {
|
||||
if (knownTwIds.has(id)) return tag;
|
||||
console.log(
|
||||
`🛑 Stripped hallucinated TwitterEmbed with tweetId="${id}"`,
|
||||
);
|
||||
return "";
|
||||
},
|
||||
);
|
||||
|
||||
cleaned = cleaned.replace(
|
||||
/<LinkedInEmbed[^>]*(?:url|urn)="([^"]+)"[^>]*\/>/gi,
|
||||
(tag, id) => {
|
||||
if (knownLiIds.has(id)) return tag;
|
||||
console.log(`🛑 Stripped hallucinated LinkedInEmbed with id="${id}"`);
|
||||
return "";
|
||||
},
|
||||
);
|
||||
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "customer-manager",
|
||||
"description": "Custom High-Fidelity Management for Directus",
|
||||
"icon": "extension",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"type": "module",
|
||||
"keywords": [
|
||||
"directus",
|
||||
@@ -27,4 +27,4 @@
|
||||
"@directus/extensions-sdk": "11.0.2",
|
||||
"vue": "^3.4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/directus-extension-toolkit",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"description": "Shared toolkit for Directus extensions in the Mintel ecosystem",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/eslint-config",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
46
packages/estimation-engine/package.json
Normal file
46
packages/estimation-engine/package.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"name": "@mintel/estimation-engine",
|
||||
"version": "1.8.21",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"bin": {
|
||||
"estimate": "./dist/cli.js"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsup src/index.ts src/cli.ts --format esm --dts --clean",
|
||||
"dev": "tsup src/index.ts src/cli.ts --format esm --watch --dts",
|
||||
"lint": "eslint src",
|
||||
"estimate": "tsx src/cli.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mintel/concept-engine": "workspace:*",
|
||||
"axios": "^1.6.0",
|
||||
"chalk": "^5.3.0",
|
||||
"commander": "^12.0.0",
|
||||
"dotenv": "^17.3.1",
|
||||
"ink": "^5.1.0",
|
||||
"ink-spinner": "^5.0.0",
|
||||
"ink-select-input": "^6.0.0",
|
||||
"ink-text-input": "^6.0.0",
|
||||
"react": "^18.2.0",
|
||||
"openai": "^4.82.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@mintel/eslint-config": "workspace:*",
|
||||
"@mintel/tsconfig": "workspace:*",
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/react": "^18.2.0",
|
||||
"tsup": "^8.3.5",
|
||||
"tsx": "^4.7.0",
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "feedback-commander",
|
||||
"description": "Custom High-Fidelity Management for Directus",
|
||||
"icon": "extension",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"type": "module",
|
||||
"keywords": [
|
||||
"directus",
|
||||
@@ -24,4 +24,4 @@
|
||||
"@directus/extensions-sdk": "11.0.2",
|
||||
"vue": "^3.4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/gatekeeper",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/husky-config",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
404: Not Found
|
||||
@@ -0,0 +1,30 @@
|
||||
[
|
||||
{
|
||||
"weights":
|
||||
[
|
||||
{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009007044399485869,"min":-1.2069439495311063}},
|
||||
{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005263455241334205,"min":-0.9211046672334858}},
|
||||
{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004001977630690033,"min":-0.5042491814669441}},
|
||||
{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013836609615999109,"min":-1.411334180831909}},
|
||||
{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015159862590771096,"min":-0.30926119685173037}},
|
||||
{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002666276225856706,"min":-0.317286870876948}},
|
||||
{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015265831292844286,"min":-1.6792414422128714}},
|
||||
{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0020280554598453,"min":-0.37113414915168985}},
|
||||
{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006100742489683862,"min":-0.8907084034938438}},
|
||||
{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016276211832083907,"min":-2.0508026908425725}},
|
||||
{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394414279975143,"min":-0.7637432129944072}},
|
||||
{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006716050119961009,"min":-0.8059260143953211}},
|
||||
{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021875603993733724,"min":-2.8875797271728514}},
|
||||
{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0041141652009066415,"min":-0.8187188749804216}},
|
||||
{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008423839597141042,"min":-0.9013508368940915}},
|
||||
{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.030007277283014035,"min":-3.8709387695088107}},
|
||||
{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008402082966823203,"min":-1.4871686851277068}},
|
||||
{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.028336129469030042,"min":-4.675461362389957}},
|
||||
{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002268134028303857,"min":-0.41053225912299807}}
|
||||
],
|
||||
"paths":
|
||||
[
|
||||
"tiny_face_detector_model.bin"
|
||||
]
|
||||
}
|
||||
]
|
||||
33
packages/image-processor/package.json
Normal file
33
packages/image-processor/package.json
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"name": "@mintel/image-processor",
|
||||
"version": "1.8.21",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"dev": "tsup --watch",
|
||||
"lint": "eslint src"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tensorflow/tfjs": "^4.22.0",
|
||||
"@vladmandic/face-api": "^1.7.15",
|
||||
"canvas": "^3.2.1",
|
||||
"sharp": "^0.33.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@mintel/eslint-config": "workspace:*",
|
||||
"@mintel/tsconfig": "workspace:*",
|
||||
"@types/node": "^20.0.0",
|
||||
"tsup": "^8.3.5",
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
1
packages/image-processor/src/index.ts
Normal file
1
packages/image-processor/src/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './processor.js';
|
||||
218
packages/image-processor/src/processor.ts
Normal file
218
packages/image-processor/src/processor.ts
Normal file
@@ -0,0 +1,218 @@
|
||||
import sharp from "sharp";
|
||||
import { Canvas, Image, ImageData } from "canvas";
|
||||
// Use the ESM no-bundle build to avoid the default Node entrypoint
|
||||
// which hardcodes require('@tensorflow/tfjs-node') and crashes in Docker.
|
||||
// This build uses pure @tensorflow/tfjs (JS-only, no native C++ bindings).
|
||||
// @ts-ignore - direct path import has no type declarations
|
||||
import * as faceapi from "@vladmandic/face-api/dist/face-api.esm-nobundle.js";
|
||||
import * as tf from "@tensorflow/tfjs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
// Polyfill required by face-api for Node.js
|
||||
faceapi.env.monkeyPatch({ Canvas, Image, ImageData } as any);
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const MODEL_URL = path.join(__dirname, "../models");
|
||||
|
||||
// State flag to ensure we only load weights once
|
||||
let modelsLoaded = false;
|
||||
|
||||
async function loadModelsOnce() {
|
||||
if (modelsLoaded) return;
|
||||
// Initialize pure JS CPU backend (no native bindings needed)
|
||||
await tf.setBackend("cpu");
|
||||
await tf.ready();
|
||||
|
||||
// Load the microscopic TinyFaceDetector (~190KB)
|
||||
await faceapi.nets.tinyFaceDetector.loadFromDisk(MODEL_URL);
|
||||
modelsLoaded = true;
|
||||
}
|
||||
|
||||
export interface ProcessImageOptions {
|
||||
width: number;
|
||||
height: number;
|
||||
format?: "webp" | "jpeg" | "png" | "avif";
|
||||
quality?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps a URL based on the IMGPROXY_URL_MAPPING environment variable.
|
||||
* Format: "match1:replace1,match2:replace2"
|
||||
*/
|
||||
export function mapUrl(url: string, mappingString?: string): string {
|
||||
if (!mappingString) return url;
|
||||
|
||||
const mappings = mappingString.split(",").map((m) => {
|
||||
if (m.includes("|")) {
|
||||
return m.split("|");
|
||||
}
|
||||
|
||||
// Legacy support for simple "host:target" or cases where one side might have a protocol
|
||||
// We try to find the split point that isn't part of a protocol "://"
|
||||
const colonIndices = [];
|
||||
for (let i = 0; i < m.length; i++) {
|
||||
if (m[i] === ":") {
|
||||
// Check if this colon is part of "://"
|
||||
if (!(m[i + 1] === "/" && m[i + 2] === "/")) {
|
||||
colonIndices.push(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (colonIndices.length === 0) return [m];
|
||||
|
||||
// In legacy mode with colons, we take the LAST non-protocol colon as the separator
|
||||
// This handles "http://host:port" or "host:http://target" better
|
||||
const lastColon = colonIndices[colonIndices.length - 1];
|
||||
return [m.substring(0, lastColon), m.substring(lastColon + 1)];
|
||||
});
|
||||
|
||||
let mappedUrl = url;
|
||||
|
||||
for (const [match, replace] of mappings) {
|
||||
if (match && replace && url.includes(match)) {
|
||||
mappedUrl = url.replace(match, replace);
|
||||
}
|
||||
}
|
||||
|
||||
return mappedUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses legacy imgproxy options string.
|
||||
* Example: rs:fill:300:400/q:80
|
||||
*/
|
||||
export function parseImgproxyOptions(
|
||||
optionsStr: string,
|
||||
): Partial<ProcessImageOptions> {
|
||||
const parts = optionsStr.split("/");
|
||||
const options: Partial<ProcessImageOptions> = {};
|
||||
|
||||
for (const part of parts) {
|
||||
if (part.startsWith("rs:")) {
|
||||
const [, , w, h] = part.split(":");
|
||||
if (w) options.width = parseInt(w, 10);
|
||||
if (h) options.height = parseInt(h, 10);
|
||||
} else if (part.startsWith("q:")) {
|
||||
const q = part.split(":")[1];
|
||||
if (q) options.quality = parseInt(q, 10);
|
||||
} else if (part.startsWith("ext:")) {
|
||||
const ext = part.split(":")[1] as any;
|
||||
if (["webp", "jpeg", "png", "avif"].includes(ext)) {
|
||||
options.format = ext;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
export async function processImageWithSmartCrop(
|
||||
inputBuffer: Buffer,
|
||||
options: ProcessImageOptions,
|
||||
): Promise<Buffer> {
|
||||
const sharpImage = sharp(inputBuffer);
|
||||
const metadata = await sharpImage.metadata();
|
||||
|
||||
if (!metadata.width || !metadata.height) {
|
||||
throw new Error("Could not read image metadata");
|
||||
}
|
||||
|
||||
// Load ML models (noop if already loaded)
|
||||
await loadModelsOnce();
|
||||
|
||||
// Convert sharp image to a Node-compatible canvas Image for face-api
|
||||
const jpegBuffer = await sharpImage.jpeg().toBuffer();
|
||||
const img = new Image();
|
||||
img.src = jpegBuffer;
|
||||
const canvas = new Canvas(img.width, img.height);
|
||||
const ctx = canvas.getContext("2d");
|
||||
ctx.drawImage(img, 0, 0, img.width, img.height);
|
||||
|
||||
// Detect faces locally using the tiny model
|
||||
// Requires explicit any cast since the types expect HTML elements in browser contexts
|
||||
const detections = await faceapi.detectAllFaces(
|
||||
canvas as any,
|
||||
new faceapi.TinyFaceDetectorOptions(),
|
||||
);
|
||||
|
||||
let cropPosition: "center" | "attention" | number = "attention"; // Fallback to sharp's attention if no faces
|
||||
|
||||
if (detections.length > 0) {
|
||||
// We have faces! Calculate the bounding box that contains all of them
|
||||
let minX = metadata.width;
|
||||
let minY = metadata.height;
|
||||
let maxX = 0;
|
||||
let maxY = 0;
|
||||
|
||||
for (const det of detections) {
|
||||
const box = det.box;
|
||||
if (box.x < minX) minX = Math.max(0, box.x);
|
||||
if (box.y < minY) minY = Math.max(0, box.y);
|
||||
if (box.x + box.width > maxX)
|
||||
maxX = Math.min(metadata.width, box.x + box.width);
|
||||
if (box.y + box.height > maxY)
|
||||
maxY = Math.min(metadata.height, box.y + box.height);
|
||||
}
|
||||
|
||||
const centerX = Math.floor(minX + (maxX - minX) / 2);
|
||||
const centerY = Math.floor(minY + (maxY - minY) / 2);
|
||||
|
||||
const targetRatio = options.width / options.height;
|
||||
const currentRatio = metadata.width / metadata.height;
|
||||
|
||||
let cropWidth = metadata.width;
|
||||
let cropHeight = metadata.height;
|
||||
|
||||
// Determine the maximal crop window that maintains aspect ratio
|
||||
if (currentRatio > targetRatio) {
|
||||
cropWidth = Math.floor(metadata.height * targetRatio);
|
||||
} else {
|
||||
cropHeight = Math.floor(metadata.width / targetRatio);
|
||||
}
|
||||
|
||||
// Center the crop window over the center of the faces
|
||||
let cropX = Math.floor(centerX - cropWidth / 2);
|
||||
let cropY = Math.floor(centerY - cropHeight / 2);
|
||||
|
||||
// Keep crop window inside image bounds
|
||||
if (cropX < 0) cropX = 0;
|
||||
if (cropY < 0) cropY = 0;
|
||||
if (cropX + cropWidth > metadata.width) cropX = metadata.width - cropWidth;
|
||||
if (cropY + cropHeight > metadata.height)
|
||||
cropY = metadata.height - cropHeight;
|
||||
|
||||
// Pre-crop the image to isolate the faces before resizing
|
||||
sharpImage.extract({
|
||||
left: cropX,
|
||||
top: cropY,
|
||||
width: cropWidth,
|
||||
height: cropHeight,
|
||||
});
|
||||
|
||||
// As we manually calculated the exact focal box, we can now just center it
|
||||
cropPosition = "center";
|
||||
}
|
||||
|
||||
let finalImage = sharpImage.resize(options.width, options.height, {
|
||||
fit: "cover",
|
||||
position: cropPosition,
|
||||
});
|
||||
|
||||
const format = options.format || "webp";
|
||||
const quality = options.quality || 80;
|
||||
|
||||
if (format === "webp") {
|
||||
finalImage = finalImage.webp({ quality });
|
||||
} else if (format === "jpeg") {
|
||||
finalImage = finalImage.jpeg({ quality });
|
||||
} else if (format === "png") {
|
||||
finalImage = finalImage.png({ quality });
|
||||
} else if (format === "avif") {
|
||||
finalImage = finalImage.avif({ quality });
|
||||
}
|
||||
|
||||
return finalImage.toBuffer();
|
||||
}
|
||||
19
packages/image-processor/tsconfig.json
Normal file
19
packages/image-processor/tsconfig.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"extends": "@mintel/tsconfig/base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"rootDir": "src",
|
||||
"allowJs": true,
|
||||
"esModuleInterop": true,
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"dist",
|
||||
"**/*.test.ts"
|
||||
]
|
||||
}
|
||||
19
packages/image-processor/tsup.config.ts
Normal file
19
packages/image-processor/tsup.config.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { defineConfig } from "tsup";
|
||||
|
||||
export default defineConfig({
|
||||
entry: ["src/index.ts"],
|
||||
format: ["esm"],
|
||||
dts: true,
|
||||
clean: true,
|
||||
// Bundle face-api and tensorflow inline (they're pure JS).
|
||||
// Keep sharp and canvas external (they have native C++ bindings).
|
||||
noExternal: [
|
||||
"@vladmandic/face-api",
|
||||
"@tensorflow/tfjs",
|
||||
"@tensorflow/tfjs-backend-wasm"
|
||||
],
|
||||
external: [
|
||||
"sharp",
|
||||
"canvas"
|
||||
],
|
||||
});
|
||||
@@ -1,6 +1,6 @@
|
||||
# Step 1: Builder stage
|
||||
FROM node:20-alpine AS builder
|
||||
RUN apk add --no-cache libc6-compat curl
|
||||
RUN apk add --no-cache libc6-compat curl python3 make g++ pkgconfig pixman-dev cairo-dev pango-dev
|
||||
WORKDIR /app
|
||||
RUN corepack enable pnpm
|
||||
ENV CI=true
|
||||
@@ -25,7 +25,7 @@ RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||
--mount=type=secret,id=NPM_TOKEN \
|
||||
export NPM_TOKEN=$(cat /run/secrets/NPM_TOKEN) && \
|
||||
pnpm config set store-dir /pnpm/store && \
|
||||
pnpm i --frozen-lockfile
|
||||
pnpm i --no-frozen-lockfile
|
||||
|
||||
# Copy the rest of the source
|
||||
COPY . .
|
||||
|
||||
@@ -189,7 +189,7 @@ jobs:
|
||||
with:
|
||||
context: .
|
||||
file: packages/infra/docker/Dockerfile.nextjs
|
||||
platforms: linux/arm64
|
||||
platforms: linux/amd64
|
||||
pull: true
|
||||
provenance: false
|
||||
build-args: |
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/infra",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/journaling",
|
||||
"version": "1.0.0",
|
||||
"version": "1.8.21",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import OpenAI from "openai";
|
||||
import { DataCommonsClient } from "./clients/data-commons";
|
||||
import { TrendsClient } from "./clients/trends";
|
||||
import { SerperClient } from "./clients/serper";
|
||||
|
||||
export interface Fact {
|
||||
statement: string;
|
||||
@@ -20,6 +21,7 @@ export class ResearchAgent {
|
||||
private openai: OpenAI;
|
||||
private dcClient: DataCommonsClient;
|
||||
private trendsClient: TrendsClient;
|
||||
private serperClient: SerperClient;
|
||||
|
||||
constructor(apiKey: string) {
|
||||
this.openai = new OpenAI({
|
||||
@@ -31,7 +33,8 @@ export class ResearchAgent {
|
||||
},
|
||||
});
|
||||
this.dcClient = new DataCommonsClient();
|
||||
this.trendsClient = new TrendsClient();
|
||||
this.trendsClient = new TrendsClient(apiKey);
|
||||
this.serperClient = new SerperClient(process.env.SERPER_API_KEY);
|
||||
}
|
||||
|
||||
async researchTopic(topic: string): Promise<Fact[]> {
|
||||
@@ -51,7 +54,6 @@ export class ResearchAgent {
|
||||
if (data.length > 0) {
|
||||
// Analyze trend
|
||||
const latest = data[data.length - 1];
|
||||
const max = Math.max(...data.map((d) => d.value));
|
||||
facts.push({
|
||||
statement: `Interest in "${kw}" is currently at ${latest.value}% of peak popularity.`,
|
||||
source: "Google Trends",
|
||||
@@ -107,120 +109,211 @@ Return JSON: { "facts": [ { "statement": "...", "source": "Organization Name Onl
|
||||
return result.facts || [];
|
||||
}
|
||||
|
||||
async findSocialPosts(
|
||||
/**
|
||||
* Extracts existing social media embeds from MDX content via regex.
|
||||
* No LLM involved — purely deterministic parsing.
|
||||
* Only returns posts that are already present in the article.
|
||||
*/
|
||||
extractSocialPosts(content: string): SocialPost[] {
|
||||
const posts: SocialPost[] = [];
|
||||
|
||||
// YouTube: <YouTubeEmbed videoId="..." />
|
||||
const ytMatches = [
|
||||
...content.matchAll(/<YouTubeEmbed[^>]*videoId="([^"]+)"[^>]*\/>/gi),
|
||||
];
|
||||
for (const match of ytMatches) {
|
||||
if (!posts.some((p) => p.embedId === match[1])) {
|
||||
posts.push({
|
||||
platform: "youtube",
|
||||
embedId: match[1],
|
||||
description: "Existing YouTube embed",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Twitter/X: <TwitterEmbed tweetId="..." />
|
||||
const twMatches = [
|
||||
...content.matchAll(/<TwitterEmbed[^>]*tweetId="([^"]+)"[^>]*\/>/gi),
|
||||
];
|
||||
for (const match of twMatches) {
|
||||
if (!posts.some((p) => p.embedId === match[1])) {
|
||||
posts.push({
|
||||
platform: "twitter",
|
||||
embedId: match[1],
|
||||
description: "Existing Twitter/X embed",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// LinkedIn: <LinkedInEmbed url="..." /> or <LinkedInEmbed urn="..." />
|
||||
const liMatches = [
|
||||
...content.matchAll(/<LinkedInEmbed[^>]*(?:url|urn)="([^"]+)"[^>]*\/>/gi),
|
||||
];
|
||||
for (const match of liMatches) {
|
||||
if (!posts.some((p) => p.embedId === match[1])) {
|
||||
posts.push({
|
||||
platform: "linkedin",
|
||||
embedId: match[1],
|
||||
description: "Existing LinkedIn embed",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (posts.length > 0) {
|
||||
console.log(
|
||||
`📱 Extracted ${posts.length} existing social media embed(s) from content`,
|
||||
);
|
||||
} else {
|
||||
console.log(`📱 No existing social media embeds found in content`);
|
||||
}
|
||||
|
||||
return posts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches real, verified social media posts using the Serper API (Google Video Search).
|
||||
* This completely prevents hallucinations as it relies on actual search results.
|
||||
*/
|
||||
async fetchRealSocialPosts(
|
||||
topic: string,
|
||||
retries = 2,
|
||||
previousFailures: string[] = [],
|
||||
customSources?: string[],
|
||||
retries = 1,
|
||||
): Promise<SocialPost[]> {
|
||||
console.log(
|
||||
`📱 Searching for relevant Social Media Posts: "${topic}"${retries < 2 ? ` (Retry ${2 - retries}/2)` : ""}`,
|
||||
`🌐 [Serper] Fetching real social media posts for topic: "${topic}"...`,
|
||||
);
|
||||
|
||||
const failureContext =
|
||||
previousFailures.length > 0
|
||||
? `\nCRITICAL FAILURE WARNING: The following IDs you generated previously returned 404 Not Found and were Hallucinations: ${previousFailures.join(", ")}. You MUST provide REAL, verifiable IDs. If you cannot 100% guarantee an ID exists, return an empty array instead of guessing.`
|
||||
: "";
|
||||
|
||||
const response = await this.openai.chat.completions.create({
|
||||
model: "google/gemini-2.5-pro",
|
||||
// Step 1: Ask the LLM to generate a highly specific YouTube search query
|
||||
// We want tutorials, explanations, or deep dives.
|
||||
const queryGen = await this.openai.chat.completions.create({
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: `You are a social media researcher finding high-value, real expert posts and videos to embed in a B2B Tech Blog post about: "${topic}".
|
||||
|
||||
Your Goal: Identify 1-3 REAL, highly relevant social media posts (YouTube, Twitter/X, LinkedIn) that provide social proof, expert opinions, or deep dives.${failureContext}
|
||||
content: `You generate ultra-short, highly relevant YouTube search queries based on a given text context.
|
||||
|
||||
Constraint: You MUST provide the exact mathematical or alphanumeric ID for the embed.
|
||||
- YouTube: The 11-character video ID (e.g. "dQw4w9WgXcQ")
|
||||
- Twitter: The numerical tweet ID (e.g. "1753464161943834945")
|
||||
- LinkedIn: The activity URN (e.g. "urn:li:activity:7153664326573674496" or just the numerical 19-digit ID)
|
||||
RULES:
|
||||
1. Extract only the 2-4 most important technical or business keywords from the provided text.
|
||||
2. Ignore all markdown syntax, frontmatter (---), titles, and descriptions.
|
||||
3. Keep the query generic enough to find popular educational tech videos, BUT ensure it specifically targets the core technical subject. Append "tutorial" or "b2b explanation" if necessary to find high-quality content.
|
||||
4. DO NOT append specific channel names (e.g., "Fireship", "Vercel") to the query.
|
||||
5. DO NOT USE QUOTES IN THE QUERY.
|
||||
|
||||
Return JSON exactly as follows:
|
||||
{
|
||||
"posts": [
|
||||
{ "platform": "youtube", "embedId": "dQw4w9WgXcQ", "description": "Google Web Dev explaining Core Web Vitals" }
|
||||
]
|
||||
}
|
||||
Return ONLY the JSON.`,
|
||||
Return a JSON object with a single string field "query". Example: {"query": "core web vitals performance tutorial"}`,
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: `CONTEXT: ${topic}`,
|
||||
},
|
||||
],
|
||||
response_format: { type: "json_object" },
|
||||
});
|
||||
|
||||
if (
|
||||
!response.choices ||
|
||||
response.choices.length === 0 ||
|
||||
!response.choices[0].message
|
||||
) {
|
||||
console.warn(`⚠️ Social post search failed for concept: "${topic}"`);
|
||||
try {
|
||||
let queryStr = "";
|
||||
const parsed = JSON.parse(
|
||||
queryGen.choices[0].message.content || '{"query": ""}',
|
||||
);
|
||||
queryStr = parsed.query || `${topic} tutorial explanation`;
|
||||
|
||||
// Step 2: Search via Serper Video Search
|
||||
const videos = await this.serperClient.searchVideos(queryStr);
|
||||
|
||||
if (!videos || videos.length === 0) {
|
||||
console.warn(`⚠️ [Serper] No videos found for query: "${queryStr}"`);
|
||||
if (retries > 0) return this.fetchRealSocialPosts(topic, customSources, retries - 1);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Filter for youtube results
|
||||
const ytVideos = videos
|
||||
.filter(
|
||||
(v) =>
|
||||
v.link &&
|
||||
v.link.includes("youtube.com/watch") &&
|
||||
v.title &&
|
||||
v.channel,
|
||||
)
|
||||
.slice(0, 5); // Take top 5 for evaluation
|
||||
|
||||
if (ytVideos.length === 0) {
|
||||
console.warn(`⚠️ [Serper] No YouTube videos in search results.`);
|
||||
if (retries > 0) return this.fetchRealSocialPosts(topic, customSources, retries - 1);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Step 3: Ask the LLM to evaluate the relevance of the found videos
|
||||
|
||||
const sourceExamples = customSources && customSources.length > 0
|
||||
? `Specifically prioritize content from: ${customSources.join(", ")}.`
|
||||
: `(e.g., Google Developers, Vercel, Theo - t3.gg, Fireship, Syntax, ByteByteGo, IBM Technology, McKinsey, Gartner, Deloitte).`;
|
||||
|
||||
const evalPrompt = `You are a strict technical evaluator. You must select the MOST RELEVANT educational tech video from the list below based on this core article context: "${topic.slice(0, 800)}..."
|
||||
|
||||
Videos:
|
||||
${ytVideos.map((v, i) => `[ID: ${i}] Title: "${v.title}" | Channel: "${v.channel}" | Snippet: "${v.snippet || "none"}"`).join("\n")}
|
||||
|
||||
RULES:
|
||||
1. The video MUST be highly relevant to the EXACT technical topic of the context.
|
||||
2. The channel SHOULD be a high-quality tech, development, or professional B2B channel ${sourceExamples} AVOID gaming, generic vlogs, clickbait, off-topic podcasts, or unrelated topics.
|
||||
3. If none of the videos are strictly relevant to the core technical or business subject (e.g. they are just casually mentioning the word), YOU MUST RETURN -1. Be extremely critical. Do not just pick the "best of the worst".
|
||||
4. If one is highly relevant, return its ID number.
|
||||
|
||||
Return ONLY a JSON object: {"bestVideoId": number}`;
|
||||
|
||||
const evalResponse = await this.openai.chat.completions.create({
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [{ role: "system", content: evalPrompt }],
|
||||
response_format: { type: "json_object" },
|
||||
});
|
||||
|
||||
let bestIdx = -1;
|
||||
try {
|
||||
const evalParsed = JSON.parse(
|
||||
evalResponse.choices[0].message.content || '{"bestVideoId": -1}',
|
||||
);
|
||||
bestIdx = evalParsed.bestVideoId;
|
||||
} catch {
|
||||
console.warn("Failed to parse video evaluation response");
|
||||
}
|
||||
|
||||
if (bestIdx < 0 || bestIdx >= ytVideos.length) {
|
||||
console.warn(`⚠️ [Serper] LLM rejected all videos as irrelevant.`);
|
||||
if (retries > 0) return this.fetchRealSocialPosts(topic, customSources, retries - 1);
|
||||
return [];
|
||||
}
|
||||
|
||||
const bestVideo = ytVideos[bestIdx];
|
||||
console.log(
|
||||
`✅ [Serper] AI selected video: ${bestVideo.title} (Channel: ${bestVideo.channel})`,
|
||||
);
|
||||
|
||||
// Extract the 11-char video ID from the link (e.g., https://www.youtube.com/watch?v=dQw4w9WgXcQ)
|
||||
const urlObj = new URL(bestVideo.link);
|
||||
const videoId = urlObj.searchParams.get("v");
|
||||
|
||||
if (!videoId) {
|
||||
console.warn(
|
||||
`⚠️ [Serper] Could not extract video ID from: ${bestVideo.link}`,
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
console.log(
|
||||
`✅ [Serper] Found valid YouTube Video: ${videoId} ("${bestVideo.title}")`,
|
||||
);
|
||||
|
||||
return [
|
||||
{
|
||||
platform: "youtube",
|
||||
embedId: videoId,
|
||||
description: bestVideo.title || "YouTube Video",
|
||||
},
|
||||
];
|
||||
} catch (e) {
|
||||
console.error("❌ Failed to fetch real social posts:", e);
|
||||
return [];
|
||||
}
|
||||
|
||||
const result = JSON.parse(response.choices[0].message.content || "{}");
|
||||
const rawPosts: SocialPost[] = result.posts || [];
|
||||
|
||||
// CRITICAL WORKFLOW FIX: Absolutely forbid hallucinations by verifying via oEmbed APIs
|
||||
const verifiedPosts: SocialPost[] = [];
|
||||
if (rawPosts.length > 0) {
|
||||
console.log(
|
||||
`🛡️ Verifying ${rawPosts.length} generated social ID(s) against network...`,
|
||||
);
|
||||
}
|
||||
|
||||
const failedIdsForThisRun: string[] = [];
|
||||
|
||||
for (const post of rawPosts) {
|
||||
let isValid = false;
|
||||
try {
|
||||
if (post.platform === "youtube") {
|
||||
const res = await fetch(
|
||||
`https://www.youtube.com/oembed?url=https://www.youtube.com/watch?v=${post.embedId}`,
|
||||
);
|
||||
isValid = res.ok;
|
||||
} else if (post.platform === "twitter") {
|
||||
const res = await fetch(
|
||||
`https://publish.twitter.com/oembed?url=https://twitter.com/x/status/${post.embedId}`,
|
||||
);
|
||||
isValid = res.ok;
|
||||
} else if (post.platform === "linkedin") {
|
||||
// LinkedIn doesn't have an unauthenticated oEmbed, so we use heuristic URL/URN format validation
|
||||
if (
|
||||
post.embedId.includes("urn:li:") ||
|
||||
post.embedId.includes("linkedin.com") ||
|
||||
/^\d{19}$/.test(post.embedId)
|
||||
) {
|
||||
isValid = true;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
isValid = false;
|
||||
}
|
||||
|
||||
if (isValid) {
|
||||
verifiedPosts.push(post);
|
||||
console.log(
|
||||
`✅ Verified real post ID: ${post.embedId} (${post.platform})`,
|
||||
);
|
||||
} else {
|
||||
failedIdsForThisRun.push(post.embedId);
|
||||
console.warn(
|
||||
`🛑 Dropped hallucinated or dead post ID: ${post.embedId} (${post.platform})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// AGENT SELF-HEALING: If all found posts were hallucinations and we have retries, challenge the LLM to try again
|
||||
if (verifiedPosts.length === 0 && rawPosts.length > 0 && retries > 0) {
|
||||
console.warn(
|
||||
`🔄 Self-Healing triggered: All IDs were hallucinations. Challenging agent to find real IDs...`,
|
||||
);
|
||||
return this.findSocialPosts(topic, retries - 1, [
|
||||
...previousFailures,
|
||||
...failedIdsForThisRun,
|
||||
]);
|
||||
}
|
||||
|
||||
return verifiedPosts;
|
||||
}
|
||||
|
||||
private async planResearch(
|
||||
@@ -255,7 +348,7 @@ CRITICAL: Do NOT provide more than 2 trendsKeywords. Keep it extremely focused.`
|
||||
try {
|
||||
let parsed = JSON.parse(
|
||||
response.choices[0].message.content ||
|
||||
'{"trendsKeywords": [], "dcVariables": []}',
|
||||
'{"trendsKeywords": [], "dcVariables": []}',
|
||||
);
|
||||
if (Array.isArray(parsed)) {
|
||||
parsed = parsed[0] || { trendsKeywords: [], dcVariables: [] };
|
||||
@@ -273,4 +366,60 @@ CRITICAL: Do NOT provide more than 2 trendsKeywords. Keep it extremely focused.`
|
||||
return { trendsKeywords: [], dcVariables: [] };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Researches the top-ranking competitors on Google for a given topic.
|
||||
* Extracts their titles and snippets to guide the LLM to write better content.
|
||||
*/
|
||||
async researchCompetitors(topic: string, retries = 1): Promise<string[]> {
|
||||
console.log(
|
||||
`🔍 [Competitor Research] Fetching top ranking web pages for topic: "${topic.slice(0, 50)}..."`,
|
||||
);
|
||||
|
||||
// Step 1: LLM generates the optimal Google Search query
|
||||
const queryGen = await this.openai.chat.completions.create({
|
||||
model: "google/gemini-2.5-flash",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: `Generate a Google Search query that a B2B decision maker would use to research the following topic: "${topic}".
|
||||
Focus on intent-driven keywords.
|
||||
Return a JSON object with a single string field "query". Example: {"query": "Next.js performance optimization agency"}.
|
||||
DO NOT USE QUOTES IN THE QUERY ITSELF.`,
|
||||
},
|
||||
],
|
||||
response_format: { type: "json_object" },
|
||||
});
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(
|
||||
queryGen.choices[0].message.content || '{"query": ""}',
|
||||
);
|
||||
const queryStr = parsed.query || topic;
|
||||
|
||||
// Step 2: Search via Serper Web Search
|
||||
const organicResults = await this.serperClient.searchWeb(queryStr, 5);
|
||||
|
||||
if (!organicResults || organicResults.length === 0) {
|
||||
console.warn(
|
||||
`⚠️ [Competitor Research] No web results found for query: "${queryStr}"`,
|
||||
);
|
||||
if (retries > 0) return this.researchCompetitors(topic, retries - 1);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Map to structured insights string
|
||||
const insights = organicResults.map((result, i) => {
|
||||
return `[Rank #${i + 1}] Title: "${result.title}" | Snippet: "${result.snippet}"`;
|
||||
});
|
||||
|
||||
console.log(
|
||||
`✅ [Competitor Research] Analyzed top ${insights.length} competitor articles.`,
|
||||
);
|
||||
return insights;
|
||||
} catch (e) {
|
||||
console.error("❌ Failed to fetch competitor research:", e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
128
packages/journaling/src/clients/serper.ts
Normal file
128
packages/journaling/src/clients/serper.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
export interface SerperVideoResult {
|
||||
title: string;
|
||||
link: string;
|
||||
snippet?: string;
|
||||
date?: string;
|
||||
duration?: string;
|
||||
channel?: string;
|
||||
}
|
||||
|
||||
export interface SerperVideoResponse {
|
||||
searchParameters: any;
|
||||
videos: SerperVideoResult[];
|
||||
}
|
||||
|
||||
export interface SerperWebResult {
|
||||
title: string;
|
||||
link: string;
|
||||
snippet: string;
|
||||
date?: string;
|
||||
sitelinks?: any[];
|
||||
position: number;
|
||||
}
|
||||
|
||||
export interface SerperWebResponse {
|
||||
searchParameters: any;
|
||||
organic: SerperWebResult[];
|
||||
}
|
||||
|
||||
export class SerperClient {
|
||||
private apiKey: string;
|
||||
|
||||
constructor(apiKey?: string) {
|
||||
const key = apiKey || process.env.SERPER_API_KEY;
|
||||
if (!key) {
|
||||
console.warn("⚠️ SERPER_API_KEY is not defined. SerperClient will fail.");
|
||||
}
|
||||
this.apiKey = key || "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a video search via Serper (Google Video Search).
|
||||
* Great for finding relevant YouTube videos.
|
||||
*/
|
||||
async searchVideos(
|
||||
query: string,
|
||||
num: number = 5,
|
||||
): Promise<SerperVideoResult[]> {
|
||||
if (!this.apiKey) {
|
||||
console.error("❌ SERPER_API_KEY missing - cannot execute search.");
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(`🔍 [Serper] Searching videos for: "${query}"`);
|
||||
const response = await fetch("https://google.serper.dev/videos", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-KEY": this.apiKey,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
q: query,
|
||||
num: num,
|
||||
gl: "de", // Germany for localized results
|
||||
hl: "de", // German language
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.error(
|
||||
`❌ [Serper] API Error: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
const text = await response.text();
|
||||
console.error(text);
|
||||
return [];
|
||||
}
|
||||
|
||||
const data = (await response.json()) as SerperVideoResponse;
|
||||
return data.videos || [];
|
||||
} catch (e) {
|
||||
console.error("❌ [Serper] Request failed", e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a standard web search via Serper.
|
||||
* Crucial for B2B competitor analysis and context gathering.
|
||||
*/
|
||||
async searchWeb(query: string, num: number = 5): Promise<SerperWebResult[]> {
|
||||
if (!this.apiKey) {
|
||||
console.error("❌ SERPER_API_KEY missing - cannot execute web search.");
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(`🔍 [Serper] Web Search for Competitor Insights: "${query}"`);
|
||||
const response = await fetch("https://google.serper.dev/search", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-KEY": this.apiKey,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
q: query,
|
||||
num: num,
|
||||
gl: "de", // Germany for localized results
|
||||
hl: "de", // German language
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.error(
|
||||
`❌ [Serper] API Error: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
const text = await response.text();
|
||||
console.error(text);
|
||||
return [];
|
||||
}
|
||||
|
||||
const data = (await response.json()) as SerperWebResponse;
|
||||
return data.organic || [];
|
||||
} catch (e) {
|
||||
console.error("❌ [Serper] Web Request failed", e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
export * from "./clients/data-commons";
|
||||
export * from "./clients/trends";
|
||||
export * from "./clients/serper";
|
||||
export * from "./agent";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/mail",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"private": false,
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@mintel/meme-generator",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"version": "1.8.21",
|
||||
"private": false,
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
|
||||
@@ -123,7 +123,7 @@ IMPORTANT: Return ONLY the JSON object. No markdown wrappers.`,
|
||||
let result;
|
||||
try {
|
||||
result = JSON.parse(body);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
console.error("Failed to parse AI response", body);
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/next-config",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/next-feedback",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/next-observability",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/next-utils",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/observability",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
39
packages/page-audit/package.json
Normal file
39
packages/page-audit/package.json
Normal file
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"name": "@mintel/page-audit",
|
||||
"version": "1.8.21",
|
||||
"private": true,
|
||||
"description": "AI-powered website IST-analysis using DataForSEO and Gemini",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"bin": {
|
||||
"page-audit": "./dist/cli.js"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsup src/index.ts src/cli.ts --format esm --dts --clean",
|
||||
"dev": "tsup src/index.ts src/cli.ts --format esm --watch --dts",
|
||||
"audit": "tsx src/cli.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^5.3.0",
|
||||
"cheerio": "^1.0.0",
|
||||
"commander": "^12.0.0",
|
||||
"dotenv": "^17.3.1",
|
||||
"openai": "^4.82.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@mintel/eslint-config": "workspace:*",
|
||||
"@mintel/tsconfig": "workspace:*",
|
||||
"@types/node": "^20.0.0",
|
||||
"tsup": "^8.3.5",
|
||||
"tsx": "^4.7.0",
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/pdf",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.js",
|
||||
|
||||
@@ -13,6 +13,8 @@ import {
|
||||
Footer,
|
||||
FoldingMarks,
|
||||
DocumentTitle,
|
||||
COLORS,
|
||||
FONT_SIZES,
|
||||
} from "./pdf/SharedUI.js";
|
||||
import { SimpleLayout } from "./pdf/SimpleLayout.js";
|
||||
|
||||
@@ -29,23 +31,23 @@ const localStyles = PDFStyleSheet.create({
|
||||
marginBottom: 6,
|
||||
},
|
||||
monoNumber: {
|
||||
fontSize: 7,
|
||||
fontSize: FONT_SIZES.TINY,
|
||||
fontWeight: "bold",
|
||||
color: "#94a3b8",
|
||||
color: COLORS.TEXT_LIGHT,
|
||||
letterSpacing: 2,
|
||||
width: 25,
|
||||
},
|
||||
sectionTitle: {
|
||||
fontSize: 9,
|
||||
fontSize: FONT_SIZES.LABEL,
|
||||
fontWeight: "bold",
|
||||
color: "#000000",
|
||||
color: COLORS.CHARCOAL,
|
||||
textTransform: "uppercase",
|
||||
letterSpacing: 0.5,
|
||||
},
|
||||
officialText: {
|
||||
fontSize: 8,
|
||||
fontSize: FONT_SIZES.BODY,
|
||||
lineHeight: 1.5,
|
||||
color: "#334155",
|
||||
color: COLORS.TEXT_MAIN,
|
||||
textAlign: "justify",
|
||||
paddingLeft: 25,
|
||||
},
|
||||
@@ -100,7 +102,7 @@ export const AgbsPDF = ({
|
||||
};
|
||||
|
||||
const content = (
|
||||
<>
|
||||
<PDFView>
|
||||
<DocumentTitle
|
||||
title="Allgemeine Geschäftsbedingungen"
|
||||
subLines={[`Stand: ${date}`]}
|
||||
@@ -142,7 +144,7 @@ export const AgbsPDF = ({
|
||||
|
||||
<AGBSection index="05" title="Abnahme">
|
||||
Die Leistung gilt als abgenommen, wenn der Auftraggeber sie produktiv
|
||||
nutzt oder innerhalb von 7 Tagen nach Bereitstellung keine
|
||||
nutzt oder innerhalb von 30 Tagen nach Bereitstellung keine
|
||||
wesentlichen Mängel angezeigt werden. Optische Abweichungen,
|
||||
Geschmacksfragen oder subjektive Einschätzungen stellen keine Mängel
|
||||
dar.
|
||||
@@ -206,7 +208,7 @@ export const AgbsPDF = ({
|
||||
bleibt die Wirksamkeit der übrigen Regelungen unberührt.
|
||||
</AGBSection>
|
||||
</PDFView>
|
||||
</>
|
||||
</PDFView>
|
||||
);
|
||||
|
||||
if (mode === "full") {
|
||||
@@ -214,9 +216,8 @@ export const AgbsPDF = ({
|
||||
<SimpleLayout
|
||||
companyData={companyData}
|
||||
bankData={bankData}
|
||||
headerIcon={headerIcon}
|
||||
footerLogo={footerLogo}
|
||||
icon={headerIcon}
|
||||
pageNumber="10"
|
||||
showPageNumber={false}
|
||||
>
|
||||
{content}
|
||||
@@ -232,7 +233,7 @@ export const AgbsPDF = ({
|
||||
<Footer
|
||||
logo={footerLogo}
|
||||
companyData={companyData}
|
||||
bankData={bankData}
|
||||
_bankData={bankData}
|
||||
showDetails={false}
|
||||
showPageNumber={false}
|
||||
/>
|
||||
|
||||
@@ -46,7 +46,7 @@ export const CombinedQuotePDF = ({
|
||||
|
||||
const layoutProps = {
|
||||
date,
|
||||
icon: estimationProps.headerIcon,
|
||||
headerIcon: estimationProps.headerIcon,
|
||||
footerLogo: estimationProps.footerLogo,
|
||||
companyData,
|
||||
bankData,
|
||||
@@ -71,7 +71,7 @@ export const CombinedQuotePDF = ({
|
||||
footerLogo={estimationProps.footerLogo}
|
||||
/>
|
||||
)}
|
||||
<SimpleLayout {...layoutProps} pageNumber="END" showPageNumber={false}>
|
||||
<SimpleLayout {...layoutProps} showPageNumber={false}>
|
||||
<ClosingModule />
|
||||
</SimpleLayout>
|
||||
</PDFDocument>
|
||||
|
||||
@@ -50,7 +50,7 @@ export const EstimationPDF = ({
|
||||
const commonProps = {
|
||||
state,
|
||||
date,
|
||||
icon: headerIcon,
|
||||
headerIcon,
|
||||
footerLogo,
|
||||
companyData,
|
||||
};
|
||||
@@ -64,17 +64,17 @@ export const EstimationPDF = ({
|
||||
<FrontPageModule state={state} headerIcon={headerIcon} date={date} />
|
||||
</PDFPage>
|
||||
|
||||
<SimpleLayout {...commonProps} pageNumber={getPageNum()}>
|
||||
<SimpleLayout {...commonProps}>
|
||||
<BriefingModule state={state} />
|
||||
</SimpleLayout>
|
||||
|
||||
{state.sitemap && state.sitemap.length > 0 && (
|
||||
<SimpleLayout {...commonProps} pageNumber={getPageNum()}>
|
||||
<SimpleLayout {...commonProps}>
|
||||
<SitemapModule state={state} />
|
||||
</SimpleLayout>
|
||||
)}
|
||||
|
||||
<SimpleLayout {...commonProps} pageNumber={getPageNum()}>
|
||||
<SimpleLayout {...commonProps}>
|
||||
<EstimationModule
|
||||
state={state}
|
||||
positions={positions}
|
||||
@@ -83,11 +83,11 @@ export const EstimationPDF = ({
|
||||
/>
|
||||
</SimpleLayout>
|
||||
|
||||
<SimpleLayout {...commonProps} pageNumber={getPageNum()}>
|
||||
<SimpleLayout {...commonProps}>
|
||||
<TransparenzModule pricing={pricing} />
|
||||
</SimpleLayout>
|
||||
|
||||
<SimpleLayout {...commonProps} pageNumber={getPageNum()}>
|
||||
<SimpleLayout {...commonProps}>
|
||||
<ClosingModule />
|
||||
</SimpleLayout>
|
||||
</PDFDocument>
|
||||
|
||||
@@ -8,58 +8,48 @@ const simpleStyles = StyleSheet.create({
|
||||
industrialPage: {
|
||||
padding: 30,
|
||||
paddingTop: 20,
|
||||
backgroundColor: '#ffffff',
|
||||
flexDirection: 'column',
|
||||
backgroundColor: '#FFFFFF',
|
||||
fontFamily: 'Outfit',
|
||||
},
|
||||
industrialNumber: {
|
||||
fontSize: 60,
|
||||
fontWeight: 'bold',
|
||||
color: '#f1f5f9',
|
||||
position: 'absolute',
|
||||
top: -10,
|
||||
right: 0,
|
||||
zIndex: -1,
|
||||
},
|
||||
industrialSection: {
|
||||
marginTop: 16,
|
||||
paddingTop: 12,
|
||||
flexDirection: 'row',
|
||||
position: 'relative',
|
||||
contentView: {
|
||||
flex: 1,
|
||||
marginTop: 20,
|
||||
},
|
||||
});
|
||||
|
||||
interface SimpleLayoutProps {
|
||||
children: React.ReactNode;
|
||||
pageNumber?: string;
|
||||
icon?: string;
|
||||
headerIcon?: string;
|
||||
footerLogo?: string;
|
||||
companyData: any;
|
||||
bankData?: any;
|
||||
showDetails?: boolean;
|
||||
showPageNumber?: boolean;
|
||||
children: React.ReactNode;
|
||||
}
|
||||
|
||||
export const SimpleLayout = ({
|
||||
children,
|
||||
pageNumber,
|
||||
icon,
|
||||
export const SimpleLayout: React.FC<SimpleLayoutProps> = ({
|
||||
headerIcon,
|
||||
footerLogo,
|
||||
companyData,
|
||||
bankData,
|
||||
showPageNumber = true
|
||||
}: SimpleLayoutProps) => {
|
||||
showDetails = false,
|
||||
showPageNumber = true,
|
||||
children,
|
||||
}) => {
|
||||
return (
|
||||
<PDFPage size="A4" style={[pdfStyles.page, simpleStyles.industrialPage]}>
|
||||
<Header icon={icon} showAddress={false} />
|
||||
{pageNumber && <PDFText style={simpleStyles.industrialNumber}>{pageNumber}</PDFText>}
|
||||
<PDFView style={simpleStyles.industrialSection}>
|
||||
<PDFView style={{ width: '100%' }}>
|
||||
{children}
|
||||
</PDFView>
|
||||
<PDFPage size="A4" style={simpleStyles.industrialPage}>
|
||||
<Header icon={headerIcon} sender={companyData.name} showAddress={false} />
|
||||
|
||||
<PDFView style={simpleStyles.contentView}>
|
||||
{children}
|
||||
</PDFView>
|
||||
|
||||
<Footer
|
||||
logo={footerLogo}
|
||||
companyData={companyData}
|
||||
bankData={bankData}
|
||||
showDetails={false}
|
||||
_bankData={bankData}
|
||||
showDetails={showDetails}
|
||||
showPageNumber={showPageNumber}
|
||||
/>
|
||||
</PDFPage>
|
||||
|
||||
53
packages/pdf-library/src/generate-agbs.ts
Normal file
53
packages/pdf-library/src/generate-agbs.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { renderToFile, Document as PDFDocument, Font } from "@react-pdf/renderer";
|
||||
import { createElement } from "react";
|
||||
import { AgbsPDF } from "./components/AgbsPDF.js";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Standard Font Registration
|
||||
Font.register({
|
||||
family: 'Outfit',
|
||||
fonts: [
|
||||
{ src: 'Helvetica' },
|
||||
{ src: 'Helvetica-Bold', fontWeight: 'bold' },
|
||||
],
|
||||
});
|
||||
|
||||
Font.register({
|
||||
family: 'Helvetica',
|
||||
fonts: [
|
||||
{ src: 'Helvetica' },
|
||||
{ src: 'Helvetica-Bold', fontWeight: 'bold' },
|
||||
],
|
||||
});
|
||||
|
||||
async function generate() {
|
||||
const outDir = path.join(__dirname, "../../../out");
|
||||
if (!fs.existsSync(outDir)) {
|
||||
fs.mkdirSync(outDir, { recursive: true });
|
||||
}
|
||||
|
||||
const outputPath = path.resolve(outDir, "AGB_Mintel.pdf");
|
||||
|
||||
console.log("Generating High-Fidelity AGB PDF...");
|
||||
|
||||
const headerIcon = "/Users/marcmintel/Projects/mintel.me/apps/web/src/assets/logo/Icon-White-Transparent.png";
|
||||
const footerLogo = "/Users/marcmintel/Projects/mintel.me/apps/web/src/assets/logo/Logo-Black-Transparent.png";
|
||||
|
||||
// WRAP IN DOCUMENT - MANDATORY FOR standalone rendering
|
||||
const document = createElement(PDFDocument, {
|
||||
title: "Allgemeine Geschäftsbedingungen - Marc Mintel",
|
||||
author: "Marc Mintel",
|
||||
},
|
||||
createElement(AgbsPDF, { mode: "full", headerIcon, footerLogo })
|
||||
);
|
||||
|
||||
await renderToFile(document, outputPath);
|
||||
console.log(`Generated: ${outputPath}`);
|
||||
}
|
||||
|
||||
generate().catch(console.error);
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "people-manager",
|
||||
"description": "Custom High-Fidelity Management for Directus",
|
||||
"icon": "extension",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"type": "module",
|
||||
"keywords": [
|
||||
"directus",
|
||||
@@ -27,4 +27,4 @@
|
||||
"@directus/extensions-sdk": "11.0.2",
|
||||
"vue": "^3.4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
30
packages/thumbnail-generator/package.json
Normal file
30
packages/thumbnail-generator/package.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "@mintel/thumbnail-generator",
|
||||
"version": "1.8.21",
|
||||
"private": false,
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsup src/index.ts --format esm --dts --clean",
|
||||
"dev": "tsup src/index.ts --format esm --watch --dts",
|
||||
"lint": "eslint src"
|
||||
},
|
||||
"dependencies": {
|
||||
"replicate": "^1.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@mintel/eslint-config": "workspace:*",
|
||||
"@mintel/tsconfig": "workspace:*",
|
||||
"@types/node": "^20.0.0",
|
||||
"tsup": "^8.3.5",
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
112
packages/thumbnail-generator/src/generator.ts
Normal file
112
packages/thumbnail-generator/src/generator.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import Replicate from "replicate";
|
||||
import * as fs from "node:fs/promises";
|
||||
import * as path from "node:path";
|
||||
|
||||
export interface ThumbnailGeneratorConfig {
|
||||
replicateApiKey: string;
|
||||
}
|
||||
|
||||
export interface ThumbnailGenerateOptions {
|
||||
model?: string;
|
||||
systemPrompt?: string;
|
||||
imagePrompt?: string; // Path to local reference image
|
||||
}
|
||||
|
||||
export class ThumbnailGenerator {
|
||||
private replicate: Replicate;
|
||||
|
||||
constructor(config: ThumbnailGeneratorConfig) {
|
||||
this.replicate = new Replicate({
|
||||
auth: config.replicateApiKey,
|
||||
});
|
||||
}
|
||||
|
||||
public async generateImage(
|
||||
topic: string,
|
||||
outputPath: string,
|
||||
options?: ThumbnailGenerateOptions,
|
||||
): Promise<string> {
|
||||
const defaultSystemPrompt = `A highly polished, ultra-minimalist conceptual illustration. Style: high-end tech agency, clean modern 3D or flat vector art, extensive use of negative space, elegant monochrome palette (whites, light grays) with a single vibrant accent color (neon green or electric blue). Extremely clean and precise geometry. Absolutely no text, no photorealism, no chaotic lines, no messy sketches, no people.`;
|
||||
|
||||
const systemPrompt = options?.systemPrompt || defaultSystemPrompt;
|
||||
const prompt = `${systemPrompt}\n\nTopic to illustrate abstractly: ${topic}`;
|
||||
|
||||
console.log(`🎨 Generating thumbnail for topic: "${topic}"...`);
|
||||
|
||||
const inputPayload: any = {
|
||||
prompt,
|
||||
aspect_ratio: "16:9",
|
||||
output_format: "png",
|
||||
output_quality: 90,
|
||||
prompt_upsampling: false,
|
||||
};
|
||||
|
||||
if (options?.imagePrompt) {
|
||||
console.log(`🖼️ Using image style reference: ${options.imagePrompt}`);
|
||||
try {
|
||||
const absImgPath = path.isAbsolute(options.imagePrompt)
|
||||
? options.imagePrompt
|
||||
: path.resolve(process.cwd(), options.imagePrompt);
|
||||
const imgBuffer = await fs.readFile(absImgPath);
|
||||
const base64 = imgBuffer.toString("base64");
|
||||
// Replicate models usually expect a data URI for image_prompt
|
||||
inputPayload.image_prompt = `data:image/png;base64,${base64}`;
|
||||
} catch (err) {
|
||||
console.warn(`⚠️ Could not load image prompt: ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Default to the requested flux-1.1-pro model unless explicitly provided
|
||||
const model = options?.model || "black-forest-labs/flux-1.1-pro";
|
||||
|
||||
const output = await this.replicate.run(model as `${string}/${string}`, {
|
||||
input: inputPayload,
|
||||
});
|
||||
|
||||
// Replicate returns a ReadableStream for the output of flux-1.1-pro in newer Node SDKs
|
||||
// Or a string URL in older ones. We handle both.
|
||||
let buffer: Buffer;
|
||||
|
||||
if (output instanceof ReadableStream) {
|
||||
console.log(`⬇️ Downloading generated stream from Replicate...`);
|
||||
const chunks: Uint8Array[] = [];
|
||||
const reader = output.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
if (value) chunks.push(value);
|
||||
}
|
||||
buffer = Buffer.concat(chunks);
|
||||
} else if (
|
||||
typeof output === "string" ||
|
||||
(Array.isArray(output) && typeof output[0] === "string")
|
||||
) {
|
||||
const imageUrl = Array.isArray(output) ? output[0] : output;
|
||||
console.log(
|
||||
`⬇️ Downloading generated image from URL: ${imageUrl.substring(0, 50)}...`,
|
||||
);
|
||||
const response = await fetch(imageUrl);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download image: ${response.statusText}`);
|
||||
}
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
buffer = Buffer.from(arrayBuffer);
|
||||
} else if (Buffer.isBuffer(output)) {
|
||||
buffer = output;
|
||||
} else if (typeof output === "object") {
|
||||
console.log("Raw output object:", output);
|
||||
throw new Error("Unexpected output format from Replicate.");
|
||||
} else {
|
||||
throw new Error("Unknown output format from Replicate.");
|
||||
}
|
||||
|
||||
const absPath = path.isAbsolute(outputPath)
|
||||
? outputPath
|
||||
: path.resolve(process.cwd(), outputPath);
|
||||
await fs.mkdir(path.dirname(absPath), { recursive: true });
|
||||
await fs.writeFile(absPath, buffer);
|
||||
|
||||
console.log(`✅ Saved thumbnail to: ${absPath}`);
|
||||
return absPath;
|
||||
}
|
||||
}
|
||||
1
packages/thumbnail-generator/src/index.ts
Normal file
1
packages/thumbnail-generator/src/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./generator";
|
||||
8
packages/thumbnail-generator/tsconfig.json
Normal file
8
packages/thumbnail-generator/tsconfig.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": "@mintel/tsconfig/base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/tsconfig",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://npm.infra.mintel.me"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "unified-dashboard",
|
||||
"description": "Custom High-Fidelity Management for Directus",
|
||||
"icon": "extension",
|
||||
"version": "1.8.10",
|
||||
"version": "1.8.21",
|
||||
"type": "module",
|
||||
"keywords": [
|
||||
"directus",
|
||||
@@ -24,4 +24,4 @@
|
||||
"@directus/extensions-sdk": "11.0.2",
|
||||
"vue": "^3.4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
14
plan_free.hbs
Normal file
14
plan_free.hbs
Normal file
@@ -0,0 +1,14 @@
|
||||
<div class="membership-card free">
|
||||
<div class="membership-card-content">
|
||||
<h2 class="membership-card-title">{{t "Free"}}</h2>
|
||||
<h3 class="membership-card-price"><sup>$</sup>0</h3>
|
||||
<div class="membership-card-options">
|
||||
<ul>
|
||||
<li>Full access to posts for subscribers</li>
|
||||
<li>Weekly email newsletter</li>
|
||||
<li>No advertising</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<a href="{{@site.url}}/signup/" class="global-button">{{t "Subscribe now"}}</a>
|
||||
</div>
|
||||
16
plan_monthly.hbs
Normal file
16
plan_monthly.hbs
Normal file
@@ -0,0 +1,16 @@
|
||||
<div class="membership-card monthly">
|
||||
<div class="membership-card-content">
|
||||
<h2 class="membership-card-title">{{t "Monthly"}}</h2>
|
||||
<h3 class="membership-card-price">{{price monthly_price currency=currency}}</h3>
|
||||
<div class="membership-card-options">
|
||||
<ul>
|
||||
<li>Full access to all premium posts</li>
|
||||
<li>Weekly email newsletter</li>
|
||||
<li>Support independent publishing</li>
|
||||
<li>Simple, secure card payment</li>
|
||||
<li>No advertising</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<a href="#" class="global-button" data-members-plan="Monthly">{{t "Subscribe now"}}</a>
|
||||
</div>
|
||||
17
plan_yearly.hbs
Normal file
17
plan_yearly.hbs
Normal file
@@ -0,0 +1,17 @@
|
||||
<div class="membership-card yearly">
|
||||
<div class="membership-card-content">
|
||||
<h2 class="membership-card-title">{{t "Yearly"}}</h2>
|
||||
<h3 class="membership-card-price">{{price yearly_price currency=currency}}</h3>
|
||||
<div class="membership-card-options">
|
||||
<ul>
|
||||
<li>Full access to all premium posts</li>
|
||||
<li>Weekly email newsletter</li>
|
||||
<li>Support independent publishing</li>
|
||||
<li>Simple, secure card payment</li>
|
||||
<li>One easy payment instead of 12!</li>
|
||||
<li>No advertising</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<a href="#" class="global-button" data-members-plan="Yearly">{{t "Subscribe now"}}</a>
|
||||
</div>
|
||||
1066
pnpm-lock.yaml
generated
1066
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -116,7 +116,6 @@ if (fs.existsSync(appsDir)) {
|
||||
}
|
||||
|
||||
// Update .env files
|
||||
updateEnv(".env");
|
||||
updateEnv(".env.example");
|
||||
|
||||
console.log("✨ All versions synced!");
|
||||
|
||||
Reference in New Issue
Block a user