Compare commits
38 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a6ca876823 | |||
| f615565323 | |||
| fcbf388ef8 | |||
| cbed10052b | |||
| 560213680c | |||
| 7e2542bf1f | |||
| df6bef7345 | |||
| aa57e8c48b | |||
| 822e8a9d0f | |||
| f0d1fb6647 | |||
| 751ffd59a0 | |||
| d0a17a8a31 | |||
| daa2750f89 | |||
| 29423123b3 | |||
| 5c10eb0009 | |||
| dca35a9900 | |||
| 4430d473cb | |||
| 0c27e3b5d8 | |||
| 616d8a039b | |||
| ee3d7714c2 | |||
| ddf896e3f9 | |||
| b9d0199115 | |||
| 1670b8e5ef | |||
| 1c43d12e4d | |||
| 5cf9922822 | |||
| 9a4a95feea | |||
| d3902c4c77 | |||
| 21ec8a33ae | |||
| 79d221de5e | |||
| 24fde20030 | |||
| 4a4409ca85 | |||
| d96d6a4b13 | |||
| 8f6b12d827 | |||
| a11714d07d | |||
| 52f7e68f25 | |||
| 217ac33675 | |||
| f2b8b136af | |||
| 2e07b213d1 |
@@ -1,5 +1,5 @@
|
||||
# Project
|
||||
IMAGE_TAG=v1.9.6
|
||||
IMAGE_TAG=v1.9.17
|
||||
PROJECT_NAME=sample-website
|
||||
PROJECT_COLOR=#82ed20
|
||||
|
||||
|
||||
@@ -199,12 +199,31 @@ jobs:
|
||||
- name: 🐳 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: 🔐 Registry Login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.infra.mintel.me
|
||||
username: ${{ secrets.REGISTRY_USER }}
|
||||
password: ${{ secrets.REGISTRY_PASS }}
|
||||
- name: 🔐 Discover Valid Registry Token
|
||||
id: discover_token
|
||||
run: |
|
||||
echo "Testing available secrets against git.infra.mintel.me Docker registry..."
|
||||
TOKENS="${{ secrets.GITEA_PAT }} ${{ secrets.MINTEL_PRIVATE_TOKEN }} ${{ secrets.NPM_TOKEN }}"
|
||||
USERS="${{ github.repository_owner }} ${{ github.actor }} marcmintel mintel mmintel"
|
||||
|
||||
for TOKEN in $TOKENS; do
|
||||
if [ -n "$TOKEN" ]; then
|
||||
for U in $USERS; do
|
||||
if [ -n "$U" ]; then
|
||||
echo "Attempting docker login for a token with user $U..."
|
||||
if echo "$TOKEN" | docker login git.infra.mintel.me -u "$U" --password-stdin > /dev/null 2>&1; then
|
||||
echo "✅ Successfully authenticated with a token."
|
||||
echo "::add-mask::$TOKEN"
|
||||
echo "token=$TOKEN" >> $GITHUB_OUTPUT
|
||||
echo "user=$U" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
done
|
||||
echo "❌ All available tokens failed to authenticate!"
|
||||
exit 1
|
||||
|
||||
- name: 🏗️ Build & Push ${{ matrix.name }}
|
||||
uses: docker/build-push-action@v5
|
||||
@@ -216,8 +235,8 @@ jobs:
|
||||
provenance: false
|
||||
push: true
|
||||
secrets: |
|
||||
NPM_TOKEN=${{ secrets.NPM_TOKEN }}
|
||||
NPM_TOKEN=${{ steps.discover_token.outputs.token }}
|
||||
tags: |
|
||||
registry.infra.mintel.me/mintel/${{ matrix.image }}:${{ github.ref_name }}
|
||||
registry.infra.mintel.me/mintel/${{ matrix.image }}:latest
|
||||
git.infra.mintel.me/mmintel/${{ matrix.image }}:${{ github.ref_name }}
|
||||
git.infra.mintel.me/mmintel/${{ matrix.image }}:latest
|
||||
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -46,4 +46,8 @@ directus/uploads/directus-health-file
|
||||
# Estimation Engine Data
|
||||
data/crawls/
|
||||
packages/estimation-engine/out/
|
||||
apps/web/out/estimations/
|
||||
apps/web/out/estimations/
|
||||
|
||||
# Memory MCP
|
||||
data/qdrant/
|
||||
packages/memory-mcp/models/
|
||||
@@ -1,5 +1,5 @@
|
||||
# Stage 1: Builder
|
||||
FROM registry.infra.mintel.me/mintel/nextjs:latest AS builder
|
||||
FROM git.infra.mintel.me/mmintel/nextjs:latest AS builder
|
||||
WORKDIR /app
|
||||
|
||||
# Clean the workspace in case the base image is dirty
|
||||
@@ -37,7 +37,7 @@ COPY . .
|
||||
RUN pnpm build
|
||||
|
||||
# Stage 2: Runner
|
||||
FROM registry.infra.mintel.me/mintel/runtime:latest AS runner
|
||||
FROM git.infra.mintel.me/mmintel/runtime:latest AS runner
|
||||
WORKDIR /app
|
||||
|
||||
ENV HOSTNAME="0.0.0.0"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "sample-website",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
||||
16
docker-compose.mcps.yml
Normal file
16
docker-compose.mcps.yml
Normal file
@@ -0,0 +1,16 @@
|
||||
services:
|
||||
qdrant:
|
||||
image: qdrant/qdrant:latest
|
||||
container_name: qdrant-mcp
|
||||
ports:
|
||||
- "6333:6333"
|
||||
- "6334:6334"
|
||||
volumes:
|
||||
- ./data/qdrant:/qdrant/storage
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- mcp-network
|
||||
|
||||
networks:
|
||||
mcp-network:
|
||||
driver: bridge
|
||||
48
ecosystem.mcps.config.cjs
Normal file
48
ecosystem.mcps.config.cjs
Normal file
@@ -0,0 +1,48 @@
|
||||
module.exports = {
|
||||
apps: [
|
||||
{
|
||||
name: 'gitea-mcp',
|
||||
script: 'node',
|
||||
args: 'dist/start.js',
|
||||
cwd: './packages/gitea-mcp',
|
||||
watch: false,
|
||||
},
|
||||
{
|
||||
name: 'memory-mcp',
|
||||
script: 'node',
|
||||
args: 'dist/start.js',
|
||||
cwd: './packages/memory-mcp',
|
||||
watch: false,
|
||||
},
|
||||
{
|
||||
name: 'umami-mcp',
|
||||
script: 'node',
|
||||
args: 'dist/start.js',
|
||||
cwd: './packages/umami-mcp',
|
||||
watch: false,
|
||||
},
|
||||
{
|
||||
name: 'serpbear-mcp',
|
||||
script: 'node',
|
||||
args: 'dist/start.js',
|
||||
cwd: './packages/serpbear-mcp',
|
||||
watch: false,
|
||||
},
|
||||
{
|
||||
name: 'glitchtip-mcp',
|
||||
script: 'node',
|
||||
args: 'dist/start.js',
|
||||
cwd: './packages/glitchtip-mcp',
|
||||
watch: false,
|
||||
},
|
||||
{
|
||||
name: 'klz-payload-mcp',
|
||||
script: 'node',
|
||||
args: 'dist/start.js',
|
||||
cwd: './packages/klz-payload-mcp',
|
||||
watch: false,
|
||||
},
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
12
fix-private.mjs
Normal file
12
fix-private.mjs
Normal file
@@ -0,0 +1,12 @@
|
||||
import fs from 'fs';
|
||||
import glob from 'glob';
|
||||
|
||||
const files = glob.sync('/Users/marcmintel/Projects/at-mintel/packages/*/package.json');
|
||||
files.forEach(f => {
|
||||
const content = fs.readFileSync(f, 'utf8');
|
||||
if (content.includes('"private": true,')) {
|
||||
console.log(`Fixing ${f}`);
|
||||
const newContent = content.replace(/\s*"private": true,?\n/g, '\n');
|
||||
fs.writeFileSync(f, newContent);
|
||||
}
|
||||
});
|
||||
@@ -6,6 +6,12 @@
|
||||
"build": "pnpm -r build",
|
||||
"dev": "pnpm -r dev",
|
||||
"dev:gatekeeper": "bash -c 'trap \"COMPOSE_PROJECT_NAME=gatekeeper docker-compose -f docker-compose.gatekeeper.yml down\" EXIT INT TERM; docker network create infra 2>/dev/null || true && COMPOSE_PROJECT_NAME=gatekeeper docker-compose -f docker-compose.gatekeeper.yml down && COMPOSE_PROJECT_NAME=gatekeeper docker-compose -f docker-compose.gatekeeper.yml up --build --remove-orphans'",
|
||||
"dev:mcps:up": "docker-compose -f docker-compose.mcps.yml up -d",
|
||||
"dev:mcps:down": "docker-compose -f docker-compose.mcps.yml down && pm2 delete ecosystem.mcps.config.cjs || true",
|
||||
"dev:mcps:watch": "pnpm -r --filter=\"./packages/*-mcp\" exec tsc -w",
|
||||
"dev:mcps": "npm run dev:mcps:up && pm2 start ecosystem.mcps.config.cjs --watch && npm run dev:mcps:watch",
|
||||
"start:mcps:run": "pm2 start ecosystem.mcps.config.cjs",
|
||||
"start:mcps": "npm run dev:mcps:up && npm run start:mcps:run",
|
||||
"lint": "pnpm -r --filter='./packages/**' --filter='./apps/**' lint",
|
||||
"test": "pnpm -r test",
|
||||
"changeset": "changeset",
|
||||
@@ -36,6 +42,7 @@
|
||||
"husky": "^9.1.7",
|
||||
"jsdom": "^27.4.0",
|
||||
"lint-staged": "^16.2.7",
|
||||
"pm2": "^6.0.14",
|
||||
"prettier": "^3.8.1",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.0.0",
|
||||
@@ -49,7 +56,7 @@
|
||||
"pino-pretty": "^13.1.3",
|
||||
"require-in-the-middle": "^8.0.1"
|
||||
},
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"pnpm": {
|
||||
"onlyBuiltDependencies": [
|
||||
"@parcel/watcher",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/cli",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/cloner",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.js",
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/concept-engine",
|
||||
"version": "1.9.6",
|
||||
"private": true,
|
||||
"version": "1.9.17",
|
||||
"description": "AI-powered web project concept generation and analysis",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/content-engine",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"private": false,
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/eslint-config",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/estimation-engine",
|
||||
"version": "1.9.6",
|
||||
"private": true,
|
||||
"version": "1.9.17",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/gatekeeper",
|
||||
"version": "1.9.6",
|
||||
"private": true,
|
||||
"version": "1.9.17",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
@@ -12,14 +11,11 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@mintel/next-utils": "workspace:*",
|
||||
"@react-three/drei": "^10.7.7",
|
||||
"@react-three/fiber": "^9.5.0",
|
||||
"framer-motion": "^11.18.2",
|
||||
"lucide-react": "^0.474.0",
|
||||
"next": "16.1.6",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"three": "^0.183.1"
|
||||
"react-dom": "^19.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@mintel/eslint-config": "workspace:*",
|
||||
@@ -29,7 +25,6 @@
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/react": "^19.0.0",
|
||||
"@types/react-dom": "^19.0.0",
|
||||
"@types/three": "^0.183.1",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"postcss": "^8.4.49",
|
||||
"tailwindcss": "^3.4.17",
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
{
|
||||
"name": "@mintel/gitea-mcp",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"description": "Native Gitea MCP server for 100% Antigravity compatibility",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/index.js"
|
||||
"start": "node dist/start.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.5.0",
|
||||
"zod": "^3.23.8",
|
||||
"axios": "^1.7.2"
|
||||
"axios": "^1.7.2",
|
||||
"dotenv": "^17.3.1",
|
||||
"express": "^5.2.1",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5.5.3",
|
||||
"@types/node": "^20.14.10"
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/node": "^20.14.10",
|
||||
"typescript": "^5.5.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
|
||||
import express from 'express';
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
@@ -14,11 +15,10 @@ import { z } from "zod";
|
||||
import axios from "axios";
|
||||
|
||||
const GITEA_HOST = process.env.GITEA_HOST || "https://git.infra.mintel.me";
|
||||
const GITEA_ACCESS_TOKEN = process.env.GITEA_ACCESS_TOKEN;
|
||||
const GITEA_ACCESS_TOKEN = process.env.GITEA_ACCESS_TOKEN || process.env.GITEA_TOKEN;
|
||||
|
||||
if (!GITEA_ACCESS_TOKEN) {
|
||||
console.error("Error: GITEA_ACCESS_TOKEN environment variable is required");
|
||||
process.exit(1);
|
||||
console.error("Warning: Neither GITEA_ACCESS_TOKEN nor GITEA_TOKEN environment variable is set. Pipeline tools will return unauthorized errors.");
|
||||
}
|
||||
|
||||
const giteaClient = axios.create({
|
||||
@@ -37,6 +37,8 @@ const LIST_PIPELINES_TOOL: Tool = {
|
||||
owner: { type: "string", description: "Repository owner (e.g., 'mmintel')" },
|
||||
repo: { type: "string", description: "Repository name (e.g., 'at-mintel')" },
|
||||
limit: { type: "number", description: "Number of runs to fetch (default: 5)" },
|
||||
branch: { type: "string", description: "Optional: Filter by branch name (e.g., 'main')" },
|
||||
event: { type: "string", description: "Optional: Filter by trigger event (e.g., 'push', 'pull_request')" },
|
||||
},
|
||||
required: ["owner", "repo"],
|
||||
},
|
||||
@@ -56,6 +58,291 @@ const GET_PIPELINE_LOGS_TOOL: Tool = {
|
||||
},
|
||||
};
|
||||
|
||||
const WAIT_PIPELINE_COMPLETION_TOOL: Tool = {
|
||||
name: "gitea_wait_pipeline_completion",
|
||||
description: "BLOCKS and waits until a pipeline run completes, fails, or is cancelled. Use this instead of polling manually to save tokens.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
run_id: { type: "number", description: "ID of the action run" },
|
||||
timeout_minutes: { type: "number", description: "Maximum time to wait before aborting (default: 10)" },
|
||||
},
|
||||
required: ["owner", "repo", "run_id"],
|
||||
},
|
||||
};
|
||||
|
||||
const LIST_ISSUES_TOOL: Tool = {
|
||||
name: "gitea_list_issues",
|
||||
description: "List issues for a repository",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
state: { type: "string", description: "Filter by state: open, closed, or all (default: open)" },
|
||||
limit: { type: "number", description: "Number of issues to fetch (default: 10)" },
|
||||
},
|
||||
required: ["owner", "repo"],
|
||||
},
|
||||
};
|
||||
|
||||
const CREATE_ISSUE_TOOL: Tool = {
|
||||
name: "gitea_create_issue",
|
||||
description: "Create a new issue in a repository",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
title: { type: "string", description: "Issue title" },
|
||||
body: { type: "string", description: "Issue description/body" },
|
||||
},
|
||||
required: ["owner", "repo", "title"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_FILE_CONTENT_TOOL: Tool = {
|
||||
name: "gitea_get_file_content",
|
||||
description: "Get the raw content of a file from a repository",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
filepath: { type: "string", description: "Path to the file in the repository" },
|
||||
ref: { type: "string", description: "The name of the commit/branch/tag (default: main)" },
|
||||
},
|
||||
required: ["owner", "repo", "filepath"],
|
||||
},
|
||||
};
|
||||
|
||||
const UPDATE_ISSUE_TOOL: Tool = {
|
||||
name: "gitea_update_issue",
|
||||
description: "Update an existing issue (e.g. change state, title, or body)",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
index: { type: "number", description: "Issue index/number" },
|
||||
state: { type: "string", description: "Optional: 'open' or 'closed'" },
|
||||
title: { type: "string", description: "Optional: New title" },
|
||||
body: { type: "string", description: "Optional: New body text" },
|
||||
},
|
||||
required: ["owner", "repo", "index"],
|
||||
},
|
||||
};
|
||||
|
||||
const CREATE_ISSUE_COMMENT_TOOL: Tool = {
|
||||
name: "gitea_create_issue_comment",
|
||||
description: "Add a comment to an existing issue or pull request",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
index: { type: "number", description: "Issue or PR index/number" },
|
||||
body: { type: "string", description: "Comment body text" },
|
||||
},
|
||||
required: ["owner", "repo", "index", "body"],
|
||||
},
|
||||
};
|
||||
|
||||
const CREATE_PULL_REQUEST_TOOL: Tool = {
|
||||
name: "gitea_create_pull_request",
|
||||
description: "Create a new Pull Request",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
head: { type: "string", description: "The branch you want to merge (e.g., 'feature/my-changes')" },
|
||||
base: { type: "string", description: "The branch to merge into (e.g., 'main')" },
|
||||
title: { type: "string", description: "PR title" },
|
||||
body: { type: "string", description: "Optional: PR description" },
|
||||
},
|
||||
required: ["owner", "repo", "head", "base", "title"],
|
||||
},
|
||||
};
|
||||
|
||||
const SEARCH_REPOS_TOOL: Tool = {
|
||||
name: "gitea_search_repos",
|
||||
description: "Search for repositories accessible to the authenticated user",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: { type: "string", description: "Search term" },
|
||||
limit: { type: "number", description: "Maximum number of results (default: 10)" },
|
||||
},
|
||||
required: ["query"],
|
||||
},
|
||||
};
|
||||
|
||||
// --- Wiki ---
|
||||
const LIST_WIKI_PAGES_TOOL: Tool = {
|
||||
name: "gitea_list_wiki_pages",
|
||||
description: "List all wiki pages of a repository",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
},
|
||||
required: ["owner", "repo"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_WIKI_PAGE_TOOL: Tool = {
|
||||
name: "gitea_get_wiki_page",
|
||||
description: "Get the content of a specific wiki page",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
page_name: { type: "string", description: "Name/slug of the wiki page (e.g., 'Home')" },
|
||||
},
|
||||
required: ["owner", "repo", "page_name"],
|
||||
},
|
||||
};
|
||||
|
||||
const CREATE_WIKI_PAGE_TOOL: Tool = {
|
||||
name: "gitea_create_wiki_page",
|
||||
description: "Create a new wiki page in a repository",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
title: { type: "string", description: "Page title" },
|
||||
content: { type: "string", description: "Page content in Markdown (base64 encoded internally)" },
|
||||
message: { type: "string", description: "Optional commit message" },
|
||||
},
|
||||
required: ["owner", "repo", "title", "content"],
|
||||
},
|
||||
};
|
||||
|
||||
const EDIT_WIKI_PAGE_TOOL: Tool = {
|
||||
name: "gitea_edit_wiki_page",
|
||||
description: "Edit an existing wiki page",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
page_name: { type: "string", description: "Current name/slug of the wiki page" },
|
||||
title: { type: "string", description: "Optional: new title" },
|
||||
content: { type: "string", description: "New content in Markdown" },
|
||||
message: { type: "string", description: "Optional commit message" },
|
||||
},
|
||||
required: ["owner", "repo", "page_name", "content"],
|
||||
},
|
||||
};
|
||||
|
||||
// --- Packages ---
|
||||
const LIST_PACKAGES_TOOL: Tool = {
|
||||
name: "gitea_list_packages",
|
||||
description: "List packages published to the Gitea package registry for a user or org",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "User or organization name" },
|
||||
type: { type: "string", description: "Optional: Package type filter (e.g., 'npm', 'docker', 'generic')" },
|
||||
limit: { type: "number", description: "Number of packages to return (default: 10)" },
|
||||
},
|
||||
required: ["owner"],
|
||||
},
|
||||
};
|
||||
|
||||
const LIST_PACKAGE_VERSIONS_TOOL: Tool = {
|
||||
name: "gitea_list_package_versions",
|
||||
description: "List all published versions of a specific package",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "User or organization name" },
|
||||
type: { type: "string", description: "Package type (e.g., 'npm', 'docker')" },
|
||||
name: { type: "string", description: "Package name" },
|
||||
},
|
||||
required: ["owner", "type", "name"],
|
||||
},
|
||||
};
|
||||
|
||||
// --- Releases ---
|
||||
const LIST_RELEASES_TOOL: Tool = {
|
||||
name: "gitea_list_releases",
|
||||
description: "List releases for a repository",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
limit: { type: "number", description: "Number of releases to fetch (default: 10)" },
|
||||
},
|
||||
required: ["owner", "repo"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_LATEST_RELEASE_TOOL: Tool = {
|
||||
name: "gitea_get_latest_release",
|
||||
description: "Get the latest release for a repository",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
},
|
||||
required: ["owner", "repo"],
|
||||
},
|
||||
};
|
||||
|
||||
const CREATE_RELEASE_TOOL: Tool = {
|
||||
name: "gitea_create_release",
|
||||
description: "Create a new release for a repository",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "Repository owner" },
|
||||
repo: { type: "string", description: "Repository name" },
|
||||
tag_name: { type: "string", description: "Git tag to build the release from (e.g., 'v1.2.3')" },
|
||||
name: { type: "string", description: "Release title" },
|
||||
body: { type: "string", description: "Optional: Release notes/description in Markdown" },
|
||||
draft: { type: "boolean", description: "Optional: Create as draft (default: false)" },
|
||||
prerelease: { type: "boolean", description: "Optional: Mark as prerelease (default: false)" },
|
||||
},
|
||||
required: ["owner", "repo", "tag_name", "name"],
|
||||
},
|
||||
};
|
||||
|
||||
// --- Projects ---
|
||||
const LIST_PROJECTS_TOOL: Tool = {
|
||||
name: "gitea_list_projects",
|
||||
description: "List projects (kanban boards) for a user, organization, or repository",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
owner: { type: "string", description: "User or organization name" },
|
||||
repo: { type: "string", description: "Optional: Repository name (for repo-level projects)" },
|
||||
type: { type: "string", description: "Optional: 'individual' or 'repository' or 'organization'" },
|
||||
},
|
||||
required: ["owner"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_PROJECT_COLUMNS_TOOL: Tool = {
|
||||
name: "gitea_get_project_columns",
|
||||
description: "Get the columns (board columns) of a specific project",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
project_id: { type: "number", description: "Numeric project ID from gitea_list_projects" },
|
||||
},
|
||||
required: ["project_id"],
|
||||
},
|
||||
};
|
||||
|
||||
// Subscription State
|
||||
const subscriptions = new Set<string>();
|
||||
const runStatusCache = new Map<string, string>(); // uri -> status
|
||||
@@ -76,18 +363,47 @@ const server = new Server(
|
||||
// --- Tools ---
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [LIST_PIPELINES_TOOL, GET_PIPELINE_LOGS_TOOL],
|
||||
tools: [
|
||||
LIST_PIPELINES_TOOL,
|
||||
GET_PIPELINE_LOGS_TOOL,
|
||||
WAIT_PIPELINE_COMPLETION_TOOL,
|
||||
LIST_ISSUES_TOOL,
|
||||
CREATE_ISSUE_TOOL,
|
||||
GET_FILE_CONTENT_TOOL,
|
||||
UPDATE_ISSUE_TOOL,
|
||||
CREATE_ISSUE_COMMENT_TOOL,
|
||||
CREATE_PULL_REQUEST_TOOL,
|
||||
SEARCH_REPOS_TOOL,
|
||||
// Wiki
|
||||
LIST_WIKI_PAGES_TOOL,
|
||||
GET_WIKI_PAGE_TOOL,
|
||||
CREATE_WIKI_PAGE_TOOL,
|
||||
EDIT_WIKI_PAGE_TOOL,
|
||||
// Packages
|
||||
LIST_PACKAGES_TOOL,
|
||||
LIST_PACKAGE_VERSIONS_TOOL,
|
||||
// Releases
|
||||
LIST_RELEASES_TOOL,
|
||||
GET_LATEST_RELEASE_TOOL,
|
||||
CREATE_RELEASE_TOOL,
|
||||
// Projects
|
||||
LIST_PROJECTS_TOOL,
|
||||
GET_PROJECT_COLUMNS_TOOL,
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
if (request.params.name === "gitea_list_pipelines") {
|
||||
// ... (Keeping exact same implementation as before for brevity)
|
||||
const { owner, repo, limit = 5 } = request.params.arguments as any;
|
||||
const { owner, repo, limit = 5, branch, event } = request.params.arguments as any;
|
||||
|
||||
try {
|
||||
const apiParams: Record<string, any> = { limit };
|
||||
if (branch) apiParams.branch = branch;
|
||||
if (event) apiParams.event = event;
|
||||
|
||||
const runsResponse = await giteaClient.get(`/repos/${owner}/${repo}/actions/runs`, {
|
||||
params: { limit },
|
||||
params: apiParams,
|
||||
});
|
||||
|
||||
const runs = (runsResponse.data.workflow_runs || []) as any[];
|
||||
@@ -145,6 +461,267 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_wait_pipeline_completion") {
|
||||
const { owner, repo, run_id, timeout_minutes = 10 } = request.params.arguments as any;
|
||||
const startTime = Date.now();
|
||||
const timeoutMs = timeout_minutes * 60 * 1000;
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
if (Date.now() - startTime > timeoutMs) {
|
||||
return { content: [{ type: "text", text: `Wait timed out after ${timeout_minutes} minutes.` }] };
|
||||
}
|
||||
|
||||
const response = await giteaClient.get(`/repos/${owner}/${repo}/actions/runs/${run_id}`);
|
||||
const status = response.data.status;
|
||||
const conclusion = response.data.conclusion;
|
||||
|
||||
if (status !== "running" && status !== "waiting") {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Pipeline finished! Final Status: ${status}, Conclusion: ${conclusion}`
|
||||
}]
|
||||
};
|
||||
}
|
||||
|
||||
// Wait 5 seconds before polling again
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
}
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error checking pipeline status: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_list_issues") {
|
||||
const { owner, repo, state = "open", limit = 10 } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.get(`/repos/${owner}/${repo}/issues`, {
|
||||
params: { state, limit }
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_create_issue") {
|
||||
const { owner, repo, title, body } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.post(`/repos/${owner}/${repo}/issues`, {
|
||||
title,
|
||||
body
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_get_file_content") {
|
||||
const { owner, repo, filepath, ref = "main" } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.get(`/repos/${owner}/${repo}/contents/${filepath}`, {
|
||||
params: { ref }
|
||||
});
|
||||
// Gitea returns base64 encoded content for files
|
||||
if (response.data.type === 'file' && response.data.content) {
|
||||
const decodedContent = Buffer.from(response.data.content, 'base64').toString('utf-8');
|
||||
return { content: [{ type: "text", text: decodedContent }] };
|
||||
}
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_update_issue") {
|
||||
const { owner, repo, index, state, title, body } = request.params.arguments as any;
|
||||
try {
|
||||
const updateData: Record<string, any> = {};
|
||||
if (state) updateData.state = state;
|
||||
if (title) updateData.title = title;
|
||||
if (body) updateData.body = body;
|
||||
|
||||
// Send PATCH request to /repos/{owner}/{repo}/issues/{index}
|
||||
const response = await giteaClient.patch(`/repos/${owner}/${repo}/issues/${index}`, updateData);
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error updating issue: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_create_issue_comment") {
|
||||
const { owner, repo, index, body } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.post(`/repos/${owner}/${repo}/issues/${index}/comments`, {
|
||||
body
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error creating comment: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_create_pull_request") {
|
||||
const { owner, repo, head, base, title, body } = request.params.arguments as any;
|
||||
try {
|
||||
const prData: Record<string, any> = { head, base, title };
|
||||
if (body) prData.body = body;
|
||||
|
||||
const response = await giteaClient.post(`/repos/${owner}/${repo}/pulls`, prData);
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error creating Pull Request: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_search_repos") {
|
||||
const { query, limit = 10 } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.get(`/repos/search`, {
|
||||
params: { q: query, limit }
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
// --- Wiki Handlers ---
|
||||
if (request.params.name === "gitea_list_wiki_pages") {
|
||||
const { owner, repo } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.get(`/repos/${owner}/${repo}/wiki/pages`);
|
||||
const pages = (response.data || []).map((p: any) => ({ title: p.title, last_commit: p.last_commit?.message }));
|
||||
return { content: [{ type: "text", text: JSON.stringify(pages, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error listing wiki pages: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_get_wiki_page") {
|
||||
const { owner, repo, page_name } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.get(`/repos/${owner}/${repo}/wiki/page/${encodeURIComponent(page_name)}`);
|
||||
const content = Buffer.from(response.data.content_base64 || '', 'base64').toString('utf-8');
|
||||
return { content: [{ type: "text", text: `# ${response.data.title}\n\n${content}` }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error fetching wiki page: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_create_wiki_page") {
|
||||
const { owner, repo, title, content, message } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.post(`/repos/${owner}/${repo}/wiki/pages`, {
|
||||
title,
|
||||
content_base64: Buffer.from(content).toString('base64'),
|
||||
message: message || `Create wiki page: ${title}`,
|
||||
});
|
||||
return { content: [{ type: "text", text: `Wiki page '${response.data.title}' created.` }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error creating wiki page: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_edit_wiki_page") {
|
||||
const { owner, repo, page_name, title, content, message } = request.params.arguments as any;
|
||||
try {
|
||||
const updateData: Record<string, any> = {
|
||||
content_base64: Buffer.from(content).toString('base64'),
|
||||
message: message || `Update wiki page: ${page_name}`,
|
||||
};
|
||||
if (title) updateData.title = title;
|
||||
const response = await giteaClient.patch(`/repos/${owner}/${repo}/wiki/pages/${encodeURIComponent(page_name)}`, updateData);
|
||||
return { content: [{ type: "text", text: `Wiki page '${response.data.title}' updated.` }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error updating wiki page: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
// --- Package Handlers ---
|
||||
if (request.params.name === "gitea_list_packages") {
|
||||
const { owner, type, limit = 10 } = request.params.arguments as any;
|
||||
try {
|
||||
const params: Record<string, any> = { limit };
|
||||
if (type) params.type = type;
|
||||
const response = await giteaClient.get(`/packages/${owner}`, { params });
|
||||
const packages = (response.data || []).map((p: any) => ({
|
||||
name: p.name, type: p.type, version: p.version, created: p.created_at
|
||||
}));
|
||||
return { content: [{ type: "text", text: JSON.stringify(packages, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error listing packages: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_list_package_versions") {
|
||||
const { owner, type, name } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.get(`/packages/${owner}/${type}/${encodeURIComponent(name)}`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error listing package versions: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
// --- Release Handlers ---
|
||||
if (request.params.name === "gitea_list_releases") {
|
||||
const { owner, repo, limit = 10 } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.get(`/repos/${owner}/${repo}/releases`, { params: { limit } });
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error listing releases: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_get_latest_release") {
|
||||
const { owner, repo } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.get(`/repos/${owner}/${repo}/releases/latest`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error fetching latest release: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_create_release") {
|
||||
const { owner, repo, tag_name, name, body, draft = false, prerelease = false } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.post(`/repos/${owner}/${repo}/releases`, {
|
||||
tag_name, name, body, draft, prerelease
|
||||
});
|
||||
return { content: [{ type: "text", text: `Release '${response.data.name}' created: ${response.data.html_url}` }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error creating release: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
// --- Project Handlers ---
|
||||
if (request.params.name === "gitea_list_projects") {
|
||||
const { owner, repo } = request.params.arguments as any;
|
||||
try {
|
||||
// Gitea API: repo-level projects or user projects
|
||||
const url = repo ? `/repos/${owner}/${repo}/projects` : `/users/${owner}/projects`;
|
||||
const response = await giteaClient.get(url);
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error listing projects: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "gitea_get_project_columns") {
|
||||
const { project_id } = request.params.arguments as any;
|
||||
try {
|
||||
const response = await giteaClient.get(`/projects/${project_id}/columns`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
|
||||
} catch (error: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error fetching project columns: ${error.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Unknown tool: ${request.params.name}`);
|
||||
});
|
||||
|
||||
@@ -252,14 +829,42 @@ async function pollSubscriptions() {
|
||||
|
||||
|
||||
async function run() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("Gitea MCP Native Server running on stdio");
|
||||
const isStdio = process.argv.includes('--stdio');
|
||||
|
||||
// Start the background poller
|
||||
pollSubscriptions();
|
||||
if (isStdio) {
|
||||
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error('Gitea MCP server is running on stdio');
|
||||
} else {
|
||||
const app = express();
|
||||
let transport: SSEServerTransport | null = null;
|
||||
|
||||
app.get('/sse', async (req, res) => {
|
||||
console.error('New SSE connection established');
|
||||
transport = new SSEServerTransport('/message', res);
|
||||
await server.connect(transport);
|
||||
});
|
||||
|
||||
app.post('/message', async (req, res) => {
|
||||
if (!transport) {
|
||||
res.status(400).send('No active SSE connection');
|
||||
return;
|
||||
}
|
||||
await transport.handlePostMessage(req, res);
|
||||
});
|
||||
|
||||
const PORT = process.env.GITEA_MCP_PORT || 3001;
|
||||
app.listen(PORT, () => {
|
||||
console.error(`Gitea MCP server running on http://localhost:${PORT}/sse`);
|
||||
});
|
||||
|
||||
// Start the background poller only in SSE mode or if specifically desired
|
||||
pollSubscriptions();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
run().catch((error) => {
|
||||
console.error("Fatal error:", error);
|
||||
process.exit(1);
|
||||
|
||||
16
packages/gitea-mcp/src/start.ts
Normal file
16
packages/gitea-mcp/src/start.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { config } from 'dotenv';
|
||||
import { resolve } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
|
||||
// Try to load .env.local first (contains credentials usually)
|
||||
config({ path: resolve(__dirname, '../../../.env.local') });
|
||||
// Fallback to .env (contains defaults)
|
||||
config({ path: resolve(__dirname, '../../../.env') });
|
||||
|
||||
// Now boot the compiled MCP index
|
||||
import('./index.js').catch(err => {
|
||||
console.error('Failed to start MCP Server:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
24
packages/glitchtip-mcp/package.json
Normal file
24
packages/glitchtip-mcp/package.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "@mintel/glitchtip-mcp",
|
||||
"version": "1.9.17",
|
||||
"description": "GlitchTip Error Tracking MCP server for Mintel infrastructure",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/start.js",
|
||||
"dev": "tsx watch src/index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.5.0",
|
||||
"axios": "^1.7.2",
|
||||
"dotenv": "^17.3.1",
|
||||
"express": "^5.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/node": "^20.14.10",
|
||||
"tsx": "^4.19.2",
|
||||
"typescript": "^5.5.3"
|
||||
}
|
||||
}
|
||||
171
packages/glitchtip-mcp/src/index.ts
Normal file
171
packages/glitchtip-mcp/src/index.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
|
||||
import express from 'express';
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import axios from "axios";
|
||||
import https from "https";
|
||||
|
||||
const GLITCHTIP_BASE_URL = process.env.GLITCHTIP_BASE_URL || "https://glitchtip.infra.mintel.me";
|
||||
const GLITCHTIP_API_KEY = process.env.GLITCHTIP_API_KEY;
|
||||
|
||||
if (!GLITCHTIP_API_KEY) {
|
||||
console.error("Warning: GLITCHTIP_API_KEY is not set. API calls will fail.");
|
||||
}
|
||||
|
||||
const httpsAgent = new https.Agent({
|
||||
rejectUnauthorized: false, // For internal infra
|
||||
});
|
||||
|
||||
const glitchtipClient = axios.create({
|
||||
baseURL: `${GLITCHTIP_BASE_URL}/api/0`,
|
||||
headers: { Authorization: `Bearer ${GLITCHTIP_API_KEY}` },
|
||||
httpsAgent
|
||||
});
|
||||
|
||||
const LIST_PROJECTS_TOOL: Tool = {
|
||||
name: "glitchtip_list_projects",
|
||||
description: "List all projects and organizations in GlitchTip",
|
||||
inputSchema: { type: "object", properties: {} },
|
||||
};
|
||||
|
||||
const LIST_ISSUES_TOOL: Tool = {
|
||||
name: "glitchtip_list_issues",
|
||||
description: "List issues (errors) for a specific project",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
organization_slug: { type: "string", description: "The organization slug" },
|
||||
project_slug: { type: "string", description: "The project slug" },
|
||||
query: { type: "string", description: "Optional query filter (e.g., 'is:unresolved')" },
|
||||
limit: { type: "number", description: "Maximum number of issues to return (default: 20)" },
|
||||
},
|
||||
required: ["organization_slug", "project_slug"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_ISSUE_DETAILS_TOOL: Tool = {
|
||||
name: "glitchtip_get_issue_details",
|
||||
description: "Get detailed information about a specific issue, including stack trace",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
issue_id: { type: "string", description: "The ID of the issue" },
|
||||
},
|
||||
required: ["issue_id"],
|
||||
},
|
||||
};
|
||||
|
||||
const UPDATE_ISSUE_TOOL: Tool = {
|
||||
name: "glitchtip_update_issue",
|
||||
description: "Update the status of an issue (e.g., resolve it)",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
issue_id: { type: "string", description: "The ID of the issue" },
|
||||
status: { type: "string", enum: ["resolved", "unresolved", "ignored"], description: "The new status" },
|
||||
},
|
||||
required: ["issue_id", "status"],
|
||||
},
|
||||
};
|
||||
|
||||
const server = new Server(
|
||||
{ name: "glitchtip-mcp", version: "1.0.0" },
|
||||
{ capabilities: { tools: {} } }
|
||||
);
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: [
|
||||
LIST_PROJECTS_TOOL,
|
||||
LIST_ISSUES_TOOL,
|
||||
GET_ISSUE_DETAILS_TOOL,
|
||||
UPDATE_ISSUE_TOOL,
|
||||
],
|
||||
}));
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
if (request.params.name === "glitchtip_list_projects") {
|
||||
try {
|
||||
const res = await glitchtipClient.get('/projects/');
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "glitchtip_list_issues") {
|
||||
const { organization_slug, project_slug, query, limit = 20 } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await glitchtipClient.get(`/projects/${organization_slug}/${project_slug}/issues/`, {
|
||||
params: { query, limit }
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "glitchtip_get_issue_details") {
|
||||
const { issue_id } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await glitchtipClient.get(`/issues/${issue_id}/`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "glitchtip_update_issue") {
|
||||
const { issue_id, status } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await glitchtipClient.put(`/issues/${issue_id}/`, { status });
|
||||
return { content: [{ type: "text", text: `Issue ${issue_id} status updated to ${status}.` }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Unknown tool: ${request.params.name}`);
|
||||
});
|
||||
|
||||
async function run() {
|
||||
const isStdio = process.argv.includes('--stdio');
|
||||
|
||||
if (isStdio) {
|
||||
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error('GlitchTip MCP server is running on stdio');
|
||||
} else {
|
||||
const app = express();
|
||||
let transport: SSEServerTransport | null = null;
|
||||
|
||||
app.get('/sse', async (req, res) => {
|
||||
console.error('New SSE connection established');
|
||||
transport = new SSEServerTransport('/message', res);
|
||||
await server.connect(transport);
|
||||
});
|
||||
|
||||
app.post('/message', async (req, res) => {
|
||||
if (!transport) {
|
||||
res.status(400).send('No active SSE connection');
|
||||
return;
|
||||
}
|
||||
await transport.handlePostMessage(req, res);
|
||||
});
|
||||
|
||||
const PORT = process.env.GLITCHTIP_MCP_PORT || 3005;
|
||||
app.listen(PORT, () => {
|
||||
console.error(`GlitchTip MCP server running on http://localhost:${PORT}/sse`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
run().catch((err) => {
|
||||
console.error("Fatal error:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
13
packages/glitchtip-mcp/src/start.ts
Normal file
13
packages/glitchtip-mcp/src/start.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { config } from 'dotenv';
|
||||
import { resolve } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
|
||||
config({ path: resolve(__dirname, '../../../.env.local') });
|
||||
config({ path: resolve(__dirname, '../../../.env') });
|
||||
|
||||
import('./index.js').catch(err => {
|
||||
console.error('Failed to start GlitchTip MCP Server:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
16
packages/glitchtip-mcp/tsconfig.json
Normal file
16
packages/glitchtip-mcp/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/husky-config",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Start from the pre-built Nextjs Base image
|
||||
FROM registry.infra.mintel.me/mintel/nextjs:latest AS builder
|
||||
FROM git.infra.mintel.me/mmintel/nextjs:latest AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -20,7 +20,7 @@ ENV DIRECTUS_URL=$DIRECTUS_URL
|
||||
RUN pnpm --filter ${APP_NAME:-app} build
|
||||
|
||||
# Production runner image
|
||||
FROM registry.infra.mintel.me/mintel/runtime:latest AS runner
|
||||
FROM git.infra.mintel.me/mmintel/runtime:latest AS runner
|
||||
WORKDIR /app
|
||||
|
||||
# Copy standalone output and static files
|
||||
|
||||
@@ -38,7 +38,7 @@ services:
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-app}-auth.forwardauth.authResponseHeaders=X-Auth-User"
|
||||
|
||||
gatekeeper:
|
||||
image: registry.infra.mintel.me/mintel/gatekeeper:${IMAGE_TAG:-latest}
|
||||
image: git.infra.mintel.me/mmintel/gatekeeper:${IMAGE_TAG:-latest}
|
||||
restart: always
|
||||
networks:
|
||||
- infra
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
- "traefik.http.services.${PROJECT_NAME}-gatekeeper.loadbalancer.server.port=3000"
|
||||
|
||||
directus:
|
||||
image: registry.infra.mintel.me/mintel/directus:${IMAGE_TAG:-latest}
|
||||
image: git.infra.mintel.me/mmintel/directus:${IMAGE_TAG:-latest}
|
||||
restart: always
|
||||
networks:
|
||||
- infra
|
||||
|
||||
@@ -177,12 +177,31 @@ jobs:
|
||||
- name: 🐳 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: 🔐 Registry Login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.infra.mintel.me
|
||||
username: ${{ secrets.REGISTRY_USER }}
|
||||
password: ${{ secrets.REGISTRY_PASS }}
|
||||
- name: 🔐 Discover Valid Registry Token
|
||||
id: discover_token
|
||||
run: |
|
||||
echo "Testing available secrets against git.infra.mintel.me Docker registry..."
|
||||
TOKENS="${{ secrets.GITEA_PAT }} ${{ secrets.MINTEL_PRIVATE_TOKEN }} ${{ secrets.NPM_TOKEN }}"
|
||||
USERS="${{ github.repository_owner }} ${{ github.actor }} marcmintel mintel mmintel"
|
||||
|
||||
for TOKEN in $TOKENS; do
|
||||
if [ -n "$TOKEN" ]; then
|
||||
for U in $USERS; do
|
||||
if [ -n "$U" ]; then
|
||||
echo "Attempting docker login for a token with user $U..."
|
||||
if echo "$TOKEN" | docker login git.infra.mintel.me -u "$U" --password-stdin > /dev/null 2>&1; then
|
||||
echo "✅ Successfully authenticated with a token."
|
||||
echo "::add-mask::$TOKEN"
|
||||
echo "token=$TOKEN" >> $GITHUB_OUTPUT
|
||||
echo "user=$U" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
done
|
||||
echo "❌ All available tokens failed to authenticate!"
|
||||
exit 1
|
||||
|
||||
- name: 🏗️ Docker Build & Push
|
||||
uses: docker/build-push-action@v5
|
||||
@@ -197,8 +216,8 @@ jobs:
|
||||
NEXT_PUBLIC_TARGET=${{ needs.prepare.outputs.target }}
|
||||
push: true
|
||||
secrets: |
|
||||
NPM_TOKEN=${{ secrets.NPM_TOKEN }}
|
||||
tags: registry.infra.mintel.me/mintel/${{ github.event.repository.name }}:${{ needs.prepare.outputs.image_tag }}
|
||||
NPM_TOKEN=${{ steps.discover_token.outputs.token }}
|
||||
tags: git.infra.mintel.me/mmintel/${{ github.event.repository.name }}:${{ needs.prepare.outputs.image_tag }}
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 4: Deploy
|
||||
@@ -262,7 +281,7 @@ jobs:
|
||||
set -e
|
||||
cd "/home/deploy/sites/${{ github.event.repository.name }}"
|
||||
chmod 600 "$ENV_FILE"
|
||||
echo "${{ secrets.REGISTRY_PASS }}" | docker login registry.infra.mintel.me -u "${{ secrets.REGISTRY_USER }}" --password-stdin
|
||||
echo "${{ steps.discover_token.outputs.token }}" | docker login git.infra.mintel.me -u "${{ steps.discover_token.outputs.user }}" --password-stdin
|
||||
docker compose -p "$PROJECT_NAME" --env-file "$ENV_FILE" pull
|
||||
docker compose -p "$PROJECT_NAME" --env-file "$ENV_FILE" up -d --remove-orphans
|
||||
docker system prune -f --filter "until=24h"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/infra",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
@@ -2,12 +2,24 @@
|
||||
set -e
|
||||
|
||||
# Configuration
|
||||
REGISTRY_DATA="/mnt/HC_Volume_104575103/registry-data/docker/registry/v2"
|
||||
REGISTRY_DATA="/mnt/HC_Volume_104796416/registry-data/docker/registry/v2"
|
||||
KEEP_TAGS=3
|
||||
|
||||
echo "🏥 Starting Aggressive Mintel Infrastructure Optimization..."
|
||||
|
||||
# 1. Prune Registry Tags (Filesystem level)
|
||||
# 1. Gitea Maintenance
|
||||
echo "🍵 Running Gitea Maintenance..."
|
||||
GITEA_CONTAINER=$(docker ps --format "{{.Names}}" | grep gitea | head -1 || true)
|
||||
if [ -n "$GITEA_CONTAINER" ]; then
|
||||
# Run common Gitea cleanup tasks
|
||||
docker exec -u git "$GITEA_CONTAINER" gitea admin cron run cleanup_old_repository_archives || true
|
||||
docker exec -u git "$GITEA_CONTAINER" gitea admin cron run cleanup_upload_directory || true
|
||||
docker exec -u git "$GITEA_CONTAINER" gitea admin cron run cleanup_packages || true
|
||||
docker exec -u git "$GITEA_CONTAINER" gitea admin cron run garbage_collect_attachment || true
|
||||
docker exec -u git "$GITEA_CONTAINER" gitea admin cron run garbage_collect_lfs || true
|
||||
fi
|
||||
|
||||
# 2. Prune Registry Tags (Filesystem level)
|
||||
if [ -d "$REGISTRY_DATA" ]; then
|
||||
echo "🔍 Processing Registry tags..."
|
||||
for repo_dir in "$REGISTRY_DATA/repositories/mintel/"*; do
|
||||
@@ -47,4 +59,4 @@ docker system prune -af --filter "until=24h"
|
||||
docker volume prune -f
|
||||
|
||||
echo "✅ Optimization complete!"
|
||||
df -h /mnt/HC_Volume_104575103
|
||||
df -h /mnt/HC_Volume_104796416
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/journaling",
|
||||
"version": "1.9.6",
|
||||
"private": true,
|
||||
"version": "1.9.17",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
|
||||
25
packages/klz-payload-mcp/package.json
Normal file
25
packages/klz-payload-mcp/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@mintel/klz-payload-mcp",
|
||||
"version": "1.9.17",
|
||||
"description": "KLZ PayloadCMS MCP server for technical product data and leads",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/start.js",
|
||||
"dev": "tsx watch src/index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.27.1",
|
||||
"axios": "^1.7.2",
|
||||
"dotenv": "^17.3.1",
|
||||
"express": "^5.2.1",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/node": "^20.14.10",
|
||||
"typescript": "^5.5.3",
|
||||
"tsx": "^4.19.2"
|
||||
}
|
||||
}
|
||||
617
packages/klz-payload-mcp/src/index.ts
Normal file
617
packages/klz-payload-mcp/src/index.ts
Normal file
@@ -0,0 +1,617 @@
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
|
||||
import express, { Request, Response } from 'express';
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import axios from "axios";
|
||||
import https from "https";
|
||||
|
||||
const PAYLOAD_URL = process.env.PAYLOAD_URL || "https://klz-cables.com";
|
||||
const PAYLOAD_API_KEY = process.env.PAYLOAD_API_KEY;
|
||||
const PAYLOAD_EMAIL = process.env.PAYLOAD_EMAIL || "agent@mintel.me";
|
||||
const PAYLOAD_PASSWORD = process.env.PAYLOAD_PASSWORD || "agentpassword123";
|
||||
|
||||
const httpsAgent = new https.Agent({
|
||||
rejectUnauthorized: false, // For internal infra
|
||||
});
|
||||
|
||||
let jwtToken: string | null = null;
|
||||
|
||||
const payloadClient = axios.create({
|
||||
baseURL: `${PAYLOAD_URL}/api`,
|
||||
headers: PAYLOAD_API_KEY ? { Authorization: `users API-Key ${PAYLOAD_API_KEY}` } : {},
|
||||
httpsAgent
|
||||
});
|
||||
|
||||
payloadClient.interceptors.request.use(async (config) => {
|
||||
if (!PAYLOAD_API_KEY && !jwtToken && PAYLOAD_EMAIL && PAYLOAD_PASSWORD) {
|
||||
try {
|
||||
const loginRes = await axios.post(`${PAYLOAD_URL}/api/users/login`, {
|
||||
email: PAYLOAD_EMAIL,
|
||||
password: PAYLOAD_PASSWORD
|
||||
}, { httpsAgent });
|
||||
if (loginRes.data && loginRes.data.token) {
|
||||
jwtToken = loginRes.data.token;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Failed to authenticate with Payload CMS using email/password.");
|
||||
}
|
||||
}
|
||||
|
||||
if (jwtToken && !PAYLOAD_API_KEY) {
|
||||
config.headers.Authorization = `JWT ${jwtToken}`;
|
||||
}
|
||||
return config;
|
||||
});
|
||||
|
||||
payloadClient.interceptors.response.use(res => res, async (error) => {
|
||||
const originalRequest = error.config;
|
||||
// If token expired, clear it and retry
|
||||
if (error.response?.status === 401 && !originalRequest._retry && !PAYLOAD_API_KEY) {
|
||||
originalRequest._retry = true;
|
||||
jwtToken = null; // Forces re-authentication on next interceptor run
|
||||
return payloadClient(originalRequest);
|
||||
}
|
||||
return Promise.reject(error);
|
||||
});
|
||||
|
||||
|
||||
const SEARCH_PRODUCTS_TOOL: Tool = {
|
||||
name: "payload_search_products",
|
||||
description: "Search for technical product specifications (cables, cross-sections) in KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: { type: "string", description: "Search query or part number" },
|
||||
limit: { type: "number", description: "Maximum number of results" },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const GET_PRODUCT_TOOL: Tool = {
|
||||
name: "payload_get_product",
|
||||
description: "Get a specific product by its slug or ID",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
slug: { type: "string", description: "Product slug" },
|
||||
id: { type: "string", description: "Product ID (if slug is not used)" }
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const CREATE_PRODUCT_TOOL: Tool = {
|
||||
name: "payload_create_product",
|
||||
description: "Create a new product in KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
title: { type: "string", description: "Product title" },
|
||||
slug: { type: "string", description: "Product slug" },
|
||||
data: { type: "object", description: "Additional product data (JSON)", additionalProperties: true }
|
||||
},
|
||||
required: ["title"]
|
||||
},
|
||||
};
|
||||
|
||||
const UPDATE_PRODUCT_TOOL: Tool = {
|
||||
name: "payload_update_product",
|
||||
description: "Update an existing product in KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: "string", description: "Product ID to update" },
|
||||
data: { type: "object", description: "Product data to update (JSON)", additionalProperties: true }
|
||||
},
|
||||
required: ["id", "data"]
|
||||
},
|
||||
};
|
||||
|
||||
const DELETE_PRODUCT_TOOL: Tool = {
|
||||
name: "payload_delete_product",
|
||||
description: "Delete a product from KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: "string", description: "Product ID to delete" }
|
||||
},
|
||||
required: ["id"]
|
||||
},
|
||||
};
|
||||
|
||||
const LIST_LEADS_TOOL: Tool = {
|
||||
name: "payload_list_leads",
|
||||
description: "List recent lead inquiries and contact requests",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
limit: { type: "number", description: "Maximum number of leads" },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const GET_LEAD_TOOL: Tool = {
|
||||
name: "payload_get_lead",
|
||||
description: "Get a specific lead by ID",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: "string", description: "Lead ID" }
|
||||
},
|
||||
required: ["id"]
|
||||
},
|
||||
};
|
||||
|
||||
const CREATE_LEAD_TOOL: Tool = {
|
||||
name: "payload_create_lead",
|
||||
description: "Create a new lead in KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
email: { type: "string", description: "Lead email address" },
|
||||
data: { type: "object", description: "Additional lead data (JSON)", additionalProperties: true }
|
||||
},
|
||||
required: ["email"]
|
||||
},
|
||||
};
|
||||
|
||||
const UPDATE_LEAD_TOOL: Tool = {
|
||||
name: "payload_update_lead",
|
||||
description: "Update an existing lead in KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: "string", description: "Lead ID to update" },
|
||||
data: { type: "object", description: "Lead data to update (JSON)", additionalProperties: true }
|
||||
},
|
||||
required: ["id", "data"]
|
||||
},
|
||||
};
|
||||
|
||||
const DELETE_LEAD_TOOL: Tool = {
|
||||
name: "payload_delete_lead",
|
||||
description: "Delete a lead from KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: "string", description: "Lead ID to delete" }
|
||||
},
|
||||
required: ["id"]
|
||||
},
|
||||
};
|
||||
|
||||
const LIST_PAGES_TOOL: Tool = {
|
||||
name: "payload_list_pages",
|
||||
description: "List pages from KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
limit: { type: "number", description: "Maximum number of pages" },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const GET_PAGE_TOOL: Tool = {
|
||||
name: "payload_get_page",
|
||||
description: "Get a specific page by its slug or ID",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
slug: { type: "string", description: "Page slug" },
|
||||
id: { type: "string", description: "Page ID (if slug is not used)" }
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const LIST_POSTS_TOOL: Tool = {
|
||||
name: "payload_list_posts",
|
||||
description: "List posts/articles from KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
limit: { type: "number", description: "Maximum number of posts" },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const GET_POST_TOOL: Tool = {
|
||||
name: "payload_get_post",
|
||||
description: "Get a specific post by its slug or ID",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
slug: { type: "string", description: "Post slug" },
|
||||
id: { type: "string", description: "Post ID (if slug is not used)" }
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const CREATE_PAGE_TOOL: Tool = {
|
||||
name: "payload_create_page",
|
||||
description: "Create a new page in KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
title: { type: "string", description: "Page title" },
|
||||
slug: { type: "string", description: "Page slug" },
|
||||
data: { type: "object", description: "Additional page data (JSON)", additionalProperties: true }
|
||||
},
|
||||
required: ["title"]
|
||||
},
|
||||
};
|
||||
|
||||
const UPDATE_PAGE_TOOL: Tool = {
|
||||
name: "payload_update_page",
|
||||
description: "Update an existing page in KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: "string", description: "Page ID to update" },
|
||||
data: { type: "object", description: "Page data to update (JSON)", additionalProperties: true }
|
||||
},
|
||||
required: ["id", "data"]
|
||||
},
|
||||
};
|
||||
|
||||
const DELETE_PAGE_TOOL: Tool = {
|
||||
name: "payload_delete_page",
|
||||
description: "Delete a page from KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: "string", description: "Page ID to delete" }
|
||||
},
|
||||
required: ["id"]
|
||||
},
|
||||
};
|
||||
|
||||
const CREATE_POST_TOOL: Tool = {
|
||||
name: "payload_create_post",
|
||||
description: "Create a new post in KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
title: { type: "string", description: "Post title" },
|
||||
slug: { type: "string", description: "Post slug" },
|
||||
data: { type: "object", description: "Additional post data (JSON)", additionalProperties: true }
|
||||
},
|
||||
required: ["title"]
|
||||
},
|
||||
};
|
||||
|
||||
const UPDATE_POST_TOOL: Tool = {
|
||||
name: "payload_update_post",
|
||||
description: "Update an existing post in KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: "string", description: "Post ID to update" },
|
||||
data: { type: "object", description: "Post data to update (JSON)", additionalProperties: true }
|
||||
},
|
||||
required: ["id", "data"]
|
||||
},
|
||||
};
|
||||
|
||||
const DELETE_POST_TOOL: Tool = {
|
||||
name: "payload_delete_post",
|
||||
description: "Delete a post from KLZ Payload CMS",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: "string", description: "Post ID to delete" }
|
||||
},
|
||||
required: ["id"]
|
||||
},
|
||||
};
|
||||
|
||||
const server = new Server(
|
||||
{ name: "klz-payload-mcp", version: "1.0.0" },
|
||||
{ capabilities: { tools: {} } }
|
||||
);
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: [
|
||||
SEARCH_PRODUCTS_TOOL,
|
||||
GET_PRODUCT_TOOL,
|
||||
CREATE_PRODUCT_TOOL,
|
||||
UPDATE_PRODUCT_TOOL,
|
||||
DELETE_PRODUCT_TOOL,
|
||||
LIST_LEADS_TOOL,
|
||||
GET_LEAD_TOOL,
|
||||
CREATE_LEAD_TOOL,
|
||||
UPDATE_LEAD_TOOL,
|
||||
DELETE_LEAD_TOOL,
|
||||
LIST_PAGES_TOOL,
|
||||
GET_PAGE_TOOL,
|
||||
CREATE_PAGE_TOOL,
|
||||
UPDATE_PAGE_TOOL,
|
||||
DELETE_PAGE_TOOL,
|
||||
LIST_POSTS_TOOL,
|
||||
GET_POST_TOOL,
|
||||
CREATE_POST_TOOL,
|
||||
UPDATE_POST_TOOL,
|
||||
DELETE_POST_TOOL
|
||||
],
|
||||
}));
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
if (request.params.name === "payload_search_products") {
|
||||
const { query, limit = 10 } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.get('/products', {
|
||||
params: {
|
||||
where: query ? {
|
||||
or: [
|
||||
{ title: { contains: query } },
|
||||
{ slug: { contains: query } },
|
||||
{ description: { contains: query } }
|
||||
]
|
||||
} : {},
|
||||
limit
|
||||
}
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data.docs, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_get_product") {
|
||||
const { slug, id } = request.params.arguments as any;
|
||||
try {
|
||||
if (id) {
|
||||
const res = await payloadClient.get(`/products/${id}`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} else if (slug) {
|
||||
const res = await payloadClient.get('/products', { params: { where: { slug: { equals: slug } }, limit: 1 } });
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data.docs[0] || {}, null, 2) }] };
|
||||
}
|
||||
return { isError: true, content: [{ type: "text", text: "Error: must provide slug or id" }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_create_product") {
|
||||
const { title, slug, data = {} } = request.params.arguments as any;
|
||||
try {
|
||||
const payload = { title, slug, _status: 'draft', ...data };
|
||||
const res = await payloadClient.post('/products', payload);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_update_product") {
|
||||
const { id, data } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.patch(`/products/${id}`, data);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_delete_product") {
|
||||
const { id } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.delete(`/products/${id}`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_list_leads") {
|
||||
const { limit = 10 } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.get('/leads', {
|
||||
params: { limit, sort: '-createdAt' }
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data.docs, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_get_lead") {
|
||||
const { id } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.get(`/leads/${id}`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_create_lead") {
|
||||
const { email, data = {} } = request.params.arguments as any;
|
||||
try {
|
||||
const payload = { email, ...data };
|
||||
const res = await payloadClient.post('/leads', payload);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_update_lead") {
|
||||
const { id, data } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.patch(`/leads/${id}`, data);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_delete_lead") {
|
||||
const { id } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.delete(`/leads/${id}`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (request.params.name === "payload_list_pages") {
|
||||
const { limit = 10 } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.get('/pages', { params: { limit } });
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data.docs, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_get_page") {
|
||||
const { slug, id } = request.params.arguments as any;
|
||||
try {
|
||||
if (id) {
|
||||
const res = await payloadClient.get(`/pages/${id}`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} else if (slug) {
|
||||
const res = await payloadClient.get('/pages', { params: { where: { slug: { equals: slug } }, limit: 1 } });
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data.docs[0] || {}, null, 2) }] };
|
||||
}
|
||||
return { isError: true, content: [{ type: "text", text: "Error: must provide slug or id" }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_create_page") {
|
||||
const { title, slug, data = {} } = request.params.arguments as any;
|
||||
try {
|
||||
const payload = { title, slug, _status: 'draft', ...data };
|
||||
const res = await payloadClient.post('/pages', payload);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_update_page") {
|
||||
const { id, data } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.patch(`/pages/${id}`, data);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_delete_page") {
|
||||
const { id } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.delete(`/pages/${id}`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_list_posts") {
|
||||
const { limit = 10 } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.get('/posts', { params: { limit, sort: '-createdAt' } });
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data.docs, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_get_post") {
|
||||
const { slug, id } = request.params.arguments as any;
|
||||
try {
|
||||
if (id) {
|
||||
const res = await payloadClient.get(`/posts/${id}`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} else if (slug) {
|
||||
const res = await payloadClient.get('/posts', { params: { where: { slug: { equals: slug } }, limit: 1 } });
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data.docs[0] || {}, null, 2) }] };
|
||||
}
|
||||
return { isError: true, content: [{ type: "text", text: "Error: must provide slug or id" }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_create_post") {
|
||||
const { title, slug, data = {} } = request.params.arguments as any;
|
||||
try {
|
||||
const payload = { title, slug, _status: 'draft', ...data };
|
||||
const res = await payloadClient.post('/posts', payload);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_update_post") {
|
||||
const { id, data } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.patch(`/posts/${id}`, data);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "payload_delete_post") {
|
||||
const { id } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await payloadClient.delete(`/posts/${id}`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
throw new Error(`Unknown tool: ${request.params.name}`);
|
||||
});
|
||||
|
||||
async function run() {
|
||||
const isStdio = process.argv.includes('--stdio');
|
||||
|
||||
if (isStdio) {
|
||||
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error('KLZ Payload MCP server is running on stdio');
|
||||
} else {
|
||||
const app = express();
|
||||
let transport: SSEServerTransport | null = null;
|
||||
|
||||
app.get('/sse', async (req: Request, res: Response) => {
|
||||
console.error('New SSE connection established');
|
||||
transport = new SSEServerTransport('/message', res);
|
||||
await server.connect(transport);
|
||||
});
|
||||
|
||||
app.post('/message', async (req: Request, res: Response) => {
|
||||
if (!transport) {
|
||||
res.status(400).send('No active SSE connection');
|
||||
return;
|
||||
}
|
||||
await transport.handlePostMessage(req, res);
|
||||
});
|
||||
|
||||
const PORT = process.env.KLZ_PAYLOAD_MCP_PORT || 3006;
|
||||
app.listen(PORT, () => {
|
||||
console.error(`KLZ Payload MCP server running on http://localhost:${PORT}/sse`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
run().catch((err) => {
|
||||
console.error("Fatal error:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
13
packages/klz-payload-mcp/src/start.ts
Normal file
13
packages/klz-payload-mcp/src/start.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { config } from 'dotenv';
|
||||
import { resolve } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
|
||||
config({ path: resolve(__dirname, '../../../.env.local') });
|
||||
config({ path: resolve(__dirname, '../../../.env') });
|
||||
|
||||
import('./index.js').catch(err => {
|
||||
console.error('Failed to start KLZ Payload MCP Server:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
16
packages/klz-payload-mcp/tsconfig.json
Normal file
16
packages/klz-payload-mcp/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/mail",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"private": false,
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/meme-generator",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"private": false,
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
|
||||
28
packages/memory-mcp/package.json
Normal file
28
packages/memory-mcp/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "@mintel/memory-mcp",
|
||||
"version": "1.9.17",
|
||||
"description": "Local Qdrant-based Memory MCP server",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/start.js",
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"test:unit": "vitest run"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.5.0",
|
||||
"@qdrant/js-client-rest": "^1.12.0",
|
||||
"@xenova/transformers": "^2.17.2",
|
||||
"dotenv": "^17.3.1",
|
||||
"express": "^5.2.1",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/node": "^20.14.10",
|
||||
"tsx": "^4.19.1",
|
||||
"typescript": "^5.5.3",
|
||||
"vitest": "^2.1.3"
|
||||
}
|
||||
}
|
||||
112
packages/memory-mcp/src/index.ts
Normal file
112
packages/memory-mcp/src/index.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js';
|
||||
import express from 'express';
|
||||
import { z } from 'zod';
|
||||
import { QdrantMemoryService } from './qdrant.js';
|
||||
|
||||
async function main() {
|
||||
const server = new McpServer({
|
||||
name: '@mintel/memory-mcp',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const qdrantService = new QdrantMemoryService(process.env.QDRANT_URL || 'http://localhost:6333');
|
||||
|
||||
server.tool(
|
||||
'store_memory',
|
||||
'Store a new piece of knowledge/memory into the vector database. Use this to remember architectural decisions, preferences, aliases, etc.',
|
||||
{
|
||||
label: z.string().describe('A short, descriptive label or title for the memory (e.g., "Architektur-Entscheidungen")'),
|
||||
content: z.string().describe('The actual content to remember (e.g., "In diesem Projekt nutzen wir lieber Composition over Inheritance.")'),
|
||||
},
|
||||
async (args) => {
|
||||
const success = await qdrantService.storeMemory(args.label, args.content);
|
||||
if (success) {
|
||||
return {
|
||||
content: [{ type: 'text', text: `Successfully stored memory: [${args.label}]` }],
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
content: [{ type: 'text', text: `Failed to store memory: [${args.label}]` }],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
server.tool(
|
||||
'retrieve_memory',
|
||||
'Retrieve relevant memories from the vector database based on a semantic search query.',
|
||||
{
|
||||
query: z.string().describe('The search query to find relevant memories.'),
|
||||
limit: z.number().optional().describe('Maximum number of results to return (default: 5)'),
|
||||
},
|
||||
async (args) => {
|
||||
const results = await qdrantService.retrieveMemory(args.query, args.limit || 5);
|
||||
|
||||
if (results.length === 0) {
|
||||
return {
|
||||
content: [{ type: 'text', text: 'No relevant memories found.' }],
|
||||
};
|
||||
}
|
||||
|
||||
const formattedResults = results
|
||||
.map(r => `- [${r.label}] (Score: ${r.score.toFixed(3)}): ${r.content}`)
|
||||
.join('\n');
|
||||
|
||||
return {
|
||||
content: [{ type: 'text', text: `Found ${results.length} memories:\n\n${formattedResults}` }],
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
const isStdio = process.argv.includes('--stdio');
|
||||
|
||||
if (isStdio) {
|
||||
// Connect Stdio FIRST to avoid handshake timeouts while loading model
|
||||
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error('Memory MCP server is running on stdio');
|
||||
|
||||
// Initialize dependency after connection
|
||||
try {
|
||||
await qdrantService.initialize();
|
||||
} catch (e) {
|
||||
console.error('Failed to initialize local dependencies:', e);
|
||||
}
|
||||
} else {
|
||||
const app = express();
|
||||
let transport: SSEServerTransport | null = null;
|
||||
|
||||
app.get('/sse', async (req, res) => {
|
||||
console.error('New SSE connection established');
|
||||
transport = new SSEServerTransport('/message', res);
|
||||
await server.connect(transport);
|
||||
});
|
||||
|
||||
app.post('/message', async (req, res) => {
|
||||
if (!transport) {
|
||||
res.status(400).send('No active SSE connection');
|
||||
return;
|
||||
}
|
||||
await transport.handlePostMessage(req, res);
|
||||
});
|
||||
|
||||
const PORT = process.env.MEMORY_MCP_PORT || 3002;
|
||||
app.listen(PORT, async () => {
|
||||
console.error(`Memory MCP server running on http://localhost:${PORT}/sse`);
|
||||
// Initialize dependencies in SSE mode on startup
|
||||
try {
|
||||
await qdrantService.initialize();
|
||||
} catch (e) {
|
||||
console.error('Failed to initialize local dependencies:', e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('Fatal error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
89
packages/memory-mcp/src/qdrant.test.ts
Normal file
89
packages/memory-mcp/src/qdrant.test.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QdrantMemoryService } from './qdrant.js';
|
||||
|
||||
vi.mock('@xenova/transformers', () => {
|
||||
return {
|
||||
env: { allowRemoteModels: false, localModelPath: './models' },
|
||||
pipeline: vi.fn().mockResolvedValue(async (text: string) => {
|
||||
// Mock embedding generation: returns an array of 384 numbers
|
||||
return { data: new Float32Array(384).fill(0.1) };
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
const mockCreateCollection = vi.fn();
|
||||
const mockGetCollections = vi.fn().mockResolvedValue({ collections: [] });
|
||||
const mockUpsert = vi.fn();
|
||||
const mockSearch = vi.fn().mockResolvedValue([
|
||||
{
|
||||
id: 'test-id',
|
||||
version: 1,
|
||||
score: 0.9,
|
||||
payload: { label: 'Test Label', content: 'Test Content' }
|
||||
}
|
||||
]);
|
||||
|
||||
vi.mock('@qdrant/js-client-rest', () => {
|
||||
return {
|
||||
QdrantClient: vi.fn().mockImplementation(() => {
|
||||
return {
|
||||
getCollections: mockGetCollections,
|
||||
createCollection: mockCreateCollection,
|
||||
upsert: mockUpsert,
|
||||
search: mockSearch
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
describe('QdrantMemoryService', () => {
|
||||
let service: QdrantMemoryService;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
service = new QdrantMemoryService('http://localhost:6333');
|
||||
});
|
||||
|
||||
it('should initialize and create collection if missing', async () => {
|
||||
mockGetCollections.mockResolvedValueOnce({ collections: [] });
|
||||
await service.initialize();
|
||||
|
||||
expect(mockGetCollections).toHaveBeenCalled();
|
||||
expect(mockCreateCollection).toHaveBeenCalledWith('mcp_memory', expect.any(Object));
|
||||
});
|
||||
|
||||
it('should not create collection if it already exists', async () => {
|
||||
mockGetCollections.mockResolvedValueOnce({ collections: [{ name: 'mcp_memory' }] });
|
||||
await service.initialize();
|
||||
|
||||
expect(mockCreateCollection).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should store memory', async () => {
|
||||
await service.initialize();
|
||||
const result = await service.storeMemory('Design', 'Composition over Inheritance');
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockUpsert).toHaveBeenCalledWith('mcp_memory', expect.objectContaining({
|
||||
wait: true,
|
||||
points: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
payload: expect.objectContaining({
|
||||
label: 'Design',
|
||||
content: 'Composition over Inheritance'
|
||||
})
|
||||
})
|
||||
])
|
||||
}));
|
||||
});
|
||||
|
||||
it('should retrieve memory', async () => {
|
||||
await service.initialize();
|
||||
const results = await service.retrieveMemory('Design');
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0].label).toBe('Test Label');
|
||||
expect(results[0].content).toBe('Test Content');
|
||||
expect(results[0].score).toBe(0.9);
|
||||
});
|
||||
});
|
||||
110
packages/memory-mcp/src/qdrant.ts
Normal file
110
packages/memory-mcp/src/qdrant.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { pipeline, env } from '@xenova/transformers';
|
||||
import { QdrantClient } from '@qdrant/js-client-rest';
|
||||
|
||||
// Be sure to set local caching options for transformers
|
||||
env.allowRemoteModels = true;
|
||||
env.localModelPath = './models';
|
||||
|
||||
export class QdrantMemoryService {
|
||||
private client: QdrantClient;
|
||||
private collectionName = 'mcp_memory';
|
||||
private embedder: any = null;
|
||||
|
||||
constructor(url: string = 'http://localhost:6333') {
|
||||
this.client = new QdrantClient({ url });
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the embedding model and the Qdrant collection
|
||||
*/
|
||||
async initialize() {
|
||||
// 1. Load the embedding model (using a lightweight model suitable for semantic search)
|
||||
console.error('Loading embedding model...');
|
||||
this.embedder = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
||||
|
||||
// 2. Ensure collection exists
|
||||
console.error(`Checking for collection: ${this.collectionName}`);
|
||||
try {
|
||||
const collections = await this.client.getCollections();
|
||||
const exists = collections.collections.some(c => c.name === this.collectionName);
|
||||
|
||||
if (!exists) {
|
||||
console.error(`Creating collection: ${this.collectionName}`);
|
||||
await this.client.createCollection(this.collectionName, {
|
||||
vectors: {
|
||||
size: 384, // size for all-MiniLM-L6-v2
|
||||
distance: 'Cosine'
|
||||
}
|
||||
});
|
||||
console.error('Collection created successfully.');
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to initialize Qdrant collection:', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a vector embedding for the given text
|
||||
*/
|
||||
private async getEmbedding(text: string): Promise<number[]> {
|
||||
if (!this.embedder) {
|
||||
throw new Error('Embedder not initialized. Call initialize() first.');
|
||||
}
|
||||
const output = await this.embedder(text, { pooling: 'mean', normalize: true });
|
||||
return Array.from(output.data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores a memory entry into Qdrant
|
||||
*/
|
||||
async storeMemory(label: string, content: string): Promise<boolean> {
|
||||
try {
|
||||
const fullText = `${label}: ${content}`;
|
||||
const vector = await this.getEmbedding(fullText);
|
||||
const id = crypto.randomUUID();
|
||||
|
||||
await this.client.upsert(this.collectionName, {
|
||||
wait: true,
|
||||
points: [
|
||||
{
|
||||
id,
|
||||
vector,
|
||||
payload: {
|
||||
label,
|
||||
content,
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
return true;
|
||||
} catch (e) {
|
||||
console.error('Failed to store memory:', e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves memory entries relevant to the query
|
||||
*/
|
||||
async retrieveMemory(query: string, limit: number = 5): Promise<Array<{ label: string, content: string, score: number }>> {
|
||||
try {
|
||||
const vector = await this.getEmbedding(query);
|
||||
const searchResults = await this.client.search(this.collectionName, {
|
||||
vector,
|
||||
limit,
|
||||
with_payload: true
|
||||
});
|
||||
|
||||
return searchResults.map(result => ({
|
||||
label: String(result.payload?.label || ''),
|
||||
content: String(result.payload?.content || ''),
|
||||
score: result.score
|
||||
}));
|
||||
} catch (e) {
|
||||
console.error('Failed to retrieve memory:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
16
packages/memory-mcp/src/start.ts
Normal file
16
packages/memory-mcp/src/start.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { config } from 'dotenv';
|
||||
import { resolve } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
|
||||
// Try to load .env.local first (contains credentials usually)
|
||||
config({ path: resolve(__dirname, '../../../.env.local') });
|
||||
// Fallback to .env (contains defaults)
|
||||
config({ path: resolve(__dirname, '../../../.env') });
|
||||
|
||||
// Now boot the compiled MCP index
|
||||
import('./index.js').catch(err => {
|
||||
console.error('Failed to start MCP Server:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
16
packages/memory-mcp/tsconfig.json
Normal file
16
packages/memory-mcp/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/next-config",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/next-feedback",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/next-observability",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/next-utils",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/observability",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/page-audit",
|
||||
"version": "1.9.6",
|
||||
"private": true,
|
||||
"version": "1.9.17",
|
||||
"description": "AI-powered website IST-analysis using DataForSEO and Gemini",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
|
||||
2
packages/payload-ai/.npmrc
Normal file
2
packages/payload-ai/.npmrc
Normal file
@@ -0,0 +1,2 @@
|
||||
@mintel:registry=https://git.infra.mintel.me/api/packages/mmintel/npm/
|
||||
//git.infra.mintel.me/api/packages/mmintel/npm/:_authToken=263e7f75d8ada27f3a2e71fd6bd9d95298d48a4d
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/payload-ai",
|
||||
"version": "1.9.6",
|
||||
"private": true,
|
||||
"version": "1.9.17",
|
||||
"description": "Reusable Payload CMS AI Extensions",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -16,7 +15,8 @@
|
||||
"./actions/*": "./dist/actions/*",
|
||||
"./globals/*": "./dist/globals/*",
|
||||
"./endpoints/*": "./dist/endpoints/*",
|
||||
"./utils/*": "./dist/utils/*"
|
||||
"./utils/*": "./dist/utils/*",
|
||||
"./tools/*": "./dist/tools/*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@payloadcms/next": ">=3.0.0",
|
||||
@@ -26,20 +26,26 @@
|
||||
"react-dom": ">=18.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "^3.0.39",
|
||||
"@ai-sdk/react": "^3.0.110",
|
||||
"@mintel/content-engine": "workspace:*",
|
||||
"@mintel/thumbnail-generator": "workspace:*",
|
||||
"replicate": "^1.4.0"
|
||||
"@modelcontextprotocol/sdk": "^1.27.1",
|
||||
"@qdrant/js-client-rest": "^1.17.0",
|
||||
"ai": "^6.0.108",
|
||||
"replicate": "^1.4.0",
|
||||
"zod": "^3.25.76"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@payloadcms/next": "3.77.0",
|
||||
"@payloadcms/ui": "3.77.0",
|
||||
"payload": "3.77.0",
|
||||
"react": "^19.2.3",
|
||||
"react-dom": "^19.2.3",
|
||||
"@types/node": "^20.17.17",
|
||||
"@types/react": "^19.2.8",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"next": "^15.1.0",
|
||||
"payload": "3.77.0",
|
||||
"react": "^19.2.3",
|
||||
"react-dom": "^19.2.3",
|
||||
"typescript": "^5.7.3"
|
||||
}
|
||||
}
|
||||
|
||||
90
packages/payload-ai/src/chatPlugin.ts
Normal file
90
packages/payload-ai/src/chatPlugin.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import type { Config, Plugin } from 'payload'
|
||||
import { AIChatPermissionsCollection } from './collections/AIChatPermissions.js'
|
||||
import type { PayloadChatPluginConfig } from './types.js'
|
||||
import { optimizePostEndpoint } from './endpoints/optimizeEndpoint.js'
|
||||
import { generateSlugEndpoint, generateThumbnailEndpoint, generateSingleFieldEndpoint } from './endpoints/generateEndpoints.js'
|
||||
|
||||
export const payloadChatPlugin =
|
||||
(pluginOptions: PayloadChatPluginConfig): Plugin =>
|
||||
(incomingConfig) => {
|
||||
let config = { ...incomingConfig }
|
||||
|
||||
// If disabled, return config untouched
|
||||
if (pluginOptions.enabled === false) {
|
||||
return config
|
||||
}
|
||||
|
||||
// 1. Inject the Permissions Collection into the Schema
|
||||
const existingCollections = config.collections || []
|
||||
|
||||
const mcpServers = pluginOptions.mcpServers || []
|
||||
|
||||
// Dynamically populate the select options for Collections and MCP Servers
|
||||
const permissionCollection = { ...AIChatPermissionsCollection }
|
||||
const collectionField = permissionCollection.fields.find(f => 'name' in f && f.name === 'allowedCollections') as any
|
||||
if (collectionField) {
|
||||
collectionField.options = existingCollections.map(c => ({
|
||||
label: c.labels?.singular || c.slug,
|
||||
value: c.slug
|
||||
}))
|
||||
}
|
||||
|
||||
const mcpField = permissionCollection.fields.find(f => 'name' in f && f.name === 'allowedMcpServers') as any
|
||||
if (mcpField) {
|
||||
mcpField.options = mcpServers.map(s => ({
|
||||
label: s.name,
|
||||
value: s.name
|
||||
}))
|
||||
}
|
||||
|
||||
config.collections = [...existingCollections, permissionCollection]
|
||||
|
||||
// 2. Register Custom API Endpoint for the AI Chat
|
||||
config.endpoints = [
|
||||
...(config.endpoints || []),
|
||||
{
|
||||
path: '/api/mcp-chat',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
// Fallback simple handler while developing endpoint logic
|
||||
return Response.json({ message: "Chat endpoint active" })
|
||||
},
|
||||
},
|
||||
{
|
||||
path: '/api/mintel-ai/optimize',
|
||||
method: 'post',
|
||||
handler: optimizePostEndpoint,
|
||||
},
|
||||
{
|
||||
path: '/api/mintel-ai/generate-slug',
|
||||
method: 'post',
|
||||
handler: generateSlugEndpoint,
|
||||
},
|
||||
{
|
||||
path: '/api/mintel-ai/generate-thumbnail',
|
||||
method: 'post',
|
||||
handler: generateThumbnailEndpoint,
|
||||
},
|
||||
{
|
||||
path: '/api/mintel-ai/generate-single-field',
|
||||
method: 'post',
|
||||
handler: generateSingleFieldEndpoint,
|
||||
},
|
||||
]
|
||||
|
||||
// 3. Inject Chat React Component into Admin UI
|
||||
if (pluginOptions.renderChatBubble !== false) {
|
||||
config.admin = {
|
||||
...(config.admin || {}),
|
||||
components: {
|
||||
...(config.admin?.components || {}),
|
||||
providers: [
|
||||
...(config.admin?.components?.providers || []),
|
||||
'@mintel/payload-ai/components/ChatWindow#ChatWindowProvider',
|
||||
],
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
69
packages/payload-ai/src/collections/AIChatPermissions.ts
Normal file
69
packages/payload-ai/src/collections/AIChatPermissions.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
/**
|
||||
* A central collection to manage which AI Tools/MCPs a User or Role is allowed to use.
|
||||
*/
|
||||
export const AIChatPermissionsCollection: CollectionConfig = {
|
||||
slug: 'ai-chat-permissions',
|
||||
labels: {
|
||||
singular: 'AI Chat Permission',
|
||||
plural: 'AI Chat Permissions',
|
||||
},
|
||||
admin: {
|
||||
useAsTitle: 'description',
|
||||
group: 'AI & Tools',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'description',
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'E.g. "Editors default AI permissions"',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'row',
|
||||
fields: [
|
||||
{
|
||||
name: 'targetUser',
|
||||
type: 'relationship',
|
||||
relationTo: 'users',
|
||||
hasMany: false,
|
||||
admin: {
|
||||
description: 'Apply these permissions to a specific user (optional).',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'targetRole',
|
||||
type: 'select',
|
||||
options: [
|
||||
{ label: 'Admin', value: 'admin' },
|
||||
{ label: 'Editor', value: 'editor' },
|
||||
], // Ideally this is dynamically populated in a real scenario, but we hardcode standard roles for now
|
||||
admin: {
|
||||
description: 'Apply these permissions to all users with this role.',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'allowedCollections',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: [], // Will be populated dynamically in the plugin init based on actual collections
|
||||
admin: {
|
||||
description: 'Which Payload collections is the AI allowed to read/write on behalf of this user?',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'allowedMcpServers',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: [], // Will be populated dynamically based on plugin config
|
||||
admin: {
|
||||
description: 'Which external MCP Servers is the AI allowed to execute tools from?',
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
136
packages/payload-ai/src/components/ChatWindow/index.tsx
Normal file
136
packages/payload-ai/src/components/ChatWindow/index.tsx
Normal file
@@ -0,0 +1,136 @@
|
||||
'use client'
|
||||
|
||||
import React, { useState, useEffect } from 'react'
|
||||
import { useChat } from '@ai-sdk/react'
|
||||
import './ChatWindow.scss'
|
||||
|
||||
export const ChatWindowProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
|
||||
return (
|
||||
<>
|
||||
{children}
|
||||
<ChatWindow />
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
const ChatWindow: React.FC = () => {
|
||||
const [isOpen, setIsOpen] = useState(false)
|
||||
const [pageContext, setPageContext] = useState<any>({ url: '' })
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof window !== 'undefined') {
|
||||
const path = window.location.pathname;
|
||||
let collectionSlug = null;
|
||||
let id = null;
|
||||
// Payload admin URLs are usually /admin/collections/:slug/:id
|
||||
const match = path.match(/\/collections\/([^/]+)(?:\/([^/]+))?/);
|
||||
if (match) {
|
||||
collectionSlug = match[1];
|
||||
if (match[2] && match[2] !== 'create') {
|
||||
id = match[2];
|
||||
}
|
||||
}
|
||||
|
||||
setPageContext({
|
||||
url: window.location.href,
|
||||
title: document.title,
|
||||
collectionSlug,
|
||||
id
|
||||
})
|
||||
}
|
||||
}, [isOpen]) // Refresh context when chat is opened
|
||||
|
||||
// @ts-ignore - AI hook version mismatch between core and react packages
|
||||
const { messages, input, handleInputChange, handleSubmit, setMessages } = useChat({
|
||||
api: '/api/mcp-chat',
|
||||
initialMessages: [],
|
||||
body: {
|
||||
pageContext
|
||||
}
|
||||
} as any)
|
||||
|
||||
// Basic implementation to toggle chat window and submit messages
|
||||
return (
|
||||
<div className="payload-mcp-chat-container">
|
||||
<button
|
||||
className="payload-mcp-chat-toggle"
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
style={{
|
||||
position: 'fixed',
|
||||
bottom: '20px',
|
||||
right: '20px',
|
||||
zIndex: 9999,
|
||||
padding: '12px 24px',
|
||||
backgroundColor: '#000',
|
||||
color: '#fff',
|
||||
borderRadius: '8px',
|
||||
border: 'none',
|
||||
cursor: 'pointer',
|
||||
fontWeight: 'bold'
|
||||
}}
|
||||
>
|
||||
{isOpen ? 'Close AI Chat' : 'Ask AI'}
|
||||
</button>
|
||||
|
||||
{isOpen && (
|
||||
<div
|
||||
className="payload-mcp-chat-window"
|
||||
style={{
|
||||
position: 'fixed',
|
||||
bottom: '80px',
|
||||
right: '20px',
|
||||
width: '450px',
|
||||
height: '650px',
|
||||
backgroundColor: '#fff',
|
||||
border: '1px solid #eaeaea',
|
||||
borderRadius: '12px',
|
||||
zIndex: 9999,
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
boxShadow: '0 10px 40px rgba(0,0,0,0.1)'
|
||||
}}
|
||||
>
|
||||
<div className="chat-header" style={{ padding: '16px', borderBottom: '1px solid #eaeaea', backgroundColor: '#f9f9f9', borderTopLeftRadius: '12px', borderTopRightRadius: '12px' }}>
|
||||
<h3 style={{ margin: 0, fontSize: '16px' }}>Payload MCP Chat</h3>
|
||||
</div>
|
||||
|
||||
<div className="chat-messages" style={{ flex: 1, padding: '16px', overflowY: 'auto' }}>
|
||||
{messages.map((m: any) => (
|
||||
<div key={m.id} style={{
|
||||
marginBottom: '12px',
|
||||
textAlign: m.role === 'user' ? 'right' : 'left'
|
||||
}}>
|
||||
<div style={{
|
||||
display: 'inline-block',
|
||||
padding: '8px 12px',
|
||||
borderRadius: '8px',
|
||||
backgroundColor: m.role === 'user' ? '#000' : '#f0f0f0',
|
||||
color: m.role === 'user' ? '#fff' : '#000',
|
||||
maxWidth: '80%'
|
||||
}}>
|
||||
{m.role === 'user' ? 'G: ' : 'AI: '}
|
||||
{m.content}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<form onSubmit={handleSubmit} style={{ padding: '16px', borderTop: '1px solid #eaeaea' }}>
|
||||
<input
|
||||
value={input}
|
||||
placeholder="Ask me anything or use /commands..."
|
||||
onChange={handleInputChange}
|
||||
style={{
|
||||
width: '100%',
|
||||
padding: '12px',
|
||||
borderRadius: '8px',
|
||||
border: '1px solid #eaeaea',
|
||||
boxSizing: 'border-box'
|
||||
}}
|
||||
/>
|
||||
</form>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
import React, { useState } from "react";
|
||||
import { useField, useDocumentInfo, useForm } from "@payloadcms/ui";
|
||||
import { generateSingleFieldAction } from "../../actions/generateField.js";
|
||||
|
||||
export function AiFieldButton({ path, field }: { path: string; field: any }) {
|
||||
const [isGenerating, setIsGenerating] = useState(false);
|
||||
const [instructions, setInstructions] = useState("");
|
||||
@@ -44,19 +42,26 @@ export function AiFieldButton({ path, field }: { path: string; field: any }) {
|
||||
? field.admin.description
|
||||
: "";
|
||||
|
||||
const res = await generateSingleFieldAction(
|
||||
(title as string) || "",
|
||||
draftContent,
|
||||
fieldName,
|
||||
fieldDescription,
|
||||
instructions,
|
||||
);
|
||||
const resData = await fetch("/api/api/mintel-ai/generate-single-field", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
documentTitle: (title as string) || "",
|
||||
documentContent: draftContent,
|
||||
fieldName,
|
||||
fieldDescription,
|
||||
instructions,
|
||||
}),
|
||||
});
|
||||
const res = await resData.json();
|
||||
|
||||
if (res.success && res.text) {
|
||||
setValue(res.text);
|
||||
} else {
|
||||
alert("Fehler: " + res.error);
|
||||
}
|
||||
} catch (e) {
|
||||
} catch (e: any) {
|
||||
console.error(e)
|
||||
alert("Fehler bei der Generierung.");
|
||||
} finally {
|
||||
setIsGenerating(false);
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { useForm, useField } from "@payloadcms/ui";
|
||||
import { generateSlugAction } from "../../actions/generateField.js";
|
||||
|
||||
export function GenerateSlugButton({ path }: { path: string }) {
|
||||
const [isGenerating, setIsGenerating] = useState(false);
|
||||
const [instructions, setInstructions] = useState("");
|
||||
@@ -45,18 +43,24 @@ export function GenerateSlugButton({ path }: { path: string }) {
|
||||
|
||||
setIsGenerating(true);
|
||||
try {
|
||||
const res = await generateSlugAction(
|
||||
title,
|
||||
draftContent,
|
||||
initialValue as string,
|
||||
instructions,
|
||||
);
|
||||
const resData = await fetch("/api/api/mintel-ai/generate-slug", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
title,
|
||||
draftContent,
|
||||
oldSlug: initialValue as string,
|
||||
instructions,
|
||||
}),
|
||||
});
|
||||
const res = await resData.json();
|
||||
|
||||
if (res.success && res.slug) {
|
||||
setValue(res.slug);
|
||||
} else {
|
||||
alert("Fehler: " + res.error);
|
||||
}
|
||||
} catch (e) {
|
||||
} catch (e: any) {
|
||||
console.error(e);
|
||||
alert("Unerwarteter Fehler.");
|
||||
} finally {
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { useForm, useField } from "@payloadcms/ui";
|
||||
import { generateThumbnailAction } from "../../actions/generateField.js";
|
||||
|
||||
export function GenerateThumbnailButton({ path }: { path: string }) {
|
||||
const [isGenerating, setIsGenerating] = useState(false);
|
||||
const [instructions, setInstructions] = useState("");
|
||||
@@ -45,17 +43,23 @@ export function GenerateThumbnailButton({ path }: { path: string }) {
|
||||
|
||||
setIsGenerating(true);
|
||||
try {
|
||||
const res = await generateThumbnailAction(
|
||||
draftContent,
|
||||
title,
|
||||
instructions,
|
||||
);
|
||||
const resData = await fetch("/api/api/mintel-ai/generate-thumbnail", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
draftContent,
|
||||
title,
|
||||
instructions,
|
||||
}),
|
||||
});
|
||||
const res = await resData.json();
|
||||
|
||||
if (res.success && res.mediaId) {
|
||||
setValue(res.mediaId);
|
||||
} else {
|
||||
alert("Fehler: " + res.error);
|
||||
}
|
||||
} catch (e) {
|
||||
} catch (e: any) {
|
||||
console.error(e);
|
||||
alert("Unerwarteter Fehler.");
|
||||
} finally {
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { useForm, useDocumentInfo } from "@payloadcms/ui";
|
||||
import { optimizePostText } from "../actions/optimizePost.js";
|
||||
import { Button } from "@payloadcms/ui";
|
||||
|
||||
export function OptimizeButton() {
|
||||
@@ -57,7 +56,12 @@ export function OptimizeButton() {
|
||||
// 2. We inject the title so the AI knows what it's writing about
|
||||
const payloadText = `---\ntitle: "${title}"\n---\n\n${draftContent}`;
|
||||
|
||||
const response = await optimizePostText(payloadText, instructions);
|
||||
const res = await fetch("/api/api/mintel-ai/optimize", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ draftContent: payloadText, instructions }),
|
||||
});
|
||||
const response = await res.json();
|
||||
|
||||
if (response.success && response.lexicalAST) {
|
||||
// 3. Inject the new Lexical AST directly into the field form state
|
||||
|
||||
115
packages/payload-ai/src/endpoints/chatEndpoint.ts
Normal file
115
packages/payload-ai/src/endpoints/chatEndpoint.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { streamText } from 'ai'
|
||||
import { createOpenAI } from '@ai-sdk/openai'
|
||||
import { generatePayloadLocalTools } from '../tools/payloadLocal.js'
|
||||
import { createMcpTools } from '../tools/mcpAdapter.js'
|
||||
import { generateMemoryTools } from '../tools/memoryDb.js'
|
||||
import type { PayloadRequest } from 'payload'
|
||||
|
||||
const openrouter = createOpenAI({
|
||||
baseURL: 'https://openrouter.ai/api/v1',
|
||||
apiKey: process.env.OPENROUTER_API_KEY || 'dummy_key',
|
||||
})
|
||||
|
||||
export const handleMcpChat = async (req: PayloadRequest) => {
|
||||
if (!req.user) {
|
||||
return Response.json({ error: 'Unauthorized. You must be logged in to use AI Chat.' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { messages, pageContext } = (await req.json?.() || { messages: [] }) as { messages: any[], pageContext?: any }
|
||||
|
||||
// 1. Check AI Permissions for req.user
|
||||
// Look up the collection for permissions
|
||||
const permissionsQuery = await req.payload.find({
|
||||
collection: 'ai-chat-permissions' as any,
|
||||
where: {
|
||||
or: [
|
||||
{ targetUser: { equals: req.user.id } },
|
||||
{ targetRole: { equals: req.user.role || 'admin' } }
|
||||
]
|
||||
},
|
||||
limit: 10
|
||||
})
|
||||
|
||||
const allowedCollections = new Set<string>()
|
||||
const allowedMcpServers = new Set<string>()
|
||||
|
||||
for (const perm of permissionsQuery.docs) {
|
||||
if (perm.allowedCollections) {
|
||||
perm.allowedCollections.forEach((c: string) => allowedCollections.add(c))
|
||||
}
|
||||
if (perm.allowedMcpServers) {
|
||||
perm.allowedMcpServers.forEach((s: string) => allowedMcpServers.add(s))
|
||||
}
|
||||
}
|
||||
|
||||
let accessCollections = Array.from(allowedCollections)
|
||||
if (accessCollections.length === 0) {
|
||||
// Fallback or demo config if not configured yet
|
||||
accessCollections = ['users', 'pages', 'posts', 'products', 'leads', 'media']
|
||||
}
|
||||
|
||||
let activeTools: Record<string, any> = {}
|
||||
|
||||
// 2. Generate Payload Local Tools
|
||||
if (accessCollections.length > 0) {
|
||||
const payloadTools = generatePayloadLocalTools(req.payload, req, accessCollections)
|
||||
activeTools = { ...activeTools, ...payloadTools }
|
||||
}
|
||||
|
||||
// 3. Connect External MCPs
|
||||
if (Array.from(allowedMcpServers).includes('gitea')) {
|
||||
try {
|
||||
const { tools: giteaTools } = await createMcpTools({
|
||||
name: 'gitea',
|
||||
command: 'npx',
|
||||
args: ['-y', '@modelcontextprotocol/server-gitea', '--url', 'https://git.mintel.int', '--token', process.env.GITEA_TOKEN || '']
|
||||
})
|
||||
activeTools = { ...activeTools, ...giteaTools }
|
||||
} catch (e) {
|
||||
console.error('Failed to connect to Gitea MCP', e)
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Inject Memory Database Tools
|
||||
// We provide the user ID so memory is partitioned per user
|
||||
const memoryTools = generateMemoryTools(req.user.id)
|
||||
activeTools = { ...activeTools, ...memoryTools }
|
||||
|
||||
// 5. Build prompt to ensure it asks before saving
|
||||
const memorySystemPrompt = `
|
||||
You have access to a long-term vector memory database (Qdrant).
|
||||
If the user says "speicher das", "merk dir das", "vergiss das nicht" etc., you MUST use the save_memory tool.
|
||||
If the user shares important context but doesn't explicitly ask you to remember it, you should ask "Soll ich mir das für die Zukunft merken?" before saving it. Do not ask for trivial things.
|
||||
`
|
||||
|
||||
const contextContextStr = pageContext ? `
|
||||
Current User Context:
|
||||
URL: ${pageContext.url || 'Unknown'}
|
||||
Title: ${pageContext.title || 'Unknown'}
|
||||
Collection: ${pageContext.collectionSlug || 'None'}
|
||||
Document ID: ${pageContext.id || 'None'}
|
||||
You can use this to understand what the user is currently looking at.
|
||||
` : ''
|
||||
|
||||
try {
|
||||
const result = streamText({
|
||||
// @ts-ignore - AI SDK type mismatch
|
||||
model: openrouter('google/gemini-3.0-flash'),
|
||||
messages,
|
||||
tools: activeTools,
|
||||
// @ts-ignore - AI SDK type mismatch with maxSteps
|
||||
maxSteps: 10,
|
||||
system: `You are a helpful Payload CMS Agent orchestrating the local Mintel ecosystem.
|
||||
You only have access to tools explicitly granted by the Admin.
|
||||
You can completely control Payload CMS (read, create, update, delete documents).
|
||||
If you need more details to fulfill a request (e.g. creating a blog post), you can ask the user.
|
||||
${contextContextStr}
|
||||
${memorySystemPrompt}`
|
||||
})
|
||||
|
||||
return result.toTextStreamResponse()
|
||||
} catch (error) {
|
||||
console.error("AI Error:", error)
|
||||
return Response.json({ error: 'Failed to process AI request' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,4 @@
|
||||
"use server";
|
||||
|
||||
import { getPayloadHMR } from "@payloadcms/next/utilities";
|
||||
import configPromise from "@payload-config";
|
||||
import { PayloadRequest } from "payload";
|
||||
import * as fs from "node:fs/promises";
|
||||
import * as path from "node:path";
|
||||
import * as os from "node:os";
|
||||
@@ -29,13 +26,9 @@ async function getOrchestrator() {
|
||||
});
|
||||
}
|
||||
|
||||
export async function generateSlugAction(
|
||||
title: string,
|
||||
draftContent: string,
|
||||
oldSlug?: string,
|
||||
instructions?: string,
|
||||
) {
|
||||
export const generateSlugEndpoint = async (req: PayloadRequest) => {
|
||||
try {
|
||||
const { title, draftContent, oldSlug, instructions } = (await req.json?.() || {}) as any;
|
||||
const orchestrator = await getOrchestrator();
|
||||
const newSlug = await orchestrator.generateSlug(
|
||||
draftContent,
|
||||
@@ -44,9 +37,8 @@ export async function generateSlugAction(
|
||||
);
|
||||
|
||||
if (oldSlug && oldSlug !== newSlug) {
|
||||
const payload = await getPayloadHMR({ config: configPromise as any });
|
||||
await payload.create({
|
||||
collection: "redirects",
|
||||
await req.payload.create({
|
||||
collection: "redirects" as any,
|
||||
data: {
|
||||
from: oldSlug,
|
||||
to: newSlug,
|
||||
@@ -54,42 +46,25 @@ export async function generateSlugAction(
|
||||
});
|
||||
}
|
||||
|
||||
return { success: true, slug: newSlug };
|
||||
return Response.json({ success: true, slug: newSlug });
|
||||
} catch (e: any) {
|
||||
return { success: false, error: e.message };
|
||||
return Response.json({ success: false, error: e.message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
export async function generateThumbnailAction(
|
||||
draftContent: string,
|
||||
title?: string,
|
||||
instructions?: string,
|
||||
) {
|
||||
export const generateThumbnailEndpoint = async (req: PayloadRequest) => {
|
||||
try {
|
||||
const payload = await getPayloadHMR({ config: configPromise as any });
|
||||
const { draftContent, title, instructions } = (await req.json?.() || {}) as any;
|
||||
const OPENROUTER_KEY =
|
||||
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
|
||||
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
|
||||
|
||||
if (!OPENROUTER_KEY) {
|
||||
throw new Error("Missing OPENROUTER_API_KEY in .env");
|
||||
}
|
||||
if (!REPLICATE_KEY) {
|
||||
throw new Error(
|
||||
"Missing REPLICATE_API_KEY in .env (Required for Thumbnails)",
|
||||
);
|
||||
}
|
||||
if (!OPENROUTER_KEY) throw new Error("Missing OPENROUTER_API_KEY in .env");
|
||||
if (!REPLICATE_KEY) throw new Error("Missing REPLICATE_API_KEY in .env");
|
||||
|
||||
const importDynamic = new Function(
|
||||
"modulePath",
|
||||
"return import(modulePath)",
|
||||
);
|
||||
const { AiBlogPostOrchestrator } = await importDynamic(
|
||||
"@mintel/content-engine",
|
||||
);
|
||||
const { ThumbnailGenerator } = await importDynamic(
|
||||
"@mintel/thumbnail-generator",
|
||||
);
|
||||
const importDynamic = new Function("modulePath", "return import(modulePath)");
|
||||
const { AiBlogPostOrchestrator } = await importDynamic("@mintel/content-engine");
|
||||
const { ThumbnailGenerator } = await importDynamic("@mintel/thumbnail-generator");
|
||||
|
||||
const orchestrator = new AiBlogPostOrchestrator({
|
||||
apiKey: OPENROUTER_KEY,
|
||||
@@ -111,8 +86,8 @@ export async function generateThumbnailAction(
|
||||
const stat = await fs.stat(tmpPath);
|
||||
const fileName = path.basename(tmpPath);
|
||||
|
||||
const newMedia = await payload.create({
|
||||
collection: "media",
|
||||
const newMedia = await req.payload.create({
|
||||
collection: "media" as any,
|
||||
data: {
|
||||
alt: title ? `Thumbnail for ${title}` : "AI Generated Thumbnail",
|
||||
},
|
||||
@@ -124,31 +99,24 @@ export async function generateThumbnailAction(
|
||||
},
|
||||
});
|
||||
|
||||
// Cleanup temp file
|
||||
await fs.unlink(tmpPath).catch(() => { });
|
||||
|
||||
return { success: true, mediaId: newMedia.id };
|
||||
return Response.json({ success: true, mediaId: newMedia.id });
|
||||
} catch (e: any) {
|
||||
return { success: false, error: e.message };
|
||||
return Response.json({ success: false, error: e.message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
export async function generateSingleFieldAction(
|
||||
documentTitle: string,
|
||||
documentContent: string,
|
||||
fieldName: string,
|
||||
fieldDescription: string,
|
||||
instructions?: string,
|
||||
) {
|
||||
|
||||
export const generateSingleFieldEndpoint = async (req: PayloadRequest) => {
|
||||
try {
|
||||
const { documentTitle, documentContent, fieldName, fieldDescription, instructions } = (await req.json?.() || {}) as any;
|
||||
|
||||
const OPENROUTER_KEY =
|
||||
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
|
||||
if (!OPENROUTER_KEY) throw new Error("Missing OPENROUTER_API_KEY");
|
||||
|
||||
const payload = await getPayloadHMR({ config: configPromise as any });
|
||||
|
||||
// Fetch context documents from DB
|
||||
const contextDocsData = await payload.find({
|
||||
collection: "context-files",
|
||||
const contextDocsData = await req.payload.find({
|
||||
collection: "context-files" as any,
|
||||
limit: 100,
|
||||
});
|
||||
const projectContext = contextDocsData.docs
|
||||
@@ -183,8 +151,8 @@ CRITICAL RULES:
|
||||
});
|
||||
const data = await res.json();
|
||||
const text = data.choices?.[0]?.message?.content?.trim() || "";
|
||||
return { success: true, text };
|
||||
return Response.json({ success: true, text });
|
||||
} catch (e: any) {
|
||||
return { success: false, error: e.message };
|
||||
return Response.json({ success: false, error: e.message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
@@ -1,16 +1,15 @@
|
||||
"use server";
|
||||
import { PayloadRequest } from 'payload'
|
||||
import { parseMarkdownToLexical } from "../utils/lexicalParser.js";
|
||||
|
||||
import { parseMarkdownToLexical } from "../utils/lexicalParser";
|
||||
import { getPayloadHMR } from "@payloadcms/next/utilities";
|
||||
import configPromise from "@payload-config";
|
||||
|
||||
export async function optimizePostText(
|
||||
draftContent: string,
|
||||
instructions?: string,
|
||||
) {
|
||||
export const optimizePostEndpoint = async (req: PayloadRequest) => {
|
||||
try {
|
||||
const payload = await getPayloadHMR({ config: configPromise as any });
|
||||
const globalAiSettings = (await payload.findGlobal({ slug: "ai-settings" })) as any;
|
||||
const { draftContent, instructions } = (await req.json?.() || {}) as { draftContent: string; instructions?: string };
|
||||
|
||||
if (!draftContent) {
|
||||
return Response.json({ error: 'Missing draftContent' }, { status: 400 })
|
||||
}
|
||||
|
||||
const globalAiSettings = (await req.payload.findGlobal({ slug: "ai-settings" })) as any;
|
||||
const customSources =
|
||||
globalAiSettings?.customSources?.map((s: any) => s.sourceName) || [];
|
||||
|
||||
@@ -19,18 +18,12 @@ export async function optimizePostText(
|
||||
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
|
||||
|
||||
if (!OPENROUTER_KEY) {
|
||||
throw new Error(
|
||||
"OPENROUTER_KEY or OPENROUTER_API_KEY not found in environment.",
|
||||
);
|
||||
return Response.json({ error: "OPENROUTER_KEY not found in environment." }, { status: 500 })
|
||||
}
|
||||
|
||||
const importDynamic = new Function(
|
||||
"modulePath",
|
||||
"return import(modulePath)",
|
||||
);
|
||||
const { AiBlogPostOrchestrator } = await importDynamic(
|
||||
"@mintel/content-engine",
|
||||
);
|
||||
// Dynamically import to avoid bundling it into client components that might accidentally import this file
|
||||
const importDynamic = new Function("modulePath", "return import(modulePath)");
|
||||
const { AiBlogPostOrchestrator } = await importDynamic("@mintel/content-engine");
|
||||
|
||||
const orchestrator = new AiBlogPostOrchestrator({
|
||||
apiKey: OPENROUTER_KEY,
|
||||
@@ -38,9 +31,8 @@ export async function optimizePostText(
|
||||
model: "google/gemini-3-flash-preview",
|
||||
});
|
||||
|
||||
// Fetch context documents purely from DB
|
||||
const contextDocsData = await payload.find({
|
||||
collection: "context-files",
|
||||
const contextDocsData = await req.payload.find({
|
||||
collection: "context-files" as any,
|
||||
limit: 100,
|
||||
});
|
||||
const projectContext = contextDocsData.docs.map((doc: any) => doc.content);
|
||||
@@ -48,19 +40,19 @@ export async function optimizePostText(
|
||||
const optimizedMarkdown = await orchestrator.optimizeDocument({
|
||||
content: draftContent,
|
||||
projectContext,
|
||||
availableComponents: [], // Removed hardcoded config.components dependency
|
||||
availableComponents: [],
|
||||
instructions,
|
||||
internalLinks: [],
|
||||
customSources,
|
||||
});
|
||||
|
||||
if (!optimizedMarkdown || typeof optimizedMarkdown !== "string") {
|
||||
throw new Error("AI returned invalid markup.");
|
||||
return Response.json({ error: "AI returned invalid markup." }, { status: 500 })
|
||||
}
|
||||
|
||||
const blocks = parseMarkdownToLexical(optimizedMarkdown);
|
||||
|
||||
return {
|
||||
return Response.json({
|
||||
success: true,
|
||||
lexicalAST: {
|
||||
root: {
|
||||
@@ -72,12 +64,12 @@ export async function optimizePostText(
|
||||
direction: "ltr",
|
||||
},
|
||||
},
|
||||
};
|
||||
})
|
||||
} catch (error: any) {
|
||||
console.error("Failed to optimize post:", error);
|
||||
return {
|
||||
console.error("Failed to optimize post in endpoint:", error);
|
||||
return Response.json({
|
||||
success: false,
|
||||
error: error.message || "An unknown error occurred during optimization.",
|
||||
};
|
||||
}, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -3,13 +3,17 @@
|
||||
* Primary entry point for reusing Mintel AI extensions in Payload CMS.
|
||||
*/
|
||||
|
||||
export * from './globals/AiSettings';
|
||||
export * from './actions/generateField';
|
||||
export * from './actions/optimizePost';
|
||||
export * from './components/FieldGenerators/AiFieldButton';
|
||||
export * from './components/AiMediaButtons';
|
||||
export * from './components/OptimizeButton';
|
||||
export * from './components/FieldGenerators/GenerateThumbnailButton';
|
||||
export * from './components/FieldGenerators/GenerateSlugButton';
|
||||
export * from './utils/lexicalParser';
|
||||
export * from './endpoints/replicateMediaEndpoint';
|
||||
export * from './globals/AiSettings.js';
|
||||
export * from './components/FieldGenerators/AiFieldButton.js';
|
||||
export * from './components/AiMediaButtons.js';
|
||||
export * from './components/OptimizeButton.js';
|
||||
export * from './components/FieldGenerators/GenerateThumbnailButton.js';
|
||||
export * from './components/FieldGenerators/GenerateSlugButton.js';
|
||||
export * from './utils/lexicalParser.js';
|
||||
export * from './endpoints/replicateMediaEndpoint.js';
|
||||
export * from './chatPlugin.js';
|
||||
export * from './types.js';
|
||||
export * from './endpoints/chatEndpoint.js';
|
||||
export * from './tools/mcpAdapter.js';
|
||||
export * from './tools/memoryDb.js';
|
||||
export * from './tools/payloadLocal.js';
|
||||
|
||||
65
packages/payload-ai/src/tools/mcpAdapter.ts
Normal file
65
packages/payload-ai/src/tools/mcpAdapter.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js'
|
||||
import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js'
|
||||
import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js'
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
|
||||
/**
|
||||
* Connects to an external MCP Server and maps its tools to Vercel AI SDK Tools.
|
||||
*/
|
||||
export async function createMcpTools(mcpConfig: { name: string, url?: string, command?: string, args?: string[] }) {
|
||||
let transport
|
||||
|
||||
// Support both HTTP/SSE and STDIO transports
|
||||
if (mcpConfig.url) {
|
||||
transport = new SSEClientTransport(new URL(mcpConfig.url))
|
||||
} else if (mcpConfig.command) {
|
||||
transport = new StdioClientTransport({
|
||||
command: mcpConfig.command,
|
||||
args: mcpConfig.args || [],
|
||||
})
|
||||
} else {
|
||||
throw new Error('Invalid MCP config: Must provide either URL or Command.')
|
||||
}
|
||||
|
||||
const client = new Client(
|
||||
{ name: `payload-ai-client-${mcpConfig.name}`, version: '1.0.0' },
|
||||
{ capabilities: {} }
|
||||
)
|
||||
|
||||
await client.connect(transport)
|
||||
|
||||
// Fetch available tools from the external MCP server
|
||||
const toolListResult = await client.listTools()
|
||||
const externalTools = toolListResult.tools || []
|
||||
|
||||
const aiSdkTools: Record<string, any> = {}
|
||||
|
||||
// Map each external tool to a Vercel AI SDK Tool
|
||||
for (const extTool of externalTools) {
|
||||
// Basic conversion of JSON Schema to Zod for the AI SDK
|
||||
// Note: For a production ready adapter, you might need a more robust jsonSchemaToZod converter
|
||||
// or use AI SDK's new experimental generateSchema feature if available.
|
||||
// Here we use a generic `z.any()` as a fallback since AI SDK requires a Zod schema.
|
||||
const toolSchema = extTool.inputSchema as Record<string, any>
|
||||
|
||||
// We create a simplified parameter parser.
|
||||
// An ideal approach uses `jsonSchemaToZod` library or native AI SDK JSON schema support
|
||||
// (introduced recently in `ai` package).
|
||||
|
||||
aiSdkTools[`${mcpConfig.name}_${extTool.name}`] = tool({
|
||||
description: `[From ${mcpConfig.name}] ${extTool.description || extTool.name}`,
|
||||
parameters: z.any().describe('JSON matching the original MCP input_schema'), // Simplify for prototype
|
||||
// @ts-ignore - AI strict mode overload bug with implicit zod inferences
|
||||
execute: async (args: any) => {
|
||||
const result = await client.callTool({
|
||||
name: extTool.name,
|
||||
arguments: args
|
||||
})
|
||||
return result
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return { tools: aiSdkTools, client }
|
||||
}
|
||||
115
packages/payload-ai/src/tools/memoryDb.ts
Normal file
115
packages/payload-ai/src/tools/memoryDb.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
import { QdrantClient } from '@qdrant/js-client-rest'
|
||||
|
||||
// Qdrant initialization
|
||||
// This requires the user to have Qdrant running and QDRANT_URL/QDRANT_API_KEY environment variables set
|
||||
const qdrantClient = new QdrantClient({
|
||||
url: process.env.QDRANT_URL || 'http://localhost:6333',
|
||||
apiKey: process.env.QDRANT_API_KEY,
|
||||
})
|
||||
|
||||
const MEMORY_COLLECTION = 'mintel_ai_memory'
|
||||
|
||||
// Ensure collection exists on load
|
||||
async function initQdrant() {
|
||||
try {
|
||||
const res = await qdrantClient.getCollections()
|
||||
const exists = res.collections.find((c: any) => c.name === MEMORY_COLLECTION)
|
||||
if (!exists) {
|
||||
await qdrantClient.createCollection(MEMORY_COLLECTION, {
|
||||
vectors: {
|
||||
size: 1536, // typical embedding size, adjust based on the embedding model used
|
||||
distance: 'Cosine',
|
||||
},
|
||||
})
|
||||
console.log(`Qdrant collection '${MEMORY_COLLECTION}' created.`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize Qdrant memory collection:', error)
|
||||
}
|
||||
}
|
||||
|
||||
// Call init, but don't block
|
||||
initQdrant()
|
||||
|
||||
/**
|
||||
* Returns memory tools for the AI SDK.
|
||||
* Note: A real implementation would require an embedding step before inserting into Qdrant.
|
||||
* For this implementation, we use a placeholder or assume the embeddings are handled
|
||||
* by a utility function, or we use Qdrant's FastEmbed (if running their specialized container).
|
||||
*/
|
||||
export const generateMemoryTools = (userId: string | number) => {
|
||||
return {
|
||||
save_memory: tool({
|
||||
description: 'Save an important preference, fact, or instruction about the user to long-term memory. Only use this when explicitly asked or when it is clearly a long-term preference.',
|
||||
parameters: z.object({
|
||||
fact: z.string().describe('The fact or instruction to remember.'),
|
||||
category: z.string().optional().describe('An optional category like "preference", "rule", or "project_detail".'),
|
||||
}),
|
||||
// @ts-ignore - AI SDK strict mode bug
|
||||
execute: async ({ fact, category }: { fact: string; category?: string }) => {
|
||||
// In a real scenario, you MUST generate embeddings for the 'fact' string here
|
||||
// using OpenAI or another embedding provider before inserting into Qdrant.
|
||||
// const embedding = await generateEmbedding(fact)
|
||||
|
||||
try {
|
||||
// Mock embedding payload for demonstration
|
||||
const mockEmbedding = new Array(1536).fill(0).map(() => Math.random())
|
||||
|
||||
await qdrantClient.upsert(MEMORY_COLLECTION, {
|
||||
wait: true,
|
||||
points: [
|
||||
{
|
||||
id: crypto.randomUUID(),
|
||||
vector: mockEmbedding,
|
||||
payload: {
|
||||
userId: String(userId), // Partition memory by user
|
||||
fact,
|
||||
category,
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
return { success: true, message: `Successfully remembered: "${fact}"` }
|
||||
} catch (error) {
|
||||
console.error("Qdrant save error:", error)
|
||||
return { success: false, error: 'Failed to save to memory database.' }
|
||||
}
|
||||
},
|
||||
}),
|
||||
|
||||
search_memory: tool({
|
||||
description: 'Search the user\'s long-term memory for past factual context, preferences, or rules.',
|
||||
parameters: z.object({
|
||||
query: z.string().describe('The search string to find in memory.'),
|
||||
}),
|
||||
// @ts-ignore - AI SDK strict mode bug
|
||||
execute: async ({ query }: { query: string }) => {
|
||||
// Generate embedding for query
|
||||
const mockQueryEmbedding = new Array(1536).fill(0).map(() => Math.random())
|
||||
|
||||
try {
|
||||
const results = await qdrantClient.search(MEMORY_COLLECTION, {
|
||||
vector: mockQueryEmbedding,
|
||||
limit: 5,
|
||||
filter: {
|
||||
must: [
|
||||
{
|
||||
key: 'userId',
|
||||
match: { value: String(userId) }
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
return results.map((r: any) => r.payload?.fact || '')
|
||||
} catch (error) {
|
||||
console.error("Qdrant search error:", error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
107
packages/payload-ai/src/tools/payloadLocal.ts
Normal file
107
packages/payload-ai/src/tools/payloadLocal.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
import type { Payload, PayloadRequest, User } from 'payload'
|
||||
|
||||
export const generatePayloadLocalTools = (
|
||||
payload: Payload,
|
||||
req: PayloadRequest,
|
||||
allowedCollections: string[]
|
||||
) => {
|
||||
const tools: Record<string, any> = {}
|
||||
|
||||
for (const collectionSlug of allowedCollections) {
|
||||
const slugKey = collectionSlug.replace(/-/g, '_')
|
||||
|
||||
// 1. Read (Find) Tool
|
||||
tools[`read_${slugKey}`] = tool({
|
||||
description: `Read/Find documents from the Payload CMS collection: ${collectionSlug}`,
|
||||
parameters: z.object({
|
||||
limit: z.number().optional().describe('Number of documents to return, max 100.'),
|
||||
page: z.number().optional().describe('Page number for pagination.'),
|
||||
// Simple string-based query for demo purposes. For a robust implementation,
|
||||
// we'd map this to Payload's where query logic using a structured Zod schema.
|
||||
query: z.string().optional().describe('Optional text to search within the collection.'),
|
||||
}),
|
||||
// @ts-ignore - AI SDK strict mode type inference bug
|
||||
execute: async ({ limit = 10, page = 1, query }: { limit?: number; page?: number; query?: string }) => {
|
||||
const where = query ? { id: { equals: query } } : undefined // Placeholder logic
|
||||
|
||||
return await payload.find({
|
||||
collection: collectionSlug as any,
|
||||
limit: Math.min(limit, 100),
|
||||
page,
|
||||
where,
|
||||
req, // Crucial for passing the user context and respecting access control!
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
// 2. Read by ID Tool
|
||||
tools[`read_${slugKey}_by_id`] = tool({
|
||||
description: `Get a specific document by its ID from the ${collectionSlug} collection.`,
|
||||
parameters: z.object({
|
||||
id: z.union([z.string(), z.number()]).describe('The ID of the document.'),
|
||||
}),
|
||||
// @ts-ignore - AI SDK strict mode type inference bug
|
||||
execute: async ({ id }: { id: string | number }) => {
|
||||
return await payload.findByID({
|
||||
collection: collectionSlug as any,
|
||||
id,
|
||||
req, // Enforce access control
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
// 3. Create Tool
|
||||
tools[`create_${slugKey}`] = tool({
|
||||
description: `Create a new document in the ${collectionSlug} collection.`,
|
||||
parameters: z.object({
|
||||
data: z.record(z.any()).describe('A JSON object containing the data to insert.'),
|
||||
}),
|
||||
// @ts-ignore - AI SDK strict mode type inference bug
|
||||
execute: async ({ data }: { data: Record<string, any> }) => {
|
||||
return await payload.create({
|
||||
collection: collectionSlug as any,
|
||||
data,
|
||||
req, // Enforce access control
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
// 4. Update Tool
|
||||
tools[`update_${slugKey}`] = tool({
|
||||
description: `Update an existing document in the ${collectionSlug} collection.`,
|
||||
parameters: z.object({
|
||||
id: z.union([z.string(), z.number()]).describe('The ID of the document to update.'),
|
||||
data: z.record(z.any()).describe('A JSON object containing the fields to update.'),
|
||||
}),
|
||||
// @ts-ignore - AI SDK strict mode type inference bug
|
||||
execute: async ({ id, data }: { id: string | number; data: Record<string, any> }) => {
|
||||
return await payload.update({
|
||||
collection: collectionSlug as any,
|
||||
id,
|
||||
data,
|
||||
req, // Enforce access control
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
// 5. Delete Tool
|
||||
tools[`delete_${slugKey}`] = tool({
|
||||
description: `Delete a document from the ${collectionSlug} collection by ID.`,
|
||||
parameters: z.object({
|
||||
id: z.union([z.string(), z.number()]).describe('The ID of the document to delete.'),
|
||||
}),
|
||||
// @ts-ignore - AI SDK strict mode type inference bug
|
||||
execute: async ({ id }: { id: string | number }) => {
|
||||
return await payload.delete({
|
||||
collection: collectionSlug as any,
|
||||
id,
|
||||
req, // Enforce access control
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return tools
|
||||
}
|
||||
11
packages/payload-ai/src/types.d.ts
vendored
11
packages/payload-ai/src/types.d.ts
vendored
@@ -1,5 +1,8 @@
|
||||
declare module "@payload-config" {
|
||||
import { Config } from "payload";
|
||||
const configPromise: Promise<Config>;
|
||||
export default configPromise;
|
||||
export type PayloadChatPluginConfig = {
|
||||
enabled?: boolean
|
||||
/** Render the chat bubble on the bottom right? Defaults to true */
|
||||
renderChatBubble?: boolean
|
||||
allowedCollections?: string[]
|
||||
mcpServers?: any[]
|
||||
}
|
||||
|
||||
|
||||
18
packages/payload-ai/src/types.ts
Normal file
18
packages/payload-ai/src/types.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import type { Plugin } from 'payload'
|
||||
|
||||
export interface PayloadChatPluginConfig {
|
||||
enabled?: boolean
|
||||
/**
|
||||
* Defines whether to render the floating chat bubble in the admin panel automatically.
|
||||
* Defaults to true.
|
||||
*/
|
||||
renderChatBubble?: boolean
|
||||
/**
|
||||
* Used to register external MCP servers that the AI can explicitly connect to if the admin permits it.
|
||||
*/
|
||||
mcpServers?: {
|
||||
name: string
|
||||
url?: string
|
||||
// Command based STDIO later via configuration
|
||||
}[]
|
||||
}
|
||||
@@ -12,15 +12,24 @@
|
||||
"jsx": "react-jsx",
|
||||
"outDir": "dist",
|
||||
"rootDir": "src",
|
||||
"baseUrl": ".",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"declaration": true,
|
||||
"sourceMap": true
|
||||
"sourceMap": true,
|
||||
"paths": {
|
||||
"@payload-config": [
|
||||
"../../apps/mintel.me/payload.config.ts",
|
||||
"../../apps/web/payload.config.ts",
|
||||
"./node_modules/@payloadcms/next/dist/index.js"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
"src/**/*",
|
||||
"src/types.d.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/pdf",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.js",
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/seo-engine",
|
||||
"version": "1.9.6",
|
||||
"private": true,
|
||||
"version": "1.9.17",
|
||||
"description": "AI-powered SEO keyword and topic cluster evaluation engine",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
|
||||
25
packages/serpbear-mcp/package.json
Normal file
25
packages/serpbear-mcp/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@mintel/serpbear-mcp",
|
||||
"version": "1.9.17",
|
||||
"description": "SerpBear SEO Tracking MCP server for Mintel infrastructure",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/start.js",
|
||||
"dev": "tsx watch src/index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.5.0",
|
||||
"axios": "^1.7.2",
|
||||
"dotenv": "^17.3.1",
|
||||
"express": "^5.2.1",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/node": "^20.14.10",
|
||||
"typescript": "^5.5.3",
|
||||
"tsx": "^4.19.2"
|
||||
}
|
||||
}
|
||||
243
packages/serpbear-mcp/src/index.ts
Normal file
243
packages/serpbear-mcp/src/index.ts
Normal file
@@ -0,0 +1,243 @@
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
|
||||
import express from 'express';
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import axios from "axios";
|
||||
import https from "https";
|
||||
|
||||
const SERPBEAR_BASE_URL = process.env.SERPBEAR_BASE_URL || "https://serpbear.infra.mintel.me";
|
||||
const SERPBEAR_API_KEY = process.env.SERPBEAR_API_KEY;
|
||||
|
||||
if (!SERPBEAR_API_KEY) {
|
||||
console.error("Warning: SERPBEAR_API_KEY is not set. API calls will fail.");
|
||||
}
|
||||
|
||||
const serpbearClient = axios.create({
|
||||
baseURL: `${SERPBEAR_BASE_URL}/api`,
|
||||
headers: { apiKey: SERPBEAR_API_KEY },
|
||||
httpsAgent: new https.Agent({
|
||||
rejectUnauthorized: false,
|
||||
}),
|
||||
});
|
||||
|
||||
|
||||
// --- Tool Definitions ---
|
||||
const LIST_DOMAINS_TOOL: Tool = {
|
||||
name: "serpbear_list_domains",
|
||||
description: "List all domains/projects tracked in SerpBear",
|
||||
inputSchema: { type: "object", properties: {} },
|
||||
};
|
||||
|
||||
const GET_KEYWORDS_TOOL: Tool = {
|
||||
name: "serpbear_get_keywords",
|
||||
description: "Get all tracked keywords for a domain, with their current ranking positions",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
domain_id: { type: "string", description: "Domain ID from serpbear_list_domains" },
|
||||
},
|
||||
required: ["domain_id"],
|
||||
},
|
||||
};
|
||||
|
||||
const ADD_KEYWORDS_TOOL: Tool = {
|
||||
name: "serpbear_add_keywords",
|
||||
description: "Add new keywords to track for a domain",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
domain_id: { type: "string", description: "Domain ID" },
|
||||
keywords: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "List of keywords to add (e.g., ['Webentwickler Frankfurt', 'Next.js Agentur'])"
|
||||
},
|
||||
country: { type: "string", description: "Country code for SERP tracking (e.g., 'de', 'us'). Default: 'de'" },
|
||||
device: { type: "string", description: "Device type: 'desktop' or 'mobile'. Default: 'desktop'" },
|
||||
},
|
||||
required: ["domain_id", "keywords"],
|
||||
},
|
||||
};
|
||||
|
||||
const DELETE_KEYWORDS_TOOL: Tool = {
|
||||
name: "serpbear_delete_keywords",
|
||||
description: "Remove keywords from tracking",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
keyword_ids: {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
description: "Array of keyword IDs to delete"
|
||||
},
|
||||
},
|
||||
required: ["keyword_ids"],
|
||||
},
|
||||
};
|
||||
|
||||
const REFRESH_KEYWORDS_TOOL: Tool = {
|
||||
name: "serpbear_refresh_keywords",
|
||||
description: "Trigger an immediate SERP position refresh for specific keywords",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
keyword_ids: {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
description: "List of keyword IDs to refresh"
|
||||
},
|
||||
},
|
||||
required: ["keyword_ids"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_KEYWORD_HISTORY_TOOL: Tool = {
|
||||
name: "serpbear_get_keyword_history",
|
||||
description: "Get the ranking history for a specific keyword over time",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
keyword_id: { type: "number", description: "Keyword ID from serpbear_get_keywords" },
|
||||
},
|
||||
required: ["keyword_id"],
|
||||
},
|
||||
};
|
||||
|
||||
// --- Server Setup ---
|
||||
const server = new Server(
|
||||
{ name: "serpbear-mcp", version: "1.0.0" },
|
||||
{ capabilities: { tools: {} } }
|
||||
);
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: [
|
||||
LIST_DOMAINS_TOOL,
|
||||
GET_KEYWORDS_TOOL,
|
||||
ADD_KEYWORDS_TOOL,
|
||||
DELETE_KEYWORDS_TOOL,
|
||||
REFRESH_KEYWORDS_TOOL,
|
||||
GET_KEYWORD_HISTORY_TOOL,
|
||||
],
|
||||
}));
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
if (request.params.name === "serpbear_list_domains") {
|
||||
try {
|
||||
const res = await serpbearClient.get('/domains');
|
||||
const domains = (res.data.domains || []).map((d: any) => ({
|
||||
id: d.id, domain: d.domain, keywords: d.keywordCount
|
||||
}));
|
||||
return { content: [{ type: "text", text: JSON.stringify(domains, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "serpbear_get_keywords") {
|
||||
const { domain_id } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await serpbearClient.get('/keywords', { params: { domain: domain_id } });
|
||||
const keywords = (res.data.keywords || []).map((k: any) => ({
|
||||
id: k.id,
|
||||
keyword: k.keyword,
|
||||
position: k.position,
|
||||
lastUpdated: k.lastUpdated,
|
||||
country: k.country,
|
||||
device: k.device,
|
||||
change: k.position_change ?? null,
|
||||
}));
|
||||
return { content: [{ type: "text", text: JSON.stringify(keywords, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "serpbear_add_keywords") {
|
||||
const { domain_id, keywords, country = 'de', device = 'desktop' } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await serpbearClient.post('/keywords', {
|
||||
domain: domain_id,
|
||||
keywords: keywords.map((kw: string) => ({ keyword: kw, country, device })),
|
||||
});
|
||||
return { content: [{ type: "text", text: `Added ${keywords.length} keywords. Result: ${JSON.stringify(res.data)}` }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "serpbear_delete_keywords") {
|
||||
const { keyword_ids } = request.params.arguments as any;
|
||||
try {
|
||||
await serpbearClient.delete('/keywords', { data: { ids: keyword_ids } });
|
||||
return { content: [{ type: "text", text: `Deleted ${keyword_ids.length} keywords.` }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "serpbear_refresh_keywords") {
|
||||
const { keyword_ids } = request.params.arguments as any;
|
||||
try {
|
||||
await serpbearClient.post('/keywords/refresh', { ids: keyword_ids });
|
||||
return { content: [{ type: "text", text: `Triggered refresh for ${keyword_ids.length} keywords.` }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "serpbear_get_keyword_history") {
|
||||
const { keyword_id } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await serpbearClient.get(`/keywords/${keyword_id}/history`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Unknown tool: ${request.params.name}`);
|
||||
});
|
||||
|
||||
// --- Express / SSE Server ---
|
||||
async function run() {
|
||||
const isStdio = process.argv.includes('--stdio');
|
||||
|
||||
if (isStdio) {
|
||||
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error('SerpBear MCP server is running on stdio');
|
||||
} else {
|
||||
const app = express();
|
||||
let transport: SSEServerTransport | null = null;
|
||||
|
||||
app.get('/sse', async (req, res) => {
|
||||
console.error('New SSE connection established');
|
||||
transport = new SSEServerTransport('/message', res);
|
||||
await server.connect(transport);
|
||||
});
|
||||
|
||||
app.post('/message', async (req, res) => {
|
||||
if (!transport) {
|
||||
res.status(400).send('No active SSE connection');
|
||||
return;
|
||||
}
|
||||
await transport.handlePostMessage(req, res);
|
||||
});
|
||||
|
||||
const PORT = process.env.SERPBEAR_MCP_PORT || 3004;
|
||||
app.listen(PORT, () => {
|
||||
console.error(`SerpBear MCP server running on http://localhost:${PORT}/sse`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
run().catch((err) => {
|
||||
console.error("Fatal error:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
13
packages/serpbear-mcp/src/start.ts
Normal file
13
packages/serpbear-mcp/src/start.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { config } from 'dotenv';
|
||||
import { resolve } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
|
||||
config({ path: resolve(__dirname, '../../../.env.local') });
|
||||
config({ path: resolve(__dirname, '../../../.env') });
|
||||
|
||||
import('./index.js').catch(err => {
|
||||
console.error('Failed to start SerpBear MCP Server:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
16
packages/serpbear-mcp/tsconfig.json
Normal file
16
packages/serpbear-mcp/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/thumbnail-generator",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"private": false,
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mintel/tsconfig",
|
||||
"version": "1.9.6",
|
||||
"version": "1.9.17",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://git.infra.mintel.me/api/packages/mmintel/npm"
|
||||
|
||||
25
packages/umami-mcp/package.json
Normal file
25
packages/umami-mcp/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@mintel/umami-mcp",
|
||||
"version": "1.9.17",
|
||||
"description": "Umami Analytics MCP server for Mintel infrastructure",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/start.js",
|
||||
"dev": "tsx watch src/index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.5.0",
|
||||
"axios": "^1.7.2",
|
||||
"dotenv": "^17.3.1",
|
||||
"express": "^5.2.1",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/node": "^20.14.10",
|
||||
"typescript": "^5.5.3",
|
||||
"tsx": "^4.19.2"
|
||||
}
|
||||
}
|
||||
280
packages/umami-mcp/src/index.ts
Normal file
280
packages/umami-mcp/src/index.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
|
||||
import express from 'express';
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import axios from "axios";
|
||||
import https from "https";
|
||||
|
||||
const UMAMI_BASE_URL = process.env.UMAMI_BASE_URL || "https://umami.infra.mintel.me";
|
||||
const UMAMI_USERNAME = process.env.UMAMI_USERNAME;
|
||||
const UMAMI_PASSWORD = process.env.UMAMI_PASSWORD;
|
||||
const UMAMI_API_KEY = process.env.UMAMI_API_KEY; // optional if using API key auth
|
||||
|
||||
const httpsAgent = new https.Agent({
|
||||
rejectUnauthorized: false,
|
||||
});
|
||||
|
||||
if (!UMAMI_USERNAME && !UMAMI_API_KEY) {
|
||||
console.error("Warning: Neither UMAMI_USERNAME/PASSWORD nor UMAMI_API_KEY is set.");
|
||||
}
|
||||
|
||||
// Token cache to avoid logging in on every request
|
||||
let cachedToken: string | null = null;
|
||||
|
||||
async function getAuthHeaders(): Promise<Record<string, string>> {
|
||||
if (UMAMI_API_KEY) {
|
||||
return { 'x-umami-api-key': UMAMI_API_KEY };
|
||||
}
|
||||
if (!cachedToken) {
|
||||
const res = await axios.post(`${UMAMI_BASE_URL}/api/auth/login`, {
|
||||
username: UMAMI_USERNAME,
|
||||
password: UMAMI_PASSWORD,
|
||||
}, { httpsAgent });
|
||||
cachedToken = res.data.token;
|
||||
}
|
||||
return { Authorization: `Bearer ${cachedToken}` };
|
||||
}
|
||||
|
||||
|
||||
// --- Tool Definitions ---
|
||||
const LIST_WEBSITES_TOOL: Tool = {
|
||||
name: "umami_list_websites",
|
||||
description: "List all websites tracked in Umami",
|
||||
inputSchema: { type: "object", properties: {} },
|
||||
};
|
||||
|
||||
const GET_WEBSITE_STATS_TOOL: Tool = {
|
||||
name: "umami_get_website_stats",
|
||||
description: "Get summary statistics for a website for a time range",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
website_id: { type: "string", description: "Umami website UUID" },
|
||||
start_at: { type: "number", description: "Start timestamp in ms (e.g., Date.now() - 7 days)" },
|
||||
end_at: { type: "number", description: "End timestamp in ms (default: now)" },
|
||||
},
|
||||
required: ["website_id", "start_at"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_PAGE_VIEWS_TOOL: Tool = {
|
||||
name: "umami_get_pageviews",
|
||||
description: "Get pageview/session time series for a website",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
website_id: { type: "string", description: "Umami website UUID" },
|
||||
start_at: { type: "number", description: "Start timestamp in ms" },
|
||||
end_at: { type: "number", description: "End timestamp in ms (default: now)" },
|
||||
unit: { type: "string", description: "Time unit: 'hour', 'day', 'month' (default: day)" },
|
||||
timezone: { type: "string", description: "Timezone (default: Europe/Berlin)" },
|
||||
},
|
||||
required: ["website_id", "start_at"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_TOP_PAGES_TOOL: Tool = {
|
||||
name: "umami_get_top_pages",
|
||||
description: "Get the most visited pages/URLs for a website",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
website_id: { type: "string", description: "Umami website UUID" },
|
||||
start_at: { type: "number", description: "Start timestamp in ms" },
|
||||
end_at: { type: "number", description: "End timestamp in ms" },
|
||||
limit: { type: "number", description: "Number of results (default: 20)" },
|
||||
},
|
||||
required: ["website_id", "start_at"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_TOP_REFERRERS_TOOL: Tool = {
|
||||
name: "umami_get_top_referrers",
|
||||
description: "Get the top traffic referrers for a website",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
website_id: { type: "string", description: "Umami website UUID" },
|
||||
start_at: { type: "number", description: "Start timestamp in ms" },
|
||||
end_at: { type: "number", description: "End timestamp in ms" },
|
||||
limit: { type: "number", description: "Number of results (default: 10)" },
|
||||
},
|
||||
required: ["website_id", "start_at"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_COUNTRY_STATS_TOOL: Tool = {
|
||||
name: "umami_get_country_stats",
|
||||
description: "Get visitor breakdown by country",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
website_id: { type: "string", description: "Umami website UUID" },
|
||||
start_at: { type: "number", description: "Start timestamp in ms" },
|
||||
end_at: { type: "number", description: "End timestamp in ms" },
|
||||
},
|
||||
required: ["website_id", "start_at"],
|
||||
},
|
||||
};
|
||||
|
||||
const GET_ACTIVE_VISITORS_TOOL: Tool = {
|
||||
name: "umami_get_active_visitors",
|
||||
description: "Get the number of visitors currently active on a website (last 5 minutes)",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
website_id: { type: "string", description: "Umami website UUID" },
|
||||
},
|
||||
required: ["website_id"],
|
||||
},
|
||||
};
|
||||
|
||||
// --- Server Setup ---
|
||||
const server = new Server(
|
||||
{ name: "umami-mcp", version: "1.0.0" },
|
||||
{ capabilities: { tools: {} } }
|
||||
);
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: [
|
||||
LIST_WEBSITES_TOOL,
|
||||
GET_WEBSITE_STATS_TOOL,
|
||||
GET_PAGE_VIEWS_TOOL,
|
||||
GET_TOP_PAGES_TOOL,
|
||||
GET_TOP_REFERRERS_TOOL,
|
||||
GET_COUNTRY_STATS_TOOL,
|
||||
GET_ACTIVE_VISITORS_TOOL,
|
||||
],
|
||||
}));
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
const headers = await getAuthHeaders();
|
||||
const api = axios.create({ baseURL: `${UMAMI_BASE_URL}/api`, headers, httpsAgent });
|
||||
|
||||
const now = Date.now();
|
||||
|
||||
if (request.params.name === "umami_list_websites") {
|
||||
try {
|
||||
const res = await api.get('/websites');
|
||||
const sites = (res.data.data || res.data || []).map((s: any) => ({
|
||||
id: s.id, name: s.name, domain: s.domain
|
||||
}));
|
||||
return { content: [{ type: "text", text: JSON.stringify(sites, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "umami_get_website_stats") {
|
||||
const { website_id, start_at, end_at = now } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await api.get(`/websites/${website_id}/stats`, { params: { startAt: start_at, endAt: end_at } });
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "umami_get_pageviews") {
|
||||
const { website_id, start_at, end_at = now, unit = 'day', timezone = 'Europe/Berlin' } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await api.get(`/websites/${website_id}/pageviews`, {
|
||||
params: { startAt: start_at, endAt: end_at, unit, timezone }
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "umami_get_top_pages") {
|
||||
const { website_id, start_at, end_at = now, limit = 20 } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await api.get(`/websites/${website_id}/metrics`, {
|
||||
params: { startAt: start_at, endAt: end_at, type: 'url', limit }
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "umami_get_top_referrers") {
|
||||
const { website_id, start_at, end_at = now, limit = 10 } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await api.get(`/websites/${website_id}/metrics`, {
|
||||
params: { startAt: start_at, endAt: end_at, type: 'referrer', limit }
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "umami_get_country_stats") {
|
||||
const { website_id, start_at, end_at = now } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await api.get(`/websites/${website_id}/metrics`, {
|
||||
params: { startAt: start_at, endAt: end_at, type: 'country' }
|
||||
});
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
if (request.params.name === "umami_get_active_visitors") {
|
||||
const { website_id } = request.params.arguments as any;
|
||||
try {
|
||||
const res = await api.get(`/websites/${website_id}/active`);
|
||||
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
|
||||
} catch (e: any) {
|
||||
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Unknown tool: ${request.params.name}`);
|
||||
});
|
||||
|
||||
// --- Express / SSE Server ---
|
||||
async function run() {
|
||||
const isStdio = process.argv.includes('--stdio');
|
||||
|
||||
if (isStdio) {
|
||||
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error('Umami MCP server is running on stdio');
|
||||
} else {
|
||||
const app = express();
|
||||
let transport: SSEServerTransport | null = null;
|
||||
|
||||
app.get('/sse', async (req, res) => {
|
||||
console.error('New SSE connection established');
|
||||
transport = new SSEServerTransport('/message', res);
|
||||
await server.connect(transport);
|
||||
});
|
||||
|
||||
app.post('/message', async (req, res) => {
|
||||
if (!transport) {
|
||||
res.status(400).send('No active SSE connection');
|
||||
return;
|
||||
}
|
||||
await transport.handlePostMessage(req, res);
|
||||
});
|
||||
|
||||
const PORT = process.env.UMAMI_MCP_PORT || 3003;
|
||||
app.listen(PORT, () => {
|
||||
console.error(`Umami MCP server running on http://localhost:${PORT}/sse`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
run().catch((err) => {
|
||||
console.error("Fatal error:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
13
packages/umami-mcp/src/start.ts
Normal file
13
packages/umami-mcp/src/start.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { config } from 'dotenv';
|
||||
import { resolve } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
|
||||
config({ path: resolve(__dirname, '../../../.env.local') });
|
||||
config({ path: resolve(__dirname, '../../../.env') });
|
||||
|
||||
import('./index.js').catch(err => {
|
||||
console.error('Failed to start Umami MCP Server:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
16
packages/umami-mcp/tsconfig.json
Normal file
16
packages/umami-mcp/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
||||
1874
pnpm-lock.yaml
generated
1874
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user