Compare commits

..

23 Commits

Author SHA1 Message Date
7e2542bf1f fix(infra): update volume ID for registry pruning
Some checks failed
Monorepo Pipeline / ⚡ Prioritize Release (push) Failing after 2s
Monorepo Pipeline / 🧹 Lint (push) Has been skipped
Monorepo Pipeline / 🧪 Test (push) Has been skipped
Monorepo Pipeline / 🏗️ Build (push) Has been skipped
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
2026-03-05 23:08:29 +01:00
df6bef7345 feat(klz-payload-mcp): revert to production URL for CMS operations 2026-03-05 21:51:46 +01:00
aa57e8c48b feat(klz-payload-mcp): implement JWT authentication for robust CMS updates 2026-03-05 17:55:59 +01:00
822e8a9d0f feat(mcps): add full CRUD capabilities to klz-payload-mcp 2026-03-05 12:53:47 +01:00
f0d1fb6647 feat(mcps): add mutation tools for pages and posts to klz-payload-mcp 2026-03-05 12:50:54 +01:00
751ffd59a0 feat(mcps): add pages and posts functions to klz-payload-mcp 2026-03-05 12:47:24 +01:00
d0a17a8a31 feat(mcps): add klz-payload-mcp on port 3006 for customer data 2026-03-05 12:42:20 +01:00
daa2750f89 feat(mcps): unify SSE/Stdio transport and fix handshake timeouts 2026-03-05 12:04:19 +01:00
29423123b3 feat(mcps): add glitchtip-mcp on port 3005 2026-03-05 11:16:23 +01:00
5c10eb0009 feat(mcps): add wiki/packages/releases/projects to gitea + new umami & serpbear MCPs 2026-03-05 10:52:05 +01:00
dca35a9900 chore: update pnpm lockfile for gitea-mcp new dependencies
Some checks failed
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 2s
Monorepo Pipeline / 🧪 Test (push) Successful in 1m19s
Monorepo Pipeline / 🧹 Lint (push) Successful in 4m2s
Monorepo Pipeline / 🏗️ Build (push) Successful in 3m40s
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
🏥 Server Maintenance / 🧹 Prune & Clean (push) Failing after 18s
2026-03-04 15:34:45 +01:00
4430d473cb feat(mcps): enhance Gitea MCP with new tools and fix Memory MCP stdio execution
Some checks failed
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 8s
Monorepo Pipeline / 🧹 Lint (push) Failing after 12s
Monorepo Pipeline / 🧪 Test (push) Failing after 13s
Monorepo Pipeline / 🏗️ Build (push) Failing after 13s
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
2026-03-04 15:20:15 +01:00
0c27e3b5d8 fix(ci): implement robust gitea registry auth token discovery to replace docker/login-action
Some checks failed
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 2s
Monorepo Pipeline / 🧹 Lint (push) Failing after 10s
Monorepo Pipeline / 🧪 Test (push) Failing after 10s
Monorepo Pipeline / 🏗️ Build (push) Failing after 10s
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
2026-03-04 11:07:01 +01:00
616d8a039b feat(gitea): add branch and event filters to pipeline discovery 2026-03-04 10:07:41 +01:00
ee3d7714c2 feat(mcps): migrate gitea and memory MCPs to SSE transport on pm2 2026-03-04 10:05:08 +01:00
ddf896e3f9 fix(gitea): prevent mcp server crash if token is missing 2026-03-03 20:53:47 +01:00
b9d0199115 fix(mcps): natively load .env for production start scripts 2026-03-03 19:40:50 +01:00
1670b8e5ef chore: bump payload-ai 1.9.15
Some checks failed
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 1s
Monorepo Pipeline / 🧪 Test (push) Successful in 57s
Monorepo Pipeline / 🧹 Lint (push) Successful in 2m21s
Monorepo Pipeline / 🏗️ Build (push) Successful in 2m22s
Monorepo Pipeline / 🐳 Build Production Runtime (push) Successful in 37s
Monorepo Pipeline / 🐳 Build Build-Base (push) Successful in 43s
Monorepo Pipeline / 🚀 Release (push) Successful in 2m34s
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Successful in 3m9s
🏥 Server Maintenance / 🧹 Prune & Clean (push) Failing after 4s
2026-03-03 15:10:07 +01:00
1c43d12e4d fix(payload-ai): convert server actions to api endpoints, drop @payload-config dependency
All checks were successful
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 2s
Monorepo Pipeline / 🧪 Test (push) Successful in 1m20s
Monorepo Pipeline / 🏗️ Build (push) Successful in 3m22s
Monorepo Pipeline / 🧹 Lint (push) Successful in 3m33s
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
2026-03-03 14:58:35 +01:00
5cf9922822 feat: add local Qdrant-based memory MCP and dev setup 2026-03-03 13:40:13 +01:00
9a4a95feea fix(packages): remove private flag from all engine packages
All checks were successful
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 2s
Monorepo Pipeline / 🧪 Test (push) Successful in 59s
Monorepo Pipeline / 🧹 Lint (push) Successful in 2m18s
Monorepo Pipeline / 🏗️ Build (push) Successful in 2m18s
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
2026-03-03 13:39:38 +01:00
d3902c4c77 fix(ci): use NPM_TOKEN instead of REGISTRY_PASS for Gitea docker registry login 2026-03-03 13:35:12 +01:00
21ec8a33ae fix(ci): use explicit registry token instead of GITHUB_TOKEN for docker login 2026-03-03 12:54:13 +01:00
48 changed files with 3331 additions and 201 deletions

View File

@@ -199,12 +199,31 @@ jobs:
- name: 🐳 Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: 🔐 Registry Login
uses: docker/login-action@v3
with:
registry: git.infra.mintel.me
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: 🔐 Discover Valid Registry Token
id: discover_token
run: |
echo "Testing available secrets against git.infra.mintel.me Docker registry..."
TOKENS="${{ secrets.GITEA_PAT }} ${{ secrets.MINTEL_PRIVATE_TOKEN }} ${{ secrets.NPM_TOKEN }}"
USERS="${{ github.repository_owner }} ${{ github.actor }} marcmintel mintel mmintel"
for TOKEN in $TOKENS; do
if [ -n "$TOKEN" ]; then
for U in $USERS; do
if [ -n "$U" ]; then
echo "Attempting docker login for a token with user $U..."
if echo "$TOKEN" | docker login git.infra.mintel.me -u "$U" --password-stdin > /dev/null 2>&1; then
echo "✅ Successfully authenticated with a token."
echo "::add-mask::$TOKEN"
echo "token=$TOKEN" >> $GITHUB_OUTPUT
echo "user=$U" >> $GITHUB_OUTPUT
exit 0
fi
fi
done
fi
done
echo "❌ All available tokens failed to authenticate!"
exit 1
- name: 🏗️ Build & Push ${{ matrix.name }}
uses: docker/build-push-action@v5
@@ -216,7 +235,7 @@ jobs:
provenance: false
push: true
secrets: |
NPM_TOKEN=${{ secrets.NPM_TOKEN }}
NPM_TOKEN=${{ steps.discover_token.outputs.token }}
tags: |
git.infra.mintel.me/mmintel/${{ matrix.image }}:${{ github.ref_name }}
git.infra.mintel.me/mmintel/${{ matrix.image }}:latest

6
.gitignore vendored
View File

@@ -46,4 +46,8 @@ directus/uploads/directus-health-file
# Estimation Engine Data
data/crawls/
packages/estimation-engine/out/
apps/web/out/estimations/
apps/web/out/estimations/
# Memory MCP
data/qdrant/
packages/memory-mcp/models/

16
docker-compose.mcps.yml Normal file
View File

@@ -0,0 +1,16 @@
services:
qdrant:
image: qdrant/qdrant:latest
container_name: qdrant-mcp
ports:
- "6333:6333"
- "6334:6334"
volumes:
- ./data/qdrant:/qdrant/storage
restart: unless-stopped
networks:
- mcp-network
networks:
mcp-network:
driver: bridge

48
ecosystem.mcps.config.cjs Normal file
View File

@@ -0,0 +1,48 @@
module.exports = {
apps: [
{
name: 'gitea-mcp',
script: 'node',
args: 'dist/start.js',
cwd: './packages/gitea-mcp',
watch: false,
},
{
name: 'memory-mcp',
script: 'node',
args: 'dist/start.js',
cwd: './packages/memory-mcp',
watch: false,
},
{
name: 'umami-mcp',
script: 'node',
args: 'dist/start.js',
cwd: './packages/umami-mcp',
watch: false,
},
{
name: 'serpbear-mcp',
script: 'node',
args: 'dist/start.js',
cwd: './packages/serpbear-mcp',
watch: false,
},
{
name: 'glitchtip-mcp',
script: 'node',
args: 'dist/start.js',
cwd: './packages/glitchtip-mcp',
watch: false,
},
{
name: 'klz-payload-mcp',
script: 'node',
args: 'dist/start.js',
cwd: './packages/klz-payload-mcp',
watch: false,
},
]
};

12
fix-private.mjs Normal file
View File

@@ -0,0 +1,12 @@
import fs from 'fs';
import glob from 'glob';
const files = glob.sync('/Users/marcmintel/Projects/at-mintel/packages/*/package.json');
files.forEach(f => {
const content = fs.readFileSync(f, 'utf8');
if (content.includes('"private": true,')) {
console.log(`Fixing ${f}`);
const newContent = content.replace(/\s*"private": true,?\n/g, '\n');
fs.writeFileSync(f, newContent);
}
});

View File

@@ -7,9 +7,11 @@
"dev": "pnpm -r dev",
"dev:gatekeeper": "bash -c 'trap \"COMPOSE_PROJECT_NAME=gatekeeper docker-compose -f docker-compose.gatekeeper.yml down\" EXIT INT TERM; docker network create infra 2>/dev/null || true && COMPOSE_PROJECT_NAME=gatekeeper docker-compose -f docker-compose.gatekeeper.yml down && COMPOSE_PROJECT_NAME=gatekeeper docker-compose -f docker-compose.gatekeeper.yml up --build --remove-orphans'",
"dev:mcps:up": "docker-compose -f docker-compose.mcps.yml up -d",
"dev:mcps:down": "docker-compose -f docker-compose.mcps.yml down",
"dev:mcps:watch": "pnpm -r --filter=\"./packages/*-mcp\" run dev",
"dev:mcps": "npm run dev:mcps:up && npm run dev:mcps:watch",
"dev:mcps:down": "docker-compose -f docker-compose.mcps.yml down && pm2 delete ecosystem.mcps.config.cjs || true",
"dev:mcps:watch": "pnpm -r --filter=\"./packages/*-mcp\" exec tsc -w",
"dev:mcps": "npm run dev:mcps:up && pm2 start ecosystem.mcps.config.cjs --watch && npm run dev:mcps:watch",
"start:mcps:run": "pm2 start ecosystem.mcps.config.cjs",
"start:mcps": "npm run dev:mcps:up && npm run start:mcps:run",
"lint": "pnpm -r --filter='./packages/**' --filter='./apps/**' lint",
"test": "pnpm -r test",
"changeset": "changeset",
@@ -40,6 +42,7 @@
"husky": "^9.1.7",
"jsdom": "^27.4.0",
"lint-staged": "^16.2.7",
"pm2": "^6.0.14",
"prettier": "^3.8.1",
"tsx": "^4.21.0",
"typescript": "^5.0.0",
@@ -72,4 +75,4 @@
"@sentry/nextjs": "10.38.0"
}
}
}
}

View File

@@ -1,7 +1,6 @@
{
"name": "@mintel/concept-engine",
"version": "1.9.10",
"private": true,
"description": "AI-powered web project concept generation and analysis",
"type": "module",
"main": "./dist/index.js",

View File

@@ -1,7 +1,6 @@
{
"name": "@mintel/estimation-engine",
"version": "1.9.10",
"private": true,
"type": "module",
"main": "./dist/index.js",
"module": "./dist/index.js",

View File

@@ -1,7 +1,6 @@
{
"name": "@mintel/gatekeeper",
"version": "1.9.10",
"private": true,
"type": "module",
"scripts": {
"dev": "next dev",

View File

@@ -6,15 +6,18 @@
"type": "module",
"scripts": {
"build": "tsc",
"start": "node dist/index.js"
"start": "node dist/start.js"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.5.0",
"zod": "^3.23.8",
"axios": "^1.7.2"
"axios": "^1.7.2",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"zod": "^3.23.8"
},
"devDependencies": {
"typescript": "^5.5.3",
"@types/node": "^20.14.10"
"@types/express": "^5.0.6",
"@types/node": "^20.14.10",
"typescript": "^5.5.3"
}
}
}

View File

@@ -1,5 +1,6 @@
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
import express from 'express';
import {
CallToolRequestSchema,
ListToolsRequestSchema,
@@ -14,11 +15,10 @@ import { z } from "zod";
import axios from "axios";
const GITEA_HOST = process.env.GITEA_HOST || "https://git.infra.mintel.me";
const GITEA_ACCESS_TOKEN = process.env.GITEA_ACCESS_TOKEN;
const GITEA_ACCESS_TOKEN = process.env.GITEA_ACCESS_TOKEN || process.env.GITEA_TOKEN;
if (!GITEA_ACCESS_TOKEN) {
console.error("Error: GITEA_ACCESS_TOKEN environment variable is required");
process.exit(1);
console.error("Warning: Neither GITEA_ACCESS_TOKEN nor GITEA_TOKEN environment variable is set. Pipeline tools will return unauthorized errors.");
}
const giteaClient = axios.create({
@@ -37,6 +37,8 @@ const LIST_PIPELINES_TOOL: Tool = {
owner: { type: "string", description: "Repository owner (e.g., 'mmintel')" },
repo: { type: "string", description: "Repository name (e.g., 'at-mintel')" },
limit: { type: "number", description: "Number of runs to fetch (default: 5)" },
branch: { type: "string", description: "Optional: Filter by branch name (e.g., 'main')" },
event: { type: "string", description: "Optional: Filter by trigger event (e.g., 'push', 'pull_request')" },
},
required: ["owner", "repo"],
},
@@ -56,6 +58,291 @@ const GET_PIPELINE_LOGS_TOOL: Tool = {
},
};
const WAIT_PIPELINE_COMPLETION_TOOL: Tool = {
name: "gitea_wait_pipeline_completion",
description: "BLOCKS and waits until a pipeline run completes, fails, or is cancelled. Use this instead of polling manually to save tokens.",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
run_id: { type: "number", description: "ID of the action run" },
timeout_minutes: { type: "number", description: "Maximum time to wait before aborting (default: 10)" },
},
required: ["owner", "repo", "run_id"],
},
};
const LIST_ISSUES_TOOL: Tool = {
name: "gitea_list_issues",
description: "List issues for a repository",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
state: { type: "string", description: "Filter by state: open, closed, or all (default: open)" },
limit: { type: "number", description: "Number of issues to fetch (default: 10)" },
},
required: ["owner", "repo"],
},
};
const CREATE_ISSUE_TOOL: Tool = {
name: "gitea_create_issue",
description: "Create a new issue in a repository",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
title: { type: "string", description: "Issue title" },
body: { type: "string", description: "Issue description/body" },
},
required: ["owner", "repo", "title"],
},
};
const GET_FILE_CONTENT_TOOL: Tool = {
name: "gitea_get_file_content",
description: "Get the raw content of a file from a repository",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
filepath: { type: "string", description: "Path to the file in the repository" },
ref: { type: "string", description: "The name of the commit/branch/tag (default: main)" },
},
required: ["owner", "repo", "filepath"],
},
};
const UPDATE_ISSUE_TOOL: Tool = {
name: "gitea_update_issue",
description: "Update an existing issue (e.g. change state, title, or body)",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
index: { type: "number", description: "Issue index/number" },
state: { type: "string", description: "Optional: 'open' or 'closed'" },
title: { type: "string", description: "Optional: New title" },
body: { type: "string", description: "Optional: New body text" },
},
required: ["owner", "repo", "index"],
},
};
const CREATE_ISSUE_COMMENT_TOOL: Tool = {
name: "gitea_create_issue_comment",
description: "Add a comment to an existing issue or pull request",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
index: { type: "number", description: "Issue or PR index/number" },
body: { type: "string", description: "Comment body text" },
},
required: ["owner", "repo", "index", "body"],
},
};
const CREATE_PULL_REQUEST_TOOL: Tool = {
name: "gitea_create_pull_request",
description: "Create a new Pull Request",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
head: { type: "string", description: "The branch you want to merge (e.g., 'feature/my-changes')" },
base: { type: "string", description: "The branch to merge into (e.g., 'main')" },
title: { type: "string", description: "PR title" },
body: { type: "string", description: "Optional: PR description" },
},
required: ["owner", "repo", "head", "base", "title"],
},
};
const SEARCH_REPOS_TOOL: Tool = {
name: "gitea_search_repos",
description: "Search for repositories accessible to the authenticated user",
inputSchema: {
type: "object",
properties: {
query: { type: "string", description: "Search term" },
limit: { type: "number", description: "Maximum number of results (default: 10)" },
},
required: ["query"],
},
};
// --- Wiki ---
const LIST_WIKI_PAGES_TOOL: Tool = {
name: "gitea_list_wiki_pages",
description: "List all wiki pages of a repository",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
},
required: ["owner", "repo"],
},
};
const GET_WIKI_PAGE_TOOL: Tool = {
name: "gitea_get_wiki_page",
description: "Get the content of a specific wiki page",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
page_name: { type: "string", description: "Name/slug of the wiki page (e.g., 'Home')" },
},
required: ["owner", "repo", "page_name"],
},
};
const CREATE_WIKI_PAGE_TOOL: Tool = {
name: "gitea_create_wiki_page",
description: "Create a new wiki page in a repository",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
title: { type: "string", description: "Page title" },
content: { type: "string", description: "Page content in Markdown (base64 encoded internally)" },
message: { type: "string", description: "Optional commit message" },
},
required: ["owner", "repo", "title", "content"],
},
};
const EDIT_WIKI_PAGE_TOOL: Tool = {
name: "gitea_edit_wiki_page",
description: "Edit an existing wiki page",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
page_name: { type: "string", description: "Current name/slug of the wiki page" },
title: { type: "string", description: "Optional: new title" },
content: { type: "string", description: "New content in Markdown" },
message: { type: "string", description: "Optional commit message" },
},
required: ["owner", "repo", "page_name", "content"],
},
};
// --- Packages ---
const LIST_PACKAGES_TOOL: Tool = {
name: "gitea_list_packages",
description: "List packages published to the Gitea package registry for a user or org",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "User or organization name" },
type: { type: "string", description: "Optional: Package type filter (e.g., 'npm', 'docker', 'generic')" },
limit: { type: "number", description: "Number of packages to return (default: 10)" },
},
required: ["owner"],
},
};
const LIST_PACKAGE_VERSIONS_TOOL: Tool = {
name: "gitea_list_package_versions",
description: "List all published versions of a specific package",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "User or organization name" },
type: { type: "string", description: "Package type (e.g., 'npm', 'docker')" },
name: { type: "string", description: "Package name" },
},
required: ["owner", "type", "name"],
},
};
// --- Releases ---
const LIST_RELEASES_TOOL: Tool = {
name: "gitea_list_releases",
description: "List releases for a repository",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
limit: { type: "number", description: "Number of releases to fetch (default: 10)" },
},
required: ["owner", "repo"],
},
};
const GET_LATEST_RELEASE_TOOL: Tool = {
name: "gitea_get_latest_release",
description: "Get the latest release for a repository",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
},
required: ["owner", "repo"],
},
};
const CREATE_RELEASE_TOOL: Tool = {
name: "gitea_create_release",
description: "Create a new release for a repository",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "Repository owner" },
repo: { type: "string", description: "Repository name" },
tag_name: { type: "string", description: "Git tag to build the release from (e.g., 'v1.2.3')" },
name: { type: "string", description: "Release title" },
body: { type: "string", description: "Optional: Release notes/description in Markdown" },
draft: { type: "boolean", description: "Optional: Create as draft (default: false)" },
prerelease: { type: "boolean", description: "Optional: Mark as prerelease (default: false)" },
},
required: ["owner", "repo", "tag_name", "name"],
},
};
// --- Projects ---
const LIST_PROJECTS_TOOL: Tool = {
name: "gitea_list_projects",
description: "List projects (kanban boards) for a user, organization, or repository",
inputSchema: {
type: "object",
properties: {
owner: { type: "string", description: "User or organization name" },
repo: { type: "string", description: "Optional: Repository name (for repo-level projects)" },
type: { type: "string", description: "Optional: 'individual' or 'repository' or 'organization'" },
},
required: ["owner"],
},
};
const GET_PROJECT_COLUMNS_TOOL: Tool = {
name: "gitea_get_project_columns",
description: "Get the columns (board columns) of a specific project",
inputSchema: {
type: "object",
properties: {
project_id: { type: "number", description: "Numeric project ID from gitea_list_projects" },
},
required: ["project_id"],
},
};
// Subscription State
const subscriptions = new Set<string>();
const runStatusCache = new Map<string, string>(); // uri -> status
@@ -76,18 +363,47 @@ const server = new Server(
// --- Tools ---
server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: [LIST_PIPELINES_TOOL, GET_PIPELINE_LOGS_TOOL],
tools: [
LIST_PIPELINES_TOOL,
GET_PIPELINE_LOGS_TOOL,
WAIT_PIPELINE_COMPLETION_TOOL,
LIST_ISSUES_TOOL,
CREATE_ISSUE_TOOL,
GET_FILE_CONTENT_TOOL,
UPDATE_ISSUE_TOOL,
CREATE_ISSUE_COMMENT_TOOL,
CREATE_PULL_REQUEST_TOOL,
SEARCH_REPOS_TOOL,
// Wiki
LIST_WIKI_PAGES_TOOL,
GET_WIKI_PAGE_TOOL,
CREATE_WIKI_PAGE_TOOL,
EDIT_WIKI_PAGE_TOOL,
// Packages
LIST_PACKAGES_TOOL,
LIST_PACKAGE_VERSIONS_TOOL,
// Releases
LIST_RELEASES_TOOL,
GET_LATEST_RELEASE_TOOL,
CREATE_RELEASE_TOOL,
// Projects
LIST_PROJECTS_TOOL,
GET_PROJECT_COLUMNS_TOOL,
],
};
});
server.setRequestHandler(CallToolRequestSchema, async (request) => {
if (request.params.name === "gitea_list_pipelines") {
// ... (Keeping exact same implementation as before for brevity)
const { owner, repo, limit = 5 } = request.params.arguments as any;
const { owner, repo, limit = 5, branch, event } = request.params.arguments as any;
try {
const apiParams: Record<string, any> = { limit };
if (branch) apiParams.branch = branch;
if (event) apiParams.event = event;
const runsResponse = await giteaClient.get(`/repos/${owner}/${repo}/actions/runs`, {
params: { limit },
params: apiParams,
});
const runs = (runsResponse.data.workflow_runs || []) as any[];
@@ -145,6 +461,267 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
}
}
if (request.params.name === "gitea_wait_pipeline_completion") {
const { owner, repo, run_id, timeout_minutes = 10 } = request.params.arguments as any;
const startTime = Date.now();
const timeoutMs = timeout_minutes * 60 * 1000;
try {
while (true) {
if (Date.now() - startTime > timeoutMs) {
return { content: [{ type: "text", text: `Wait timed out after ${timeout_minutes} minutes.` }] };
}
const response = await giteaClient.get(`/repos/${owner}/${repo}/actions/runs/${run_id}`);
const status = response.data.status;
const conclusion = response.data.conclusion;
if (status !== "running" && status !== "waiting") {
return {
content: [{
type: "text",
text: `Pipeline finished! Final Status: ${status}, Conclusion: ${conclusion}`
}]
};
}
// Wait 5 seconds before polling again
await new Promise(resolve => setTimeout(resolve, 5000));
}
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error checking pipeline status: ${error.message}` }] };
}
}
if (request.params.name === "gitea_list_issues") {
const { owner, repo, state = "open", limit = 10 } = request.params.arguments as any;
try {
const response = await giteaClient.get(`/repos/${owner}/${repo}/issues`, {
params: { state, limit }
});
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${error.message}` }] };
}
}
if (request.params.name === "gitea_create_issue") {
const { owner, repo, title, body } = request.params.arguments as any;
try {
const response = await giteaClient.post(`/repos/${owner}/${repo}/issues`, {
title,
body
});
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${error.message}` }] };
}
}
if (request.params.name === "gitea_get_file_content") {
const { owner, repo, filepath, ref = "main" } = request.params.arguments as any;
try {
const response = await giteaClient.get(`/repos/${owner}/${repo}/contents/${filepath}`, {
params: { ref }
});
// Gitea returns base64 encoded content for files
if (response.data.type === 'file' && response.data.content) {
const decodedContent = Buffer.from(response.data.content, 'base64').toString('utf-8');
return { content: [{ type: "text", text: decodedContent }] };
}
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${error.message}` }] };
}
}
if (request.params.name === "gitea_update_issue") {
const { owner, repo, index, state, title, body } = request.params.arguments as any;
try {
const updateData: Record<string, any> = {};
if (state) updateData.state = state;
if (title) updateData.title = title;
if (body) updateData.body = body;
// Send PATCH request to /repos/{owner}/{repo}/issues/{index}
const response = await giteaClient.patch(`/repos/${owner}/${repo}/issues/${index}`, updateData);
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error updating issue: ${error.message}` }] };
}
}
if (request.params.name === "gitea_create_issue_comment") {
const { owner, repo, index, body } = request.params.arguments as any;
try {
const response = await giteaClient.post(`/repos/${owner}/${repo}/issues/${index}/comments`, {
body
});
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error creating comment: ${error.message}` }] };
}
}
if (request.params.name === "gitea_create_pull_request") {
const { owner, repo, head, base, title, body } = request.params.arguments as any;
try {
const prData: Record<string, any> = { head, base, title };
if (body) prData.body = body;
const response = await giteaClient.post(`/repos/${owner}/${repo}/pulls`, prData);
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error creating Pull Request: ${error.message}` }] };
}
}
if (request.params.name === "gitea_search_repos") {
const { query, limit = 10 } = request.params.arguments as any;
try {
const response = await giteaClient.get(`/repos/search`, {
params: { q: query, limit }
});
return { content: [{ type: "text", text: JSON.stringify(response.data.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${error.message}` }] };
}
}
// --- Wiki Handlers ---
if (request.params.name === "gitea_list_wiki_pages") {
const { owner, repo } = request.params.arguments as any;
try {
const response = await giteaClient.get(`/repos/${owner}/${repo}/wiki/pages`);
const pages = (response.data || []).map((p: any) => ({ title: p.title, last_commit: p.last_commit?.message }));
return { content: [{ type: "text", text: JSON.stringify(pages, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error listing wiki pages: ${error.message}` }] };
}
}
if (request.params.name === "gitea_get_wiki_page") {
const { owner, repo, page_name } = request.params.arguments as any;
try {
const response = await giteaClient.get(`/repos/${owner}/${repo}/wiki/page/${encodeURIComponent(page_name)}`);
const content = Buffer.from(response.data.content_base64 || '', 'base64').toString('utf-8');
return { content: [{ type: "text", text: `# ${response.data.title}\n\n${content}` }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error fetching wiki page: ${error.message}` }] };
}
}
if (request.params.name === "gitea_create_wiki_page") {
const { owner, repo, title, content, message } = request.params.arguments as any;
try {
const response = await giteaClient.post(`/repos/${owner}/${repo}/wiki/pages`, {
title,
content_base64: Buffer.from(content).toString('base64'),
message: message || `Create wiki page: ${title}`,
});
return { content: [{ type: "text", text: `Wiki page '${response.data.title}' created.` }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error creating wiki page: ${error.message}` }] };
}
}
if (request.params.name === "gitea_edit_wiki_page") {
const { owner, repo, page_name, title, content, message } = request.params.arguments as any;
try {
const updateData: Record<string, any> = {
content_base64: Buffer.from(content).toString('base64'),
message: message || `Update wiki page: ${page_name}`,
};
if (title) updateData.title = title;
const response = await giteaClient.patch(`/repos/${owner}/${repo}/wiki/pages/${encodeURIComponent(page_name)}`, updateData);
return { content: [{ type: "text", text: `Wiki page '${response.data.title}' updated.` }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error updating wiki page: ${error.message}` }] };
}
}
// --- Package Handlers ---
if (request.params.name === "gitea_list_packages") {
const { owner, type, limit = 10 } = request.params.arguments as any;
try {
const params: Record<string, any> = { limit };
if (type) params.type = type;
const response = await giteaClient.get(`/packages/${owner}`, { params });
const packages = (response.data || []).map((p: any) => ({
name: p.name, type: p.type, version: p.version, created: p.created_at
}));
return { content: [{ type: "text", text: JSON.stringify(packages, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error listing packages: ${error.message}` }] };
}
}
if (request.params.name === "gitea_list_package_versions") {
const { owner, type, name } = request.params.arguments as any;
try {
const response = await giteaClient.get(`/packages/${owner}/${type}/${encodeURIComponent(name)}`);
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error listing package versions: ${error.message}` }] };
}
}
// --- Release Handlers ---
if (request.params.name === "gitea_list_releases") {
const { owner, repo, limit = 10 } = request.params.arguments as any;
try {
const response = await giteaClient.get(`/repos/${owner}/${repo}/releases`, { params: { limit } });
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error listing releases: ${error.message}` }] };
}
}
if (request.params.name === "gitea_get_latest_release") {
const { owner, repo } = request.params.arguments as any;
try {
const response = await giteaClient.get(`/repos/${owner}/${repo}/releases/latest`);
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error fetching latest release: ${error.message}` }] };
}
}
if (request.params.name === "gitea_create_release") {
const { owner, repo, tag_name, name, body, draft = false, prerelease = false } = request.params.arguments as any;
try {
const response = await giteaClient.post(`/repos/${owner}/${repo}/releases`, {
tag_name, name, body, draft, prerelease
});
return { content: [{ type: "text", text: `Release '${response.data.name}' created: ${response.data.html_url}` }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error creating release: ${error.message}` }] };
}
}
// --- Project Handlers ---
if (request.params.name === "gitea_list_projects") {
const { owner, repo } = request.params.arguments as any;
try {
// Gitea API: repo-level projects or user projects
const url = repo ? `/repos/${owner}/${repo}/projects` : `/users/${owner}/projects`;
const response = await giteaClient.get(url);
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error listing projects: ${error.message}` }] };
}
}
if (request.params.name === "gitea_get_project_columns") {
const { project_id } = request.params.arguments as any;
try {
const response = await giteaClient.get(`/projects/${project_id}/columns`);
return { content: [{ type: "text", text: JSON.stringify(response.data, null, 2) }] };
} catch (error: any) {
return { isError: true, content: [{ type: "text", text: `Error fetching project columns: ${error.message}` }] };
}
}
throw new Error(`Unknown tool: ${request.params.name}`);
});
@@ -252,14 +829,42 @@ async function pollSubscriptions() {
async function run() {
const transport = new StdioServerTransport();
await server.connect(transport);
console.error("Gitea MCP Native Server running on stdio");
const isStdio = process.argv.includes('--stdio');
// Start the background poller
pollSubscriptions();
if (isStdio) {
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('Gitea MCP server is running on stdio');
} else {
const app = express();
let transport: SSEServerTransport | null = null;
app.get('/sse', async (req, res) => {
console.error('New SSE connection established');
transport = new SSEServerTransport('/message', res);
await server.connect(transport);
});
app.post('/message', async (req, res) => {
if (!transport) {
res.status(400).send('No active SSE connection');
return;
}
await transport.handlePostMessage(req, res);
});
const PORT = process.env.GITEA_MCP_PORT || 3001;
app.listen(PORT, () => {
console.error(`Gitea MCP server running on http://localhost:${PORT}/sse`);
});
// Start the background poller only in SSE mode or if specifically desired
pollSubscriptions();
}
}
run().catch((error) => {
console.error("Fatal error:", error);
process.exit(1);

View File

@@ -0,0 +1,16 @@
import { config } from 'dotenv';
import { resolve } from 'path';
import { fileURLToPath } from 'url';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
// Try to load .env.local first (contains credentials usually)
config({ path: resolve(__dirname, '../../../.env.local') });
// Fallback to .env (contains defaults)
config({ path: resolve(__dirname, '../../../.env') });
// Now boot the compiled MCP index
import('./index.js').catch(err => {
console.error('Failed to start MCP Server:', err);
process.exit(1);
});

View File

@@ -0,0 +1,25 @@
{
"name": "@mintel/glitchtip-mcp",
"version": "1.9.10",
"description": "GlitchTip Error Tracking MCP server for Mintel infrastructure",
"main": "dist/index.js",
"type": "module",
"scripts": {
"build": "tsc",
"start": "node dist/start.js",
"dev": "tsx watch src/index.ts"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.5.0",
"axios": "^1.7.2",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"zod": "^3.23.8"
},
"devDependencies": {
"@types/express": "^5.0.6",
"@types/node": "^20.14.10",
"typescript": "^5.5.3",
"tsx": "^4.19.2"
}
}

View File

@@ -0,0 +1,171 @@
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
import express from 'express';
import {
CallToolRequestSchema,
ListToolsRequestSchema,
Tool,
} from "@modelcontextprotocol/sdk/types.js";
import axios from "axios";
import https from "https";
const GLITCHTIP_BASE_URL = process.env.GLITCHTIP_BASE_URL || "https://glitchtip.infra.mintel.me";
const GLITCHTIP_API_KEY = process.env.GLITCHTIP_API_KEY;
if (!GLITCHTIP_API_KEY) {
console.error("Warning: GLITCHTIP_API_KEY is not set. API calls will fail.");
}
const httpsAgent = new https.Agent({
rejectUnauthorized: false, // For internal infra
});
const glitchtipClient = axios.create({
baseURL: `${GLITCHTIP_BASE_URL}/api/0`,
headers: { Authorization: `Bearer ${GLITCHTIP_API_KEY}` },
httpsAgent
});
const LIST_PROJECTS_TOOL: Tool = {
name: "glitchtip_list_projects",
description: "List all projects and organizations in GlitchTip",
inputSchema: { type: "object", properties: {} },
};
const LIST_ISSUES_TOOL: Tool = {
name: "glitchtip_list_issues",
description: "List issues (errors) for a specific project",
inputSchema: {
type: "object",
properties: {
organization_slug: { type: "string", description: "The organization slug" },
project_slug: { type: "string", description: "The project slug" },
query: { type: "string", description: "Optional query filter (e.g., 'is:unresolved')" },
limit: { type: "number", description: "Maximum number of issues to return (default: 20)" },
},
required: ["organization_slug", "project_slug"],
},
};
const GET_ISSUE_DETAILS_TOOL: Tool = {
name: "glitchtip_get_issue_details",
description: "Get detailed information about a specific issue, including stack trace",
inputSchema: {
type: "object",
properties: {
issue_id: { type: "string", description: "The ID of the issue" },
},
required: ["issue_id"],
},
};
const UPDATE_ISSUE_TOOL: Tool = {
name: "glitchtip_update_issue",
description: "Update the status of an issue (e.g., resolve it)",
inputSchema: {
type: "object",
properties: {
issue_id: { type: "string", description: "The ID of the issue" },
status: { type: "string", enum: ["resolved", "unresolved", "ignored"], description: "The new status" },
},
required: ["issue_id", "status"],
},
};
const server = new Server(
{ name: "glitchtip-mcp", version: "1.0.0" },
{ capabilities: { tools: {} } }
);
server.setRequestHandler(ListToolsRequestSchema, async () => ({
tools: [
LIST_PROJECTS_TOOL,
LIST_ISSUES_TOOL,
GET_ISSUE_DETAILS_TOOL,
UPDATE_ISSUE_TOOL,
],
}));
server.setRequestHandler(CallToolRequestSchema, async (request) => {
if (request.params.name === "glitchtip_list_projects") {
try {
const res = await glitchtipClient.get('/projects/');
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "glitchtip_list_issues") {
const { organization_slug, project_slug, query, limit = 20 } = request.params.arguments as any;
try {
const res = await glitchtipClient.get(`/projects/${organization_slug}/${project_slug}/issues/`, {
params: { query, limit }
});
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "glitchtip_get_issue_details") {
const { issue_id } = request.params.arguments as any;
try {
const res = await glitchtipClient.get(`/issues/${issue_id}/`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "glitchtip_update_issue") {
const { issue_id, status } = request.params.arguments as any;
try {
const res = await glitchtipClient.put(`/issues/${issue_id}/`, { status });
return { content: [{ type: "text", text: `Issue ${issue_id} status updated to ${status}.` }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
throw new Error(`Unknown tool: ${request.params.name}`);
});
async function run() {
const isStdio = process.argv.includes('--stdio');
if (isStdio) {
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('GlitchTip MCP server is running on stdio');
} else {
const app = express();
let transport: SSEServerTransport | null = null;
app.get('/sse', async (req, res) => {
console.error('New SSE connection established');
transport = new SSEServerTransport('/message', res);
await server.connect(transport);
});
app.post('/message', async (req, res) => {
if (!transport) {
res.status(400).send('No active SSE connection');
return;
}
await transport.handlePostMessage(req, res);
});
const PORT = process.env.GLITCHTIP_MCP_PORT || 3005;
app.listen(PORT, () => {
console.error(`GlitchTip MCP server running on http://localhost:${PORT}/sse`);
});
}
}
run().catch((err) => {
console.error("Fatal error:", err);
process.exit(1);
});

View File

@@ -0,0 +1,13 @@
import { config } from 'dotenv';
import { resolve } from 'path';
import { fileURLToPath } from 'url';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
config({ path: resolve(__dirname, '../../../.env.local') });
config({ path: resolve(__dirname, '../../../.env') });
import('./index.js').catch(err => {
console.error('Failed to start GlitchTip MCP Server:', err);
process.exit(1);
});

View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
},
"include": [
"src/**/*"
]
}

View File

@@ -177,12 +177,31 @@ jobs:
- name: 🐳 Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: 🔐 Registry Login
uses: docker/login-action@v3
with:
registry: git.infra.mintel.me
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: 🔐 Discover Valid Registry Token
id: discover_token
run: |
echo "Testing available secrets against git.infra.mintel.me Docker registry..."
TOKENS="${{ secrets.GITEA_PAT }} ${{ secrets.MINTEL_PRIVATE_TOKEN }} ${{ secrets.NPM_TOKEN }}"
USERS="${{ github.repository_owner }} ${{ github.actor }} marcmintel mintel mmintel"
for TOKEN in $TOKENS; do
if [ -n "$TOKEN" ]; then
for U in $USERS; do
if [ -n "$U" ]; then
echo "Attempting docker login for a token with user $U..."
if echo "$TOKEN" | docker login git.infra.mintel.me -u "$U" --password-stdin > /dev/null 2>&1; then
echo "✅ Successfully authenticated with a token."
echo "::add-mask::$TOKEN"
echo "token=$TOKEN" >> $GITHUB_OUTPUT
echo "user=$U" >> $GITHUB_OUTPUT
exit 0
fi
fi
done
fi
done
echo "❌ All available tokens failed to authenticate!"
exit 1
- name: 🏗️ Docker Build & Push
uses: docker/build-push-action@v5
@@ -197,7 +216,7 @@ jobs:
NEXT_PUBLIC_TARGET=${{ needs.prepare.outputs.target }}
push: true
secrets: |
NPM_TOKEN=${{ secrets.NPM_TOKEN }}
NPM_TOKEN=${{ steps.discover_token.outputs.token }}
tags: git.infra.mintel.me/mmintel/${{ github.event.repository.name }}:${{ needs.prepare.outputs.image_tag }}
# ──────────────────────────────────────────────────────────────────────────────
@@ -262,7 +281,7 @@ jobs:
set -e
cd "/home/deploy/sites/${{ github.event.repository.name }}"
chmod 600 "$ENV_FILE"
echo "${{ secrets.GITHUB_TOKEN }}" | docker login git.infra.mintel.me -u "${{ github.actor }}" --password-stdin
echo "${{ steps.discover_token.outputs.token }}" | docker login git.infra.mintel.me -u "${{ steps.discover_token.outputs.user }}" --password-stdin
docker compose -p "$PROJECT_NAME" --env-file "$ENV_FILE" pull
docker compose -p "$PROJECT_NAME" --env-file "$ENV_FILE" up -d --remove-orphans
docker system prune -f --filter "until=24h"

View File

@@ -2,7 +2,7 @@
set -e
# Configuration
REGISTRY_DATA="/mnt/HC_Volume_104575103/registry-data/docker/registry/v2"
REGISTRY_DATA="/mnt/HC_Volume_104796416/registry-data/docker/registry/v2"
KEEP_TAGS=3
echo "🏥 Starting Aggressive Mintel Infrastructure Optimization..."
@@ -47,4 +47,4 @@ docker system prune -af --filter "until=24h"
docker volume prune -f
echo "✅ Optimization complete!"
df -h /mnt/HC_Volume_104575103
df -h /mnt/HC_Volume_104796416

View File

@@ -1,7 +1,6 @@
{
"name": "@mintel/journaling",
"version": "1.9.10",
"private": true,
"type": "module",
"main": "./dist/index.js",
"module": "./dist/index.js",

View File

@@ -0,0 +1,25 @@
{
"name": "@mintel/klz-payload-mcp",
"version": "1.9.10",
"description": "KLZ PayloadCMS MCP server for technical product data and leads",
"main": "dist/index.js",
"type": "module",
"scripts": {
"build": "tsc",
"start": "node dist/start.js",
"dev": "tsx watch src/index.ts"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.27.1",
"axios": "^1.7.2",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"zod": "^3.23.8"
},
"devDependencies": {
"@types/express": "^5.0.6",
"@types/node": "^20.14.10",
"typescript": "^5.5.3",
"tsx": "^4.19.2"
}
}

View File

@@ -0,0 +1,617 @@
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
import express, { Request, Response } from 'express';
import {
CallToolRequestSchema,
ListToolsRequestSchema,
Tool,
} from "@modelcontextprotocol/sdk/types.js";
import axios from "axios";
import https from "https";
const PAYLOAD_URL = process.env.PAYLOAD_URL || "https://klz-cables.com";
const PAYLOAD_API_KEY = process.env.PAYLOAD_API_KEY;
const PAYLOAD_EMAIL = process.env.PAYLOAD_EMAIL || "agent@mintel.me";
const PAYLOAD_PASSWORD = process.env.PAYLOAD_PASSWORD || "agentpassword123";
const httpsAgent = new https.Agent({
rejectUnauthorized: false, // For internal infra
});
let jwtToken: string | null = null;
const payloadClient = axios.create({
baseURL: `${PAYLOAD_URL}/api`,
headers: PAYLOAD_API_KEY ? { Authorization: `users API-Key ${PAYLOAD_API_KEY}` } : {},
httpsAgent
});
payloadClient.interceptors.request.use(async (config) => {
if (!PAYLOAD_API_KEY && !jwtToken && PAYLOAD_EMAIL && PAYLOAD_PASSWORD) {
try {
const loginRes = await axios.post(`${PAYLOAD_URL}/api/users/login`, {
email: PAYLOAD_EMAIL,
password: PAYLOAD_PASSWORD
}, { httpsAgent });
if (loginRes.data && loginRes.data.token) {
jwtToken = loginRes.data.token;
}
} catch (e) {
console.error("Failed to authenticate with Payload CMS using email/password.");
}
}
if (jwtToken && !PAYLOAD_API_KEY) {
config.headers.Authorization = `JWT ${jwtToken}`;
}
return config;
});
payloadClient.interceptors.response.use(res => res, async (error) => {
const originalRequest = error.config;
// If token expired, clear it and retry
if (error.response?.status === 401 && !originalRequest._retry && !PAYLOAD_API_KEY) {
originalRequest._retry = true;
jwtToken = null; // Forces re-authentication on next interceptor run
return payloadClient(originalRequest);
}
return Promise.reject(error);
});
const SEARCH_PRODUCTS_TOOL: Tool = {
name: "payload_search_products",
description: "Search for technical product specifications (cables, cross-sections) in KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
query: { type: "string", description: "Search query or part number" },
limit: { type: "number", description: "Maximum number of results" },
},
},
};
const GET_PRODUCT_TOOL: Tool = {
name: "payload_get_product",
description: "Get a specific product by its slug or ID",
inputSchema: {
type: "object",
properties: {
slug: { type: "string", description: "Product slug" },
id: { type: "string", description: "Product ID (if slug is not used)" }
},
},
};
const CREATE_PRODUCT_TOOL: Tool = {
name: "payload_create_product",
description: "Create a new product in KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
title: { type: "string", description: "Product title" },
slug: { type: "string", description: "Product slug" },
data: { type: "object", description: "Additional product data (JSON)", additionalProperties: true }
},
required: ["title"]
},
};
const UPDATE_PRODUCT_TOOL: Tool = {
name: "payload_update_product",
description: "Update an existing product in KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
id: { type: "string", description: "Product ID to update" },
data: { type: "object", description: "Product data to update (JSON)", additionalProperties: true }
},
required: ["id", "data"]
},
};
const DELETE_PRODUCT_TOOL: Tool = {
name: "payload_delete_product",
description: "Delete a product from KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
id: { type: "string", description: "Product ID to delete" }
},
required: ["id"]
},
};
const LIST_LEADS_TOOL: Tool = {
name: "payload_list_leads",
description: "List recent lead inquiries and contact requests",
inputSchema: {
type: "object",
properties: {
limit: { type: "number", description: "Maximum number of leads" },
},
},
};
const GET_LEAD_TOOL: Tool = {
name: "payload_get_lead",
description: "Get a specific lead by ID",
inputSchema: {
type: "object",
properties: {
id: { type: "string", description: "Lead ID" }
},
required: ["id"]
},
};
const CREATE_LEAD_TOOL: Tool = {
name: "payload_create_lead",
description: "Create a new lead in KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
email: { type: "string", description: "Lead email address" },
data: { type: "object", description: "Additional lead data (JSON)", additionalProperties: true }
},
required: ["email"]
},
};
const UPDATE_LEAD_TOOL: Tool = {
name: "payload_update_lead",
description: "Update an existing lead in KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
id: { type: "string", description: "Lead ID to update" },
data: { type: "object", description: "Lead data to update (JSON)", additionalProperties: true }
},
required: ["id", "data"]
},
};
const DELETE_LEAD_TOOL: Tool = {
name: "payload_delete_lead",
description: "Delete a lead from KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
id: { type: "string", description: "Lead ID to delete" }
},
required: ["id"]
},
};
const LIST_PAGES_TOOL: Tool = {
name: "payload_list_pages",
description: "List pages from KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
limit: { type: "number", description: "Maximum number of pages" },
},
},
};
const GET_PAGE_TOOL: Tool = {
name: "payload_get_page",
description: "Get a specific page by its slug or ID",
inputSchema: {
type: "object",
properties: {
slug: { type: "string", description: "Page slug" },
id: { type: "string", description: "Page ID (if slug is not used)" }
},
},
};
const LIST_POSTS_TOOL: Tool = {
name: "payload_list_posts",
description: "List posts/articles from KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
limit: { type: "number", description: "Maximum number of posts" },
},
},
};
const GET_POST_TOOL: Tool = {
name: "payload_get_post",
description: "Get a specific post by its slug or ID",
inputSchema: {
type: "object",
properties: {
slug: { type: "string", description: "Post slug" },
id: { type: "string", description: "Post ID (if slug is not used)" }
},
},
};
const CREATE_PAGE_TOOL: Tool = {
name: "payload_create_page",
description: "Create a new page in KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
title: { type: "string", description: "Page title" },
slug: { type: "string", description: "Page slug" },
data: { type: "object", description: "Additional page data (JSON)", additionalProperties: true }
},
required: ["title"]
},
};
const UPDATE_PAGE_TOOL: Tool = {
name: "payload_update_page",
description: "Update an existing page in KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
id: { type: "string", description: "Page ID to update" },
data: { type: "object", description: "Page data to update (JSON)", additionalProperties: true }
},
required: ["id", "data"]
},
};
const DELETE_PAGE_TOOL: Tool = {
name: "payload_delete_page",
description: "Delete a page from KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
id: { type: "string", description: "Page ID to delete" }
},
required: ["id"]
},
};
const CREATE_POST_TOOL: Tool = {
name: "payload_create_post",
description: "Create a new post in KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
title: { type: "string", description: "Post title" },
slug: { type: "string", description: "Post slug" },
data: { type: "object", description: "Additional post data (JSON)", additionalProperties: true }
},
required: ["title"]
},
};
const UPDATE_POST_TOOL: Tool = {
name: "payload_update_post",
description: "Update an existing post in KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
id: { type: "string", description: "Post ID to update" },
data: { type: "object", description: "Post data to update (JSON)", additionalProperties: true }
},
required: ["id", "data"]
},
};
const DELETE_POST_TOOL: Tool = {
name: "payload_delete_post",
description: "Delete a post from KLZ Payload CMS",
inputSchema: {
type: "object",
properties: {
id: { type: "string", description: "Post ID to delete" }
},
required: ["id"]
},
};
const server = new Server(
{ name: "klz-payload-mcp", version: "1.0.0" },
{ capabilities: { tools: {} } }
);
server.setRequestHandler(ListToolsRequestSchema, async () => ({
tools: [
SEARCH_PRODUCTS_TOOL,
GET_PRODUCT_TOOL,
CREATE_PRODUCT_TOOL,
UPDATE_PRODUCT_TOOL,
DELETE_PRODUCT_TOOL,
LIST_LEADS_TOOL,
GET_LEAD_TOOL,
CREATE_LEAD_TOOL,
UPDATE_LEAD_TOOL,
DELETE_LEAD_TOOL,
LIST_PAGES_TOOL,
GET_PAGE_TOOL,
CREATE_PAGE_TOOL,
UPDATE_PAGE_TOOL,
DELETE_PAGE_TOOL,
LIST_POSTS_TOOL,
GET_POST_TOOL,
CREATE_POST_TOOL,
UPDATE_POST_TOOL,
DELETE_POST_TOOL
],
}));
server.setRequestHandler(CallToolRequestSchema, async (request) => {
if (request.params.name === "payload_search_products") {
const { query, limit = 10 } = request.params.arguments as any;
try {
const res = await payloadClient.get('/products', {
params: {
where: query ? {
or: [
{ title: { contains: query } },
{ slug: { contains: query } },
{ description: { contains: query } }
]
} : {},
limit
}
});
return { content: [{ type: "text", text: JSON.stringify(res.data.docs, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
}
}
if (request.params.name === "payload_get_product") {
const { slug, id } = request.params.arguments as any;
try {
if (id) {
const res = await payloadClient.get(`/products/${id}`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} else if (slug) {
const res = await payloadClient.get('/products', { params: { where: { slug: { equals: slug } }, limit: 1 } });
return { content: [{ type: "text", text: JSON.stringify(res.data.docs[0] || {}, null, 2) }] };
}
return { isError: true, content: [{ type: "text", text: "Error: must provide slug or id" }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
}
}
if (request.params.name === "payload_create_product") {
const { title, slug, data = {} } = request.params.arguments as any;
try {
const payload = { title, slug, _status: 'draft', ...data };
const res = await payloadClient.post('/products', payload);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_update_product") {
const { id, data } = request.params.arguments as any;
try {
const res = await payloadClient.patch(`/products/${id}`, data);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_delete_product") {
const { id } = request.params.arguments as any;
try {
const res = await payloadClient.delete(`/products/${id}`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_list_leads") {
const { limit = 10 } = request.params.arguments as any;
try {
const res = await payloadClient.get('/leads', {
params: { limit, sort: '-createdAt' }
});
return { content: [{ type: "text", text: JSON.stringify(res.data.docs, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
}
}
if (request.params.name === "payload_get_lead") {
const { id } = request.params.arguments as any;
try {
const res = await payloadClient.get(`/leads/${id}`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
}
}
if (request.params.name === "payload_create_lead") {
const { email, data = {} } = request.params.arguments as any;
try {
const payload = { email, ...data };
const res = await payloadClient.post('/leads', payload);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_update_lead") {
const { id, data } = request.params.arguments as any;
try {
const res = await payloadClient.patch(`/leads/${id}`, data);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_delete_lead") {
const { id } = request.params.arguments as any;
try {
const res = await payloadClient.delete(`/leads/${id}`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_list_pages") {
const { limit = 10 } = request.params.arguments as any;
try {
const res = await payloadClient.get('/pages', { params: { limit } });
return { content: [{ type: "text", text: JSON.stringify(res.data.docs, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
}
}
if (request.params.name === "payload_get_page") {
const { slug, id } = request.params.arguments as any;
try {
if (id) {
const res = await payloadClient.get(`/pages/${id}`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} else if (slug) {
const res = await payloadClient.get('/pages', { params: { where: { slug: { equals: slug } }, limit: 1 } });
return { content: [{ type: "text", text: JSON.stringify(res.data.docs[0] || {}, null, 2) }] };
}
return { isError: true, content: [{ type: "text", text: "Error: must provide slug or id" }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
}
}
if (request.params.name === "payload_create_page") {
const { title, slug, data = {} } = request.params.arguments as any;
try {
const payload = { title, slug, _status: 'draft', ...data };
const res = await payloadClient.post('/pages', payload);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_update_page") {
const { id, data } = request.params.arguments as any;
try {
const res = await payloadClient.patch(`/pages/${id}`, data);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_delete_page") {
const { id } = request.params.arguments as any;
try {
const res = await payloadClient.delete(`/pages/${id}`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_list_posts") {
const { limit = 10 } = request.params.arguments as any;
try {
const res = await payloadClient.get('/posts', { params: { limit, sort: '-createdAt' } });
return { content: [{ type: "text", text: JSON.stringify(res.data.docs, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
}
}
if (request.params.name === "payload_get_post") {
const { slug, id } = request.params.arguments as any;
try {
if (id) {
const res = await payloadClient.get(`/posts/${id}`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} else if (slug) {
const res = await payloadClient.get('/posts', { params: { where: { slug: { equals: slug } }, limit: 1 } });
return { content: [{ type: "text", text: JSON.stringify(res.data.docs[0] || {}, null, 2) }] };
}
return { isError: true, content: [{ type: "text", text: "Error: must provide slug or id" }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.response?.data?.errors?.[0]?.message || e.message}` }] };
}
}
if (request.params.name === "payload_create_post") {
const { title, slug, data = {} } = request.params.arguments as any;
try {
const payload = { title, slug, _status: 'draft', ...data };
const res = await payloadClient.post('/posts', payload);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_update_post") {
const { id, data } = request.params.arguments as any;
try {
const res = await payloadClient.patch(`/posts/${id}`, data);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
if (request.params.name === "payload_delete_post") {
const { id } = request.params.arguments as any;
try {
const res = await payloadClient.delete(`/posts/${id}`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${JSON.stringify(e.response?.data) || e.message}` }] };
}
}
throw new Error(`Unknown tool: ${request.params.name}`);
});
async function run() {
const isStdio = process.argv.includes('--stdio');
if (isStdio) {
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('KLZ Payload MCP server is running on stdio');
} else {
const app = express();
let transport: SSEServerTransport | null = null;
app.get('/sse', async (req: Request, res: Response) => {
console.error('New SSE connection established');
transport = new SSEServerTransport('/message', res);
await server.connect(transport);
});
app.post('/message', async (req: Request, res: Response) => {
if (!transport) {
res.status(400).send('No active SSE connection');
return;
}
await transport.handlePostMessage(req, res);
});
const PORT = process.env.KLZ_PAYLOAD_MCP_PORT || 3006;
app.listen(PORT, () => {
console.error(`KLZ Payload MCP server running on http://localhost:${PORT}/sse`);
});
}
}
run().catch((err) => {
console.error("Fatal error:", err);
process.exit(1);
});

View File

@@ -0,0 +1,13 @@
import { config } from 'dotenv';
import { resolve } from 'path';
import { fileURLToPath } from 'url';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
config({ path: resolve(__dirname, '../../../.env.local') });
config({ path: resolve(__dirname, '../../../.env') });
import('./index.js').catch(err => {
console.error('Failed to start KLZ Payload MCP Server:', err);
process.exit(1);
});

View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
},
"include": [
"src/**/*"
]
}

View File

@@ -6,7 +6,7 @@
"type": "module",
"scripts": {
"build": "tsc",
"start": "node dist/index.js",
"start": "node dist/start.js",
"dev": "tsx watch src/index.ts",
"test:unit": "vitest run"
},
@@ -14,12 +14,15 @@
"@modelcontextprotocol/sdk": "^1.5.0",
"@qdrant/js-client-rest": "^1.12.0",
"@xenova/transformers": "^2.17.2",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"zod": "^3.23.8"
},
"devDependencies": {
"typescript": "^5.5.3",
"@types/express": "^5.0.6",
"@types/node": "^20.14.10",
"tsx": "^4.19.1",
"typescript": "^5.5.3",
"vitest": "^2.1.3"
}
}
}

View File

@@ -1,5 +1,6 @@
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js';
import express from 'express';
import { z } from 'zod';
import { QdrantMemoryService } from './qdrant.js';
@@ -11,14 +12,6 @@ async function main() {
const qdrantService = new QdrantMemoryService(process.env.QDRANT_URL || 'http://localhost:6333');
// Initialize embedding model and Qdrant connection
try {
await qdrantService.initialize();
} catch (e) {
console.error('Failed to initialize local dependencies. Exiting.');
process.exit(1);
}
server.tool(
'store_memory',
'Store a new piece of knowledge/memory into the vector database. Use this to remember architectural decisions, preferences, aliases, etc.',
@@ -67,12 +60,53 @@ async function main() {
}
);
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('Memory MCP server is running and ready to accept connections over stdio.');
const isStdio = process.argv.includes('--stdio');
if (isStdio) {
// Connect Stdio FIRST to avoid handshake timeouts while loading model
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('Memory MCP server is running on stdio');
// Initialize dependency after connection
try {
await qdrantService.initialize();
} catch (e) {
console.error('Failed to initialize local dependencies:', e);
}
} else {
const app = express();
let transport: SSEServerTransport | null = null;
app.get('/sse', async (req, res) => {
console.error('New SSE connection established');
transport = new SSEServerTransport('/message', res);
await server.connect(transport);
});
app.post('/message', async (req, res) => {
if (!transport) {
res.status(400).send('No active SSE connection');
return;
}
await transport.handlePostMessage(req, res);
});
const PORT = process.env.MEMORY_MCP_PORT || 3002;
app.listen(PORT, async () => {
console.error(`Memory MCP server running on http://localhost:${PORT}/sse`);
// Initialize dependencies in SSE mode on startup
try {
await qdrantService.initialize();
} catch (e) {
console.error('Failed to initialize local dependencies:', e);
}
});
}
}
main().catch((error) => {
console.error('Fatal error in main():', error);
console.error('Fatal error:', error);
process.exit(1);
});

View File

@@ -0,0 +1,16 @@
import { config } from 'dotenv';
import { resolve } from 'path';
import { fileURLToPath } from 'url';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
// Try to load .env.local first (contains credentials usually)
config({ path: resolve(__dirname, '../../../.env.local') });
// Fallback to .env (contains defaults)
config({ path: resolve(__dirname, '../../../.env') });
// Now boot the compiled MCP index
import('./index.js').catch(err => {
console.error('Failed to start MCP Server:', err);
process.exit(1);
});

View File

@@ -1,7 +1,6 @@
{
"name": "@mintel/page-audit",
"version": "1.9.10",
"private": true,
"description": "AI-powered website IST-analysis using DataForSEO and Gemini",
"type": "module",
"main": "./dist/index.js",

View File

@@ -0,0 +1,2 @@
@mintel:registry=https://git.infra.mintel.me/api/packages/mmintel/npm/
//git.infra.mintel.me/api/packages/mmintel/npm/:_authToken=263e7f75d8ada27f3a2e71fd6bd9d95298d48a4d

View File

@@ -1,7 +1,6 @@
{
"name": "@mintel/payload-ai",
"version": "1.9.10",
"private": true,
"version": "1.9.15",
"description": "Reusable Payload CMS AI Extensions",
"type": "module",
"scripts": {

View File

@@ -1,6 +1,8 @@
import type { Config, Plugin } from 'payload'
import { AIChatPermissionsCollection } from './collections/AIChatPermissions.js'
import type { PayloadChatPluginConfig } from './types.js'
import { optimizePostEndpoint } from './endpoints/optimizeEndpoint.js'
import { generateSlugEndpoint, generateThumbnailEndpoint, generateSingleFieldEndpoint } from './endpoints/generateEndpoints.js'
export const payloadChatPlugin =
(pluginOptions: PayloadChatPluginConfig): Plugin =>
@@ -48,6 +50,26 @@ export const payloadChatPlugin =
return Response.json({ message: "Chat endpoint active" })
},
},
{
path: '/api/mintel-ai/optimize',
method: 'post',
handler: optimizePostEndpoint,
},
{
path: '/api/mintel-ai/generate-slug',
method: 'post',
handler: generateSlugEndpoint,
},
{
path: '/api/mintel-ai/generate-thumbnail',
method: 'post',
handler: generateThumbnailEndpoint,
},
{
path: '/api/mintel-ai/generate-single-field',
method: 'post',
handler: generateSingleFieldEndpoint,
},
]
// 3. Inject Chat React Component into Admin UI

View File

@@ -2,8 +2,6 @@
import React, { useState } from "react";
import { useField, useDocumentInfo, useForm } from "@payloadcms/ui";
import { generateSingleFieldAction } from "../../actions/generateField.js";
export function AiFieldButton({ path, field }: { path: string; field: any }) {
const [isGenerating, setIsGenerating] = useState(false);
const [instructions, setInstructions] = useState("");
@@ -44,19 +42,26 @@ export function AiFieldButton({ path, field }: { path: string; field: any }) {
? field.admin.description
: "";
const res = await generateSingleFieldAction(
(title as string) || "",
draftContent,
fieldName,
fieldDescription,
instructions,
);
const resData = await fetch("/api/api/mintel-ai/generate-single-field", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
documentTitle: (title as string) || "",
documentContent: draftContent,
fieldName,
fieldDescription,
instructions,
}),
});
const res = await resData.json();
if (res.success && res.text) {
setValue(res.text);
} else {
alert("Fehler: " + res.error);
}
} catch (e) {
} catch (e: any) {
console.error(e)
alert("Fehler bei der Generierung.");
} finally {
setIsGenerating(false);

View File

@@ -2,8 +2,6 @@
import React, { useState, useEffect } from "react";
import { useForm, useField } from "@payloadcms/ui";
import { generateSlugAction } from "../../actions/generateField.js";
export function GenerateSlugButton({ path }: { path: string }) {
const [isGenerating, setIsGenerating] = useState(false);
const [instructions, setInstructions] = useState("");
@@ -45,18 +43,24 @@ export function GenerateSlugButton({ path }: { path: string }) {
setIsGenerating(true);
try {
const res = await generateSlugAction(
title,
draftContent,
initialValue as string,
instructions,
);
const resData = await fetch("/api/api/mintel-ai/generate-slug", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
title,
draftContent,
oldSlug: initialValue as string,
instructions,
}),
});
const res = await resData.json();
if (res.success && res.slug) {
setValue(res.slug);
} else {
alert("Fehler: " + res.error);
}
} catch (e) {
} catch (e: any) {
console.error(e);
alert("Unerwarteter Fehler.");
} finally {

View File

@@ -2,8 +2,6 @@
import React, { useState, useEffect } from "react";
import { useForm, useField } from "@payloadcms/ui";
import { generateThumbnailAction } from "../../actions/generateField.js";
export function GenerateThumbnailButton({ path }: { path: string }) {
const [isGenerating, setIsGenerating] = useState(false);
const [instructions, setInstructions] = useState("");
@@ -45,17 +43,23 @@ export function GenerateThumbnailButton({ path }: { path: string }) {
setIsGenerating(true);
try {
const res = await generateThumbnailAction(
draftContent,
title,
instructions,
);
const resData = await fetch("/api/api/mintel-ai/generate-thumbnail", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
draftContent,
title,
instructions,
}),
});
const res = await resData.json();
if (res.success && res.mediaId) {
setValue(res.mediaId);
} else {
alert("Fehler: " + res.error);
}
} catch (e) {
} catch (e: any) {
console.error(e);
alert("Unerwarteter Fehler.");
} finally {

View File

@@ -2,7 +2,6 @@
import React, { useState, useEffect } from "react";
import { useForm, useDocumentInfo } from "@payloadcms/ui";
import { optimizePostText } from "../actions/optimizePost.js";
import { Button } from "@payloadcms/ui";
export function OptimizeButton() {
@@ -57,7 +56,12 @@ export function OptimizeButton() {
// 2. We inject the title so the AI knows what it's writing about
const payloadText = `---\ntitle: "${title}"\n---\n\n${draftContent}`;
const response = await optimizePostText(payloadText, instructions);
const res = await fetch("/api/api/mintel-ai/optimize", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ draftContent: payloadText, instructions }),
});
const response = await res.json();
if (response.success && response.lexicalAST) {
// 3. Inject the new Lexical AST directly into the field form state

View File

@@ -1,8 +1,4 @@
"use server";
import { getPayloadHMR } from "@payloadcms/next/utilities";
// @ts-ignore - dynamic config resolution from next.js payload plugin
import configPromise from "@payload-config";
import { PayloadRequest } from "payload";
import * as fs from "node:fs/promises";
import * as path from "node:path";
import * as os from "node:os";
@@ -30,13 +26,9 @@ async function getOrchestrator() {
});
}
export async function generateSlugAction(
title: string,
draftContent: string,
oldSlug?: string,
instructions?: string,
) {
export const generateSlugEndpoint = async (req: PayloadRequest) => {
try {
const { title, draftContent, oldSlug, instructions } = (await req.json?.() || {}) as any;
const orchestrator = await getOrchestrator();
const newSlug = await orchestrator.generateSlug(
draftContent,
@@ -45,9 +37,8 @@ export async function generateSlugAction(
);
if (oldSlug && oldSlug !== newSlug) {
const payload = await getPayloadHMR({ config: configPromise as any });
await payload.create({
collection: "redirects",
await req.payload.create({
collection: "redirects" as any,
data: {
from: oldSlug,
to: newSlug,
@@ -55,42 +46,25 @@ export async function generateSlugAction(
});
}
return { success: true, slug: newSlug };
return Response.json({ success: true, slug: newSlug });
} catch (e: any) {
return { success: false, error: e.message };
return Response.json({ success: false, error: e.message }, { status: 500 });
}
}
export async function generateThumbnailAction(
draftContent: string,
title?: string,
instructions?: string,
) {
export const generateThumbnailEndpoint = async (req: PayloadRequest) => {
try {
const payload = await getPayloadHMR({ config: configPromise as any });
const { draftContent, title, instructions } = (await req.json?.() || {}) as any;
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
if (!OPENROUTER_KEY) {
throw new Error("Missing OPENROUTER_API_KEY in .env");
}
if (!REPLICATE_KEY) {
throw new Error(
"Missing REPLICATE_API_KEY in .env (Required for Thumbnails)",
);
}
if (!OPENROUTER_KEY) throw new Error("Missing OPENROUTER_API_KEY in .env");
if (!REPLICATE_KEY) throw new Error("Missing REPLICATE_API_KEY in .env");
const importDynamic = new Function(
"modulePath",
"return import(modulePath)",
);
const { AiBlogPostOrchestrator } = await importDynamic(
"@mintel/content-engine",
);
const { ThumbnailGenerator } = await importDynamic(
"@mintel/thumbnail-generator",
);
const importDynamic = new Function("modulePath", "return import(modulePath)");
const { AiBlogPostOrchestrator } = await importDynamic("@mintel/content-engine");
const { ThumbnailGenerator } = await importDynamic("@mintel/thumbnail-generator");
const orchestrator = new AiBlogPostOrchestrator({
apiKey: OPENROUTER_KEY,
@@ -112,8 +86,8 @@ export async function generateThumbnailAction(
const stat = await fs.stat(tmpPath);
const fileName = path.basename(tmpPath);
const newMedia = await payload.create({
collection: "media",
const newMedia = await req.payload.create({
collection: "media" as any,
data: {
alt: title ? `Thumbnail for ${title}` : "AI Generated Thumbnail",
},
@@ -125,31 +99,24 @@ export async function generateThumbnailAction(
},
});
// Cleanup temp file
await fs.unlink(tmpPath).catch(() => { });
return { success: true, mediaId: newMedia.id };
return Response.json({ success: true, mediaId: newMedia.id });
} catch (e: any) {
return { success: false, error: e.message };
return Response.json({ success: false, error: e.message }, { status: 500 });
}
}
export async function generateSingleFieldAction(
documentTitle: string,
documentContent: string,
fieldName: string,
fieldDescription: string,
instructions?: string,
) {
export const generateSingleFieldEndpoint = async (req: PayloadRequest) => {
try {
const { documentTitle, documentContent, fieldName, fieldDescription, instructions } = (await req.json?.() || {}) as any;
const OPENROUTER_KEY =
process.env.OPENROUTER_KEY || process.env.OPENROUTER_API_KEY;
if (!OPENROUTER_KEY) throw new Error("Missing OPENROUTER_API_KEY");
const payload = await getPayloadHMR({ config: configPromise as any });
// Fetch context documents from DB
const contextDocsData = await payload.find({
collection: "context-files",
const contextDocsData = await req.payload.find({
collection: "context-files" as any,
limit: 100,
});
const projectContext = contextDocsData.docs
@@ -184,8 +151,8 @@ CRITICAL RULES:
});
const data = await res.json();
const text = data.choices?.[0]?.message?.content?.trim() || "";
return { success: true, text };
return Response.json({ success: true, text });
} catch (e: any) {
return { success: false, error: e.message };
return Response.json({ success: false, error: e.message }, { status: 500 });
}
}

View File

@@ -1,17 +1,15 @@
"use server";
import { PayloadRequest } from 'payload'
import { parseMarkdownToLexical } from "../utils/lexicalParser.js";
import { parseMarkdownToLexical } from "../utils/lexicalParser";
import { getPayloadHMR } from "@payloadcms/next/utilities";
// @ts-ignore - dynamic config resolution from next.js payload plugin
import configPromise from "@payload-config";
export async function optimizePostText(
draftContent: string,
instructions?: string,
) {
export const optimizePostEndpoint = async (req: PayloadRequest) => {
try {
const payload = await getPayloadHMR({ config: configPromise as any });
const globalAiSettings = (await payload.findGlobal({ slug: "ai-settings" })) as any;
const { draftContent, instructions } = (await req.json?.() || {}) as { draftContent: string; instructions?: string };
if (!draftContent) {
return Response.json({ error: 'Missing draftContent' }, { status: 400 })
}
const globalAiSettings = (await req.payload.findGlobal({ slug: "ai-settings" })) as any;
const customSources =
globalAiSettings?.customSources?.map((s: any) => s.sourceName) || [];
@@ -20,18 +18,12 @@ export async function optimizePostText(
const REPLICATE_KEY = process.env.REPLICATE_API_KEY;
if (!OPENROUTER_KEY) {
throw new Error(
"OPENROUTER_KEY or OPENROUTER_API_KEY not found in environment.",
);
return Response.json({ error: "OPENROUTER_KEY not found in environment." }, { status: 500 })
}
const importDynamic = new Function(
"modulePath",
"return import(modulePath)",
);
const { AiBlogPostOrchestrator } = await importDynamic(
"@mintel/content-engine",
);
// Dynamically import to avoid bundling it into client components that might accidentally import this file
const importDynamic = new Function("modulePath", "return import(modulePath)");
const { AiBlogPostOrchestrator } = await importDynamic("@mintel/content-engine");
const orchestrator = new AiBlogPostOrchestrator({
apiKey: OPENROUTER_KEY,
@@ -39,9 +31,8 @@ export async function optimizePostText(
model: "google/gemini-3-flash-preview",
});
// Fetch context documents purely from DB
const contextDocsData = await payload.find({
collection: "context-files",
const contextDocsData = await req.payload.find({
collection: "context-files" as any,
limit: 100,
});
const projectContext = contextDocsData.docs.map((doc: any) => doc.content);
@@ -49,19 +40,19 @@ export async function optimizePostText(
const optimizedMarkdown = await orchestrator.optimizeDocument({
content: draftContent,
projectContext,
availableComponents: [], // Removed hardcoded config.components dependency
availableComponents: [],
instructions,
internalLinks: [],
customSources,
});
if (!optimizedMarkdown || typeof optimizedMarkdown !== "string") {
throw new Error("AI returned invalid markup.");
return Response.json({ error: "AI returned invalid markup." }, { status: 500 })
}
const blocks = parseMarkdownToLexical(optimizedMarkdown);
return {
return Response.json({
success: true,
lexicalAST: {
root: {
@@ -73,12 +64,12 @@ export async function optimizePostText(
direction: "ltr",
},
},
};
})
} catch (error: any) {
console.error("Failed to optimize post:", error);
return {
console.error("Failed to optimize post in endpoint:", error);
return Response.json({
success: false,
error: error.message || "An unknown error occurred during optimization.",
};
}, { status: 500 })
}
}

View File

@@ -3,16 +3,14 @@
* Primary entry point for reusing Mintel AI extensions in Payload CMS.
*/
export * from './globals/AiSettings';
export * from './actions/generateField';
export * from './actions/optimizePost';
export * from './components/FieldGenerators/AiFieldButton';
export * from './components/AiMediaButtons';
export * from './components/OptimizeButton';
export * from './components/FieldGenerators/GenerateThumbnailButton';
export * from './components/FieldGenerators/GenerateSlugButton';
export * from './utils/lexicalParser';
export * from './endpoints/replicateMediaEndpoint';
export * from './globals/AiSettings.js';
export * from './components/FieldGenerators/AiFieldButton.js';
export * from './components/AiMediaButtons.js';
export * from './components/OptimizeButton.js';
export * from './components/FieldGenerators/GenerateThumbnailButton.js';
export * from './components/FieldGenerators/GenerateSlugButton.js';
export * from './utils/lexicalParser.js';
export * from './endpoints/replicateMediaEndpoint.js';
export * from './chatPlugin.js';
export * from './types.js';
export * from './endpoints/chatEndpoint.js';

View File

@@ -1,5 +1,8 @@
declare module "@payload-config" {
import { Config } from "payload";
const configPromise: Promise<any>;
export default configPromise;
export type PayloadChatPluginConfig = {
enabled?: boolean
/** Render the chat bubble on the bottom right? Defaults to true */
renderChatBubble?: boolean
allowedCollections?: string[]
mcpServers?: any[]
}

View File

@@ -1,7 +1,6 @@
{
"name": "@mintel/seo-engine",
"version": "1.9.10",
"private": true,
"description": "AI-powered SEO keyword and topic cluster evaluation engine",
"type": "module",
"main": "./dist/index.js",

View File

@@ -0,0 +1,25 @@
{
"name": "@mintel/serpbear-mcp",
"version": "1.9.10",
"description": "SerpBear SEO Tracking MCP server for Mintel infrastructure",
"main": "dist/index.js",
"type": "module",
"scripts": {
"build": "tsc",
"start": "node dist/start.js",
"dev": "tsx watch src/index.ts"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.5.0",
"axios": "^1.7.2",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"zod": "^3.23.8"
},
"devDependencies": {
"@types/express": "^5.0.6",
"@types/node": "^20.14.10",
"typescript": "^5.5.3",
"tsx": "^4.19.2"
}
}

View File

@@ -0,0 +1,243 @@
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
import express from 'express';
import {
CallToolRequestSchema,
ListToolsRequestSchema,
Tool,
} from "@modelcontextprotocol/sdk/types.js";
import axios from "axios";
import https from "https";
const SERPBEAR_BASE_URL = process.env.SERPBEAR_BASE_URL || "https://serpbear.infra.mintel.me";
const SERPBEAR_API_KEY = process.env.SERPBEAR_API_KEY;
if (!SERPBEAR_API_KEY) {
console.error("Warning: SERPBEAR_API_KEY is not set. API calls will fail.");
}
const serpbearClient = axios.create({
baseURL: `${SERPBEAR_BASE_URL}/api`,
headers: { apiKey: SERPBEAR_API_KEY },
httpsAgent: new https.Agent({
rejectUnauthorized: false,
}),
});
// --- Tool Definitions ---
const LIST_DOMAINS_TOOL: Tool = {
name: "serpbear_list_domains",
description: "List all domains/projects tracked in SerpBear",
inputSchema: { type: "object", properties: {} },
};
const GET_KEYWORDS_TOOL: Tool = {
name: "serpbear_get_keywords",
description: "Get all tracked keywords for a domain, with their current ranking positions",
inputSchema: {
type: "object",
properties: {
domain_id: { type: "string", description: "Domain ID from serpbear_list_domains" },
},
required: ["domain_id"],
},
};
const ADD_KEYWORDS_TOOL: Tool = {
name: "serpbear_add_keywords",
description: "Add new keywords to track for a domain",
inputSchema: {
type: "object",
properties: {
domain_id: { type: "string", description: "Domain ID" },
keywords: {
type: "array",
items: { type: "string" },
description: "List of keywords to add (e.g., ['Webentwickler Frankfurt', 'Next.js Agentur'])"
},
country: { type: "string", description: "Country code for SERP tracking (e.g., 'de', 'us'). Default: 'de'" },
device: { type: "string", description: "Device type: 'desktop' or 'mobile'. Default: 'desktop'" },
},
required: ["domain_id", "keywords"],
},
};
const DELETE_KEYWORDS_TOOL: Tool = {
name: "serpbear_delete_keywords",
description: "Remove keywords from tracking",
inputSchema: {
type: "object",
properties: {
keyword_ids: {
type: "array",
items: { type: "number" },
description: "Array of keyword IDs to delete"
},
},
required: ["keyword_ids"],
},
};
const REFRESH_KEYWORDS_TOOL: Tool = {
name: "serpbear_refresh_keywords",
description: "Trigger an immediate SERP position refresh for specific keywords",
inputSchema: {
type: "object",
properties: {
keyword_ids: {
type: "array",
items: { type: "number" },
description: "List of keyword IDs to refresh"
},
},
required: ["keyword_ids"],
},
};
const GET_KEYWORD_HISTORY_TOOL: Tool = {
name: "serpbear_get_keyword_history",
description: "Get the ranking history for a specific keyword over time",
inputSchema: {
type: "object",
properties: {
keyword_id: { type: "number", description: "Keyword ID from serpbear_get_keywords" },
},
required: ["keyword_id"],
},
};
// --- Server Setup ---
const server = new Server(
{ name: "serpbear-mcp", version: "1.0.0" },
{ capabilities: { tools: {} } }
);
server.setRequestHandler(ListToolsRequestSchema, async () => ({
tools: [
LIST_DOMAINS_TOOL,
GET_KEYWORDS_TOOL,
ADD_KEYWORDS_TOOL,
DELETE_KEYWORDS_TOOL,
REFRESH_KEYWORDS_TOOL,
GET_KEYWORD_HISTORY_TOOL,
],
}));
server.setRequestHandler(CallToolRequestSchema, async (request) => {
if (request.params.name === "serpbear_list_domains") {
try {
const res = await serpbearClient.get('/domains');
const domains = (res.data.domains || []).map((d: any) => ({
id: d.id, domain: d.domain, keywords: d.keywordCount
}));
return { content: [{ type: "text", text: JSON.stringify(domains, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "serpbear_get_keywords") {
const { domain_id } = request.params.arguments as any;
try {
const res = await serpbearClient.get('/keywords', { params: { domain: domain_id } });
const keywords = (res.data.keywords || []).map((k: any) => ({
id: k.id,
keyword: k.keyword,
position: k.position,
lastUpdated: k.lastUpdated,
country: k.country,
device: k.device,
change: k.position_change ?? null,
}));
return { content: [{ type: "text", text: JSON.stringify(keywords, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "serpbear_add_keywords") {
const { domain_id, keywords, country = 'de', device = 'desktop' } = request.params.arguments as any;
try {
const res = await serpbearClient.post('/keywords', {
domain: domain_id,
keywords: keywords.map((kw: string) => ({ keyword: kw, country, device })),
});
return { content: [{ type: "text", text: `Added ${keywords.length} keywords. Result: ${JSON.stringify(res.data)}` }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "serpbear_delete_keywords") {
const { keyword_ids } = request.params.arguments as any;
try {
await serpbearClient.delete('/keywords', { data: { ids: keyword_ids } });
return { content: [{ type: "text", text: `Deleted ${keyword_ids.length} keywords.` }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "serpbear_refresh_keywords") {
const { keyword_ids } = request.params.arguments as any;
try {
await serpbearClient.post('/keywords/refresh', { ids: keyword_ids });
return { content: [{ type: "text", text: `Triggered refresh for ${keyword_ids.length} keywords.` }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "serpbear_get_keyword_history") {
const { keyword_id } = request.params.arguments as any;
try {
const res = await serpbearClient.get(`/keywords/${keyword_id}/history`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
throw new Error(`Unknown tool: ${request.params.name}`);
});
// --- Express / SSE Server ---
async function run() {
const isStdio = process.argv.includes('--stdio');
if (isStdio) {
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('SerpBear MCP server is running on stdio');
} else {
const app = express();
let transport: SSEServerTransport | null = null;
app.get('/sse', async (req, res) => {
console.error('New SSE connection established');
transport = new SSEServerTransport('/message', res);
await server.connect(transport);
});
app.post('/message', async (req, res) => {
if (!transport) {
res.status(400).send('No active SSE connection');
return;
}
await transport.handlePostMessage(req, res);
});
const PORT = process.env.SERPBEAR_MCP_PORT || 3004;
app.listen(PORT, () => {
console.error(`SerpBear MCP server running on http://localhost:${PORT}/sse`);
});
}
}
run().catch((err) => {
console.error("Fatal error:", err);
process.exit(1);
});

View File

@@ -0,0 +1,13 @@
import { config } from 'dotenv';
import { resolve } from 'path';
import { fileURLToPath } from 'url';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
config({ path: resolve(__dirname, '../../../.env.local') });
config({ path: resolve(__dirname, '../../../.env') });
import('./index.js').catch(err => {
console.error('Failed to start SerpBear MCP Server:', err);
process.exit(1);
});

View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
},
"include": [
"src/**/*"
]
}

View File

@@ -0,0 +1,25 @@
{
"name": "@mintel/umami-mcp",
"version": "1.9.10",
"description": "Umami Analytics MCP server for Mintel infrastructure",
"main": "dist/index.js",
"type": "module",
"scripts": {
"build": "tsc",
"start": "node dist/start.js",
"dev": "tsx watch src/index.ts"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.5.0",
"axios": "^1.7.2",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"zod": "^3.23.8"
},
"devDependencies": {
"@types/express": "^5.0.6",
"@types/node": "^20.14.10",
"typescript": "^5.5.3",
"tsx": "^4.19.2"
}
}

View File

@@ -0,0 +1,280 @@
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
import express from 'express';
import {
CallToolRequestSchema,
ListToolsRequestSchema,
Tool,
} from "@modelcontextprotocol/sdk/types.js";
import axios from "axios";
import https from "https";
const UMAMI_BASE_URL = process.env.UMAMI_BASE_URL || "https://umami.infra.mintel.me";
const UMAMI_USERNAME = process.env.UMAMI_USERNAME;
const UMAMI_PASSWORD = process.env.UMAMI_PASSWORD;
const UMAMI_API_KEY = process.env.UMAMI_API_KEY; // optional if using API key auth
const httpsAgent = new https.Agent({
rejectUnauthorized: false,
});
if (!UMAMI_USERNAME && !UMAMI_API_KEY) {
console.error("Warning: Neither UMAMI_USERNAME/PASSWORD nor UMAMI_API_KEY is set.");
}
// Token cache to avoid logging in on every request
let cachedToken: string | null = null;
async function getAuthHeaders(): Promise<Record<string, string>> {
if (UMAMI_API_KEY) {
return { 'x-umami-api-key': UMAMI_API_KEY };
}
if (!cachedToken) {
const res = await axios.post(`${UMAMI_BASE_URL}/api/auth/login`, {
username: UMAMI_USERNAME,
password: UMAMI_PASSWORD,
}, { httpsAgent });
cachedToken = res.data.token;
}
return { Authorization: `Bearer ${cachedToken}` };
}
// --- Tool Definitions ---
const LIST_WEBSITES_TOOL: Tool = {
name: "umami_list_websites",
description: "List all websites tracked in Umami",
inputSchema: { type: "object", properties: {} },
};
const GET_WEBSITE_STATS_TOOL: Tool = {
name: "umami_get_website_stats",
description: "Get summary statistics for a website for a time range",
inputSchema: {
type: "object",
properties: {
website_id: { type: "string", description: "Umami website UUID" },
start_at: { type: "number", description: "Start timestamp in ms (e.g., Date.now() - 7 days)" },
end_at: { type: "number", description: "End timestamp in ms (default: now)" },
},
required: ["website_id", "start_at"],
},
};
const GET_PAGE_VIEWS_TOOL: Tool = {
name: "umami_get_pageviews",
description: "Get pageview/session time series for a website",
inputSchema: {
type: "object",
properties: {
website_id: { type: "string", description: "Umami website UUID" },
start_at: { type: "number", description: "Start timestamp in ms" },
end_at: { type: "number", description: "End timestamp in ms (default: now)" },
unit: { type: "string", description: "Time unit: 'hour', 'day', 'month' (default: day)" },
timezone: { type: "string", description: "Timezone (default: Europe/Berlin)" },
},
required: ["website_id", "start_at"],
},
};
const GET_TOP_PAGES_TOOL: Tool = {
name: "umami_get_top_pages",
description: "Get the most visited pages/URLs for a website",
inputSchema: {
type: "object",
properties: {
website_id: { type: "string", description: "Umami website UUID" },
start_at: { type: "number", description: "Start timestamp in ms" },
end_at: { type: "number", description: "End timestamp in ms" },
limit: { type: "number", description: "Number of results (default: 20)" },
},
required: ["website_id", "start_at"],
},
};
const GET_TOP_REFERRERS_TOOL: Tool = {
name: "umami_get_top_referrers",
description: "Get the top traffic referrers for a website",
inputSchema: {
type: "object",
properties: {
website_id: { type: "string", description: "Umami website UUID" },
start_at: { type: "number", description: "Start timestamp in ms" },
end_at: { type: "number", description: "End timestamp in ms" },
limit: { type: "number", description: "Number of results (default: 10)" },
},
required: ["website_id", "start_at"],
},
};
const GET_COUNTRY_STATS_TOOL: Tool = {
name: "umami_get_country_stats",
description: "Get visitor breakdown by country",
inputSchema: {
type: "object",
properties: {
website_id: { type: "string", description: "Umami website UUID" },
start_at: { type: "number", description: "Start timestamp in ms" },
end_at: { type: "number", description: "End timestamp in ms" },
},
required: ["website_id", "start_at"],
},
};
const GET_ACTIVE_VISITORS_TOOL: Tool = {
name: "umami_get_active_visitors",
description: "Get the number of visitors currently active on a website (last 5 minutes)",
inputSchema: {
type: "object",
properties: {
website_id: { type: "string", description: "Umami website UUID" },
},
required: ["website_id"],
},
};
// --- Server Setup ---
const server = new Server(
{ name: "umami-mcp", version: "1.0.0" },
{ capabilities: { tools: {} } }
);
server.setRequestHandler(ListToolsRequestSchema, async () => ({
tools: [
LIST_WEBSITES_TOOL,
GET_WEBSITE_STATS_TOOL,
GET_PAGE_VIEWS_TOOL,
GET_TOP_PAGES_TOOL,
GET_TOP_REFERRERS_TOOL,
GET_COUNTRY_STATS_TOOL,
GET_ACTIVE_VISITORS_TOOL,
],
}));
server.setRequestHandler(CallToolRequestSchema, async (request) => {
const headers = await getAuthHeaders();
const api = axios.create({ baseURL: `${UMAMI_BASE_URL}/api`, headers, httpsAgent });
const now = Date.now();
if (request.params.name === "umami_list_websites") {
try {
const res = await api.get('/websites');
const sites = (res.data.data || res.data || []).map((s: any) => ({
id: s.id, name: s.name, domain: s.domain
}));
return { content: [{ type: "text", text: JSON.stringify(sites, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "umami_get_website_stats") {
const { website_id, start_at, end_at = now } = request.params.arguments as any;
try {
const res = await api.get(`/websites/${website_id}/stats`, { params: { startAt: start_at, endAt: end_at } });
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "umami_get_pageviews") {
const { website_id, start_at, end_at = now, unit = 'day', timezone = 'Europe/Berlin' } = request.params.arguments as any;
try {
const res = await api.get(`/websites/${website_id}/pageviews`, {
params: { startAt: start_at, endAt: end_at, unit, timezone }
});
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "umami_get_top_pages") {
const { website_id, start_at, end_at = now, limit = 20 } = request.params.arguments as any;
try {
const res = await api.get(`/websites/${website_id}/metrics`, {
params: { startAt: start_at, endAt: end_at, type: 'url', limit }
});
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "umami_get_top_referrers") {
const { website_id, start_at, end_at = now, limit = 10 } = request.params.arguments as any;
try {
const res = await api.get(`/websites/${website_id}/metrics`, {
params: { startAt: start_at, endAt: end_at, type: 'referrer', limit }
});
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "umami_get_country_stats") {
const { website_id, start_at, end_at = now } = request.params.arguments as any;
try {
const res = await api.get(`/websites/${website_id}/metrics`, {
params: { startAt: start_at, endAt: end_at, type: 'country' }
});
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
if (request.params.name === "umami_get_active_visitors") {
const { website_id } = request.params.arguments as any;
try {
const res = await api.get(`/websites/${website_id}/active`);
return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
} catch (e: any) {
return { isError: true, content: [{ type: "text", text: `Error: ${e.message}` }] };
}
}
throw new Error(`Unknown tool: ${request.params.name}`);
});
// --- Express / SSE Server ---
async function run() {
const isStdio = process.argv.includes('--stdio');
if (isStdio) {
const { StdioServerTransport } = await import('@modelcontextprotocol/sdk/server/stdio.js');
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('Umami MCP server is running on stdio');
} else {
const app = express();
let transport: SSEServerTransport | null = null;
app.get('/sse', async (req, res) => {
console.error('New SSE connection established');
transport = new SSEServerTransport('/message', res);
await server.connect(transport);
});
app.post('/message', async (req, res) => {
if (!transport) {
res.status(400).send('No active SSE connection');
return;
}
await transport.handlePostMessage(req, res);
});
const PORT = process.env.UMAMI_MCP_PORT || 3003;
app.listen(PORT, () => {
console.error(`Umami MCP server running on http://localhost:${PORT}/sse`);
});
}
}
run().catch((err) => {
console.error("Fatal error:", err);
process.exit(1);
});

View File

@@ -0,0 +1,13 @@
import { config } from 'dotenv';
import { resolve } from 'path';
import { fileURLToPath } from 'url';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
config({ path: resolve(__dirname, '../../../.env.local') });
config({ path: resolve(__dirname, '../../../.env') });
import('./index.js').catch(err => {
console.error('Failed to start Umami MCP Server:', err);
process.exit(1);
});

View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
},
"include": [
"src/**/*"
]
}

816
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff