fix: bump @mintel/payload-ai to 1.9.13 and apply CSS loader shim for Next.js dev server
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 6s
Build & Deploy / 🧪 QA (push) Successful in 4m25s
Build & Deploy / 🏗️ Build (push) Failing after 17s
Build & Deploy / 🚀 Deploy (push) Has been skipped
Build & Deploy / 🧪 Post-Deploy Verification (push) Has been skipped
Build & Deploy / 🔔 Notify (push) Successful in 1s

This commit is contained in:
2026-03-03 14:58:14 +01:00
parent 89d258e63d
commit 6174b44570
8 changed files with 1041 additions and 10 deletions

2
apps/web/ignore-css.js Normal file
View File

@@ -0,0 +1,2 @@
const Module = require("module");
Module._extensions[".css"] = function () {};

12
apps/web/ignore-css.mjs Normal file
View File

@@ -0,0 +1,12 @@
import { extname } from 'node:path';
export async function load(url, context, nextLoad) {
if (url.endsWith('.css') || url.endsWith('.scss')) {
return {
format: 'module',
shortCircuit: true,
source: 'export default {};'
};
}
return nextLoad(url, context);
}

View File

@@ -6,7 +6,7 @@
"scripts": {
"dev": "pnpm run seed:context && next dev --webpack --hostname 0.0.0.0",
"dev:native": "DATABASE_URI=postgres://payload:payload@127.0.0.1:54321/payload PAYLOAD_SECRET=dev-secret pnpm run seed:context && DATABASE_URI=postgres://payload:payload@127.0.0.1:54321/payload PAYLOAD_SECRET=dev-secret next dev --webpack",
"seed:context": "tsx ./seed-context.ts",
"seed:context": "node --import tsx --experimental-loader ./ignore-css.mjs ./seed-context.ts",
"build": "next build --webpack",
"start": "next start",
"lint": "eslint app src scripts video",
@@ -26,8 +26,11 @@
"check:forms": "tsx scripts/check-forms.ts",
"cms:push:testing": "bash ./scripts/cms-sync.sh push testing",
"cms:pull:testing": "bash ./scripts/cms-sync.sh pull testing",
"cms:push:staging": "bash ./scripts/cms-sync.sh push staging",
"cms:pull:staging": "bash ./scripts/cms-sync.sh pull staging",
"cms:push:prod": "bash ./scripts/cms-sync.sh push prod",
"cms:pull:prod": "bash ./scripts/cms-sync.sh pull prod"
"cms:pull:prod": "bash ./scripts/cms-sync.sh pull prod",
"db:restore": "bash ./scripts/restore-db.sh"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.750.0",
@@ -39,7 +42,7 @@
"@mintel/content-engine": "link:../../../at-mintel/packages/content-engine",
"@mintel/estimation-engine": "link:../../../at-mintel/packages/estimation-engine",
"@mintel/meme-generator": "link:../../../at-mintel/packages/meme-generator",
"@mintel/payload-ai": "^1.9.10",
"@mintel/payload-ai": "^1.9.13",
"@mintel/pdf": "link:../../../at-mintel/packages/pdf-library",
"@mintel/thumbnail-generator": "link:../../../at-mintel/packages/thumbnail-generator",
"@next/mdx": "^16.1.6",

View File

@@ -108,8 +108,12 @@ resolve_target() {
# Auto-detect remote DB credentials from the env file on the server
echo "🔍 Detecting $TARGET database credentials..."
REMOTE_DB_USER="directus"
REMOTE_DB_NAME="directus"
# Try specific environment file first, then fallback to .env and .env.*
REMOTE_DB_USER=$(ssh "$SSH_HOST" "grep -h '^\(POSTGRES_USER\|postgres_DB_USER\)=' $REMOTE_SITE_DIR/.env.$TARGET $REMOTE_SITE_DIR/.env 2>/dev/null | head -1 | cut -d= -f2" || echo "")
REMOTE_DB_NAME=$(ssh "$SSH_HOST" "grep -h '^\(POSTGRES_DB\|postgres_DB_NAME\)=' $REMOTE_SITE_DIR/.env.$TARGET $REMOTE_SITE_DIR/.env 2>/dev/null | head -1 | cut -d= -f2" || echo "")
# Fallback if empty
REMOTE_DB_USER="${REMOTE_DB_USER:-payload}"
REMOTE_DB_NAME="${REMOTE_DB_NAME:-payload}"
echo " User: $REMOTE_DB_USER | DB: $REMOTE_DB_NAME"

View File

@@ -0,0 +1,168 @@
import fs from "node:fs";
import * as xlsxImport from "xlsx";
const xlsx = (xlsxImport as any).default || xlsxImport;
import { getPayload } from "payload";
import configPromise from "../payload.config";
async function run() {
try {
console.log("Initializing Payload...");
const payload = await getPayload({ config: configPromise });
const filePath = "/Users/marcmintel/Downloads/Akquise_Branchen.xlsx";
if (!fs.existsSync(filePath)) {
console.error("File not found:", filePath);
process.exit(1);
}
console.log(`Reading Excel file: ${filePath}`);
const wb = xlsx.readFile(filePath);
let accountsCreated = 0;
let contactsCreated = 0;
for (const sheetName of wb.SheetNames) {
if (
sheetName === "Weitere Kundenideen" ||
sheetName.includes("BKF Firmen")
)
continue;
let industry = sheetName
.replace(/^\d+_/, "")
.replace(/^\d+\.\s*/, "")
.replace(/_/g, " ");
console.log(
`\n--- Importing Sheet: ${sheetName} -> Industry: ${industry} ---`,
);
const rows = xlsx.utils.sheet_to_json(wb.Sheets[sheetName]);
for (const row of rows) {
const companyName = row["Unternehmen"]?.trim();
const website = row["Webseitenlink"]?.trim();
let email = row["Emailadresse"]?.trim();
const contactName = row["Ansprechpartner"]?.trim();
const position = row["Position"]?.trim();
const statusRaw = row["Webseiten-Status (alt/gut/schlecht)"]
?.trim()
?.toLowerCase();
const notes = row["Notizen"]?.trim();
if (!companyName) continue;
let websiteStatus = "unknown";
if (statusRaw === "gut") websiteStatus = "gut";
else if (statusRaw === "ok" || statusRaw === "okay")
websiteStatus = "ok";
else if (
statusRaw === "schlecht" ||
statusRaw === "alt" ||
statusRaw === "veraltet"
)
websiteStatus = "schlecht";
// Find or create account
let accountId;
const whereClause = website
? { website: { equals: website } }
: { name: { equals: companyName } };
const existingAccounts = await payload.find({
collection: "crm-accounts",
where: whereClause,
});
if (existingAccounts.docs.length > 0) {
accountId = existingAccounts.docs[0].id;
console.log(`[SKIP] Account exists: ${companyName}`);
} else {
try {
const newAccount = await payload.create({
collection: "crm-accounts",
data: {
name: companyName,
website: website || "",
status: "lead",
leadTemperature: "cold",
industry,
websiteStatus,
notes,
} as any,
});
accountId = newAccount.id;
accountsCreated++;
console.log(`[OK] Created account: ${companyName}`);
} catch (err: any) {
console.error(
`[ERROR] Failed to create account ${companyName}:`,
err.message,
);
continue; // Skip contact creation if account failed
}
}
// Handle contact
if (email) {
// Some rows have multiple emails or contacts. Let's just pick the first email if there are commas.
if (email.includes(",")) email = email.split(",")[0].trim();
const existingContacts = await payload.find({
collection: "crm-contacts",
where: { email: { equals: email } },
});
if (existingContacts.docs.length === 0) {
let firstName = "Team";
let lastName = companyName; // fallback
if (contactName) {
// If multiple contacts are listed, just take the first one
const firstContact = contactName.split(",")[0].trim();
const parts = firstContact.split(" ");
if (parts.length > 1) {
lastName = parts.pop();
firstName = parts.join(" ");
} else {
firstName = firstContact;
lastName = "Contact";
}
}
try {
await payload.create({
collection: "crm-contacts",
data: {
email,
firstName,
lastName,
role: position,
account: accountId as any,
},
});
contactsCreated++;
console.log(` -> [OK] Created contact: ${email}`);
} catch (err: any) {
console.error(
` -> [ERROR] Failed to create contact ${email}:`,
err.message,
);
}
} else {
console.log(` -> [SKIP] Contact exists: ${email}`);
}
}
}
}
console.log(`\nMigration completed successfully!`);
console.log(
`Created ${accountsCreated} Accounts and ${contactsCreated} Contacts.`,
);
process.exit(0);
} catch (e) {
console.error("Migration failed:", e);
process.exit(1);
}
}
run();

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
# ────────────────────────────────────────────────────────────────────────────
# Payload CMS Database Restore
# Restores a backup created by backup-db.sh
# Usage: pnpm run db:restore <backup-file>
# ────────────────────────────────────────────────────────────────────────────
set -euo pipefail
# Load environment variables
if [ -f ../../.env ]; then
set -a; source ../../.env; set +a
fi
if [ -f .env ]; then
set -a; source .env; set +a
fi
DB_NAME="${postgres_DB_NAME:-payload}"
DB_USER="${postgres_DB_USER:-payload}"
DB_CONTAINER="mintel-me-postgres-db-1"
BACKUP_FILE="${1:-}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BACKUP_DIR="${SCRIPT_DIR}/../../../../backups"
if [ -z "$BACKUP_FILE" ]; then
echo "❌ Usage: pnpm run db:restore <backup-file>"
echo ""
echo "📋 Available backups in $BACKUP_DIR:"
ls -lh "$BACKUP_DIR"/*.dump 2>/dev/null | awk '{print " " $NF " (" $5 ")"}' || echo " No backups found."
exit 1
fi
if [ ! -f "$BACKUP_FILE" ]; then
echo "❌ Backup file not found: $BACKUP_FILE"
exit 1
fi
# Check if container is running
if ! docker ps --format '{{.Names}}' | grep -q "$DB_CONTAINER"; then
echo "❌ Database container '$DB_CONTAINER' is not running."
echo " Start it with: pnpm dev:docker"
exit 1
fi
echo "⚠️ WARNING: This will REPLACE ALL DATA in the '$DB_NAME' database!"
echo " Backup file: $BACKUP_FILE"
echo ""
read -p "Are you sure? (y/N) " -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Cancelled."
exit 0
fi
echo "🔄 Restoring database from $BACKUP_FILE..."
# Uses pg_restore for custom format dumps (-F c) produced by backup-db.sh
cat "$BACKUP_FILE" | docker exec -i "$DB_CONTAINER" pg_restore -U "$DB_USER" -d "$DB_NAME" --clean --if-exists
echo "✅ Database restored successfully!"

View File

@@ -16,6 +16,13 @@
"lint:yaml": "node scripts/lint-yaml.js",
"optimize-blog": "tsx --env-file=.env apps/web/scripts/optimize-blog-post.ts",
"db:backup": "bash apps/web/scripts/backup-db.sh",
"db:restore": "bash apps/web/scripts/restore-db.sh",
"cms:push:testing": "bash apps/web/scripts/cms-sync.sh push testing",
"cms:pull:testing": "bash apps/web/scripts/cms-sync.sh pull testing",
"cms:push:staging": "bash apps/web/scripts/cms-sync.sh push staging",
"cms:pull:staging": "bash apps/web/scripts/cms-sync.sh pull staging",
"cms:push:prod": "bash apps/web/scripts/cms-sync.sh push prod",
"cms:pull:prod": "bash apps/web/scripts/cms-sync.sh pull prod",
"prepare": "husky"
},
"devDependencies": {
@@ -60,6 +67,7 @@
"dependencies": {
"@eslint/compat": "^2.0.2",
"@mintel/acquisition": "link:../at-mintel/packages/acquisition-library",
"@mintel/payload-ai": "1.9.13",
"tsx": "^4.21.0",
"turbo": "^2.8.10"
}

783
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff