fix: bump @mintel/payload-ai to 1.9.13 and apply CSS loader shim for Next.js dev server
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 6s
Build & Deploy / 🧪 QA (push) Successful in 4m25s
Build & Deploy / 🏗️ Build (push) Failing after 17s
Build & Deploy / 🚀 Deploy (push) Has been skipped
Build & Deploy / 🧪 Post-Deploy Verification (push) Has been skipped
Build & Deploy / 🔔 Notify (push) Successful in 1s

This commit is contained in:
2026-03-03 14:58:14 +01:00
parent 89d258e63d
commit 6174b44570
8 changed files with 1041 additions and 10 deletions

View File

@@ -108,8 +108,12 @@ resolve_target() {
# Auto-detect remote DB credentials from the env file on the server
echo "🔍 Detecting $TARGET database credentials..."
REMOTE_DB_USER="directus"
REMOTE_DB_NAME="directus"
# Try specific environment file first, then fallback to .env and .env.*
REMOTE_DB_USER=$(ssh "$SSH_HOST" "grep -h '^\(POSTGRES_USER\|postgres_DB_USER\)=' $REMOTE_SITE_DIR/.env.$TARGET $REMOTE_SITE_DIR/.env 2>/dev/null | head -1 | cut -d= -f2" || echo "")
REMOTE_DB_NAME=$(ssh "$SSH_HOST" "grep -h '^\(POSTGRES_DB\|postgres_DB_NAME\)=' $REMOTE_SITE_DIR/.env.$TARGET $REMOTE_SITE_DIR/.env 2>/dev/null | head -1 | cut -d= -f2" || echo "")
# Fallback if empty
REMOTE_DB_USER="${REMOTE_DB_USER:-payload}"
REMOTE_DB_NAME="${REMOTE_DB_NAME:-payload}"
echo " User: $REMOTE_DB_USER | DB: $REMOTE_DB_NAME"

View File

@@ -0,0 +1,168 @@
import fs from "node:fs";
import * as xlsxImport from "xlsx";
const xlsx = (xlsxImport as any).default || xlsxImport;
import { getPayload } from "payload";
import configPromise from "../payload.config";
async function run() {
try {
console.log("Initializing Payload...");
const payload = await getPayload({ config: configPromise });
const filePath = "/Users/marcmintel/Downloads/Akquise_Branchen.xlsx";
if (!fs.existsSync(filePath)) {
console.error("File not found:", filePath);
process.exit(1);
}
console.log(`Reading Excel file: ${filePath}`);
const wb = xlsx.readFile(filePath);
let accountsCreated = 0;
let contactsCreated = 0;
for (const sheetName of wb.SheetNames) {
if (
sheetName === "Weitere Kundenideen" ||
sheetName.includes("BKF Firmen")
)
continue;
let industry = sheetName
.replace(/^\d+_/, "")
.replace(/^\d+\.\s*/, "")
.replace(/_/g, " ");
console.log(
`\n--- Importing Sheet: ${sheetName} -> Industry: ${industry} ---`,
);
const rows = xlsx.utils.sheet_to_json(wb.Sheets[sheetName]);
for (const row of rows) {
const companyName = row["Unternehmen"]?.trim();
const website = row["Webseitenlink"]?.trim();
let email = row["Emailadresse"]?.trim();
const contactName = row["Ansprechpartner"]?.trim();
const position = row["Position"]?.trim();
const statusRaw = row["Webseiten-Status (alt/gut/schlecht)"]
?.trim()
?.toLowerCase();
const notes = row["Notizen"]?.trim();
if (!companyName) continue;
let websiteStatus = "unknown";
if (statusRaw === "gut") websiteStatus = "gut";
else if (statusRaw === "ok" || statusRaw === "okay")
websiteStatus = "ok";
else if (
statusRaw === "schlecht" ||
statusRaw === "alt" ||
statusRaw === "veraltet"
)
websiteStatus = "schlecht";
// Find or create account
let accountId;
const whereClause = website
? { website: { equals: website } }
: { name: { equals: companyName } };
const existingAccounts = await payload.find({
collection: "crm-accounts",
where: whereClause,
});
if (existingAccounts.docs.length > 0) {
accountId = existingAccounts.docs[0].id;
console.log(`[SKIP] Account exists: ${companyName}`);
} else {
try {
const newAccount = await payload.create({
collection: "crm-accounts",
data: {
name: companyName,
website: website || "",
status: "lead",
leadTemperature: "cold",
industry,
websiteStatus,
notes,
} as any,
});
accountId = newAccount.id;
accountsCreated++;
console.log(`[OK] Created account: ${companyName}`);
} catch (err: any) {
console.error(
`[ERROR] Failed to create account ${companyName}:`,
err.message,
);
continue; // Skip contact creation if account failed
}
}
// Handle contact
if (email) {
// Some rows have multiple emails or contacts. Let's just pick the first email if there are commas.
if (email.includes(",")) email = email.split(",")[0].trim();
const existingContacts = await payload.find({
collection: "crm-contacts",
where: { email: { equals: email } },
});
if (existingContacts.docs.length === 0) {
let firstName = "Team";
let lastName = companyName; // fallback
if (contactName) {
// If multiple contacts are listed, just take the first one
const firstContact = contactName.split(",")[0].trim();
const parts = firstContact.split(" ");
if (parts.length > 1) {
lastName = parts.pop();
firstName = parts.join(" ");
} else {
firstName = firstContact;
lastName = "Contact";
}
}
try {
await payload.create({
collection: "crm-contacts",
data: {
email,
firstName,
lastName,
role: position,
account: accountId as any,
},
});
contactsCreated++;
console.log(` -> [OK] Created contact: ${email}`);
} catch (err: any) {
console.error(
` -> [ERROR] Failed to create contact ${email}:`,
err.message,
);
}
} else {
console.log(` -> [SKIP] Contact exists: ${email}`);
}
}
}
}
console.log(`\nMigration completed successfully!`);
console.log(
`Created ${accountsCreated} Accounts and ${contactsCreated} Contacts.`,
);
process.exit(0);
} catch (e) {
console.error("Migration failed:", e);
process.exit(1);
}
}
run();

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
# ────────────────────────────────────────────────────────────────────────────
# Payload CMS Database Restore
# Restores a backup created by backup-db.sh
# Usage: pnpm run db:restore <backup-file>
# ────────────────────────────────────────────────────────────────────────────
set -euo pipefail
# Load environment variables
if [ -f ../../.env ]; then
set -a; source ../../.env; set +a
fi
if [ -f .env ]; then
set -a; source .env; set +a
fi
DB_NAME="${postgres_DB_NAME:-payload}"
DB_USER="${postgres_DB_USER:-payload}"
DB_CONTAINER="mintel-me-postgres-db-1"
BACKUP_FILE="${1:-}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BACKUP_DIR="${SCRIPT_DIR}/../../../../backups"
if [ -z "$BACKUP_FILE" ]; then
echo "❌ Usage: pnpm run db:restore <backup-file>"
echo ""
echo "📋 Available backups in $BACKUP_DIR:"
ls -lh "$BACKUP_DIR"/*.dump 2>/dev/null | awk '{print " " $NF " (" $5 ")"}' || echo " No backups found."
exit 1
fi
if [ ! -f "$BACKUP_FILE" ]; then
echo "❌ Backup file not found: $BACKUP_FILE"
exit 1
fi
# Check if container is running
if ! docker ps --format '{{.Names}}' | grep -q "$DB_CONTAINER"; then
echo "❌ Database container '$DB_CONTAINER' is not running."
echo " Start it with: pnpm dev:docker"
exit 1
fi
echo "⚠️ WARNING: This will REPLACE ALL DATA in the '$DB_NAME' database!"
echo " Backup file: $BACKUP_FILE"
echo ""
read -p "Are you sure? (y/N) " -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Cancelled."
exit 0
fi
echo "🔄 Restoring database from $BACKUP_FILE..."
# Uses pg_restore for custom format dumps (-F c) produced by backup-db.sh
cat "$BACKUP_FILE" | docker exec -i "$DB_CONTAINER" pg_restore -U "$DB_USER" -d "$DB_NAME" --clean --if-exists
echo "✅ Database restored successfully!"