feat: implement Project Management with Gantt Chart, Milestones, and CRM enhancements

This commit is contained in:
2026-03-01 00:26:59 +01:00
parent 4b5609a75e
commit 6444cf1e81
47 changed files with 15312 additions and 7373 deletions

21
apps/web/scripts/backup-db.sh Executable file
View File

@@ -0,0 +1,21 @@
#!/bin/bash
set -e
DB_CONTAINER="mintel-me-postgres-db-1"
DB_USER="payload"
DB_NAME="payload"
# Resolve backup dir relative to this script's location
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BACKUP_DIR="${SCRIPT_DIR}/../../../backups"
TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_FILE="${BACKUP_DIR}/payload_backup_${TIMESTAMP}.dump"
echo "Creating backup directory at ${BACKUP_DIR}..."
mkdir -p "${BACKUP_DIR}"
echo "Dumping database '${DB_NAME}' from container '${DB_CONTAINER}'..."
docker exec ${DB_CONTAINER} pg_dump -U ${DB_USER} -F c ${DB_NAME} > "${BACKUP_FILE}"
echo "✅ Backup successful: ${BACKUP_FILE}"
ls -lh "${BACKUP_FILE}"

View File

@@ -0,0 +1,42 @@
import { getPayload } from "payload";
import configPromise from "../payload.config";
async function run() {
try {
const payload = await getPayload({ config: configPromise });
const existing = await payload.find({
collection: "users",
where: { email: { equals: "marc@mintel.me" } },
});
if (existing.totalDocs > 0) {
console.log("User already exists, updating password...");
await payload.update({
collection: "users",
where: { email: { equals: "marc@mintel.me" } },
data: {
password: "Tim300493.",
},
});
console.log("Password updated.");
} else {
console.log("Creating user...");
await payload.create({
collection: "users",
data: {
email: "marc@mintel.me",
password: "Tim300493.",
name: "Marc Mintel",
},
});
console.log("User marc@mintel.me created.");
}
process.exit(0);
} catch (err) {
console.error("Failed to create user:", err);
process.exit(1);
}
}
run();

View File

@@ -0,0 +1,99 @@
import { S3Client, GetObjectCommand } from "@aws-sdk/client-s3";
import fs from "fs";
import path from "path";
import dotenv from "dotenv";
import { fileURLToPath } from "url";
dotenv.config();
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const client = new S3Client({
region: process.env.S3_REGION || "fsn1",
endpoint: process.env.S3_ENDPOINT,
credentials: {
accessKeyId: process.env.S3_ACCESS_KEY || "",
secretAccessKey: process.env.S3_SECRET_KEY || "",
},
forcePathStyle: true,
});
async function downloadFile(key: string, localPath: string) {
try {
const bucket = process.env.S3_BUCKET || "mintel";
const command = new GetObjectCommand({
Bucket: bucket,
Key: key,
});
const response = await client.send(command);
if (response.Body) {
const dir = path.dirname(localPath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
const stream = fs.createWriteStream(localPath);
const reader = response.Body as any;
// Node.js stream handling
if (typeof reader.pipe === "function") {
reader.pipe(stream);
} else {
// Alternative for web streams if necessary, but in Node it should have pipe
const arr = await response.Body.transformToByteArray();
fs.writeFileSync(localPath, arr);
}
return new Promise((resolve, reject) => {
stream.on("finish", resolve);
stream.on("error", reject);
});
}
} catch (err) {
console.error(`Failed to download ${key}:`, err);
}
}
function parseMatter(content: string) {
const match = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
if (!match) return { data: {}, content };
const data: Record<string, any> = {};
match[1].split("\n").forEach((line) => {
const [key, ...rest] = line.split(":");
if (key && rest.length) {
const field = key.trim();
let val = rest.join(":").trim();
data[field] = val.replace(/^["']|["']$/g, "");
}
});
return { data, content: match[2].trim() };
}
async function run() {
const webDir = path.resolve(__dirname, "..");
const contentDir = path.join(webDir, "content", "blog");
const publicDir = path.join(webDir, "public");
const prefix = `${process.env.S3_PREFIX || "mintel-me"}/media/`;
const files = fs.readdirSync(contentDir).filter((f) => f.endsWith(".mdx"));
for (const file of files) {
const content = fs.readFileSync(path.join(contentDir, file), "utf-8");
const { data } = parseMatter(content);
if (data.thumbnail) {
const fileName = path.basename(data.thumbnail);
const s3Key = `${prefix}${fileName}`;
const localPath = path.join(publicDir, data.thumbnail.replace(/^\//, ""));
console.log(`Downloading ${s3Key} to ${localPath}...`);
await downloadFile(s3Key, localPath);
}
}
console.log("Downloads complete.");
}
run();

View File

@@ -0,0 +1,44 @@
import { S3Client, ListObjectsV2Command } from "@aws-sdk/client-s3";
import dotenv from "dotenv";
dotenv.config();
const client = new S3Client({
region: process.env.S3_REGION || "fsn1",
endpoint: process.env.S3_ENDPOINT,
credentials: {
accessKeyId: process.env.S3_ACCESS_KEY || "",
secretAccessKey: process.env.S3_SECRET_KEY || "",
},
forcePathStyle: true,
});
async function run() {
try {
const bucket = process.env.S3_BUCKET || "mintel";
const prefix = `${process.env.S3_PREFIX || "mintel-me"}/media/`;
console.log(`Listing objects in bucket: ${bucket}, prefix: ${prefix}`);
const command = new ListObjectsV2Command({
Bucket: bucket,
Prefix: prefix,
});
const response = await client.send(command);
if (!response.Contents) {
console.log("No objects found.");
return;
}
console.log(`Found ${response.Contents.length} objects:`);
response.Contents.forEach((obj) => {
console.log(` - ${obj.Key} (${obj.Size} bytes)`);
});
} catch (err) {
console.error("Error listing S3 objects:", err);
}
}
run();