Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 963e572291 | |||
| 9887324469 | |||
| 78da0fdea9 | |||
| 91db336c0e | |||
| cfbff88e45 | |||
| 90b41d2a15 | |||
| 3f45293c2e | |||
| 7e957d6fb4 | |||
| 4334d31445 | |||
| 1559037029 | |||
| b7438f2718 | |||
| a090373825 | |||
| 3c3d019924 | |||
| c6d20119c7 | |||
| 04a19742da | |||
| 39b96a51db | |||
| d27e1f91ad | |||
| 18cd576ee9 | |||
| 3d2f240cf6 | |||
| 6260b40b91 | |||
| 109c8389f3 | |||
| 55cb073a6d | |||
| fb87fd52f7 | |||
| da9b2fb9cf | |||
| 5032700c2c | |||
| d44838254c | |||
| 1742604a7a |
@@ -31,7 +31,6 @@ jobs:
|
||||
traefik_host: ${{ steps.determine.outputs.traefik_host }}
|
||||
traefik_rule: ${{ steps.determine.outputs.traefik_rule }}
|
||||
next_public_url: ${{ steps.determine.outputs.next_public_url }}
|
||||
directus_url: ${{ steps.determine.outputs.directus_url }}
|
||||
project_name: ${{ steps.determine.outputs.project_name }}
|
||||
short_sha: ${{ steps.determine.outputs.short_sha }}
|
||||
container:
|
||||
@@ -96,7 +95,6 @@ jobs:
|
||||
echo "traefik_host=$PRIMARY_HOST"
|
||||
echo "traefik_rule=$TRAEFIK_RULE"
|
||||
echo "next_public_url=https://$PRIMARY_HOST"
|
||||
echo "directus_url=https://cms.$PRIMARY_HOST"
|
||||
echo "project_name=$PRJ-$TARGET"
|
||||
echo "short_sha=$SHORT_SHA"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
@@ -163,7 +161,9 @@ jobs:
|
||||
echo "@mintel:registry=https://${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}" > .npmrc
|
||||
echo "//${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}/:_authToken=${{ secrets.REGISTRY_PASS }}" >> .npmrc
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
run: |
|
||||
pnpm store prune
|
||||
pnpm install --no-frozen-lockfile
|
||||
- name: 🧪 QA Checks
|
||||
if: github.event.inputs.skip_checks != 'true'
|
||||
run: |
|
||||
@@ -196,11 +196,10 @@ jobs:
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/arm64
|
||||
platforms: linux/amd64
|
||||
build-args: |
|
||||
NEXT_PUBLIC_BASE_URL=${{ needs.prepare.outputs.next_public_url }}
|
||||
NEXT_PUBLIC_TARGET=${{ needs.prepare.outputs.target }}
|
||||
DIRECTUS_URL=${{ needs.prepare.outputs.directus_url }}
|
||||
NPM_TOKEN=${{ secrets.REGISTRY_PASS }}
|
||||
tags: registry.infra.mintel.me/mintel/mb-grid-solutions:${{ needs.prepare.outputs.image_tag }}
|
||||
cache-from: type=registry,ref=registry.infra.mintel.me/mintel/mb-grid-solutions:buildcache
|
||||
@@ -222,18 +221,14 @@ jobs:
|
||||
IMAGE_TAG: ${{ needs.prepare.outputs.image_tag }}
|
||||
PROJECT_NAME: ${{ needs.prepare.outputs.project_name }}
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
DIRECTUS_URL: ${{ needs.prepare.outputs.directus_url }}
|
||||
DIRECTUS_HOST: cms.${{ needs.prepare.outputs.traefik_host }}
|
||||
|
||||
# Secrets mapping (Directus)
|
||||
DIRECTUS_KEY: ${{ (needs.prepare.outputs.target == 'testing' && secrets.TESTING_DIRECTUS_KEY) || (needs.prepare.outputs.target == 'staging' && secrets.STAGING_DIRECTUS_KEY) || secrets.DIRECTUS_KEY || vars.DIRECTUS_KEY }}
|
||||
DIRECTUS_SECRET: ${{ (needs.prepare.outputs.target == 'testing' && secrets.TESTING_DIRECTUS_SECRET) || (needs.prepare.outputs.target == 'staging' && secrets.STAGING_DIRECTUS_SECRET) || secrets.DIRECTUS_SECRET || vars.DIRECTUS_SECRET }}
|
||||
DIRECTUS_ADMIN_EMAIL: ${{ (needs.prepare.outputs.target == 'testing' && secrets.TESTING_DIRECTUS_ADMIN_EMAIL) || (needs.prepare.outputs.target == 'staging' && secrets.STAGING_DIRECTUS_ADMIN_EMAIL) || secrets.DIRECTUS_ADMIN_EMAIL || vars.DIRECTUS_ADMIN_EMAIL || 'admin@mintel.me' }}
|
||||
DIRECTUS_ADMIN_PASSWORD: ${{ (needs.prepare.outputs.target == 'testing' && secrets.TESTING_DIRECTUS_ADMIN_PASSWORD) || (needs.prepare.outputs.target == 'staging' && secrets.STAGING_DIRECTUS_ADMIN_PASSWORD) || secrets.DIRECTUS_ADMIN_PASSWORD || vars.DIRECTUS_ADMIN_PASSWORD }}
|
||||
# Secrets mapping (Database & CMS)
|
||||
PAYLOAD_SECRET: ${{ (needs.prepare.outputs.target == 'testing' && secrets.TESTING_PAYLOAD_SECRET) || (needs.prepare.outputs.target == 'staging' && secrets.STAGING_PAYLOAD_SECRET) || secrets.PAYLOAD_SECRET || secrets.DIRECTUS_SECRET || vars.PAYLOAD_SECRET || 'you-need-to-set-a-payload-secret' }}
|
||||
DATABASE_URI: ${{ (needs.prepare.outputs.target == 'testing' && secrets.TESTING_DATABASE_URI) || (needs.prepare.outputs.target == 'staging' && secrets.STAGING_DATABASE_URI) || secrets.DATABASE_URI || vars.DATABASE_URI }}
|
||||
DIRECTUS_DB_NAME: ${{ secrets.DIRECTUS_DB_NAME || vars.DIRECTUS_DB_NAME || 'directus' }}
|
||||
DIRECTUS_DB_USER: ${{ secrets.DIRECTUS_DB_USER || vars.DIRECTUS_DB_USER || 'directus' }}
|
||||
DIRECTUS_DB_PASSWORD: ${{ (needs.prepare.outputs.target == 'testing' && secrets.TESTING_DIRECTUS_DB_PASSWORD) || (needs.prepare.outputs.target == 'staging' && secrets.STAGING_DIRECTUS_DB_PASSWORD) || secrets.DIRECTUS_DB_PASSWORD || vars.DIRECTUS_DB_PASSWORD || 'directus' }}
|
||||
DIRECTUS_API_TOKEN: ${{ (needs.prepare.outputs.target == 'testing' && secrets.TESTING_DIRECTUS_API_TOKEN) || (needs.prepare.outputs.target == 'staging' && secrets.STAGING_DIRECTUS_API_TOKEN) || secrets.DIRECTUS_API_TOKEN || vars.DIRECTUS_API_TOKEN }}
|
||||
|
||||
|
||||
# Secrets mapping (Mail)
|
||||
MAIL_HOST: ${{ secrets.SMTP_HOST || vars.SMTP_HOST }}
|
||||
@@ -259,7 +254,6 @@ jobs:
|
||||
- name: 📝 Generate Environment
|
||||
shell: bash
|
||||
env:
|
||||
TRAEFIK_RULE: ${{ needs.prepare.outputs.traefik_rule }}
|
||||
TRAEFIK_HOST: ${{ needs.prepare.outputs.traefik_host }}
|
||||
ENV_FILE: ${{ needs.prepare.outputs.env_file }}
|
||||
run: |
|
||||
@@ -271,8 +265,6 @@ jobs:
|
||||
AUTH_MIDDLEWARE="$STD_MW"
|
||||
COMPOSE_PROFILES=""
|
||||
else
|
||||
# Exclude Gatekeeper from the main app router to prevent redirect loops
|
||||
TRAEFIK_RULE="Host(\`${TRAEFIK_HOST}\`) && !PathPrefix(\`/gatekeeper\`)"
|
||||
# Order: Forward (Proto) -> Auth -> Compression
|
||||
AUTH_MIDDLEWARE="${PROJECT_NAME}-forward,${PROJECT_NAME}-auth,compress"
|
||||
COMPOSE_PROFILES="gatekeeper"
|
||||
@@ -292,18 +284,12 @@ jobs:
|
||||
PROJECT_COLOR=$PROJECT_COLOR
|
||||
LOG_LEVEL=$LOG_LEVEL
|
||||
|
||||
# Directus
|
||||
DIRECTUS_URL=$DIRECTUS_URL
|
||||
DIRECTUS_HOST=$DIRECTUS_HOST
|
||||
DIRECTUS_KEY=$DIRECTUS_KEY
|
||||
DIRECTUS_SECRET=$DIRECTUS_SECRET
|
||||
DIRECTUS_ADMIN_EMAIL=$DIRECTUS_ADMIN_EMAIL
|
||||
DIRECTUS_ADMIN_PASSWORD=$DIRECTUS_ADMIN_PASSWORD
|
||||
# Database & Payload
|
||||
DATABASE_URI=\${DATABASE_URI:-postgresql://$DIRECTUS_DB_USER:$DIRECTUS_DB_PASSWORD@mb-grid-db:5432/$DIRECTUS_DB_NAME}
|
||||
PAYLOAD_SECRET=${PAYLOAD_SECRET:-you-need-to-set-a-payload-secret}
|
||||
DIRECTUS_DB_NAME=$DIRECTUS_DB_NAME
|
||||
DIRECTUS_DB_USER=$DIRECTUS_DB_USER
|
||||
DIRECTUS_DB_PASSWORD=$DIRECTUS_DB_PASSWORD
|
||||
DIRECTUS_API_TOKEN=$DIRECTUS_API_TOKEN
|
||||
INTERNAL_DIRECTUS_URL=http://${PROJECT_NAME}-directus:8055
|
||||
|
||||
# Mail
|
||||
MAIL_HOST=$MAIL_HOST
|
||||
@@ -327,8 +313,7 @@ jobs:
|
||||
SENTRY_ENVIRONMENT=$TARGET
|
||||
PROJECT_NAME=$PROJECT_NAME
|
||||
ENV_FILE=$ENV_FILE
|
||||
TRAEFIK_RULE="${TRAEFIK_RULE}"
|
||||
TRAEFIK_HOST="${TRAEFIK_HOST}"
|
||||
TRAEFIK_HOST=$TRAEFIK_HOST
|
||||
COMPOSE_PROFILES=$COMPOSE_PROFILES
|
||||
TRAEFIK_MIDDLEWARES=$AUTH_MIDDLEWARE
|
||||
EOF
|
||||
@@ -345,7 +330,7 @@ jobs:
|
||||
|
||||
# Transfer and Restart
|
||||
SITE_DIR="/home/deploy/sites/mb-grid-solutions.com"
|
||||
ssh root@alpha.mintel.me "mkdir -p $SITE_DIR/directus/schema $SITE_DIR/directus/uploads $SITE_DIR/directus/extensions"
|
||||
ssh root@alpha.mintel.me "mkdir -p $SITE_DIR"
|
||||
|
||||
scp .env.deploy root@alpha.mintel.me:$SITE_DIR/$ENV_FILE
|
||||
scp docker-compose.yaml root@alpha.mintel.me:$SITE_DIR/docker-compose.yaml
|
||||
@@ -354,8 +339,14 @@ jobs:
|
||||
ssh root@alpha.mintel.me "cd $SITE_DIR && docker compose -p '${{ needs.prepare.outputs.project_name }}' --env-file '$ENV_FILE' pull"
|
||||
ssh root@alpha.mintel.me "cd $SITE_DIR && docker compose -p '${{ needs.prepare.outputs.project_name }}' --env-file '$ENV_FILE' up -d --remove-orphans"
|
||||
|
||||
# Apply Directus Schema Snapshot if available
|
||||
ssh root@alpha.mintel.me "cd $SITE_DIR && if docker compose -p '${{ needs.prepare.outputs.project_name }}' --env-file '$ENV_FILE' exec -T directus ls /directus/schema/snapshot.yaml >/dev/null 2>&1; then echo '→ Applying Directus Schema Snapshot...' && docker compose -p '${{ needs.prepare.outputs.project_name }}' --env-file '$ENV_FILE' exec -T directus npx directus schema apply /directus/schema/snapshot.yaml --yes; fi"
|
||||
# Apply Payload Migrations using the target app container's programmatic endpoint
|
||||
ssh root@alpha.mintel.me "cd $SITE_DIR && echo '→ Waiting for DB and Running Payload Migrations...' && \
|
||||
for i in {1..5}; do \
|
||||
echo \"Attempt \$i...\"; \
|
||||
docker compose -p '${{ needs.prepare.outputs.project_name }}' --env-file '$ENV_FILE' exec -T mb-grid-app sh -c 'curl -s -f -X POST -H \"Authorization: Bearer \$PAYLOAD_SECRET\" http://localhost:3000/api/payload/migrate \
|
||||
|| { echo \"HTTP error or DB not ready.\"; exit 1; }' && { echo '✅ Migrations successful!'; break; } \
|
||||
|| { if [ \$i -eq 5 ]; then echo '❌ Migration failed after 5 attempts!'; exit 1; else echo '⏳ Retrying in 5s...'; sleep 5; fi; }; \
|
||||
done"
|
||||
|
||||
ssh root@alpha.mintel.me "docker system prune -f --filter 'until=24h'"
|
||||
|
||||
@@ -364,49 +355,149 @@ jobs:
|
||||
run: docker builder prune -f --filter "until=1h"
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 5: Health Check
|
||||
# JOB 5: Post-Deploy Verification (Smoke Tests + Quality Gates)
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
healthcheck:
|
||||
name: 🩺 Health Check
|
||||
post_deploy_checks:
|
||||
name: 🧪 Post-Deploy Verification
|
||||
needs: [prepare, deploy]
|
||||
if: needs.deploy.result == 'success'
|
||||
if: needs.deploy.result == 'success' && needs.prepare.outputs.target != 'branch'
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: 🔍 Smoke Test
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 10
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: 🔐 Registry Auth
|
||||
run: |
|
||||
URL="${{ needs.prepare.outputs.next_public_url }}"
|
||||
echo "Checking health of $URL..."
|
||||
for i in {1..12}; do
|
||||
if curl -s -f -k -L "$URL" > /dev/null; then
|
||||
echo "✅ Health check passed!"
|
||||
exit 0
|
||||
fi
|
||||
echo "Waiting for service to be ready... ($i/12)"
|
||||
sleep 10
|
||||
done
|
||||
echo "❌ Health check failed after 2 minutes."
|
||||
exit 1
|
||||
echo "@mintel:registry=https://${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}" > .npmrc
|
||||
echo "//${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}/:_authToken=${{ secrets.REGISTRY_PASS }}" >> .npmrc
|
||||
- name: Install dependencies
|
||||
id: deps
|
||||
run: |
|
||||
pnpm store prune
|
||||
pnpm install --no-frozen-lockfile
|
||||
- name: 📦 Cache APT Packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /var/cache/apt/archives
|
||||
key: apt-cache-${{ runner.os }}-${{ runner.arch }}-chromium
|
||||
|
||||
- name: 💾 Cache Chromium
|
||||
id: cache-chromium
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /usr/bin/chromium
|
||||
key: ${{ runner.os }}-chromium-native-${{ hashFiles('package.json') }}
|
||||
|
||||
- name: 🔍 Install Chromium (Native & ARM64)
|
||||
if: steps.cache-chromium.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
rm -f /etc/apt/apt.conf.d/docker-clean
|
||||
apt-get update
|
||||
apt-get install -y gnupg wget ca-certificates
|
||||
OS_ID=$(. /etc/os-release && echo $ID)
|
||||
CODENAME=$(. /etc/os-release && echo $VERSION_CODENAME)
|
||||
if [ "$OS_ID" = "debian" ]; then
|
||||
apt-get install -y chromium
|
||||
else
|
||||
mkdir -p /etc/apt/keyrings
|
||||
KEY_ID="82BB6851C64F6880"
|
||||
wget -qO- "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x$KEY_ID" | gpg --dearmor > /etc/apt/keyrings/xtradeb.gpg
|
||||
echo "deb [signed-by=/etc/apt/keyrings/xtradeb.gpg] http://ppa.launchpad.net/xtradeb/apps/ubuntu $CODENAME main" > /etc/apt/sources.list.d/xtradeb-ppa.list
|
||||
printf "Package: *\nPin: release o=LP-PPA-xtradeb-apps\nPin-Priority: 1001\n" > /etc/apt/preferences.d/xtradeb
|
||||
apt-get update
|
||||
apt-get install -y --allow-downgrades chromium
|
||||
fi
|
||||
[ -f /usr/bin/chromium ] && ln -sf /usr/bin/chromium /usr/bin/google-chrome
|
||||
[ -f /usr/bin/chromium ] && ln -sf /usr/bin/chromium /usr/bin/chromium-browser
|
||||
|
||||
# ── Critical Smoke Tests (MUST pass) ──────────────────────────────────
|
||||
- name: 🏥 CMS Deep Health Check
|
||||
env:
|
||||
DEPLOY_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GK_PASS: ${{ secrets.GATEKEEPER_PASSWORD || vars.GATEKEEPER_PASSWORD }}
|
||||
run: |
|
||||
echo "Waiting 10s for app to fully start..."
|
||||
sleep 10
|
||||
echo "Checking basic health..."
|
||||
curl -sf "$DEPLOY_URL/api/health" || { echo "❌ Basic health check failed"; exit 1; }
|
||||
echo "✅ Basic health OK"
|
||||
|
||||
- name: 🌐 Core Smoke Tests (HTTP, API, Locale)
|
||||
if: always() && steps.deps.outcome == 'success'
|
||||
uses: https://git.infra.mintel.me/mmintel/at-mintel/.gitea/actions/core-smoke-tests@main
|
||||
with:
|
||||
TARGET_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || vars.GATEKEEPER_PASSWORD }}
|
||||
UMAMI_API_ENDPOINT: ${{ secrets.UMAMI_API_ENDPOINT || secrets.NEXT_PUBLIC_UMAMI_SCRIPT_URL || vars.UMAMI_API_ENDPOINT || 'https://analytics.infra.mintel.me' }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN || vars.SENTRY_DSN }}
|
||||
|
||||
- name: 📝 E2E Form Submission Test
|
||||
if: always() && steps.deps.outcome == 'success'
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || vars.GATEKEEPER_PASSWORD }}
|
||||
PUPPETEER_EXECUTABLE_PATH: /usr/bin/chromium
|
||||
run: pnpm test run
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 6: Notifications
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
notifications:
|
||||
name: 🔔 Notify
|
||||
needs: [prepare, deploy, healthcheck]
|
||||
needs: [prepare, deploy, post_deploy_checks]
|
||||
if: always()
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: 🔔 Gotify
|
||||
shell: bash
|
||||
run: |
|
||||
STATUS="${{ needs.deploy.result }}"
|
||||
TITLE="mb-grid-solutions.com: $STATUS"
|
||||
[[ "$STATUS" == "success" ]] && PRIORITY=5 || PRIORITY=8
|
||||
|
||||
DEPLOY="${{ needs.deploy.result }}"
|
||||
SMOKE="${{ needs.post_deploy_checks.result }}"
|
||||
PERF="${{ needs.post_deploy_checks.result }}"
|
||||
TARGET="${{ needs.prepare.outputs.target }}"
|
||||
VERSION="${{ needs.prepare.outputs.image_tag }}"
|
||||
URL="${{ needs.prepare.outputs.next_public_url }}"
|
||||
|
||||
# Gotify priority scale:
|
||||
# 1-3 = low (silent/info)
|
||||
# 4-5 = normal
|
||||
# 6-7 = high (warning)
|
||||
# 8-10 = critical (alarm)
|
||||
if [[ "$DEPLOY" != "success" ]]; then
|
||||
PRIORITY=10
|
||||
EMOJI="🚨"
|
||||
STATUS_LINE="DEPLOY FAILED"
|
||||
elif [[ "$SMOKE" != "success" ]]; then
|
||||
PRIORITY=8
|
||||
EMOJI="⚠️"
|
||||
STATUS_LINE="Smoke tests failed"
|
||||
elif [[ "$PERF" != "success" ]]; then
|
||||
PRIORITY=5
|
||||
EMOJI="📉"
|
||||
STATUS_LINE="Performance degraded"
|
||||
else
|
||||
PRIORITY=2
|
||||
EMOJI="✅"
|
||||
STATUS_LINE="All checks passed"
|
||||
fi
|
||||
|
||||
TITLE="$EMOJI mb-grid-solutions.com $VERSION → $TARGET"
|
||||
MESSAGE="$STATUS_LINE
|
||||
Deploy: $DEPLOY | Smoke: $SMOKE | Perf: $PERF
|
||||
$URL"
|
||||
|
||||
curl -s -k -X POST "${{ secrets.GOTIFY_URL }}/message?token=${{ secrets.GOTIFY_TOKEN }}" \
|
||||
-F "title=$TITLE" \
|
||||
-F "message=Deploy to ${{ needs.prepare.outputs.target }} finished with status $STATUS.\nVersion: ${{ needs.prepare.outputs.image_tag }}" \
|
||||
-F "message=$MESSAGE" \
|
||||
-F "priority=$PRIORITY" || true
|
||||
|
||||
17
.gitea/workflows/qa.yml
Normal file
17
.gitea/workflows/qa.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Nightly QA
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
call-qa-workflow:
|
||||
uses: mmintel/at-mintel/.gitea/workflows/quality-assurance-template.yml@main
|
||||
with:
|
||||
TARGET_URL: 'https://testing.mb-grid-solutions.com'
|
||||
PROJECT_NAME: 'mb-grid-solutions'
|
||||
secrets:
|
||||
GOTIFY_URL: ${{ secrets.GOTIFY_URL }}
|
||||
GOTIFY_TOKEN: ${{ secrets.GOTIFY_TOKEN }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || vars.GATEKEEPER_PASSWORD }}
|
||||
@@ -32,7 +32,8 @@ COPY pnpm-lock.yaml package.json .npmrc* ./
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||
--mount=type=secret,id=NPM_TOKEN \
|
||||
export NPM_TOKEN=$(cat /run/secrets/NPM_TOKEN 2>/dev/null || echo $NPM_TOKEN) && \
|
||||
pnpm install --frozen-lockfile
|
||||
pnpm store prune && \
|
||||
pnpm install --no-frozen-lockfile
|
||||
|
||||
# Copy source code
|
||||
COPY . .
|
||||
|
||||
17
app/(payload)/admin/[[...segments]]/page.tsx
Normal file
17
app/(payload)/admin/[[...segments]]/page.tsx
Normal file
@@ -0,0 +1,17 @@
|
||||
import configPromise from "@payload-config";
|
||||
import { RootPage } from "@payloadcms/next/views";
|
||||
import { importMap } from "../importMap";
|
||||
|
||||
type Args = {
|
||||
params: Promise<{
|
||||
segments: string[];
|
||||
}>;
|
||||
searchParams: Promise<{
|
||||
[key: string]: string | string[];
|
||||
}>;
|
||||
};
|
||||
|
||||
const Page = ({ params, searchParams }: Args) =>
|
||||
RootPage({ config: configPromise, importMap, params, searchParams });
|
||||
|
||||
export default Page;
|
||||
78
app/(payload)/admin/importMap.js
Normal file
78
app/(payload)/admin/importMap.js
Normal file
@@ -0,0 +1,78 @@
|
||||
import { RscEntryLexicalCell as RscEntryLexicalCell_44fe37237e0ebf4470c9990d8cb7b07e } from "@payloadcms/richtext-lexical/rsc";
|
||||
import { RscEntryLexicalField as RscEntryLexicalField_44fe37237e0ebf4470c9990d8cb7b07e } from "@payloadcms/richtext-lexical/rsc";
|
||||
import { LexicalDiffComponent as LexicalDiffComponent_44fe37237e0ebf4470c9990d8cb7b07e } from "@payloadcms/richtext-lexical/rsc";
|
||||
import { BlocksFeatureClient as BlocksFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { InlineToolbarFeatureClient as InlineToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { HorizontalRuleFeatureClient as HorizontalRuleFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { UploadFeatureClient as UploadFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { BlockquoteFeatureClient as BlockquoteFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { RelationshipFeatureClient as RelationshipFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { LinkFeatureClient as LinkFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { ChecklistFeatureClient as ChecklistFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { OrderedListFeatureClient as OrderedListFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { UnorderedListFeatureClient as UnorderedListFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { IndentFeatureClient as IndentFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { AlignFeatureClient as AlignFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { HeadingFeatureClient as HeadingFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { ParagraphFeatureClient as ParagraphFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { InlineCodeFeatureClient as InlineCodeFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { SuperscriptFeatureClient as SuperscriptFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { SubscriptFeatureClient as SubscriptFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { StrikethroughFeatureClient as StrikethroughFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { UnderlineFeatureClient as UnderlineFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { BoldFeatureClient as BoldFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { ItalicFeatureClient as ItalicFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from "@payloadcms/richtext-lexical/client";
|
||||
import { CollectionCards as CollectionCards_f9c02e79a4aed9a3924487c0cd4cafb1 } from "@payloadcms/next/rsc";
|
||||
|
||||
export const importMap = {
|
||||
"@payloadcms/richtext-lexical/rsc#RscEntryLexicalCell":
|
||||
RscEntryLexicalCell_44fe37237e0ebf4470c9990d8cb7b07e,
|
||||
"@payloadcms/richtext-lexical/rsc#RscEntryLexicalField":
|
||||
RscEntryLexicalField_44fe37237e0ebf4470c9990d8cb7b07e,
|
||||
"@payloadcms/richtext-lexical/rsc#LexicalDiffComponent":
|
||||
LexicalDiffComponent_44fe37237e0ebf4470c9990d8cb7b07e,
|
||||
"@payloadcms/richtext-lexical/client#BlocksFeatureClient":
|
||||
BlocksFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#InlineToolbarFeatureClient":
|
||||
InlineToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#HorizontalRuleFeatureClient":
|
||||
HorizontalRuleFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#UploadFeatureClient":
|
||||
UploadFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#BlockquoteFeatureClient":
|
||||
BlockquoteFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#RelationshipFeatureClient":
|
||||
RelationshipFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#LinkFeatureClient":
|
||||
LinkFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#ChecklistFeatureClient":
|
||||
ChecklistFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#OrderedListFeatureClient":
|
||||
OrderedListFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#UnorderedListFeatureClient":
|
||||
UnorderedListFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#IndentFeatureClient":
|
||||
IndentFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#AlignFeatureClient":
|
||||
AlignFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#HeadingFeatureClient":
|
||||
HeadingFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#ParagraphFeatureClient":
|
||||
ParagraphFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#InlineCodeFeatureClient":
|
||||
InlineCodeFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#SuperscriptFeatureClient":
|
||||
SuperscriptFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#SubscriptFeatureClient":
|
||||
SubscriptFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#StrikethroughFeatureClient":
|
||||
StrikethroughFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#UnderlineFeatureClient":
|
||||
UnderlineFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#BoldFeatureClient":
|
||||
BoldFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/richtext-lexical/client#ItalicFeatureClient":
|
||||
ItalicFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
"@payloadcms/next/rsc#CollectionCards":
|
||||
CollectionCards_f9c02e79a4aed9a3924487c0cd4cafb1,
|
||||
};
|
||||
1
app/(payload)/admin/importMap.ts
Normal file
1
app/(payload)/admin/importMap.ts
Normal file
@@ -0,0 +1 @@
|
||||
export const importMap = {};
|
||||
14
app/(payload)/api/[...slug]/route.ts
Normal file
14
app/(payload)/api/[...slug]/route.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import config from "@payload-config";
|
||||
import {
|
||||
REST_GET,
|
||||
REST_OPTIONS,
|
||||
REST_PATCH,
|
||||
REST_POST,
|
||||
REST_DELETE,
|
||||
} from "@payloadcms/next/routes";
|
||||
|
||||
export const GET = REST_GET(config);
|
||||
export const POST = REST_POST(config);
|
||||
export const DELETE = REST_DELETE(config);
|
||||
export const PATCH = REST_PATCH(config);
|
||||
export const OPTIONS = REST_OPTIONS(config);
|
||||
1
app/(payload)/custom.scss
Normal file
1
app/(payload)/custom.scss
Normal file
@@ -0,0 +1 @@
|
||||
/* Custom SCSS for Payload Admin Panel */
|
||||
36
app/(payload)/layout.tsx
Normal file
36
app/(payload)/layout.tsx
Normal file
@@ -0,0 +1,36 @@
|
||||
import configPromise from "@payload-config";
|
||||
import { RootLayout } from "@payloadcms/next/layouts";
|
||||
import React from "react";
|
||||
|
||||
import "@payloadcms/next/css";
|
||||
import "./custom.scss";
|
||||
import { handleServerFunctions } from "@payloadcms/next/layouts";
|
||||
import { importMap } from "./admin/importMap";
|
||||
|
||||
type Args = {
|
||||
children: React.ReactNode;
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const serverFunction: any = async function (args: any) {
|
||||
"use server";
|
||||
return handleServerFunctions({
|
||||
...args,
|
||||
config: configPromise,
|
||||
importMap,
|
||||
});
|
||||
};
|
||||
|
||||
const Layout = ({ children }: Args) => {
|
||||
return (
|
||||
<RootLayout
|
||||
config={configPromise}
|
||||
importMap={importMap}
|
||||
serverFunction={serverFunction}
|
||||
>
|
||||
{children}
|
||||
</RootLayout>
|
||||
);
|
||||
};
|
||||
|
||||
export default Layout;
|
||||
@@ -1,15 +1,19 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import * as nodemailer from "nodemailer";
|
||||
import directus, { ensureAuthenticated } from "@/lib/directus";
|
||||
import { createItem } from "@directus/sdk";
|
||||
import { getPayload } from "payload";
|
||||
import configPromise from "@payload-config";
|
||||
import { getServerAppServices } from "@/lib/services/create-services.server";
|
||||
import {
|
||||
render,
|
||||
ContactFormNotification,
|
||||
ConfirmationMessage,
|
||||
} from "@mintel/mail";
|
||||
import React from "react";
|
||||
|
||||
export async function POST(req: Request) {
|
||||
const services = getServerAppServices();
|
||||
const logger = services.logger.child({ action: "contact_submission" });
|
||||
|
||||
// Set analytics context from request headers for high-fidelity server-side tracking
|
||||
// This fulfills the "server-side via nextjs proxy" requirement
|
||||
if (services.analytics.setServerContext) {
|
||||
services.analytics.setServerContext({
|
||||
userAgent: req.headers.get("user-agent") || undefined,
|
||||
@@ -41,70 +45,87 @@ export async function POST(req: Request) {
|
||||
if (!message || message.length < 20) {
|
||||
return NextResponse.json({ error: "message_too_short" }, { status: 400 });
|
||||
}
|
||||
|
||||
if (message.length > 4000) {
|
||||
return NextResponse.json({ error: "message_too_long" }, { status: 400 });
|
||||
}
|
||||
|
||||
// 1. Directus save
|
||||
let directusSaved = false;
|
||||
const payload = await getPayload({ config: configPromise });
|
||||
|
||||
// 1. Payload save
|
||||
let payloadSaved = false;
|
||||
try {
|
||||
await ensureAuthenticated();
|
||||
await directus.request(
|
||||
createItem("contact_submissions", {
|
||||
await payload.create({
|
||||
collection: "form-submissions",
|
||||
data: {
|
||||
name,
|
||||
email,
|
||||
company: company || "Nicht angegeben",
|
||||
message,
|
||||
}),
|
||||
);
|
||||
logger.info("Contact submission saved to Directus");
|
||||
directusSaved = true;
|
||||
} catch (directusError) {
|
||||
const errorMessage =
|
||||
directusError instanceof Error
|
||||
? directusError.message
|
||||
: String(directusError);
|
||||
logger.error("Failed to save to Directus", {
|
||||
error: errorMessage,
|
||||
details: directusError,
|
||||
});
|
||||
services.errors.captureException(directusError, {
|
||||
phase: "directus_save",
|
||||
});
|
||||
// We still try to send the email even if Directus fails
|
||||
}
|
||||
|
||||
// 2. Email sending
|
||||
try {
|
||||
const { config } = await import("@/lib/config");
|
||||
const transporter = nodemailer.createTransport({
|
||||
host: config.mail.host,
|
||||
port: config.mail.port,
|
||||
secure: config.mail.port === 465,
|
||||
auth: {
|
||||
user: config.mail.user,
|
||||
pass: config.mail.pass,
|
||||
},
|
||||
});
|
||||
logger.info("Contact submission saved to PayloadCMS");
|
||||
payloadSaved = true;
|
||||
} catch (payloadError) {
|
||||
const errorMessage =
|
||||
payloadError instanceof Error
|
||||
? payloadError.message
|
||||
: String(payloadError);
|
||||
logger.error("Failed to save to Payload", {
|
||||
error: errorMessage,
|
||||
details: payloadError,
|
||||
});
|
||||
services.errors.captureException(payloadError, { phase: "payload_save" });
|
||||
}
|
||||
|
||||
await transporter.sendMail({
|
||||
// 2. Email sending via Payload (which uses configured nodemailer)
|
||||
try {
|
||||
const { config } = await import("@/lib/config");
|
||||
const clientName = "MB Grid Solutions";
|
||||
|
||||
// 2a. Notification to MB Grid
|
||||
const notificationHtml = await render(
|
||||
React.createElement(ContactFormNotification, {
|
||||
name,
|
||||
email,
|
||||
message,
|
||||
company,
|
||||
}),
|
||||
);
|
||||
|
||||
await payload.sendEmail({
|
||||
from: config.mail.from,
|
||||
to: config.mail.recipients.join(",") || "info@mb-grid-solutions.com",
|
||||
to:
|
||||
config.mail.recipients.join(",") ||
|
||||
process.env.CONTACT_RECIPIENT ||
|
||||
"info@mb-grid-solutions.com",
|
||||
replyTo: email,
|
||||
subject: `Kontaktanfrage von ${name}`,
|
||||
text: `
|
||||
Name: ${name}
|
||||
Firma: ${company || "Nicht angegeben"}
|
||||
E-Mail: ${email}
|
||||
Zeitpunkt: ${new Date().toISOString()}
|
||||
|
||||
Nachricht:
|
||||
${message}
|
||||
`,
|
||||
html: notificationHtml,
|
||||
});
|
||||
|
||||
logger.info("Email sent successfully");
|
||||
// 2b. Confirmation to the User
|
||||
try {
|
||||
const confirmationHtml = await render(
|
||||
React.createElement(ConfirmationMessage, {
|
||||
name,
|
||||
clientName,
|
||||
}),
|
||||
);
|
||||
|
||||
await payload.sendEmail({
|
||||
from: config.mail.from,
|
||||
to: email,
|
||||
subject: `Ihre Kontaktanfrage bei ${clientName}`,
|
||||
html: confirmationHtml,
|
||||
});
|
||||
} catch (confirmError) {
|
||||
logger.warn(
|
||||
"Failed to send confirmation email, but notification was sent",
|
||||
{ error: confirmError },
|
||||
);
|
||||
}
|
||||
|
||||
logger.info("Emails sent successfully");
|
||||
|
||||
// Notify success for important leads
|
||||
await services.notifications.notify({
|
||||
@@ -116,18 +137,16 @@ ${message}
|
||||
logger.error("SMTP Error", { error: smtpError });
|
||||
services.errors.captureException(smtpError, { phase: "smtp_send" });
|
||||
|
||||
// If Directus failed AND SMTP failed, then we really have a problem
|
||||
if (!directusSaved) {
|
||||
if (!payloadSaved) {
|
||||
return NextResponse.json(
|
||||
{ error: "Systemfehler (Speicherung und Versand fehlgeschlagen)" },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
|
||||
// If Directus was successful, we tell the user "Ok" but we know internally it was a partial failure
|
||||
await services.notifications.notify({
|
||||
title: "🚨 SMTP Fehler (Kontaktformular)",
|
||||
message: `Anfrage von ${name} (${email}) in Directus gespeichert, aber E-Mail-Versand fehlgeschlagen: ${smtpError instanceof Error ? smtpError.message : String(smtpError)}`,
|
||||
message: `Anfrage von ${name} (${email}) in Payload gespeichert, aber E-Mail-Versand fehlgeschlagen: ${smtpError instanceof Error ? smtpError.message : String(smtpError)}`,
|
||||
priority: 8,
|
||||
});
|
||||
}
|
||||
|
||||
35
app/api/payload/migrate/route.ts
Normal file
35
app/api/payload/migrate/route.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { getPayload } from "payload";
|
||||
import configPromise from "@payload-config";
|
||||
import { getServerAppServices } from "@/lib/services/create-services.server";
|
||||
|
||||
export async function POST(req: Request) {
|
||||
const authHeader = req.headers.get("authorization");
|
||||
if (authHeader !== `Bearer ${process.env.PAYLOAD_SECRET}`) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
const { logger } = getServerAppServices();
|
||||
|
||||
try {
|
||||
logger.info("Starting programmatic Payload migrations...");
|
||||
const payload = await getPayload({ config: configPromise });
|
||||
|
||||
await payload.db.migrate();
|
||||
|
||||
logger.info("Successfully executed Payload migrations.");
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: "Migrations executed successfully.",
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error("Failed to run migrations remotely", { error });
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
},
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -8,13 +8,12 @@ services:
|
||||
- .:/app
|
||||
environment:
|
||||
NODE_ENV: development
|
||||
# Docker Internal Communication
|
||||
DIRECTUS_URL: http://directus:8055
|
||||
DATABASE_URI: postgresql://directus:directus@mb-grid-db:5432/directus
|
||||
# Build / dependency installation
|
||||
NPM_TOKEN: ${NPM_TOKEN}
|
||||
CI: 'true'
|
||||
ports:
|
||||
- "3000:3000"
|
||||
# ports:
|
||||
# - "3000:3000"
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
# Clear all production-related TLS/Middleware settings for the main routers
|
||||
@@ -26,15 +25,3 @@ services:
|
||||
# Actually, gatekeeper is a separate service. We can keep it or ignore it.
|
||||
# But the app router normally points to gatekeeper middleware.
|
||||
# By clearing middlewares above, we bypass gatekeeper for local dev.
|
||||
|
||||
directus:
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid-solutions}-directus.entrypoints=web"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid-solutions}-directus.rule=Host(`${DIRECTUS_HOST:-cms.mb-grid-solutions.localhost}`)"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid-solutions}-directus.tls=false"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid-solutions}-directus.middlewares="
|
||||
ports:
|
||||
- "8055:8055"
|
||||
environment:
|
||||
PUBLIC_URL: http://${DIRECTUS_HOST:-cms.mb-grid-solutions.localhost}
|
||||
|
||||
@@ -1,38 +1,36 @@
|
||||
services:
|
||||
app:
|
||||
mb-grid-app:
|
||||
image: registry.infra.mintel.me/mintel/mb-grid-solutions:${IMAGE_TAG:-latest}
|
||||
restart: always
|
||||
networks:
|
||||
- default
|
||||
- infra
|
||||
env_file:
|
||||
- ${ENV_FILE:-.env}
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.${PROJECT_NAME}.rule=${TRAEFIK_RULE:-Host(`${TRAEFIK_HOST:-mb-grid-solutions.localhost}`)}"
|
||||
- "traefik.http.routers.${PROJECT_NAME}.entrypoints=websecure"
|
||||
- "traefik.http.routers.${PROJECT_NAME}.tls.certresolver=le"
|
||||
- "traefik.http.routers.${PROJECT_NAME}.tls=true"
|
||||
- "traefik.http.routers.${PROJECT_NAME}.priority=1000"
|
||||
- "traefik.http.services.${PROJECT_NAME}.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.${PROJECT_NAME}.middlewares=${TRAEFIK_MIDDLEWARES:-${PROJECT_NAME}-auth,${PROJECT_NAME}-forward,compress}"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}.rule=Host(`${TRAEFIK_HOST:-mb-grid-solutions.localhost}`)"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}.entrypoints=websecure"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}.tls.certresolver=le"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}.tls=true"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}.priority=1000"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}.service=${PROJECT_NAME:-mb-grid}-app-svc"
|
||||
- "traefik.http.services.${PROJECT_NAME:-mb-grid}-app-svc.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}.middlewares=${TRAEFIK_MIDDLEWARES:-mb-grid-auth,mb-grid-forward,compress}"
|
||||
- "traefik.docker.network=infra"
|
||||
|
||||
# Public Router – paths that bypass Gatekeeper auth
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-public.rule=Host(`${TRAEFIK_HOST:-mb-grid-solutions.localhost}`) && PathRegexp(`^/([a-z]{2}/)?(health|login|gatekeeper|uploads|media|robots\\.txt|manifest\\.webmanifest|sitemap(-[0-9]+)?\\.xml|(.*/)?api/og(/.*)?|(.*/)?opengraph-image.*)`)"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-public.entrypoints=websecure"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-public.tls.certresolver=le"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-public.tls=true"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-public.service=${PROJECT_NAME:-mb-grid}-app-svc"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-public.priority=2000"
|
||||
|
||||
# Forwarded Headers (Protocol Normalization)
|
||||
- "traefik.http.middlewares.${PROJECT_NAME}-forward.headers.customrequestheaders.X-Forwarded-Proto=https"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME}-forward.headers.customrequestheaders.X-Forwarded-Ssl=on"
|
||||
|
||||
# Gatekeeper Router (Path-based)
|
||||
- "traefik.http.routers.${PROJECT_NAME}-gatekeeper.rule=(Host(`${TRAEFIK_HOST}`) && PathPrefix(`/gatekeeper`))"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-gatekeeper.entrypoints=websecure"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-gatekeeper.tls.certresolver=le"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-gatekeeper.tls=true"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-gatekeeper.priority=2000"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-gatekeeper.service=${PROJECT_NAME}-gatekeeper"
|
||||
|
||||
- "traefik.http.middlewares.${PROJECT_NAME}-auth.forwardauth.address=http://${PROJECT_NAME}-gatekeeper:3000/gatekeeper/api/verify"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME}-auth.forwardauth.trustForwardHeader=true"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME}-auth.forwardauth.authRequestHeaders=X-Forwarded-Host,X-Forwarded-Proto,X-Forwarded-For,Cookie"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME}-auth.forwardauth.authResponseHeaders=X-Auth-User"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-mb-grid}-forward.headers.customrequestheaders.X-Forwarded-Proto=https"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-mb-grid}-forward.headers.customrequestheaders.X-Forwarded-Ssl=on"
|
||||
- "traefik.http.middlewares.compress.compress=true"
|
||||
healthcheck:
|
||||
test: [ "CMD", "node", "-e", "fetch('http://127.0.0.1:3000/api/health').then(r => r.ok ? process.exit(0) : process.exit(1)).catch(() => process.exit(1))" ]
|
||||
interval: 10s
|
||||
@@ -40,15 +38,15 @@ services:
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
|
||||
gatekeeper:
|
||||
image: registry.infra.mintel.me/mintel/gatekeeper:v1.7.12
|
||||
mb-grid-gatekeeper:
|
||||
image: registry.infra.mintel.me/mintel/gatekeeper:testing
|
||||
container_name: ${PROJECT_NAME:-mb-grid-solutions}-gatekeeper
|
||||
profiles: [ "gatekeeper" ]
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
infra:
|
||||
aliases:
|
||||
- ${PROJECT_NAME:-mb-grid-solutions}-gatekeeper
|
||||
- mb-grid-gatekeeper
|
||||
env_file:
|
||||
- ${ENV_FILE:-.env}
|
||||
environment:
|
||||
@@ -66,61 +64,28 @@ services:
|
||||
retries: 5
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.${PROJECT_NAME}-gatekeeper.loadbalancer.server.port=3000"
|
||||
- "traefik.http.services.mb-grid-gatekeeper-svc.loadbalancer.server.port=3000"
|
||||
|
||||
# Gatekeeper Verification Middleware
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-mb-grid}-auth.forwardauth.address=http://${PROJECT_NAME:-mb-grid}-gatekeeper:3000/gatekeeper/api/verify"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-mb-grid}-auth.forwardauth.trustForwardHeader=true"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-mb-grid}-auth.forwardauth.authRequestHeaders=X-Forwarded-Host,X-Forwarded-Proto,X-Forwarded-For,Cookie"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-mb-grid}-auth.forwardauth.authResponseHeaders=X-Auth-User"
|
||||
|
||||
# Gatekeeper Public Router (Login/Auth UI)
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-gatekeeper.rule=(Host(`${TRAEFIK_HOST:-mb-grid-solutions.localhost}`) && PathPrefix(`/gatekeeper`))"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-gatekeeper.entrypoints=websecure"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-gatekeeper.tls.certresolver=le"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-gatekeeper.tls=true"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-gatekeeper.priority=2000"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-mb-grid}-gatekeeper.service=${PROJECT_NAME:-mb-grid}-gatekeeper-svc"
|
||||
- "traefik.docker.network=infra"
|
||||
|
||||
directus:
|
||||
image: directus/directus:11
|
||||
restart: always
|
||||
networks:
|
||||
infra:
|
||||
aliases:
|
||||
- ${PROJECT_NAME:-mb-grid-solutions}-directus
|
||||
testing-backend:
|
||||
env_file:
|
||||
- ${ENV_FILE:-.env}
|
||||
environment:
|
||||
DB_CLIENT: 'pg'
|
||||
DB_HOST: 'directus-db'
|
||||
DB_PORT: '5432'
|
||||
WEBSOCKETS_ENABLED: 'true'
|
||||
PUBLIC_URL: ${DIRECTUS_URL}
|
||||
KEY: ${DIRECTUS_KEY:-01234567-89ab-cdef-0123-456789abcdef}
|
||||
SECRET: ${DIRECTUS_SECRET:-long-secret-for-signing-tokens-must-be-32-chars}
|
||||
ADMIN_EMAIL: ${DIRECTUS_ADMIN_EMAIL}
|
||||
ADMIN_PASSWORD: ${DIRECTUS_ADMIN_PASSWORD}
|
||||
DB_DATABASE: ${DIRECTUS_DB_NAME:-directus}
|
||||
DB_USER: ${DIRECTUS_DB_USER:-directus}
|
||||
DB_PASSWORD: ${DIRECTUS_DB_PASSWORD:-directus}
|
||||
# Telemetry & Performance
|
||||
LOGGER_LEVEL: ${LOG_LEVEL:-info}
|
||||
SENTRY_DSN: ${SENTRY_DSN}
|
||||
SENTRY_ENVIRONMENT: ${TARGET:-development}
|
||||
volumes:
|
||||
- ./directus/uploads:/directus/uploads
|
||||
- ./directus/extensions:/directus/extensions
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-directus.rule=Host(`${DIRECTUS_HOST:-cms.mb-grid-solutions.localhost}`)"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-directus.entrypoints=websecure"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-directus.tls.certresolver=le"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-directus.tls=true"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-directus.priority=1000"
|
||||
- "traefik.http.routers.${PROJECT_NAME}-directus.middlewares=${PROJECT_NAME}-forward,compress"
|
||||
- "traefik.http.services.${PROJECT_NAME}-directus.loadbalancer.server.port=8055"
|
||||
- "traefik.docker.network=infra"
|
||||
healthcheck:
|
||||
test: [ "CMD", "node", "-e", "fetch('http://localhost:8055/admin').then(r => r.ok ? process.exit(0) : process.exit(1)).catch(() => process.exit(1))" ]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 30s
|
||||
|
||||
directus-db:
|
||||
mb-grid-db:
|
||||
image: postgres:15-alpine
|
||||
restart: always
|
||||
networks:
|
||||
- testing-backend
|
||||
- default
|
||||
env_file:
|
||||
- ${ENV_FILE:-.env}
|
||||
environment:
|
||||
@@ -128,13 +93,13 @@ services:
|
||||
POSTGRES_USER: ${DIRECTUS_DB_USER:-directus}
|
||||
POSTGRES_PASSWORD: ${DIRECTUS_DB_PASSWORD:-directus}
|
||||
volumes:
|
||||
- directus-db-data:/var/lib/postgresql/data
|
||||
- mb-grid-db-data:/var/lib/postgresql/data
|
||||
|
||||
networks:
|
||||
default:
|
||||
name: mb-grid-solutions-internal
|
||||
infra:
|
||||
external: true
|
||||
testing-backend:
|
||||
internal: true
|
||||
|
||||
volumes:
|
||||
directus-db-data:
|
||||
mb-grid-db-data:
|
||||
|
||||
@@ -61,14 +61,6 @@ function createConfig() {
|
||||
from: env.MAIL_FROM,
|
||||
recipients: env.MAIL_RECIPIENTS,
|
||||
},
|
||||
directus: {
|
||||
url: env.DIRECTUS_URL,
|
||||
adminEmail: env.DIRECTUS_ADMIN_EMAIL,
|
||||
password: env.DIRECTUS_ADMIN_PASSWORD,
|
||||
token: env.DIRECTUS_API_TOKEN,
|
||||
internalUrl: env.INTERNAL_DIRECTUS_URL,
|
||||
proxyPath: "/cms",
|
||||
},
|
||||
notifications: {
|
||||
gotify: {
|
||||
url: env.GOTIFY_URL,
|
||||
@@ -131,9 +123,6 @@ export const config = {
|
||||
get mail() {
|
||||
return getConfig().mail;
|
||||
},
|
||||
get directus() {
|
||||
return getConfig().directus;
|
||||
},
|
||||
get notifications() {
|
||||
return getConfig().notifications;
|
||||
},
|
||||
@@ -176,12 +165,6 @@ export function getMaskedConfig() {
|
||||
from: c.mail.from,
|
||||
recipients: c.mail.recipients,
|
||||
},
|
||||
directus: {
|
||||
url: c.directus.url,
|
||||
adminEmail: mask(c.directus.adminEmail),
|
||||
password: mask(c.directus.password),
|
||||
token: mask(c.directus.token),
|
||||
},
|
||||
notifications: {
|
||||
gotify: {
|
||||
url: c.notifications.gotify.url,
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import {
|
||||
createMintelDirectusClient,
|
||||
ensureDirectusAuthenticated,
|
||||
} from "@mintel/next-utils";
|
||||
import { getServerAppServices } from "./services/create-services.server";
|
||||
|
||||
// Initialize client using Mintel standards (environment-aware)
|
||||
const client = createMintelDirectusClient();
|
||||
|
||||
/**
|
||||
* Ensures the client is authenticated.
|
||||
* Standardized using @mintel/next-utils ensureDirectusAuthenticated.
|
||||
*/
|
||||
export async function ensureAuthenticated() {
|
||||
try {
|
||||
await ensureDirectusAuthenticated(client);
|
||||
} catch (e) {
|
||||
if (typeof window === "undefined") {
|
||||
getServerAppServices().errors.captureException(e, {
|
||||
phase: "directus_auth_standardized",
|
||||
});
|
||||
}
|
||||
console.error("Failed to authenticate with Directus:", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
export default client;
|
||||
2
next-env.d.ts
vendored
2
next-env.d.ts
vendored
@@ -1,6 +1,6 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
import "./.next/types/routes.d.ts";
|
||||
import "./.next/dev/types/routes.d.ts";
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { withPayload } from "@payloadcms/next/withPayload";
|
||||
import withMintelConfig from "@mintel/next-config";
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
@@ -33,4 +34,4 @@ const nextConfig = {
|
||||
},
|
||||
};
|
||||
|
||||
export default withMintelConfig(nextConfig);
|
||||
export default withPayload(withMintelConfig(nextConfig));
|
||||
|
||||
49
package.json
49
package.json
@@ -4,48 +4,58 @@
|
||||
"type": "module",
|
||||
"packageManager": "pnpm@10.18.3",
|
||||
"scripts": {
|
||||
"dev": "docker network create infra 2>/dev/null || true && echo '\\n🚀 Development Environment Starting...\\n\\n📱 App: http://mb-grid-solutions.localhost\\n🗄️ CMS: http://cms.mb-grid-solutions.localhost/admin\\n🚦 Traefik: http://localhost:8080\\n\\n(Press Ctrl+C to stop)\\n' && docker compose down --remove-orphans && docker compose up app directus directus-db",
|
||||
"dev": "docker network create infra 2>/dev/null || true && echo '\\n🚀 Development Environment Starting...\\n\\n📱 App: http://mb-grid-solutions.localhost\\n🗄️ CMS: http://mb-grid-solutions.localhost/admin\\n🚦 Traefik: http://localhost:8080\\n\\n(Press Ctrl+C to stop)\\n' && docker compose down --remove-orphans && docker compose up app mb-grid-db",
|
||||
"dev:next": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "eslint app components lib scripts",
|
||||
"test": "vitest",
|
||||
"prepare": "husky",
|
||||
"cms:bootstrap": "DIRECTUS_URL=http://localhost:8055 npx tsx --env-file=.env scripts/setup-directus.ts",
|
||||
"cms:push:staging": "./scripts/sync-directus.sh push staging",
|
||||
"cms:pull:staging": "./scripts/sync-directus.sh pull staging",
|
||||
"cms:push:testing": "./scripts/sync-directus.sh push testing",
|
||||
"cms:pull:testing": "./scripts/sync-directus.sh pull testing",
|
||||
"cms:push:prod": "./scripts/sync-directus.sh push production",
|
||||
"cms:pull:prod": "./scripts/sync-directus.sh pull production",
|
||||
"pagespeed:test": "mintel pagespeed test"
|
||||
"generate:types": "payload generate:types",
|
||||
"generate:importmap": "payload generate:importmap",
|
||||
"pagespeed:test": "mintel pagespeed test",
|
||||
"check:http": "tsx ./scripts/check-http.ts",
|
||||
"check:apis": "tsx ./scripts/check-apis.ts",
|
||||
"check:locale": "tsx ./scripts/check-locale.ts"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"@mintel/next-config": "^1.8.21",
|
||||
"@mintel/next-utils": "^1.8.21",
|
||||
"@aws-sdk/client-s3": "^3.999.0",
|
||||
"@mintel/mail": "^1.8.21",
|
||||
"@mintel/next-config": "^1.8.20",
|
||||
"@mintel/next-utils": "^1.8.20",
|
||||
"@payloadcms/db-postgres": "^3.77.0",
|
||||
"@payloadcms/email-nodemailer": "^3.77.0",
|
||||
"@payloadcms/next": "^3.77.0",
|
||||
"@payloadcms/richtext-lexical": "^3.77.0",
|
||||
"@payloadcms/storage-s3": "^3.77.0",
|
||||
"@payloadcms/ui": "^3.77.0",
|
||||
"@react-email/components": "^1.0.8",
|
||||
"@sentry/nextjs": "^10.38.0",
|
||||
"framer-motion": "^12.29.2",
|
||||
"graphql": "^16.13.0",
|
||||
"lucide-react": "^0.562.0",
|
||||
"next": "^16.1.6",
|
||||
"next-intl": "^4.8.2",
|
||||
"nodemailer": "^7.0.12",
|
||||
"payload": "^3.77.0",
|
||||
"pino": "^10.3.0",
|
||||
"react": "^19.2.4",
|
||||
"react-dom": "^19.2.4",
|
||||
"react-email": "^5.2.8",
|
||||
"sharp": "^0.34.5",
|
||||
"zod": "^3.24.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@commitlint/cli": "^20.4.0",
|
||||
"@commitlint/config-conventional": "^20.4.0",
|
||||
"@directus/sdk": "^21.0.0",
|
||||
"@mintel/cli": "^1.8.21",
|
||||
"@mintel/eslint-config": "^1.8.21",
|
||||
"@mintel/husky-config": "^1.8.21",
|
||||
"@mintel/tsconfig": "^1.8.21",
|
||||
"@mintel/cli": "^1.8.20",
|
||||
"@mintel/eslint-config": "^1.8.20",
|
||||
"@mintel/husky-config": "^1.8.20",
|
||||
"@mintel/tsconfig": "^1.8.20",
|
||||
"@tailwindcss/postcss": "^4.1.18",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.2",
|
||||
@@ -55,6 +65,8 @@
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^5.1.2",
|
||||
"autoprefixer": "^10.4.23",
|
||||
"axios": "^1.13.5",
|
||||
"cheerio": "^1.2.0",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-config-next": "15.1.6",
|
||||
"happy-dom": "^20.6.1",
|
||||
@@ -65,7 +77,12 @@
|
||||
"postcss": "^8.5.6",
|
||||
"prettier": "^3.5.0",
|
||||
"tailwindcss": "^4.1.18",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.9.3",
|
||||
"vitest": "^4.0.18"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "ssh://git@git.infra.mintel.me:2222/mmintel/mb-grid-solutions.com.git"
|
||||
}
|
||||
}
|
||||
|
||||
5879
pnpm-lock.yaml
generated
5879
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
136
scripts/check-apis.ts
Normal file
136
scripts/check-apis.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import axios from "axios";
|
||||
import dns from "dns";
|
||||
import { promisify } from "util";
|
||||
import url from "url";
|
||||
|
||||
const resolve4 = promisify(dns.resolve4);
|
||||
|
||||
// This script verifies that external logging and analytics APIs are reachable
|
||||
// from the deployment environment (which could be behind corporate firewalls or VPNs).
|
||||
|
||||
const umamiEndpoint =
|
||||
process.env.UMAMI_API_ENDPOINT || "https://analytics.infra.mintel.me";
|
||||
const sentryDsn = process.env.SENTRY_DSN || "";
|
||||
|
||||
async function checkUmami() {
|
||||
console.log(`\n🔍 Checking Umami Analytics API Availability...`);
|
||||
console.log(` Endpoint: ${umamiEndpoint}`);
|
||||
|
||||
try {
|
||||
// Umami usually exposes a /api/heartbeat or /api/health if we know the route.
|
||||
// Trying root or /api/auth/verify (which will give 401 but proves routing works).
|
||||
// A simple GET to the configured endpoint should return a 200 or 401, not a 5xx/timeout.
|
||||
const response = await axios.get(
|
||||
`${umamiEndpoint.replace(/\/$/, "")}/api/health`,
|
||||
{
|
||||
timeout: 5000,
|
||||
validateStatus: () => true, // Accept any status, we just want to know it's reachable and not 5xx
|
||||
},
|
||||
);
|
||||
|
||||
// As long as it's not a 502/503/504 Bad Gateway/Timeout, the service is "up" from our perspective
|
||||
if (response.status >= 500) {
|
||||
throw new Error(
|
||||
`Umami API responded with server error HTTP ${response.status}`,
|
||||
);
|
||||
}
|
||||
|
||||
console.log(` ✅ Umami Analytics is reachable (HTTP ${response.status})`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
// If /api/health fails completely, maybe try a DNS check as a fallback
|
||||
try {
|
||||
console.warn(
|
||||
` ⚠️ HTTP check failed, falling back to DNS resolution...`,
|
||||
);
|
||||
const umamiHost = new url.URL(umamiEndpoint).hostname;
|
||||
await resolve4(umamiHost);
|
||||
console.log(
|
||||
` ✅ Umami Analytics DNS resolved successfully (${umamiHost})`,
|
||||
);
|
||||
return true;
|
||||
} catch (error) {
|
||||
const dnsErr = error as Error;
|
||||
console.error(
|
||||
` ❌ CRITICAL: Umami Analytics is completely unreachable! ${err.message} | DNS: ${dnsErr.message}`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function checkSentry() {
|
||||
console.log(`\n🔍 Checking Glitchtip/Sentry Error Tracking Availability...`);
|
||||
|
||||
if (!sentryDsn) {
|
||||
console.log(` ℹ️ No SENTRY_DSN provided in environment. Skipping.`);
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsedDsn = new url.URL(sentryDsn);
|
||||
const host = parsedDsn.hostname;
|
||||
console.log(` Host: ${host}`);
|
||||
|
||||
// We do a DNS lookup to ensure the runner can actually resolve the tracking server
|
||||
const addresses = await resolve4(host);
|
||||
|
||||
if (addresses && addresses.length > 0) {
|
||||
console.log(` ✅ Glitchtip/Sentry domain resolved: ${addresses[0]}`);
|
||||
|
||||
// Optional: Quick TCP/HTTP check to the host root (Glitchtip usually runs on 80/443 root)
|
||||
try {
|
||||
const proto = parsedDsn.protocol || "https:";
|
||||
await axios.get(`${proto}//${host}/api/0/`, {
|
||||
timeout: 5000,
|
||||
validateStatus: () => true,
|
||||
});
|
||||
console.log(` ✅ Glitchtip/Sentry API root responds to HTTP.`);
|
||||
} catch {
|
||||
console.log(
|
||||
` ⚠️ Glitchtip/Sentry HTTP ping failed or timed out, but DNS is valid. Proceeding.`,
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
throw new Error("No IP addresses found for DSN host");
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
console.error(
|
||||
` ❌ CRITICAL: Glitchtip/Sentry DSN is invalid or hostname is unresolvable! ${err.message}`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log("🚀 Starting External API Connectivity Smoke Test...");
|
||||
|
||||
let hasErrors = false;
|
||||
|
||||
const umamiOk = await checkUmami();
|
||||
if (!umamiOk) hasErrors = true;
|
||||
|
||||
const sentryOk = await checkSentry();
|
||||
if (!sentryOk) hasErrors = true;
|
||||
|
||||
if (hasErrors) {
|
||||
console.error(
|
||||
`\n🚨 POST-DEPLOY CHECK FAILED: One or more critical external APIs are unreachable.`,
|
||||
);
|
||||
console.error(
|
||||
` This might mean the deployment environment lacks outbound internet access, `,
|
||||
);
|
||||
console.error(
|
||||
` DNS is misconfigured, or the upstream services are down.`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`\n🎉 SUCCESS: All required external APIs are reachable!`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
main();
|
||||
91
scripts/check-http.ts
Normal file
91
scripts/check-http.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import axios from "axios";
|
||||
import * as cheerio from "cheerio";
|
||||
|
||||
const targetUrl =
|
||||
process.argv[2] ||
|
||||
process.env.NEXT_PUBLIC_BASE_URL ||
|
||||
"http://localhost:3000";
|
||||
const gatekeeperPassword = process.env.GATEKEEPER_PASSWORD || "lassmichrein";
|
||||
|
||||
async function main() {
|
||||
console.log(`\n🚀 Starting HTTP Sitemap Validation for: ${targetUrl}\n`);
|
||||
|
||||
try {
|
||||
const sitemapUrl = `${targetUrl.replace(/\/$/, "")}/sitemap.xml`;
|
||||
console.log(`📥 Fetching sitemap from ${sitemapUrl}...`);
|
||||
|
||||
const response = await axios.get(sitemapUrl, {
|
||||
headers: { Cookie: `mintel_gatekeeper_session=${gatekeeperPassword}` },
|
||||
validateStatus: (status) => status < 400,
|
||||
});
|
||||
|
||||
const $ = cheerio.load(response.data, { xmlMode: true });
|
||||
let urls = $("url loc")
|
||||
.map((i, el) => $(el).text())
|
||||
.get();
|
||||
|
||||
const urlPattern = /https?:\/\/[^\/]+/;
|
||||
urls = [...new Set(urls)]
|
||||
.filter((u) => u.startsWith("http"))
|
||||
.map((u) => u.replace(urlPattern, targetUrl.replace(/\/$/, "")))
|
||||
.sort();
|
||||
|
||||
console.log(`✅ Found ${urls.length} target URLs in sitemap.`);
|
||||
|
||||
if (urls.length === 0) {
|
||||
console.error("❌ No URLs found in sitemap. Is the site up?");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`\n🔍 Verifying HTTP Status Codes (Limit: None)...`);
|
||||
let hasErrors = false;
|
||||
|
||||
// Run fetches sequentially to avoid overwhelming the server during CI
|
||||
for (let i = 0; i < urls.length; i++) {
|
||||
const u = urls[i];
|
||||
try {
|
||||
const res = await axios.get(u, {
|
||||
headers: {
|
||||
Cookie: `mintel_gatekeeper_session=${gatekeeperPassword}`,
|
||||
},
|
||||
validateStatus: null, // Don't throw on error status
|
||||
});
|
||||
|
||||
if (res.status >= 400) {
|
||||
console.error(`❌ ERROR ${res.status}: ${res.statusText} -> ${u}`);
|
||||
hasErrors = true;
|
||||
} else {
|
||||
console.log(`✅ OK ${res.status} -> ${u}`);
|
||||
}
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
console.error(`❌ NETWORK ERROR: ${err.message} -> ${u}`);
|
||||
hasErrors = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasErrors) {
|
||||
console.error(
|
||||
`\n❌ HTTP Sitemap Validation Failed. One or more pages returned an error.`,
|
||||
);
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log(
|
||||
`\n✨ Success: All ${urls.length} pages are healthy! (HTTP 200)`,
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
if (axios.isAxiosError(e) && e.response) {
|
||||
console.error(
|
||||
`\n❌ Critical Error during Sitemap Fetch: HTTP ${e.response.status} ${e.response.statusText}`,
|
||||
);
|
||||
} else {
|
||||
const errorMsg = e instanceof Error ? e.message : String(e);
|
||||
console.error(`\n❌ Critical Error during Sitemap Fetch: ${errorMsg}`);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
199
scripts/check-locale.ts
Normal file
199
scripts/check-locale.ts
Normal file
@@ -0,0 +1,199 @@
|
||||
import axios from "axios";
|
||||
import * as cheerio from "cheerio";
|
||||
|
||||
/**
|
||||
* Locale & Language Switcher Smoke Test
|
||||
*
|
||||
* For every URL in the sitemap:
|
||||
* 1. Fetches the page HTML
|
||||
* 2. Extracts <link rel="alternate" hreflang="..." href="..."> tags
|
||||
* 3. Verifies each alternate URL uses correctly translated slugs
|
||||
* 4. Verifies each alternate URL returns HTTP 200
|
||||
*/
|
||||
|
||||
const targetUrl =
|
||||
process.argv[2] ||
|
||||
process.env.NEXT_PUBLIC_BASE_URL ||
|
||||
"http://localhost:3000";
|
||||
const gatekeeperPassword = process.env.GATEKEEPER_PASSWORD || "lassmichrein";
|
||||
|
||||
// Expected slug translations: German key → English value
|
||||
const SLUG_MAP: Record<string, string> = {
|
||||
// Add translations if mb-grid translates URLs: e.g. produkte: 'products'
|
||||
};
|
||||
|
||||
// Reverse map: English → German
|
||||
const REVERSE_SLUG_MAP: Record<string, string> = Object.fromEntries(
|
||||
Object.entries(SLUG_MAP).map(([de, en]) => [en, de]),
|
||||
);
|
||||
|
||||
const headers = { Cookie: `mintel_gatekeeper_session=${gatekeeperPassword}` };
|
||||
|
||||
function getExpectedTranslation(
|
||||
sourcePath: string,
|
||||
sourceLocale: string,
|
||||
targetLocale: string,
|
||||
): string {
|
||||
const segments = sourcePath.split("/").filter(Boolean);
|
||||
// First segment is locale
|
||||
segments[0] = targetLocale;
|
||||
|
||||
const map = sourceLocale === "de" ? SLUG_MAP : REVERSE_SLUG_MAP;
|
||||
|
||||
return (
|
||||
"/" +
|
||||
segments
|
||||
.map((seg, i) => {
|
||||
if (i === 0) return seg; // locale
|
||||
return map[seg] || seg; // translate or keep (product names like n2x2y stay the same)
|
||||
})
|
||||
.join("/")
|
||||
);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log(`\n🌐 Starting Locale Smoke Test for: ${targetUrl}\n`);
|
||||
|
||||
// 1. Fetch sitemap
|
||||
const sitemapUrl = `${targetUrl.replace(/\/$/, "")}/sitemap.xml`;
|
||||
console.log(`📥 Fetching sitemap from ${sitemapUrl}...`);
|
||||
const sitemapRes = await axios.get(sitemapUrl, {
|
||||
headers,
|
||||
validateStatus: (s) => s < 400,
|
||||
});
|
||||
const $sitemap = cheerio.load(sitemapRes.data, { xmlMode: true });
|
||||
|
||||
let urls = $sitemap("url loc")
|
||||
.map((_i, el) => $sitemap(el).text())
|
||||
.get();
|
||||
|
||||
const urlPattern = /https?:\/\/[^/]+/;
|
||||
urls = [...new Set(urls)]
|
||||
.filter((u) => u.startsWith("http"))
|
||||
.map((u) => u.replace(urlPattern, targetUrl.replace(/\/$/, "")))
|
||||
.sort();
|
||||
|
||||
console.log(`✅ Found ${urls.length} URLs in sitemap.\n`);
|
||||
|
||||
let totalChecked = 0;
|
||||
let totalPassed = 0;
|
||||
let totalFailed = 0;
|
||||
const failures: string[] = [];
|
||||
|
||||
for (const url of urls) {
|
||||
const path = new URL(url).pathname;
|
||||
const locale = path.split("/")[1];
|
||||
if (!locale || !["de", "en"].includes(locale)) continue;
|
||||
|
||||
try {
|
||||
const res = await axios.get(url, { headers, validateStatus: null });
|
||||
if (res.status >= 400) continue; // Skip pages that are already broken (check-http catches those)
|
||||
|
||||
const $ = cheerio.load(res.data);
|
||||
|
||||
// Extract hreflang alternate links
|
||||
const alternates: { hreflang: string; href: string }[] = [];
|
||||
$('link[rel="alternate"][hreflang]').each((_i, el) => {
|
||||
const hreflang = $(el).attr("hreflang") || "";
|
||||
let href = $(el).attr("href") || "";
|
||||
if (href && hreflang && hreflang !== "x-default") {
|
||||
href = href.replace(urlPattern, targetUrl.replace(/\/$/, ""));
|
||||
alternates.push({ hreflang, href });
|
||||
}
|
||||
});
|
||||
|
||||
if (alternates.length === 0) {
|
||||
// Some pages may not have alternates, that's OK
|
||||
continue;
|
||||
}
|
||||
|
||||
totalChecked++;
|
||||
|
||||
// Validate each alternate
|
||||
let pageOk = true;
|
||||
|
||||
for (const alt of alternates) {
|
||||
if (alt.hreflang === locale) continue; // Same locale, skip
|
||||
|
||||
// 1. Check slug translation is correct
|
||||
const expectedPath = getExpectedTranslation(path, locale, alt.hreflang);
|
||||
const actualPath = new URL(alt.href).pathname;
|
||||
|
||||
if (actualPath !== expectedPath) {
|
||||
console.error(
|
||||
`❌ SLUG MISMATCH: ${path} → hreflang="${alt.hreflang}" expected ${expectedPath} but got ${actualPath}`,
|
||||
);
|
||||
failures.push(
|
||||
`Slug mismatch: ${path} → ${alt.hreflang}: expected ${expectedPath}, got ${actualPath}`,
|
||||
);
|
||||
pageOk = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
// 2. Check alternate URL returns 200
|
||||
try {
|
||||
const altRes = await axios.get(alt.href, {
|
||||
headers,
|
||||
validateStatus: null,
|
||||
maxRedirects: 5,
|
||||
});
|
||||
if (altRes.status >= 400) {
|
||||
console.error(
|
||||
`❌ BROKEN ALTERNATE: ${path} → ${alt.href} returned ${altRes.status}`,
|
||||
);
|
||||
failures.push(
|
||||
`Broken alternate: ${path} → ${alt.href} (${altRes.status})`,
|
||||
);
|
||||
pageOk = false;
|
||||
}
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
console.error(
|
||||
`❌ NETWORK ERROR: ${path} → ${alt.href}: ${err.message}`,
|
||||
);
|
||||
failures.push(`Network error: ${path} → ${alt.href}: ${err.message}`);
|
||||
pageOk = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (pageOk) {
|
||||
console.log(
|
||||
`✅ ${path} — alternates OK (${alternates
|
||||
.map((a) => a.hreflang)
|
||||
.filter((h) => h !== locale)
|
||||
.join(", ")})`,
|
||||
);
|
||||
totalPassed++;
|
||||
} else {
|
||||
totalFailed++;
|
||||
}
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
console.error(`❌ NETWORK ERROR fetching ${url}: ${err.message}`);
|
||||
totalFailed++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n${"─".repeat(60)}`);
|
||||
console.log(`📊 Locale Smoke Test Results:`);
|
||||
console.log(` Pages checked: ${totalChecked}`);
|
||||
console.log(` Passed: ${totalPassed}`);
|
||||
console.log(` Failed: ${totalFailed}`);
|
||||
|
||||
if (failures.length > 0) {
|
||||
console.log(`\n❌ Failures:`);
|
||||
failures.forEach((f) => console.log(` • ${f}`));
|
||||
console.log(`\n❌ Locale Smoke Test FAILED.`);
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log(
|
||||
`\n✨ All locale alternates are correctly translated and reachable!`,
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(`\n❌ Critical error:`, err.message);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,150 +0,0 @@
|
||||
import {
|
||||
createMintelDirectusClient,
|
||||
ensureDirectusAuthenticated,
|
||||
} from "@mintel/next-utils";
|
||||
import { createCollection, createField, updateSettings } from "@directus/sdk";
|
||||
|
||||
const client = createMintelDirectusClient();
|
||||
|
||||
async function setupBranding() {
|
||||
const prjName = process.env.PROJECT_NAME || "MB Grid Solutions";
|
||||
const prjColor = process.env.PROJECT_COLOR || "#82ed20";
|
||||
|
||||
console.log(`🎨 Refining Directus Branding for ${prjName}...`);
|
||||
await ensureDirectusAuthenticated(client);
|
||||
|
||||
const cssInjection = `
|
||||
<style>
|
||||
@import url('https://fonts.googleapis.com/css2?family=Outfit:wght@400;500;600;700&display=swap');
|
||||
|
||||
body, .v-app { font-family: 'Outfit', sans-serif !important; }
|
||||
|
||||
.public-view .v-card {
|
||||
backdrop-filter: blur(20px);
|
||||
background: rgba(255, 255, 255, 0.9) !important;
|
||||
border-radius: 32px !important;
|
||||
box-shadow: 0 50px 100px -20px rgba(0, 0, 0, 0.4) !important;
|
||||
border: 1px solid rgba(255, 255, 255, 0.3) !important;
|
||||
}
|
||||
|
||||
.v-navigation-drawer { background: #000c24 !important; }
|
||||
|
||||
.v-list-item--active {
|
||||
color: ${prjColor} !important;
|
||||
background: rgba(130, 237, 32, 0.1) !important;
|
||||
}
|
||||
</style>
|
||||
<div style="font-family: 'Outfit', sans-serif; text-align: center; margin-top: 24px;">
|
||||
<p style="color: rgba(255,255,255,0.6); font-size: 11px; letter-spacing: 2px; margin-bottom: 4px; font-weight: 600; text-transform: uppercase;">Mintel Infrastructure Engine</p>
|
||||
<h1 style="color: #ffffff; font-size: 20px; font-weight: 700; margin: 0; letter-spacing: -0.5px;">${prjName.toUpperCase()} <span style="color: ${prjColor};">SYNC.</span></h1>
|
||||
</div>
|
||||
`;
|
||||
|
||||
try {
|
||||
await client.request(
|
||||
updateSettings({
|
||||
project_name: prjName,
|
||||
project_color: prjColor,
|
||||
public_note: cssInjection,
|
||||
module_bar_background: "#00081a",
|
||||
theme_light_overrides: {
|
||||
primary: prjColor,
|
||||
borderRadius: "12px",
|
||||
navigationBackground: "#000c24",
|
||||
navigationForeground: "#ffffff",
|
||||
moduleBarBackground: "#00081a",
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any),
|
||||
);
|
||||
console.log("✨ Branding applied!");
|
||||
|
||||
await createCollectionAndFields();
|
||||
console.log("🏗️ Schema alignment complete!");
|
||||
} catch (error) {
|
||||
console.error("❌ Error during bootstrap:", error);
|
||||
}
|
||||
}
|
||||
|
||||
async function createCollectionAndFields() {
|
||||
const collectionName = "contact_submissions";
|
||||
|
||||
try {
|
||||
await client.request(
|
||||
createCollection({
|
||||
collection: collectionName,
|
||||
schema: {},
|
||||
meta: {
|
||||
icon: "contact_mail",
|
||||
display_template: "{{name}} <{{email}}>",
|
||||
group: null,
|
||||
sort: null,
|
||||
collapse: "open",
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Add ID field
|
||||
await client.request(
|
||||
createField(collectionName, {
|
||||
field: "id",
|
||||
type: "integer",
|
||||
meta: { hidden: true },
|
||||
schema: { is_primary_key: true, has_auto_increment: true },
|
||||
}),
|
||||
);
|
||||
console.log(`✅ Collection ${collectionName} created.`);
|
||||
} catch {
|
||||
console.log(`ℹ️ Collection ${collectionName} exists.`);
|
||||
}
|
||||
|
||||
const safeAddField = async (
|
||||
field: string,
|
||||
type: string,
|
||||
meta: Record<string, unknown> = {},
|
||||
) => {
|
||||
try {
|
||||
await client.request(createField(collectionName, { field, type, meta }));
|
||||
console.log(`✅ Field ${field} added.`);
|
||||
} catch {
|
||||
// Ignore if exists
|
||||
}
|
||||
};
|
||||
|
||||
await safeAddField("name", "string", {
|
||||
interface: "input",
|
||||
display: "raw",
|
||||
width: "half",
|
||||
});
|
||||
await safeAddField("email", "string", {
|
||||
interface: "input",
|
||||
display: "raw",
|
||||
width: "half",
|
||||
});
|
||||
await safeAddField("company", "string", {
|
||||
interface: "input",
|
||||
display: "raw",
|
||||
width: "half",
|
||||
});
|
||||
await safeAddField("message", "text", {
|
||||
interface: "textarea",
|
||||
display: "raw",
|
||||
width: "full",
|
||||
});
|
||||
await safeAddField("date_created", "timestamp", {
|
||||
interface: "datetime",
|
||||
special: ["date-created"],
|
||||
display: "datetime",
|
||||
display_options: { relative: true },
|
||||
width: "half",
|
||||
});
|
||||
}
|
||||
|
||||
setupBranding()
|
||||
.then(() => {
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error("🚨 Fatal bootstrap error:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,131 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
REMOTE_HOST="${SSH_HOST:-root@alpha.mintel.me}"
|
||||
ACTION=$1
|
||||
ENV=$2
|
||||
|
||||
# Help
|
||||
if [ -z "$ACTION" ] || [ -z "$ENV" ]; then
|
||||
echo "Usage: ./scripts/sync-directus.sh [push|pull] [testing|staging|production]"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " push Sync LOCAL data -> REMOTE"
|
||||
echo " pull Sync REMOTE data -> LOCAL"
|
||||
echo ""
|
||||
echo "Environments:"
|
||||
echo " testing, staging, production"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Project Configuration (extracted from package.json and aligned with deploy.yml)
|
||||
PRJ_ID=$(jq -r .name package.json | sed 's/@mintel\///' | sed 's/\.com$//')
|
||||
REMOTE_DIR="/home/deploy/sites/${PRJ_ID}.com"
|
||||
|
||||
case $ENV in
|
||||
testing) PROJECT_NAME="${PRJ_ID}-testing"; ENV_FILE=".env.testing" ;;
|
||||
staging) PROJECT_NAME="${PRJ_ID}-staging"; ENV_FILE=".env.staging" ;;
|
||||
production) PROJECT_NAME="${PRJ_ID}-production"; ENV_FILE=".env.prod" ;;
|
||||
*) echo "❌ Invalid environment: $ENV"; exit 1 ;;
|
||||
esac
|
||||
|
||||
# DB Details (matching docker-compose defaults)
|
||||
DB_USER="directus"
|
||||
DB_NAME="directus"
|
||||
|
||||
echo "🔍 Detecting local database..."
|
||||
LOCAL_DB_CONTAINER=$(docker compose ps -q directus-db)
|
||||
|
||||
if [ -z "$LOCAL_DB_CONTAINER" ]; then
|
||||
# Check if it exists but is stopped
|
||||
LOCAL_DB_EXISTS=$(docker compose ps -a -q directus-db)
|
||||
if [ -n "$LOCAL_DB_EXISTS" ]; then
|
||||
echo "⏳ Local directus-db is stopped. Starting it..."
|
||||
docker compose up -d directus-db
|
||||
# Wait a few seconds for PG to be ready
|
||||
sleep 2
|
||||
LOCAL_DB_CONTAINER=$(docker compose ps -q directus-db)
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$LOCAL_DB_CONTAINER" ]; then
|
||||
echo "❌ Local directus-db container not found. Is it defined in docker-compose.yaml?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$ACTION" == "push" ]; then
|
||||
echo "🚀 Pushing LOCAL -> $ENV ($PROJECT_NAME)..."
|
||||
|
||||
# 1. DB Dump
|
||||
echo "📦 Dumping local database..."
|
||||
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$DB_USER" --clean --if-exists --no-owner --no-privileges "$DB_NAME" > dump.sql
|
||||
|
||||
# 2. Upload Dump
|
||||
echo "📤 Uploading dump to remote server..."
|
||||
scp dump.sql "$REMOTE_HOST:$REMOTE_DIR/dump.sql"
|
||||
|
||||
# 3. Restore on Remote
|
||||
echo "🔄 Restoring dump on $ENV..."
|
||||
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus-db")
|
||||
|
||||
if [ -z "$REMOTE_DB_CONTAINER" ]; then
|
||||
echo "❌ Remote $ENV-db container not found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "🧹 Wiping remote database schema..."
|
||||
ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER psql -U $DB_USER $DB_NAME -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'"
|
||||
|
||||
echo "⚡ Restoring database..."
|
||||
ssh "$REMOTE_HOST" "docker exec -i $REMOTE_DB_CONTAINER psql -U $DB_USER $DB_NAME < $REMOTE_DIR/dump.sql"
|
||||
|
||||
# 4. Sync Uploads
|
||||
echo "📁 Syncing uploads (Local -> $ENV)..."
|
||||
rsync -avz --progress ./directus/uploads/ "$REMOTE_HOST:$REMOTE_DIR/directus/uploads/"
|
||||
|
||||
# Clean up
|
||||
rm dump.sql
|
||||
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
|
||||
|
||||
# 5. Restart Directus to trigger migrations and refresh schema cache
|
||||
echo "🔄 Restarting remote Directus to apply migrations..."
|
||||
ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME restart directus"
|
||||
|
||||
echo "✨ Push to $ENV complete!"
|
||||
|
||||
elif [ "$ACTION" == "pull" ]; then
|
||||
echo "📥 Pulling $ENV Data -> LOCAL..."
|
||||
|
||||
# 1. DB Dump on Remote
|
||||
echo "📦 Dumping remote database ($ENV)..."
|
||||
REMOTE_DB_CONTAINER=$(ssh "$REMOTE_HOST" "cd $REMOTE_DIR && docker compose -p $PROJECT_NAME ps -q directus-db")
|
||||
|
||||
if [ -z "$REMOTE_DB_CONTAINER" ]; then
|
||||
echo "❌ Remote $ENV-db container not found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ssh "$REMOTE_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $DB_USER --clean --if-exists --no-owner --no-privileges $DB_NAME > $REMOTE_DIR/dump.sql"
|
||||
|
||||
# 2. Download Dump
|
||||
echo "📥 Downloading dump..."
|
||||
scp "$REMOTE_HOST:$REMOTE_DIR/dump.sql" dump.sql
|
||||
|
||||
# 3. Restore Locally
|
||||
echo "🧹 Wiping local database schema..."
|
||||
docker exec "$LOCAL_DB_CONTAINER" psql -U "$DB_USER" "$DB_NAME" -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'
|
||||
|
||||
echo "⚡ Restoring database locally..."
|
||||
docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$DB_USER" "$DB_NAME" < dump.sql
|
||||
|
||||
# 4. Sync Uploads
|
||||
echo "📁 Syncing uploads ($ENV -> Local)..."
|
||||
rsync -avz --progress "$REMOTE_HOST:$REMOTE_DIR/directus/uploads/" ./directus/uploads/
|
||||
|
||||
# Clean up
|
||||
rm dump.sql
|
||||
ssh "$REMOTE_HOST" "rm $REMOTE_DIR/dump.sql"
|
||||
|
||||
echo "✨ Pull to Local complete!"
|
||||
fi
|
||||
|
||||
86
scripts/upload-s3.ts
Normal file
86
scripts/upload-s3.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3";
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
const S3_ENDPOINT = process.env.S3_ENDPOINT;
|
||||
const S3_REGION = process.env.S3_REGION || "fsn1";
|
||||
const S3_BUCKET = process.env.S3_BUCKET;
|
||||
const S3_PREFIX = process.env.S3_PREFIX;
|
||||
const S3_ACCESS_KEY = process.env.S3_ACCESS_KEY;
|
||||
const S3_SECRET_KEY = process.env.S3_SECRET_KEY;
|
||||
|
||||
if (!S3_ENDPOINT || !S3_BUCKET || !S3_ACCESS_KEY || !S3_SECRET_KEY) {
|
||||
console.error("Missing S3 credentials in environment");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const s3Client = new S3Client({
|
||||
region: S3_REGION,
|
||||
endpoint: S3_ENDPOINT,
|
||||
credentials: {
|
||||
accessKeyId: S3_ACCESS_KEY,
|
||||
secretAccessKey: S3_SECRET_KEY,
|
||||
},
|
||||
forcePathStyle: true,
|
||||
});
|
||||
|
||||
async function uploadDirectory(dirPath: string, prefix: string) {
|
||||
const files = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
|
||||
for (const file of files) {
|
||||
if (file.name === ".DS_Store" || file.name === ".gitkeep") continue;
|
||||
|
||||
const fullPath = path.join(dirPath, file.name);
|
||||
// Combine prefix with filename, ensuring no double slashes, e.g., mb-grid-solutions/media/filename.ext
|
||||
const s3Key = `${prefix}/${file.name}`.replace(/\/+/g, "/");
|
||||
|
||||
if (file.isDirectory()) {
|
||||
await uploadDirectory(fullPath, s3Key);
|
||||
} else {
|
||||
const fileContent = fs.readFileSync(fullPath);
|
||||
let contentType = "application/octet-stream";
|
||||
if (file.name.endsWith(".png")) contentType = "image/png";
|
||||
else if (file.name.endsWith(".jpg") || file.name.endsWith(".jpeg"))
|
||||
contentType = "image/jpeg";
|
||||
else if (file.name.endsWith(".svg")) contentType = "image/svg+xml";
|
||||
else if (file.name.endsWith(".webp")) contentType = "image/webp";
|
||||
else if (file.name.endsWith(".pdf")) contentType = "application/pdf";
|
||||
|
||||
try {
|
||||
await s3Client.send(
|
||||
new PutObjectCommand({
|
||||
Bucket: S3_BUCKET,
|
||||
Key: s3Key,
|
||||
Body: fileContent,
|
||||
ContentType: contentType,
|
||||
ACL: "public-read", // Hetzner requires public-read for public access usually
|
||||
}),
|
||||
);
|
||||
console.log(`✅ Uploaded ${file.name} to ${S3_BUCKET}/${s3Key}`);
|
||||
} catch (err) {
|
||||
console.error(`❌ Failed to upload ${file.name}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const mediaDir = path.resolve(process.cwd(), "public/media");
|
||||
if (fs.existsSync(mediaDir)) {
|
||||
console.log("Uploading public/media...");
|
||||
// Media inside Payload CMS uses prefix/media usually, like mb-grid-solutions/media
|
||||
await uploadDirectory(mediaDir, `${S3_PREFIX}/media`);
|
||||
} else {
|
||||
console.log("No public/media directory found.");
|
||||
}
|
||||
|
||||
const assetsDir = path.resolve(process.cwd(), "public/assets");
|
||||
if (fs.existsSync(assetsDir)) {
|
||||
console.log("Uploading public/assets...");
|
||||
await uploadDirectory(assetsDir, `${S3_PREFIX}/assets`);
|
||||
} else {
|
||||
console.log("No public/assets directory found.");
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
1278
src/migrations/20260227_113637_v1_initial.json
Normal file
1278
src/migrations/20260227_113637_v1_initial.json
Normal file
File diff suppressed because it is too large
Load Diff
183
src/migrations/20260227_113637_v1_initial.ts
Normal file
183
src/migrations/20260227_113637_v1_initial.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { MigrateUpArgs, MigrateDownArgs, sql } from "@payloadcms/db-postgres";
|
||||
|
||||
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
await db.execute(sql`
|
||||
CREATE TABLE "users_sessions" (
|
||||
"_order" integer NOT NULL,
|
||||
"_parent_id" integer NOT NULL,
|
||||
"id" varchar PRIMARY KEY NOT NULL,
|
||||
"created_at" timestamp(3) with time zone,
|
||||
"expires_at" timestamp(3) with time zone NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "users" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"email" varchar NOT NULL,
|
||||
"reset_password_token" varchar,
|
||||
"reset_password_expiration" timestamp(3) with time zone,
|
||||
"salt" varchar,
|
||||
"hash" varchar,
|
||||
"login_attempts" numeric DEFAULT 0,
|
||||
"lock_until" timestamp(3) with time zone
|
||||
);
|
||||
|
||||
CREATE TABLE "media" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"alt" varchar NOT NULL,
|
||||
"prefix" varchar DEFAULT 'mb-grid-solutions/media',
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"url" varchar,
|
||||
"thumbnail_u_r_l" varchar,
|
||||
"filename" varchar,
|
||||
"mime_type" varchar,
|
||||
"filesize" numeric,
|
||||
"width" numeric,
|
||||
"height" numeric,
|
||||
"focal_x" numeric,
|
||||
"focal_y" numeric,
|
||||
"sizes_thumbnail_url" varchar,
|
||||
"sizes_thumbnail_width" numeric,
|
||||
"sizes_thumbnail_height" numeric,
|
||||
"sizes_thumbnail_mime_type" varchar,
|
||||
"sizes_thumbnail_filesize" numeric,
|
||||
"sizes_thumbnail_filename" varchar,
|
||||
"sizes_card_url" varchar,
|
||||
"sizes_card_width" numeric,
|
||||
"sizes_card_height" numeric,
|
||||
"sizes_card_mime_type" varchar,
|
||||
"sizes_card_filesize" numeric,
|
||||
"sizes_card_filename" varchar
|
||||
);
|
||||
|
||||
CREATE TABLE "form_submissions" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"name" varchar NOT NULL,
|
||||
"email" varchar NOT NULL,
|
||||
"company" varchar,
|
||||
"message" varchar NOT NULL,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "pages" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"title" varchar NOT NULL,
|
||||
"slug" varchar NOT NULL,
|
||||
"content" jsonb NOT NULL,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "payload_kv" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"key" varchar NOT NULL,
|
||||
"data" jsonb NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "payload_locked_documents" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"global_slug" varchar,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "payload_locked_documents_rels" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"order" integer,
|
||||
"parent_id" integer NOT NULL,
|
||||
"path" varchar NOT NULL,
|
||||
"users_id" integer,
|
||||
"media_id" integer,
|
||||
"form_submissions_id" integer,
|
||||
"pages_id" integer
|
||||
);
|
||||
|
||||
CREATE TABLE "payload_preferences" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"key" varchar,
|
||||
"value" jsonb,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "payload_preferences_rels" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"order" integer,
|
||||
"parent_id" integer NOT NULL,
|
||||
"path" varchar NOT NULL,
|
||||
"users_id" integer
|
||||
);
|
||||
|
||||
CREATE TABLE "payload_migrations" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"name" varchar,
|
||||
"batch" numeric,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE "users_sessions" ADD CONSTRAINT "users_sessions_parent_id_fk" FOREIGN KEY ("_parent_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
|
||||
ALTER TABLE "payload_locked_documents_rels" ADD CONSTRAINT "payload_locked_documents_rels_parent_fk" FOREIGN KEY ("parent_id") REFERENCES "public"."payload_locked_documents"("id") ON DELETE cascade ON UPDATE no action;
|
||||
ALTER TABLE "payload_locked_documents_rels" ADD CONSTRAINT "payload_locked_documents_rels_users_fk" FOREIGN KEY ("users_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
|
||||
ALTER TABLE "payload_locked_documents_rels" ADD CONSTRAINT "payload_locked_documents_rels_media_fk" FOREIGN KEY ("media_id") REFERENCES "public"."media"("id") ON DELETE cascade ON UPDATE no action;
|
||||
ALTER TABLE "payload_locked_documents_rels" ADD CONSTRAINT "payload_locked_documents_rels_form_submissions_fk" FOREIGN KEY ("form_submissions_id") REFERENCES "public"."form_submissions"("id") ON DELETE cascade ON UPDATE no action;
|
||||
ALTER TABLE "payload_locked_documents_rels" ADD CONSTRAINT "payload_locked_documents_rels_pages_fk" FOREIGN KEY ("pages_id") REFERENCES "public"."pages"("id") ON DELETE cascade ON UPDATE no action;
|
||||
ALTER TABLE "payload_preferences_rels" ADD CONSTRAINT "payload_preferences_rels_parent_fk" FOREIGN KEY ("parent_id") REFERENCES "public"."payload_preferences"("id") ON DELETE cascade ON UPDATE no action;
|
||||
ALTER TABLE "payload_preferences_rels" ADD CONSTRAINT "payload_preferences_rels_users_fk" FOREIGN KEY ("users_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
|
||||
CREATE INDEX "users_sessions_order_idx" ON "users_sessions" USING btree ("_order");
|
||||
CREATE INDEX "users_sessions_parent_id_idx" ON "users_sessions" USING btree ("_parent_id");
|
||||
CREATE INDEX "users_updated_at_idx" ON "users" USING btree ("updated_at");
|
||||
CREATE INDEX "users_created_at_idx" ON "users" USING btree ("created_at");
|
||||
CREATE UNIQUE INDEX "users_email_idx" ON "users" USING btree ("email");
|
||||
CREATE INDEX "media_updated_at_idx" ON "media" USING btree ("updated_at");
|
||||
CREATE INDEX "media_created_at_idx" ON "media" USING btree ("created_at");
|
||||
CREATE UNIQUE INDEX "media_filename_idx" ON "media" USING btree ("filename");
|
||||
CREATE INDEX "media_sizes_thumbnail_sizes_thumbnail_filename_idx" ON "media" USING btree ("sizes_thumbnail_filename");
|
||||
CREATE INDEX "media_sizes_card_sizes_card_filename_idx" ON "media" USING btree ("sizes_card_filename");
|
||||
CREATE INDEX "form_submissions_updated_at_idx" ON "form_submissions" USING btree ("updated_at");
|
||||
CREATE INDEX "form_submissions_created_at_idx" ON "form_submissions" USING btree ("created_at");
|
||||
CREATE INDEX "pages_updated_at_idx" ON "pages" USING btree ("updated_at");
|
||||
CREATE INDEX "pages_created_at_idx" ON "pages" USING btree ("created_at");
|
||||
CREATE UNIQUE INDEX "payload_kv_key_idx" ON "payload_kv" USING btree ("key");
|
||||
CREATE INDEX "payload_locked_documents_global_slug_idx" ON "payload_locked_documents" USING btree ("global_slug");
|
||||
CREATE INDEX "payload_locked_documents_updated_at_idx" ON "payload_locked_documents" USING btree ("updated_at");
|
||||
CREATE INDEX "payload_locked_documents_created_at_idx" ON "payload_locked_documents" USING btree ("created_at");
|
||||
CREATE INDEX "payload_locked_documents_rels_order_idx" ON "payload_locked_documents_rels" USING btree ("order");
|
||||
CREATE INDEX "payload_locked_documents_rels_parent_idx" ON "payload_locked_documents_rels" USING btree ("parent_id");
|
||||
CREATE INDEX "payload_locked_documents_rels_path_idx" ON "payload_locked_documents_rels" USING btree ("path");
|
||||
CREATE INDEX "payload_locked_documents_rels_users_id_idx" ON "payload_locked_documents_rels" USING btree ("users_id");
|
||||
CREATE INDEX "payload_locked_documents_rels_media_id_idx" ON "payload_locked_documents_rels" USING btree ("media_id");
|
||||
CREATE INDEX "payload_locked_documents_rels_form_submissions_id_idx" ON "payload_locked_documents_rels" USING btree ("form_submissions_id");
|
||||
CREATE INDEX "payload_locked_documents_rels_pages_id_idx" ON "payload_locked_documents_rels" USING btree ("pages_id");
|
||||
CREATE INDEX "payload_preferences_key_idx" ON "payload_preferences" USING btree ("key");
|
||||
CREATE INDEX "payload_preferences_updated_at_idx" ON "payload_preferences" USING btree ("updated_at");
|
||||
CREATE INDEX "payload_preferences_created_at_idx" ON "payload_preferences" USING btree ("created_at");
|
||||
CREATE INDEX "payload_preferences_rels_order_idx" ON "payload_preferences_rels" USING btree ("order");
|
||||
CREATE INDEX "payload_preferences_rels_parent_idx" ON "payload_preferences_rels" USING btree ("parent_id");
|
||||
CREATE INDEX "payload_preferences_rels_path_idx" ON "payload_preferences_rels" USING btree ("path");
|
||||
CREATE INDEX "payload_preferences_rels_users_id_idx" ON "payload_preferences_rels" USING btree ("users_id");
|
||||
CREATE INDEX "payload_migrations_updated_at_idx" ON "payload_migrations" USING btree ("updated_at");
|
||||
CREATE INDEX "payload_migrations_created_at_idx" ON "payload_migrations" USING btree ("created_at");`);
|
||||
}
|
||||
|
||||
export async function down({
|
||||
db,
|
||||
payload,
|
||||
req,
|
||||
}: MigrateDownArgs): Promise<void> {
|
||||
await db.execute(sql`
|
||||
DROP TABLE "users_sessions" CASCADE;
|
||||
DROP TABLE "users" CASCADE;
|
||||
DROP TABLE "media" CASCADE;
|
||||
DROP TABLE "form_submissions" CASCADE;
|
||||
DROP TABLE "pages" CASCADE;
|
||||
DROP TABLE "payload_kv" CASCADE;
|
||||
DROP TABLE "payload_locked_documents" CASCADE;
|
||||
DROP TABLE "payload_locked_documents_rels" CASCADE;
|
||||
DROP TABLE "payload_preferences" CASCADE;
|
||||
DROP TABLE "payload_preferences_rels" CASCADE;
|
||||
DROP TABLE "payload_migrations" CASCADE;`);
|
||||
}
|
||||
9
src/migrations/index.ts
Normal file
9
src/migrations/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import * as migration_20260227_113637_v1_initial from "./20260227_113637_v1_initial";
|
||||
|
||||
export const migrations = [
|
||||
{
|
||||
up: migration_20260227_113637_v1_initial.up,
|
||||
down: migration_20260227_113637_v1_initial.down,
|
||||
name: "20260227_113637_v1_initial",
|
||||
},
|
||||
];
|
||||
4
src/payload/blocks/allBlocks.ts
Normal file
4
src/payload/blocks/allBlocks.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import { Block } from "payload";
|
||||
|
||||
// Define any custom blocks you want here. Leaving empty for now.
|
||||
export const payloadBlocks: Block[] = [];
|
||||
44
src/payload/collections/FormSubmissions.ts
Normal file
44
src/payload/collections/FormSubmissions.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { CollectionConfig } from "payload";
|
||||
|
||||
export const FormSubmissions: CollectionConfig = {
|
||||
slug: "form-submissions",
|
||||
admin: {
|
||||
useAsTitle: "name",
|
||||
defaultColumns: ["name", "email", "company", "createdAt"],
|
||||
description: "Captured leads from Contact Form.",
|
||||
},
|
||||
access: {
|
||||
read: ({ req: { user } }) =>
|
||||
Boolean(user) || process.env.NODE_ENV === "development",
|
||||
update: ({ req: { user } }) =>
|
||||
Boolean(user) || process.env.NODE_ENV === "development",
|
||||
delete: ({ req: { user } }) =>
|
||||
Boolean(user) || process.env.NODE_ENV === "development",
|
||||
create: () => false, // Only system creates submissions
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: "name",
|
||||
type: "text",
|
||||
required: true,
|
||||
admin: { readOnly: true },
|
||||
},
|
||||
{
|
||||
name: "email",
|
||||
type: "email",
|
||||
required: true,
|
||||
admin: { readOnly: true },
|
||||
},
|
||||
{
|
||||
name: "company",
|
||||
type: "text",
|
||||
admin: { readOnly: true },
|
||||
},
|
||||
{
|
||||
name: "message",
|
||||
type: "textarea",
|
||||
required: true,
|
||||
admin: { readOnly: true },
|
||||
},
|
||||
],
|
||||
};
|
||||
42
src/payload/collections/Media.ts
Normal file
42
src/payload/collections/Media.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { CollectionConfig } from "payload";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const filename = fileURLToPath(import.meta.url);
|
||||
const dirname = path.dirname(filename);
|
||||
|
||||
export const Media: CollectionConfig = {
|
||||
slug: "media",
|
||||
admin: {
|
||||
useAsTitle: "filename",
|
||||
defaultColumns: ["filename", "alt", "updatedAt"],
|
||||
},
|
||||
access: {
|
||||
read: () => true, // Publicly readable
|
||||
},
|
||||
upload: {
|
||||
staticDir: path.resolve(dirname, "../../../public/media"),
|
||||
adminThumbnail: "thumbnail",
|
||||
imageSizes: [
|
||||
{
|
||||
name: "thumbnail",
|
||||
width: 400,
|
||||
height: 300,
|
||||
position: "centre",
|
||||
},
|
||||
{
|
||||
name: "card",
|
||||
width: 768,
|
||||
height: undefined,
|
||||
position: "centre",
|
||||
},
|
||||
],
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: "alt",
|
||||
type: "text",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
42
src/payload/collections/Pages.ts
Normal file
42
src/payload/collections/Pages.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { CollectionConfig } from "payload";
|
||||
import { lexicalEditor, BlocksFeature } from "@payloadcms/richtext-lexical";
|
||||
import { payloadBlocks } from "../blocks/allBlocks";
|
||||
|
||||
export const Pages: CollectionConfig = {
|
||||
slug: "pages",
|
||||
admin: {
|
||||
useAsTitle: "title",
|
||||
defaultColumns: ["title", "slug", "updatedAt"],
|
||||
},
|
||||
access: {
|
||||
read: () => true, // Publicly readable
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: "title",
|
||||
type: "text",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "slug",
|
||||
type: "text",
|
||||
required: true,
|
||||
admin: {
|
||||
position: "sidebar",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "content",
|
||||
type: "richText",
|
||||
editor: lexicalEditor({
|
||||
features: ({ defaultFeatures }) => [
|
||||
...defaultFeatures,
|
||||
BlocksFeature({
|
||||
blocks: payloadBlocks,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
12
src/payload/collections/Users.ts
Normal file
12
src/payload/collections/Users.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import type { CollectionConfig } from "payload";
|
||||
|
||||
export const Users: CollectionConfig = {
|
||||
slug: "users",
|
||||
admin: {
|
||||
useAsTitle: "email",
|
||||
},
|
||||
auth: true,
|
||||
fields: [
|
||||
// Email added by default
|
||||
],
|
||||
};
|
||||
453
src/payload/payload-types.ts
Normal file
453
src/payload/payload-types.ts
Normal file
@@ -0,0 +1,453 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* This file was automatically generated by Payload.
|
||||
* DO NOT MODIFY IT BY HAND. Instead, modify your source Payload config,
|
||||
* and re-run `payload generate:types` to regenerate this file.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Supported timezones in IANA format.
|
||||
*
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "supportedTimezones".
|
||||
*/
|
||||
export type SupportedTimezones =
|
||||
| "Pacific/Midway"
|
||||
| "Pacific/Niue"
|
||||
| "Pacific/Honolulu"
|
||||
| "Pacific/Rarotonga"
|
||||
| "America/Anchorage"
|
||||
| "Pacific/Gambier"
|
||||
| "America/Los_Angeles"
|
||||
| "America/Tijuana"
|
||||
| "America/Denver"
|
||||
| "America/Phoenix"
|
||||
| "America/Chicago"
|
||||
| "America/Guatemala"
|
||||
| "America/New_York"
|
||||
| "America/Bogota"
|
||||
| "America/Caracas"
|
||||
| "America/Santiago"
|
||||
| "America/Buenos_Aires"
|
||||
| "America/Sao_Paulo"
|
||||
| "Atlantic/South_Georgia"
|
||||
| "Atlantic/Azores"
|
||||
| "Atlantic/Cape_Verde"
|
||||
| "Europe/London"
|
||||
| "Europe/Berlin"
|
||||
| "Africa/Lagos"
|
||||
| "Europe/Athens"
|
||||
| "Africa/Cairo"
|
||||
| "Europe/Moscow"
|
||||
| "Asia/Riyadh"
|
||||
| "Asia/Dubai"
|
||||
| "Asia/Baku"
|
||||
| "Asia/Karachi"
|
||||
| "Asia/Tashkent"
|
||||
| "Asia/Calcutta"
|
||||
| "Asia/Dhaka"
|
||||
| "Asia/Almaty"
|
||||
| "Asia/Jakarta"
|
||||
| "Asia/Bangkok"
|
||||
| "Asia/Shanghai"
|
||||
| "Asia/Singapore"
|
||||
| "Asia/Tokyo"
|
||||
| "Asia/Seoul"
|
||||
| "Australia/Brisbane"
|
||||
| "Australia/Sydney"
|
||||
| "Pacific/Guam"
|
||||
| "Pacific/Noumea"
|
||||
| "Pacific/Auckland"
|
||||
| "Pacific/Fiji";
|
||||
|
||||
export interface Config {
|
||||
auth: {
|
||||
users: UserAuthOperations;
|
||||
};
|
||||
blocks: {};
|
||||
collections: {
|
||||
users: User;
|
||||
media: Media;
|
||||
"form-submissions": FormSubmission;
|
||||
pages: Page;
|
||||
"payload-kv": PayloadKv;
|
||||
"payload-locked-documents": PayloadLockedDocument;
|
||||
"payload-preferences": PayloadPreference;
|
||||
"payload-migrations": PayloadMigration;
|
||||
};
|
||||
collectionsJoins: {};
|
||||
collectionsSelect: {
|
||||
users: UsersSelect<false> | UsersSelect<true>;
|
||||
media: MediaSelect<false> | MediaSelect<true>;
|
||||
"form-submissions":
|
||||
| FormSubmissionsSelect<false>
|
||||
| FormSubmissionsSelect<true>;
|
||||
pages: PagesSelect<false> | PagesSelect<true>;
|
||||
"payload-kv": PayloadKvSelect<false> | PayloadKvSelect<true>;
|
||||
"payload-locked-documents":
|
||||
| PayloadLockedDocumentsSelect<false>
|
||||
| PayloadLockedDocumentsSelect<true>;
|
||||
"payload-preferences":
|
||||
| PayloadPreferencesSelect<false>
|
||||
| PayloadPreferencesSelect<true>;
|
||||
"payload-migrations":
|
||||
| PayloadMigrationsSelect<false>
|
||||
| PayloadMigrationsSelect<true>;
|
||||
};
|
||||
db: {
|
||||
defaultIDType: number;
|
||||
};
|
||||
fallbackLocale: null;
|
||||
globals: {};
|
||||
globalsSelect: {};
|
||||
locale: null;
|
||||
user: User;
|
||||
jobs: {
|
||||
tasks: unknown;
|
||||
workflows: unknown;
|
||||
};
|
||||
}
|
||||
export interface UserAuthOperations {
|
||||
forgotPassword: {
|
||||
email: string;
|
||||
password: string;
|
||||
};
|
||||
login: {
|
||||
email: string;
|
||||
password: string;
|
||||
};
|
||||
registerFirstUser: {
|
||||
email: string;
|
||||
password: string;
|
||||
};
|
||||
unlock: {
|
||||
email: string;
|
||||
password: string;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "users".
|
||||
*/
|
||||
export interface User {
|
||||
id: number;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
email: string;
|
||||
resetPasswordToken?: string | null;
|
||||
resetPasswordExpiration?: string | null;
|
||||
salt?: string | null;
|
||||
hash?: string | null;
|
||||
loginAttempts?: number | null;
|
||||
lockUntil?: string | null;
|
||||
sessions?:
|
||||
| {
|
||||
id: string;
|
||||
createdAt?: string | null;
|
||||
expiresAt: string;
|
||||
}[]
|
||||
| null;
|
||||
password?: string | null;
|
||||
collection: "users";
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "media".
|
||||
*/
|
||||
export interface Media {
|
||||
id: number;
|
||||
alt: string;
|
||||
prefix?: string | null;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
url?: string | null;
|
||||
thumbnailURL?: string | null;
|
||||
filename?: string | null;
|
||||
mimeType?: string | null;
|
||||
filesize?: number | null;
|
||||
width?: number | null;
|
||||
height?: number | null;
|
||||
focalX?: number | null;
|
||||
focalY?: number | null;
|
||||
sizes?: {
|
||||
thumbnail?: {
|
||||
url?: string | null;
|
||||
width?: number | null;
|
||||
height?: number | null;
|
||||
mimeType?: string | null;
|
||||
filesize?: number | null;
|
||||
filename?: string | null;
|
||||
};
|
||||
card?: {
|
||||
url?: string | null;
|
||||
width?: number | null;
|
||||
height?: number | null;
|
||||
mimeType?: string | null;
|
||||
filesize?: number | null;
|
||||
filename?: string | null;
|
||||
};
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Captured leads from Contact Form.
|
||||
*
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "form-submissions".
|
||||
*/
|
||||
export interface FormSubmission {
|
||||
id: number;
|
||||
name: string;
|
||||
email: string;
|
||||
company?: string | null;
|
||||
message: string;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "pages".
|
||||
*/
|
||||
export interface Page {
|
||||
id: number;
|
||||
title: string;
|
||||
slug: string;
|
||||
content: {
|
||||
root: {
|
||||
type: string;
|
||||
children: {
|
||||
type: any;
|
||||
version: number;
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
direction: ("ltr" | "rtl") | null;
|
||||
format: "left" | "start" | "center" | "right" | "end" | "justify" | "";
|
||||
indent: number;
|
||||
version: number;
|
||||
};
|
||||
[k: string]: unknown;
|
||||
};
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-kv".
|
||||
*/
|
||||
export interface PayloadKv {
|
||||
id: number;
|
||||
key: string;
|
||||
data:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-locked-documents".
|
||||
*/
|
||||
export interface PayloadLockedDocument {
|
||||
id: number;
|
||||
document?:
|
||||
| ({
|
||||
relationTo: "users";
|
||||
value: number | User;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: "media";
|
||||
value: number | Media;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: "form-submissions";
|
||||
value: number | FormSubmission;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: "pages";
|
||||
value: number | Page;
|
||||
} | null);
|
||||
globalSlug?: string | null;
|
||||
user: {
|
||||
relationTo: "users";
|
||||
value: number | User;
|
||||
};
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-preferences".
|
||||
*/
|
||||
export interface PayloadPreference {
|
||||
id: number;
|
||||
user: {
|
||||
relationTo: "users";
|
||||
value: number | User;
|
||||
};
|
||||
key?: string | null;
|
||||
value?:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-migrations".
|
||||
*/
|
||||
export interface PayloadMigration {
|
||||
id: number;
|
||||
name?: string | null;
|
||||
batch?: number | null;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "users_select".
|
||||
*/
|
||||
export interface UsersSelect<T extends boolean = true> {
|
||||
updatedAt?: T;
|
||||
createdAt?: T;
|
||||
email?: T;
|
||||
resetPasswordToken?: T;
|
||||
resetPasswordExpiration?: T;
|
||||
salt?: T;
|
||||
hash?: T;
|
||||
loginAttempts?: T;
|
||||
lockUntil?: T;
|
||||
sessions?:
|
||||
| T
|
||||
| {
|
||||
id?: T;
|
||||
createdAt?: T;
|
||||
expiresAt?: T;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "media_select".
|
||||
*/
|
||||
export interface MediaSelect<T extends boolean = true> {
|
||||
alt?: T;
|
||||
prefix?: T;
|
||||
updatedAt?: T;
|
||||
createdAt?: T;
|
||||
url?: T;
|
||||
thumbnailURL?: T;
|
||||
filename?: T;
|
||||
mimeType?: T;
|
||||
filesize?: T;
|
||||
width?: T;
|
||||
height?: T;
|
||||
focalX?: T;
|
||||
focalY?: T;
|
||||
sizes?:
|
||||
| T
|
||||
| {
|
||||
thumbnail?:
|
||||
| T
|
||||
| {
|
||||
url?: T;
|
||||
width?: T;
|
||||
height?: T;
|
||||
mimeType?: T;
|
||||
filesize?: T;
|
||||
filename?: T;
|
||||
};
|
||||
card?:
|
||||
| T
|
||||
| {
|
||||
url?: T;
|
||||
width?: T;
|
||||
height?: T;
|
||||
mimeType?: T;
|
||||
filesize?: T;
|
||||
filename?: T;
|
||||
};
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "form-submissions_select".
|
||||
*/
|
||||
export interface FormSubmissionsSelect<T extends boolean = true> {
|
||||
name?: T;
|
||||
email?: T;
|
||||
company?: T;
|
||||
message?: T;
|
||||
updatedAt?: T;
|
||||
createdAt?: T;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "pages_select".
|
||||
*/
|
||||
export interface PagesSelect<T extends boolean = true> {
|
||||
title?: T;
|
||||
slug?: T;
|
||||
content?: T;
|
||||
updatedAt?: T;
|
||||
createdAt?: T;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-kv_select".
|
||||
*/
|
||||
export interface PayloadKvSelect<T extends boolean = true> {
|
||||
key?: T;
|
||||
data?: T;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-locked-documents_select".
|
||||
*/
|
||||
export interface PayloadLockedDocumentsSelect<T extends boolean = true> {
|
||||
document?: T;
|
||||
globalSlug?: T;
|
||||
user?: T;
|
||||
updatedAt?: T;
|
||||
createdAt?: T;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-preferences_select".
|
||||
*/
|
||||
export interface PayloadPreferencesSelect<T extends boolean = true> {
|
||||
user?: T;
|
||||
key?: T;
|
||||
value?: T;
|
||||
updatedAt?: T;
|
||||
createdAt?: T;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-migrations_select".
|
||||
*/
|
||||
export interface PayloadMigrationsSelect<T extends boolean = true> {
|
||||
name?: T;
|
||||
batch?: T;
|
||||
updatedAt?: T;
|
||||
createdAt?: T;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "auth".
|
||||
*/
|
||||
export interface Auth {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
|
||||
declare module "payload" {
|
||||
export interface GeneratedTypes extends Config {}
|
||||
}
|
||||
92
src/payload/payload.config.ts
Normal file
92
src/payload/payload.config.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { buildConfig } from "payload";
|
||||
import { postgresAdapter } from "@payloadcms/db-postgres";
|
||||
import { lexicalEditor, BlocksFeature } from "@payloadcms/richtext-lexical";
|
||||
import { nodemailerAdapter } from "@payloadcms/email-nodemailer";
|
||||
import { s3Storage } from "@payloadcms/storage-s3";
|
||||
import sharp from "sharp";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import { payloadBlocks } from "./blocks/allBlocks";
|
||||
|
||||
import { Users } from "./collections/Users";
|
||||
import { Media } from "./collections/Media";
|
||||
import { FormSubmissions } from "./collections/FormSubmissions";
|
||||
import { Pages } from "./collections/Pages";
|
||||
|
||||
const filename = fileURLToPath(import.meta.url);
|
||||
const dirname = path.dirname(filename);
|
||||
|
||||
export default buildConfig({
|
||||
admin: {
|
||||
user: Users.slug,
|
||||
importMap: {
|
||||
baseDir: path.resolve(dirname),
|
||||
},
|
||||
meta: {
|
||||
titleSuffix: " – MB Grid Solutions",
|
||||
},
|
||||
},
|
||||
collections: [Users, Media, FormSubmissions, Pages],
|
||||
editor: lexicalEditor({
|
||||
features: ({ defaultFeatures }) => [
|
||||
...defaultFeatures,
|
||||
BlocksFeature({
|
||||
blocks: payloadBlocks,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
secret: process.env.PAYLOAD_SECRET || "fallback-secret-for-dev",
|
||||
typescript: {
|
||||
outputFile: path.resolve(dirname, "payload-types.ts"),
|
||||
},
|
||||
db: postgresAdapter({
|
||||
pool: {
|
||||
connectionString:
|
||||
process.env.DATABASE_URI ||
|
||||
process.env.POSTGRES_URI ||
|
||||
`postgresql://${process.env.DIRECTUS_DB_USER || "directus"}:${process.env.DIRECTUS_DB_PASSWORD || "directus"}@127.0.0.1:5432/${process.env.DIRECTUS_DB_NAME || "directus"}`,
|
||||
},
|
||||
}),
|
||||
...(process.env.SMTP_HOST
|
||||
? {
|
||||
email: nodemailerAdapter({
|
||||
defaultFromAddress:
|
||||
process.env.SMTP_FROM || "info@mb-grid-solutions.com",
|
||||
defaultFromName: "MB Grid Solutions CMS",
|
||||
transportOptions: {
|
||||
host: process.env.SMTP_HOST,
|
||||
port: parseInt(process.env.SMTP_PORT || "587"),
|
||||
auth: {
|
||||
user: process.env.SMTP_USER,
|
||||
pass: process.env.SMTP_PASS,
|
||||
},
|
||||
secure: process.env.SMTP_SECURE === "true",
|
||||
},
|
||||
}),
|
||||
}
|
||||
: {}),
|
||||
sharp,
|
||||
plugins: [
|
||||
...(process.env.S3_ENDPOINT
|
||||
? [
|
||||
s3Storage({
|
||||
collections: {
|
||||
media: {
|
||||
prefix: `${process.env.S3_PREFIX || "mb-grid-solutions"}/media`,
|
||||
},
|
||||
},
|
||||
bucket: process.env.S3_BUCKET || "",
|
||||
config: {
|
||||
credentials: {
|
||||
accessKeyId: process.env.S3_ACCESS_KEY || "",
|
||||
secretAccessKey: process.env.S3_SECRET_KEY || "",
|
||||
},
|
||||
region: process.env.S3_REGION || "fsn1",
|
||||
endpoint: process.env.S3_ENDPOINT,
|
||||
forcePathStyle: true,
|
||||
},
|
||||
}),
|
||||
]
|
||||
: []),
|
||||
],
|
||||
});
|
||||
1
tests/__mocks__/payload-config.ts
Normal file
1
tests/__mocks__/payload-config.ts
Normal file
@@ -0,0 +1 @@
|
||||
export default {};
|
||||
168
tests/api-contact.test.ts
Normal file
168
tests/api-contact.test.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||
|
||||
// Mock Payload CMS
|
||||
const { mockCreate, mockSendEmail } = vi.hoisted(() => ({
|
||||
mockCreate: vi.fn(),
|
||||
mockSendEmail: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("payload", () => ({
|
||||
getPayload: vi.fn().mockResolvedValue({
|
||||
create: mockCreate,
|
||||
sendEmail: mockSendEmail,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock Email Template renders
|
||||
vi.mock("@mintel/mail", () => ({
|
||||
render: vi.fn().mockResolvedValue("<html>Mocked Email HTML</html>"),
|
||||
ContactFormNotification: () => "ContactFormNotification",
|
||||
ConfirmationMessage: () => "ConfirmationMessage",
|
||||
}));
|
||||
|
||||
// Mock Notifications and Analytics
|
||||
const { mockNotify, mockTrack, mockCaptureException } = vi.hoisted(() => ({
|
||||
mockNotify: vi.fn(),
|
||||
mockTrack: vi.fn(),
|
||||
mockCaptureException: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/services/create-services.server", () => ({
|
||||
getServerAppServices: () => ({
|
||||
logger: {
|
||||
child: () => ({
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
}),
|
||||
},
|
||||
analytics: {
|
||||
setServerContext: vi.fn(),
|
||||
track: mockTrack,
|
||||
},
|
||||
notifications: {
|
||||
notify: mockNotify,
|
||||
},
|
||||
errors: {
|
||||
captureException: mockCaptureException,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
// Import the route handler we want to test
|
||||
import { POST } from "../app/api/contact/route";
|
||||
import { NextResponse } from "next/server";
|
||||
import type { Mock } from "vitest";
|
||||
|
||||
describe("Contact API Integration", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
(NextResponse.json as Mock).mockImplementation((body: any, init?: any) => ({
|
||||
status: init?.status || 200,
|
||||
json: async () => body,
|
||||
}));
|
||||
});
|
||||
|
||||
it("should validate and decline empty or short messages", async () => {
|
||||
const req = new Request("http://localhost/api/contact", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
message: "too short",
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await POST(req);
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("message_too_short");
|
||||
|
||||
// Ensure payload and email were NOT called
|
||||
expect(mockCreate).not.toHaveBeenCalled();
|
||||
expect(mockSendEmail).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should catch honeypot submissions", async () => {
|
||||
const req = new Request("http://localhost/api/contact", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
name: "Spam Bot",
|
||||
email: "spam@example.com",
|
||||
message: "This is a very long spam message that passes length checks.",
|
||||
website: "http://spam.com", // Honeypot filled
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await POST(req);
|
||||
// Honeypot returns 200 OK so the bot thinks it succeeded
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
// But it actually does NOTHING internally
|
||||
expect(mockCreate).not.toHaveBeenCalled();
|
||||
expect(mockSendEmail).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should successfully save to Payload and send emails", async () => {
|
||||
const req = new Request("http://localhost/api/contact", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"user-agent": "vitest",
|
||||
"x-forwarded-for": "127.0.0.1",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: "Jane Doe",
|
||||
email: "jane@example.com",
|
||||
company: "Jane Tech",
|
||||
message:
|
||||
"Hello, I am interested in exploring your high-voltage grid solutions.",
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await POST(req);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.message).toBe("Ok");
|
||||
|
||||
// 1. Verify Payload creation
|
||||
expect(mockCreate).toHaveBeenCalledTimes(1);
|
||||
expect(mockCreate).toHaveBeenCalledWith({
|
||||
collection: "form-submissions",
|
||||
data: {
|
||||
name: "Jane Doe",
|
||||
email: "jane@example.com",
|
||||
company: "Jane Tech",
|
||||
message:
|
||||
"Hello, I am interested in exploring your high-voltage grid solutions.",
|
||||
},
|
||||
});
|
||||
|
||||
// 2. Verify Email Sending
|
||||
// Note: sendEmail is called twice (Notification + User Confirmation)
|
||||
expect(mockSendEmail).toHaveBeenCalledTimes(2);
|
||||
|
||||
expect(mockSendEmail).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
expect.objectContaining({
|
||||
subject: "Kontaktanfrage von Jane Doe",
|
||||
replyTo: "jane@example.com",
|
||||
}),
|
||||
);
|
||||
|
||||
expect(mockSendEmail).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
expect.objectContaining({
|
||||
to: "jane@example.com",
|
||||
subject: "Ihre Kontaktanfrage bei MB Grid Solutions",
|
||||
}),
|
||||
);
|
||||
|
||||
// 3. Verify notification and analytics
|
||||
expect(mockNotify).toHaveBeenCalledTimes(1);
|
||||
expect(mockTrack).toHaveBeenCalledWith("contact-form-success", {
|
||||
has_company: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -3,7 +3,8 @@
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"paths": {
|
||||
"@/*": ["./*"]
|
||||
"@/*": ["./*"],
|
||||
"@payload-config": ["./src/payload/payload.config.ts"]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
|
||||
5
types/mintel-mail.d.ts
vendored
Normal file
5
types/mintel-mail.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
declare module "@mintel/mail" {
|
||||
export const render: any;
|
||||
export const ContactFormNotification: any;
|
||||
export const ConfirmationMessage: any;
|
||||
}
|
||||
@@ -5,10 +5,21 @@ export default defineConfig({
|
||||
plugins: [react()],
|
||||
test: {
|
||||
environment: 'happy-dom',
|
||||
environmentOptions: {
|
||||
happyDOM: {
|
||||
settings: {
|
||||
disableIframePageLoading: true,
|
||||
disableJavaScriptFileLoading: true,
|
||||
disableCSSFileLoading: true,
|
||||
}
|
||||
}
|
||||
},
|
||||
globals: true,
|
||||
setupFiles: ['./tests/setup.tsx'],
|
||||
alias: {
|
||||
'next/server': 'next/server.js',
|
||||
'@payload-config': new URL('./tests/__mocks__/payload-config.ts', import.meta.url).pathname,
|
||||
'@': new URL('./', import.meta.url).pathname,
|
||||
},
|
||||
exclude: ['**/node_modules/**', '**/.next/**'],
|
||||
server: {
|
||||
|
||||
Reference in New Issue
Block a user