Compare commits
32 Commits
v2.0.2
...
v2.0.7-rc.
| Author | SHA1 | Date | |
|---|---|---|---|
| 2ba67af68a | |||
| b0f088a1dc | |||
| f358492a99 | |||
| 32576b5391 | |||
| 1e9cf7d9ab | |||
| f0f840ad5a | |||
| ca352fea3a | |||
| 323886443f | |||
| c5851370bf | |||
| 0186dd2dc9 | |||
| 82156d30f7 | |||
| 3dcde28071 | |||
| c4fca24eca | |||
| 2435b968cc | |||
| b6a1ebd236 | |||
| aa0c9cd9f5 | |||
| a3899f6cdd | |||
| a960a7b139 | |||
| 824ee3cb75 | |||
| 28633f187c | |||
| 51e0d86a6c | |||
| 923ff2071b | |||
| 30eb2e6e0e | |||
| dd830f9077 | |||
| ba16f1d7aa | |||
| 0842c136a6 | |||
| 36b8e64d69 | |||
| 4833af81f4 | |||
| 5f766589c4 | |||
| 56a7613e85 | |||
| c7c345eaad | |||
| ec99dc0317 |
@@ -5,8 +5,6 @@ node_modules
|
||||
.gitignore
|
||||
.gitea
|
||||
.github
|
||||
public/uploads
|
||||
directus/uploads
|
||||
.turbo
|
||||
reference/
|
||||
.next
|
||||
|
||||
@@ -3,6 +3,10 @@ name: CI - Lint, Typecheck & Test
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: deploy-pipeline
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
quality-assurance:
|
||||
runs-on: docker
|
||||
@@ -45,3 +49,4 @@ jobs:
|
||||
|
||||
- name: ♿ WCAG Sitemap Audit
|
||||
run: pnpm start-server-and-test start http://localhost:3000 "pnpm run check:wcag http://localhost:3000"
|
||||
# monitor trigger
|
||||
|
||||
@@ -15,6 +15,7 @@ on:
|
||||
|
||||
env:
|
||||
PUPPETEER_SKIP_DOWNLOAD: "true"
|
||||
COREPACK_NPM_REGISTRY: "https://registry.npmmirror.com"
|
||||
|
||||
concurrency:
|
||||
group: deploy-pipeline
|
||||
@@ -186,7 +187,7 @@ jobs:
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
build:
|
||||
name: 🏗️ Build
|
||||
needs: [prepare]
|
||||
needs: [prepare, qa]
|
||||
if: needs.prepare.outputs.target != 'skip'
|
||||
runs-on: docker
|
||||
container:
|
||||
@@ -211,7 +212,7 @@ jobs:
|
||||
UMAMI_WEBSITE_ID=${{ secrets.UMAMI_WEBSITE_ID || vars.UMAMI_WEBSITE_ID }}
|
||||
UMAMI_API_ENDPOINT=${{ secrets.UMAMI_API_ENDPOINT || vars.UMAMI_API_ENDPOINT || 'https://analytics.infra.mintel.me' }}
|
||||
NPM_TOKEN=${{ secrets.REGISTRY_PASS }}
|
||||
tags: registry.infra.mintel.me/mintel/klz-cables.com:${{ needs.prepare.outputs.image_tag }}
|
||||
tags: registry.infra.mintel.me/mintel/klz-2026:${{ needs.prepare.outputs.image_tag }}
|
||||
secrets: |
|
||||
"NPM_TOKEN=${{ secrets.REGISTRY_PASS }}"
|
||||
|
||||
@@ -357,6 +358,43 @@ jobs:
|
||||
ssh root@alpha.mintel.me "cd $SITE_DIR && docker compose -p '${{ needs.prepare.outputs.project_name }}' --env-file '$ENV_FILE' pull"
|
||||
ssh root@alpha.mintel.me "cd $SITE_DIR && docker compose -p '${{ needs.prepare.outputs.project_name }}' --env-file '$ENV_FILE' up -d --remove-orphans"
|
||||
|
||||
# Sanitize Payload Migrations: Replace 'dev' push entries with proper migration names.
|
||||
# Without this, Payload prompts interactively for confirmation and blocks forever in Docker.
|
||||
DB_CONTAINER="${{ needs.prepare.outputs.project_name }}-klz-db-1"
|
||||
echo "⏳ Waiting for database container to be ready..."
|
||||
for i in $(seq 1 15); do
|
||||
if ssh root@alpha.mintel.me "docker exec $DB_CONTAINER pg_isready -U payload -q 2>/dev/null"; then
|
||||
echo "✅ Database is ready."
|
||||
break
|
||||
fi
|
||||
echo " Attempt $i/15..."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
echo "🔧 Sanitizing payload_migrations table (if exists)..."
|
||||
REMOTE_DB_USER=$(ssh root@alpha.mintel.me "grep -h '^PAYLOAD_DB_USER=' $SITE_DIR/.env* 2>/dev/null | tail -1 | cut -d= -f2" || echo "payload")
|
||||
REMOTE_DB_NAME=$(ssh root@alpha.mintel.me "grep -h '^PAYLOAD_DB_NAME=' $SITE_DIR/.env* 2>/dev/null | tail -1 | cut -d= -f2" || echo "payload")
|
||||
REMOTE_DB_USER="${REMOTE_DB_USER:-payload}"
|
||||
REMOTE_DB_NAME="${REMOTE_DB_NAME:-payload}"
|
||||
ssh root@alpha.mintel.me "docker exec $DB_CONTAINER psql -U $REMOTE_DB_USER -d $REMOTE_DB_NAME -c \"
|
||||
DO \\\$\\\$ BEGIN
|
||||
DELETE FROM payload_migrations WHERE batch = -1;
|
||||
INSERT INTO payload_migrations (name, batch)
|
||||
SELECT name, batch FROM (VALUES
|
||||
('20260223_195005_products_collection', 1),
|
||||
('20260223_195151_remove_sku_unique', 2),
|
||||
('20260225_003500_add_pages_collection', 3)
|
||||
) AS v(name, batch)
|
||||
WHERE NOT EXISTS (SELECT 1 FROM payload_migrations pm WHERE pm.name = v.name);
|
||||
EXCEPTION WHEN undefined_table THEN
|
||||
RAISE NOTICE 'payload_migrations table does not exist yet — skipping sanitization';
|
||||
END \\\$\\\$;
|
||||
\"" || echo "⚠️ Migration sanitization skipped (table may not exist yet)"
|
||||
|
||||
# Restart app to pick up clean migration state
|
||||
APP_CONTAINER="${{ needs.prepare.outputs.project_name }}-klz-app-1"
|
||||
ssh root@alpha.mintel.me "docker restart $APP_CONTAINER"
|
||||
|
||||
ssh root@alpha.mintel.me "docker system prune -f --filter 'until=24h'"
|
||||
|
||||
- name: 🧹 Post-Deploy Cleanup (Runner)
|
||||
@@ -364,12 +402,11 @@ jobs:
|
||||
run: docker builder prune -f --filter "until=1h"
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 5: Smoke Test (OG Images)
|
||||
# JOB 5: Post-Deploy Verification (Smoke Tests + Quality Gates)
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
smoke_test:
|
||||
name: 🧪 Smoke Test
|
||||
post_deploy_checks:
|
||||
name: 🧪 Post-Deploy Verification
|
||||
needs: [prepare, deploy]
|
||||
continue-on-error: true
|
||||
if: needs.deploy.result == 'success' && needs.prepare.outputs.target != 'branch'
|
||||
runs-on: docker
|
||||
container:
|
||||
@@ -391,19 +428,52 @@ jobs:
|
||||
echo "//${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}/:_authToken=${{ secrets.REGISTRY_PASS }}" >> .npmrc
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: 🚀 Run OG Image Check
|
||||
|
||||
# ── Critical Smoke Tests (MUST pass) ──────────────────────────────────
|
||||
- name: 🚀 OG Image Check
|
||||
env:
|
||||
TEST_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
run: pnpm run check:og
|
||||
- name: 🌐 Full Sitemap HTTP Validation
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
run: pnpm run check:http
|
||||
|
||||
# ── Quality Gates (informational, don't block pipeline) ───────────────
|
||||
- name: 🌐 HTML DOM Validation
|
||||
continue-on-error: true
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
run: pnpm check:html
|
||||
- name: 🔒 Security Headers Scan
|
||||
continue-on-error: true
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
run: pnpm check:security
|
||||
- name: 🔗 Lychee Deep Link Crawl
|
||||
continue-on-error: true
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
run: pnpm check:links
|
||||
- name: 🖼️ Dynamic Asset & Image Integrity Scan
|
||||
continue-on-error: true
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
run: pnpm check:assets
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 6: Lighthouse (Performance & Accessibility)
|
||||
# JOB 6: Performance & Accessibility (Lighthouse + WCAG)
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
lighthouse:
|
||||
name: ⚡ Lighthouse
|
||||
needs: [prepare, deploy]
|
||||
performance:
|
||||
name: ⚡ Performance & Accessibility
|
||||
needs: [prepare, post_deploy_checks]
|
||||
continue-on-error: true
|
||||
if: success() && needs.prepare.outputs.target != 'skip' && needs.prepare.outputs.target != 'branch'
|
||||
if: needs.post_deploy_checks.result == 'success' && needs.prepare.outputs.target != 'branch'
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
@@ -418,7 +488,6 @@ jobs:
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: 🔐 Registry Auth
|
||||
run: |
|
||||
echo "@mintel:registry=https://${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}" > .npmrc
|
||||
@@ -459,78 +528,14 @@ jobs:
|
||||
# Standardize binary paths
|
||||
[ -f /usr/bin/chromium ] && ln -sf /usr/bin/chromium /usr/bin/google-chrome
|
||||
[ -f /usr/bin/chromium ] && ln -sf /usr/bin/chromium /usr/bin/chromium-browser
|
||||
- name: ⚡ Run Lighthouse CI
|
||||
- name: ⚡ Lighthouse CI
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
CHROME_PATH: /usr/bin/chromium
|
||||
PAGESPEED_LIMIT: 8
|
||||
run: pnpm run pagespeed:test
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 7: WCAG Audit
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
wcag:
|
||||
name: ♿ WCAG
|
||||
needs: [prepare, deploy, smoke_test]
|
||||
continue-on-error: true
|
||||
if: success() && needs.prepare.outputs.target != 'skip' && needs.prepare.outputs.target != 'branch'
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 10
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: 🔐 Registry Auth
|
||||
run: |
|
||||
echo "@mintel:registry=https://${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}" > .npmrc
|
||||
echo "//${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}/:_authToken=${{ secrets.REGISTRY_PASS }}" >> .npmrc
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: 🔍 Install Chromium (Native & ARM64)
|
||||
run: |
|
||||
rm -f /etc/apt/apt.conf.d/docker-clean
|
||||
apt-get update
|
||||
apt-get install -y gnupg wget ca-certificates
|
||||
|
||||
# Detect OS
|
||||
OS_ID=$(. /etc/os-release && echo $ID)
|
||||
CODENAME=$(. /etc/os-release && echo $VERSION_CODENAME)
|
||||
|
||||
if [ "$OS_ID" = "debian" ]; then
|
||||
echo "🎯 Debian detected - installing native chromium"
|
||||
apt-get install -y chromium
|
||||
else
|
||||
echo "🎯 Ubuntu detected - adding xtradeb PPA"
|
||||
mkdir -p /etc/apt/keyrings
|
||||
KEY_ID="82BB6851C64F6880"
|
||||
|
||||
# Fetch PPA key
|
||||
wget -qO- "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x$KEY_ID" | gpg --dearmor > /etc/apt/keyrings/xtradeb.gpg
|
||||
|
||||
# Add PPA repository
|
||||
echo "deb [signed-by=/etc/apt/keyrings/xtradeb.gpg] http://ppa.launchpad.net/xtradeb/apps/ubuntu $CODENAME main" > /etc/apt/sources.list.d/xtradeb-ppa.list
|
||||
|
||||
# PRIORITY PINNING: Force PPA over Snap-dummy
|
||||
printf "Package: *\nPin: release o=LP-PPA-xtradeb-apps\nPin-Priority: 1001\n" > /etc/apt/preferences.d/xtradeb
|
||||
|
||||
apt-get update
|
||||
apt-get install -y --allow-downgrades chromium
|
||||
fi
|
||||
|
||||
# Standardize binary paths
|
||||
[ -f /usr/bin/chromium ] && ln -sf /usr/bin/chromium /usr/bin/google-chrome
|
||||
[ -f /usr/bin/chromium ] && ln -sf /usr/bin/chromium /usr/bin/chromium-browser
|
||||
- name: ♿ Run WCAG Audit
|
||||
- name: ♿ WCAG Audit
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
@@ -539,76 +544,24 @@ jobs:
|
||||
run: pnpm run check:wcag
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 9: Quality Assertions
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
quality_assertions:
|
||||
name: 🛡️ Quality Gates
|
||||
needs: [prepare, deploy, smoke_test]
|
||||
continue-on-error: true
|
||||
if: success() && needs.prepare.outputs.target != 'skip' && needs.prepare.outputs.target != 'branch'
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 10
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: 🔐 Registry Auth
|
||||
run: |
|
||||
echo "@mintel:registry=https://${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}" > .npmrc
|
||||
echo "//${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}/:_authToken=${{ secrets.REGISTRY_PASS }}" >> .npmrc
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: 🌐 HTML DOM Validation
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
run: pnpm check:html
|
||||
- name: 🔒 Security Headers Scan
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
run: pnpm check:security
|
||||
- name: 🔗 Lychee Deep Link Crawl
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
run: pnpm check:links
|
||||
- name: 🖼️ Dynamic Asset & Image Integrity Scan
|
||||
env:
|
||||
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
|
||||
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
|
||||
run: pnpm check:assets
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 10: Notifications
|
||||
# JOB 7: Notifications
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
notifications:
|
||||
name: 🔔 Notify
|
||||
needs: [prepare, deploy, smoke_test, lighthouse, wcag, quality_assertions]
|
||||
needs: [prepare, deploy, post_deploy_checks, performance]
|
||||
if: always()
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: 🔔 Gotify
|
||||
run: |
|
||||
STATUS="${{ needs.deploy.result }}"
|
||||
TITLE="klz-cables.com: $STATUS"
|
||||
[[ "$STATUS" == "success" ]] && PRIORITY=5 || PRIORITY=8
|
||||
SMOKE="${{ needs.post_deploy_checks.result }}"
|
||||
TITLE="klz-cables.com: deploy=$STATUS smoke=$SMOKE"
|
||||
[[ "$STATUS" == "success" && "$SMOKE" == "success" ]] && PRIORITY=5 || PRIORITY=8
|
||||
|
||||
curl -s -k -X POST "${{ secrets.GOTIFY_URL }}/message?token=${{ secrets.GOTIFY_TOKEN }}" \
|
||||
-F "title=$TITLE" \
|
||||
-F "message=Deploy to ${{ needs.prepare.outputs.target }} finished with status $STATUS.\nVersion: ${{ needs.prepare.outputs.image_tag }}" \
|
||||
-F "message=Deploy to ${{ needs.prepare.outputs.target }} finished.\nDeploy: $STATUS | Smoke: $SMOKE\nVersion: ${{ needs.prepare.outputs.image_tag }}" \
|
||||
-F "priority=$PRIORITY" || true
|
||||
|
||||
@@ -59,6 +59,7 @@ export default async function ProductsPage({ params }: ProductsPageProps) {
|
||||
const solarSlug = await mapFileSlugToTranslated('solar-cables', locale);
|
||||
|
||||
const productsSlug = await mapFileSlugToTranslated('products', locale);
|
||||
const contactSlug = await mapFileSlugToTranslated('contact', locale);
|
||||
|
||||
const categories = [
|
||||
{
|
||||
@@ -230,10 +231,10 @@ export default async function ProductsPage({ params }: ProductsPageProps) {
|
||||
</p>
|
||||
</div>
|
||||
<Button
|
||||
href={`/${locale}/contact`}
|
||||
href={`/${locale}/${contactSlug}`}
|
||||
variant="accent"
|
||||
size="lg"
|
||||
className="group whitespace-nowrap w-full md:w-auto md:h-16 md:px-10 md:text-xl"
|
||||
className="group whitespace-nowrap w-full md:w-auto md:h-16 px-6 md:px-10 text-sm md:text-xl"
|
||||
>
|
||||
{t('cta.button')}
|
||||
<span className="ml-2 md:ml-4 transition-transform group-hover:translate-x-2">
|
||||
|
||||
@@ -40,7 +40,8 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const dsnUrl = new URL(realDsn);
|
||||
const projectId = dsnUrl.pathname.replace('/', '');
|
||||
const relayUrl = `${dsnUrl.protocol}//${dsnUrl.host}/api/${projectId}/envelope/`;
|
||||
const sentryKey = dsnUrl.username;
|
||||
const relayUrl = `${dsnUrl.protocol}//${dsnUrl.host}/api/${projectId}/envelope/?sentry_key=${sentryKey}`;
|
||||
|
||||
logger.debug('Relaying Sentry envelope', {
|
||||
projectId,
|
||||
@@ -57,22 +58,18 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
if (!process.env.CI) {
|
||||
logger.error('Sentry/GlitchTip API responded with error', {
|
||||
status: response.status,
|
||||
error: errorText.slice(0, 100),
|
||||
});
|
||||
}
|
||||
logger.error('Sentry/GlitchTip API responded with error', {
|
||||
status: response.status,
|
||||
error: errorText.slice(0, 100),
|
||||
});
|
||||
return new NextResponse(errorText, { status: response.status });
|
||||
}
|
||||
|
||||
return NextResponse.json({ status: 'ok' });
|
||||
} catch (error) {
|
||||
if (!process.env.CI) {
|
||||
logger.error('Failed to relay Sentry request', {
|
||||
error: (error as Error).message,
|
||||
});
|
||||
}
|
||||
logger.error('Failed to relay Sentry request', {
|
||||
error: (error as Error).message,
|
||||
});
|
||||
return NextResponse.json({ error: 'Internal Server Error' }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +78,13 @@
|
||||
"Kabel",
|
||||
"Deutsch",
|
||||
"Spannung",
|
||||
"unbekannt"
|
||||
"unbekannt",
|
||||
"payloadcms",
|
||||
"imgproxy",
|
||||
"Leitungen",
|
||||
"impressum",
|
||||
"datenschutz",
|
||||
"agbs"
|
||||
],
|
||||
"ignorePaths": [
|
||||
"node_modules",
|
||||
|
||||
@@ -12,6 +12,8 @@ services:
|
||||
environment:
|
||||
POSTGRES_URI: postgres://${PAYLOAD_DB_USER:-payload}:${PAYLOAD_DB_PASSWORD:-120in09oenaoinsd9iaidon}@klz-db:5432/${PAYLOAD_DB_NAME:-payload}
|
||||
PAYLOAD_SECRET: ${PAYLOAD_SECRET:-fallback-secret-for-production-needs-change}
|
||||
volumes:
|
||||
- klz_media_data:/app/public/media
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
# HTTP ⇒ HTTPS redirect
|
||||
@@ -26,8 +28,8 @@ services:
|
||||
- "traefik.http.routers.${PROJECT_NAME:-klz}.service=${PROJECT_NAME:-klz}-app-svc"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-klz}.middlewares=${AUTH_MIDDLEWARE:-klz-ratelimit,klz-forward,klz-compress}"
|
||||
|
||||
# Public Router (Whitelist)
|
||||
- "traefik.http.routers.${PROJECT_NAME:-klz}-public.rule=(${TRAEFIK_HOST_RULE:-Host(`${TRAEFIK_HOST:-klz-cables.com}`)}) && (PathPrefix(`/health`) || PathPrefix(`/sitemap.xml`) || PathPrefix(`/robots.txt`) || PathPrefix(`/manifest.webmanifest`) || PathPrefix(`/api/og`) || PathRegexp(`.*opengraph-image.*`) || PathRegexp(`^/sitemap(-[0-9]+)?\\.xml$`))"
|
||||
# Public Router – paths that bypass Gatekeeper auth (health, SEO, static assets, OG images)
|
||||
- "traefik.http.routers.${PROJECT_NAME:-klz}-public.rule=(${TRAEFIK_HOST_RULE:-Host(`${TRAEFIK_HOST:-klz-cables.com}`)}) && PathRegexp(`^/(health|uploads|media|robots\\.txt|manifest\\.webmanifest|sitemap(-[0-9]+)?\\.xml|(.*/)?api/og(/.*)?|(.*/)?opengraph-image.*)`)"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-klz}-public.entrypoints=${TRAEFIK_ENTRYPOINT:-web}"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-klz}-public.tls.certresolver=${TRAEFIK_CERT_RESOLVER:-}"
|
||||
- "traefik.http.routers.${PROJECT_NAME:-klz}-public.tls=${TRAEFIK_TLS:-false}"
|
||||
@@ -59,6 +61,9 @@ services:
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.${PROJECT_NAME:-klz}-gatekeeper-svc.loadbalancer.server.port=3000"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-klz}-auth.forwardauth.address=http://${PROJECT_NAME:-klz}-gatekeeper:3000/gatekeeper/api/verify"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-klz}-auth.forwardauth.trustForwardHeader=true"
|
||||
- "traefik.http.middlewares.${PROJECT_NAME:-klz}-auth.forwardauth.authResponseHeaders=X-Auth-User"
|
||||
- "traefik.docker.network=infra"
|
||||
|
||||
klz-db:
|
||||
@@ -74,8 +79,6 @@ services:
|
||||
- klz_db_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- default
|
||||
ports:
|
||||
- "54322:5432"
|
||||
|
||||
networks:
|
||||
default:
|
||||
@@ -86,3 +89,5 @@ networks:
|
||||
volumes:
|
||||
klz_db_data:
|
||||
external: false
|
||||
klz_media_data:
|
||||
external: false
|
||||
|
||||
78
lib/mdx.ts
78
lib/mdx.ts
@@ -193,22 +193,28 @@ export async function getAllProducts(locale: string): Promise<ProductMdx[]> {
|
||||
.filter(Boolean);
|
||||
return resolvedImages.length > 0;
|
||||
})
|
||||
.map((doc) => ({
|
||||
slug: doc.slug,
|
||||
frontmatter: {
|
||||
title: doc.title,
|
||||
sku: doc.sku || '',
|
||||
description: doc.description || '',
|
||||
categories: Array.isArray(doc.categories)
|
||||
? doc.categories.map((c: any) => c.category)
|
||||
: [],
|
||||
images: ((doc.images as any[]) || [])
|
||||
.map((img) => (typeof img === 'string' ? img : img.url))
|
||||
.filter(Boolean),
|
||||
locale: doc.locale,
|
||||
},
|
||||
content: null,
|
||||
}));
|
||||
.map((doc) => {
|
||||
const resolvedImages = ((doc.images as any[]) || [])
|
||||
.map((img) => (typeof img === 'string' ? img : img.url))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const plainCategories = Array.isArray(doc.categories)
|
||||
? doc.categories.map((c: any) => String(c.category))
|
||||
: [];
|
||||
|
||||
return {
|
||||
slug: String(doc.slug),
|
||||
frontmatter: {
|
||||
title: String(doc.title),
|
||||
sku: doc.sku ? String(doc.sku) : '',
|
||||
description: doc.description ? String(doc.description) : '',
|
||||
categories: plainCategories,
|
||||
images: resolvedImages,
|
||||
locale: String(doc.locale),
|
||||
},
|
||||
content: null,
|
||||
};
|
||||
});
|
||||
|
||||
// Also include English fallbacks for slugs not in this locale
|
||||
if (locale !== 'en') {
|
||||
@@ -229,23 +235,29 @@ export async function getAllProducts(locale: string): Promise<ProductMdx[]> {
|
||||
.filter(Boolean);
|
||||
return resolvedImages.length > 0;
|
||||
})
|
||||
.map((doc) => ({
|
||||
slug: doc.slug,
|
||||
frontmatter: {
|
||||
title: doc.title,
|
||||
sku: doc.sku || '',
|
||||
description: doc.description || '',
|
||||
categories: Array.isArray(doc.categories)
|
||||
? doc.categories.map((c: any) => c.category)
|
||||
: [],
|
||||
images: ((doc.images as any[]) || [])
|
||||
.map((img) => (typeof img === 'string' ? img : img.url))
|
||||
.filter(Boolean),
|
||||
locale: doc.locale,
|
||||
isFallback: true,
|
||||
},
|
||||
content: null,
|
||||
}));
|
||||
.map((doc) => {
|
||||
const resolvedImages = ((doc.images as any[]) || [])
|
||||
.map((img) => (typeof img === 'string' ? img : img.url))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const plainCategories = Array.isArray(doc.categories)
|
||||
? doc.categories.map((c: any) => String(c.category))
|
||||
: [];
|
||||
|
||||
return {
|
||||
slug: String(doc.slug),
|
||||
frontmatter: {
|
||||
title: String(doc.title),
|
||||
sku: doc.sku ? String(doc.sku) : '',
|
||||
description: doc.description ? String(doc.description) : '',
|
||||
categories: plainCategories,
|
||||
images: resolvedImages,
|
||||
locale: String(doc.locale),
|
||||
isFallback: true,
|
||||
},
|
||||
content: null,
|
||||
};
|
||||
});
|
||||
|
||||
products = [...products, ...fallbacks];
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ export default async function middleware(request: NextRequest) {
|
||||
pathname.startsWith('/stats') ||
|
||||
pathname.startsWith('/errors') ||
|
||||
pathname.startsWith('/health') ||
|
||||
pathname.startsWith('/uploads') ||
|
||||
pathname.includes('/api/og') ||
|
||||
pathname.includes('opengraph-image') ||
|
||||
pathname.endsWith('sitemap.xml') ||
|
||||
|
||||
2
next-env.d.ts
vendored
2
next-env.d.ts
vendored
@@ -1,6 +1,6 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
import "./.next/types/routes.d.ts";
|
||||
import "./.next/dev/types/routes.d.ts";
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
|
||||
@@ -424,6 +424,10 @@ const nextConfig = {
|
||||
source: '/de/produkte/:path*',
|
||||
destination: '/de/products/:path*',
|
||||
},
|
||||
{
|
||||
source: '/de/kontakt',
|
||||
destination: '/de/contact',
|
||||
},
|
||||
];
|
||||
},
|
||||
};
|
||||
|
||||
18
package.json
18
package.json
@@ -105,6 +105,7 @@
|
||||
"check:a11y": "pa11y-ci",
|
||||
"check:wcag": "tsx ./scripts/wcag-sitemap.ts",
|
||||
"check:html": "tsx ./scripts/check-html.ts",
|
||||
"check:http": "tsx ./scripts/check-http.ts",
|
||||
"check:spell": "cspell \"content/**/*.{md,mdx}\" \"app/**/*.tsx\" \"components/**/*.tsx\"",
|
||||
"check:security": "tsx ./scripts/check-security.ts",
|
||||
"check:links": "bash ./scripts/check-links.sh",
|
||||
@@ -116,22 +117,17 @@
|
||||
"cms:bootstrap": "pnpm run cms:branding:local",
|
||||
"pdf:datasheets": "tsx ./scripts/generate-pdf-datasheets.ts",
|
||||
"pdf:datasheets:legacy": "tsx ./scripts/generate-pdf-datasheets-pdf-lib.ts",
|
||||
"cms:schema:snapshot": "./scripts/cms-snapshot.sh",
|
||||
"cms:schema:apply": "./scripts/cms-apply.sh local",
|
||||
"cms:schema:apply:testing": "./scripts/cms-apply.sh testing",
|
||||
"cms:schema:apply:staging": "./scripts/cms-apply.sh staging",
|
||||
"cms:schema:apply:prod": "./scripts/cms-apply.sh production",
|
||||
"cms:pull:testing": "./scripts/sync-directus.sh pull testing",
|
||||
"cms:pull:staging": "./scripts/sync-directus.sh pull staging",
|
||||
"cms:pull:prod": "./scripts/sync-directus.sh pull production",
|
||||
"cms:push:staging:DANGER": "./scripts/sync-directus.sh push staging",
|
||||
"cms:push:testing:DANGER": "./scripts/sync-directus.sh push testing",
|
||||
"cms:push:prod:DANGER": "./scripts/sync-directus.sh push production",
|
||||
"cms:migrate": "payload migrate",
|
||||
"cms:seed": "tsx ./scripts/seed-payload.ts",
|
||||
"pagespeed:test": "tsx ./scripts/pagespeed-sitemap.ts",
|
||||
"pagespeed:audit": "./scripts/audit-local.sh",
|
||||
"pagespeed:urls": "tsx -e \"import sitemap from './app/sitemap'; sitemap().then(urls => console.log(urls.map(u => u.url).join('\\n')))\"",
|
||||
"backup:db": "bash ./scripts/backup-db.sh",
|
||||
"restore:db": "bash ./scripts/restore-db.sh",
|
||||
"cms:push:testing": "bash ./scripts/cms-sync.sh push testing",
|
||||
"cms:push:prod": "bash ./scripts/cms-sync.sh push prod",
|
||||
"cms:pull:testing": "bash ./scripts/cms-sync.sh pull testing",
|
||||
"cms:pull:prod": "bash ./scripts/cms-sync.sh pull prod",
|
||||
"prepare": "husky",
|
||||
"preinstall": "npx only-allow pnpm"
|
||||
},
|
||||
|
||||
@@ -7,6 +7,7 @@ import { fileURLToPath } from 'url';
|
||||
import { nodemailerAdapter } from '@payloadcms/email-nodemailer';
|
||||
import { BlocksFeature } from '@payloadcms/richtext-lexical';
|
||||
import { payloadBlocks } from './src/payload/blocks/allBlocks';
|
||||
import { migrations } from './src/migrations';
|
||||
|
||||
// Only disable sharp cache in production to prevent memory leaks.
|
||||
// In dev, the cache avoids 41s+ re-processing per image through VirtioFS.
|
||||
@@ -20,11 +21,17 @@ import { Posts } from './src/payload/collections/Posts';
|
||||
import { FormSubmissions } from './src/payload/collections/FormSubmissions';
|
||||
import { Products } from './src/payload/collections/Products';
|
||||
import { Pages } from './src/payload/collections/Pages';
|
||||
import { seedDatabase } from './src/payload/seed';
|
||||
|
||||
const filename = fileURLToPath(import.meta.url);
|
||||
const dirname = path.dirname(filename);
|
||||
|
||||
export default buildConfig({
|
||||
onInit: async (payload) => {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
await seedDatabase(payload);
|
||||
}
|
||||
},
|
||||
admin: {
|
||||
user: Users.slug,
|
||||
importMap: {
|
||||
@@ -45,6 +52,7 @@ export default buildConfig({
|
||||
outputFile: path.resolve(dirname, 'payload-types.ts'),
|
||||
},
|
||||
db: postgresAdapter({
|
||||
prodMigrations: migrations,
|
||||
pool: {
|
||||
connectionString:
|
||||
process.env.DATABASE_URI ||
|
||||
|
||||
74
scripts/check-http.ts
Normal file
74
scripts/check-http.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import axios from 'axios';
|
||||
import * as cheerio from 'cheerio';
|
||||
|
||||
const targetUrl = process.argv[2] || process.env.NEXT_PUBLIC_BASE_URL || 'http://localhost:3000';
|
||||
const gatekeeperPassword = process.env.GATEKEEPER_PASSWORD || 'klz2026';
|
||||
|
||||
async function main() {
|
||||
console.log(`\n🚀 Starting HTTP Sitemap Validation for: ${targetUrl}\n`);
|
||||
|
||||
try {
|
||||
const sitemapUrl = `${targetUrl.replace(/\/$/, '')}/sitemap.xml`;
|
||||
console.log(`📥 Fetching sitemap from ${sitemapUrl}...`);
|
||||
|
||||
const response = await axios.get(sitemapUrl, {
|
||||
headers: { Cookie: `klz_gatekeeper_session=${gatekeeperPassword}` },
|
||||
validateStatus: (status) => status < 400,
|
||||
});
|
||||
|
||||
const $ = cheerio.load(response.data, { xmlMode: true });
|
||||
let urls = $('url loc')
|
||||
.map((i, el) => $(el).text())
|
||||
.get();
|
||||
|
||||
const urlPattern = /https?:\/\/[^\/]+/;
|
||||
urls = [...new Set(urls)]
|
||||
.filter((u) => u.startsWith('http'))
|
||||
.map((u) => u.replace(urlPattern, targetUrl.replace(/\/$/, '')))
|
||||
.sort();
|
||||
|
||||
console.log(`✅ Found ${urls.length} target URLs in sitemap.`);
|
||||
|
||||
if (urls.length === 0) {
|
||||
console.error('❌ No URLs found in sitemap. Is the site up?');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`\n🔍 Verifying HTTP Status Codes (Limit: None)...`);
|
||||
let hasErrors = false;
|
||||
|
||||
// Run fetches sequentially to avoid overwhelming the server during CI
|
||||
for (let i = 0; i < urls.length; i++) {
|
||||
const u = urls[i];
|
||||
try {
|
||||
const res = await axios.get(u, {
|
||||
headers: { Cookie: `klz_gatekeeper_session=${gatekeeperPassword}` },
|
||||
validateStatus: null, // Don't throw on error status
|
||||
});
|
||||
|
||||
if (res.status >= 400) {
|
||||
console.error(`❌ ERROR ${res.status}: ${res.statusText} -> ${u}`);
|
||||
hasErrors = true;
|
||||
} else {
|
||||
console.log(`✅ OK ${res.status} -> ${u}`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(`❌ NETWORK ERROR: ${err.message} -> ${u}`);
|
||||
hasErrors = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasErrors) {
|
||||
console.error(`\n❌ HTTP Sitemap Validation Failed. One or more pages returned an error.`);
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log(`\n✨ Success: All ${urls.length} pages are healthy! (HTTP 200)`);
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error(`\n❌ Critical Error during Sitemap Fetch:`, error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -8,7 +8,7 @@ const routes = [
|
||||
'/de/opengraph-image',
|
||||
'/en/opengraph-image',
|
||||
'/de/blog/opengraph-image',
|
||||
'/de/api/og/product?slug=nay2y',
|
||||
'/de/api/og/product?slug=low-voltage-cables',
|
||||
'/en/api/og/product?slug=medium-voltage-cables',
|
||||
];
|
||||
|
||||
|
||||
246
scripts/cms-sync.sh
Executable file
246
scripts/cms-sync.sh
Executable file
@@ -0,0 +1,246 @@
|
||||
#!/usr/bin/env bash
|
||||
# ────────────────────────────────────────────────────────────────────────────
|
||||
# CMS Data Sync Tool
|
||||
# Safely syncs Payload CMS data (DB + media) between environments.
|
||||
#
|
||||
# Usage:
|
||||
# cms:push:testing – Push local → testing
|
||||
# cms:push:prod – Push local → production
|
||||
# cms:pull:testing – Pull testing → local
|
||||
# cms:pull:prod – Pull production → local
|
||||
# ────────────────────────────────────────────────────────────────────────────
|
||||
set -euo pipefail
|
||||
|
||||
# Load environment variables
|
||||
if [ -f .env ]; then
|
||||
set -a; source .env; set +a
|
||||
fi
|
||||
|
||||
# ── Configuration ──────────────────────────────────────────────────────────
|
||||
DIRECTION="${1:-}" # push | pull
|
||||
TARGET="${2:-}" # testing | prod
|
||||
SSH_HOST="root@alpha.mintel.me"
|
||||
LOCAL_DB_USER="${PAYLOAD_DB_USER:-payload}"
|
||||
LOCAL_DB_NAME="${PAYLOAD_DB_NAME:-payload}"
|
||||
LOCAL_DB_CONTAINER="klz-2026-klz-db-1"
|
||||
LOCAL_MEDIA_DIR="./public/media"
|
||||
BACKUP_DIR="./backups"
|
||||
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
|
||||
|
||||
# Remote credentials (resolved per-target from server env files)
|
||||
REMOTE_DB_USER=""
|
||||
REMOTE_DB_NAME=""
|
||||
|
||||
# Migration names to insert after restore (keeps Payload from prompting)
|
||||
MIGRATIONS=(
|
||||
"20260223_195005_products_collection:1"
|
||||
"20260223_195151_remove_sku_unique:2"
|
||||
"20260225_003500_add_pages_collection:3"
|
||||
)
|
||||
|
||||
# ── Resolve target environment ─────────────────────────────────────────────
|
||||
resolve_target() {
|
||||
case "$TARGET" in
|
||||
testing)
|
||||
REMOTE_PROJECT="klz-testing"
|
||||
REMOTE_DB_CONTAINER="klz-testing-klz-db-1"
|
||||
REMOTE_APP_CONTAINER="klz-testing-klz-app-1"
|
||||
REMOTE_MEDIA_VOLUME="/var/lib/docker/volumes/klz-testing_klz_media_data/_data"
|
||||
REMOTE_SITE_DIR="/home/deploy/sites/testing.klz-cables.com"
|
||||
;;
|
||||
prod|production)
|
||||
REMOTE_PROJECT="klz-cablescom"
|
||||
REMOTE_DB_CONTAINER="klz-cablescom-klz-db-1"
|
||||
REMOTE_APP_CONTAINER="klz-cablescom-klz-app-1"
|
||||
REMOTE_MEDIA_VOLUME="/var/lib/docker/volumes/klz-cablescom_klz_media_data/_data"
|
||||
REMOTE_SITE_DIR="/home/deploy/sites/klz-cables.com"
|
||||
;;
|
||||
*)
|
||||
echo "❌ Unknown target: $TARGET"
|
||||
echo " Valid targets: testing, prod"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Auto-detect remote DB credentials from the env file on the server
|
||||
echo "🔍 Detecting $TARGET database credentials..."
|
||||
REMOTE_DB_USER=$(ssh "$SSH_HOST" "grep -h '^PAYLOAD_DB_USER=' $REMOTE_SITE_DIR/.env* 2>/dev/null | tail -1 | cut -d= -f2" || echo "")
|
||||
REMOTE_DB_NAME=$(ssh "$SSH_HOST" "grep -h '^PAYLOAD_DB_NAME=' $REMOTE_SITE_DIR/.env* 2>/dev/null | tail -1 | cut -d= -f2" || echo "")
|
||||
REMOTE_DB_USER="${REMOTE_DB_USER:-payload}"
|
||||
REMOTE_DB_NAME="${REMOTE_DB_NAME:-payload}"
|
||||
echo " User: $REMOTE_DB_USER | DB: $REMOTE_DB_NAME"
|
||||
}
|
||||
|
||||
# ── Ensure local DB is running ─────────────────────────────────────────────
|
||||
ensure_local_db() {
|
||||
if ! docker ps --format '{{.Names}}' | grep -q "$LOCAL_DB_CONTAINER"; then
|
||||
echo "⏳ Local DB container not running. Starting..."
|
||||
docker compose up -d klz-db
|
||||
echo "⏳ Waiting for local DB to be ready..."
|
||||
for i in $(seq 1 10); do
|
||||
if docker exec "$LOCAL_DB_CONTAINER" pg_isready -U "$LOCAL_DB_USER" -q 2>/dev/null; then
|
||||
echo "✅ Local DB is ready."
|
||||
return
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
echo "❌ Local DB failed to start."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# ── Sanitize migrations table ──────────────────────────────────────────────
|
||||
sanitize_migrations() {
|
||||
local container="$1"
|
||||
local db_user="$2"
|
||||
local db_name="$3"
|
||||
local is_remote="$4" # "true" or "false"
|
||||
|
||||
echo "🔧 Sanitizing payload_migrations table..."
|
||||
local SQL="DELETE FROM payload_migrations WHERE batch = -1;"
|
||||
for entry in "${MIGRATIONS[@]}"; do
|
||||
local name="${entry%%:*}"
|
||||
local batch="${entry##*:}"
|
||||
SQL="$SQL INSERT INTO payload_migrations (name, batch) SELECT '$name', $batch WHERE NOT EXISTS (SELECT 1 FROM payload_migrations WHERE name = '$name');"
|
||||
done
|
||||
|
||||
if [ "$is_remote" = "true" ]; then
|
||||
ssh "$SSH_HOST" "docker exec $container psql -U $db_user -d $db_name -c \"$SQL\""
|
||||
else
|
||||
docker exec "$container" psql -U "$db_user" -d "$db_name" -c "$SQL"
|
||||
fi
|
||||
}
|
||||
|
||||
# ── Safety: Create backup before overwriting ───────────────────────────────
|
||||
backup_local_db() {
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
local file="$BACKUP_DIR/payload_pre_sync_${TIMESTAMP}.sql.gz"
|
||||
echo "📦 Creating safety backup of local DB → $file"
|
||||
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$LOCAL_DB_USER" -d "$LOCAL_DB_NAME" --clean --if-exists | gzip > "$file"
|
||||
echo "✅ Backup: $file ($(du -h "$file" | cut -f1))"
|
||||
}
|
||||
|
||||
backup_remote_db() {
|
||||
local file="/tmp/payload_pre_sync_${TIMESTAMP}.sql.gz"
|
||||
echo "📦 Creating safety backup of $TARGET DB → $SSH_HOST:$file"
|
||||
ssh "$SSH_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $REMOTE_DB_USER -d $REMOTE_DB_NAME --clean --if-exists | gzip > $file"
|
||||
echo "✅ Remote backup: $file"
|
||||
}
|
||||
|
||||
# ── PUSH: local → remote ──────────────────────────────────────────────────
|
||||
do_push() {
|
||||
echo ""
|
||||
echo "┌──────────────────────────────────────────────────┐"
|
||||
echo "│ 📤 PUSH: local → $TARGET "
|
||||
echo "│ This will OVERWRITE the $TARGET database! "
|
||||
echo "│ A safety backup will be created first. "
|
||||
echo "└──────────────────────────────────────────────────┘"
|
||||
echo ""
|
||||
read -p "Are you sure? (y/N) " -n 1 -r
|
||||
echo ""
|
||||
[[ ! $REPLY =~ ^[Yy]$ ]] && { echo "Cancelled."; exit 0; }
|
||||
|
||||
# 0. Ensure local DB is running
|
||||
ensure_local_db
|
||||
|
||||
# 1. Safety backup of remote
|
||||
backup_remote_db
|
||||
|
||||
# 2. Dump local DB
|
||||
echo "📤 Dumping local database..."
|
||||
local dump="/tmp/payload_push_${TIMESTAMP}.sql.gz"
|
||||
docker exec "$LOCAL_DB_CONTAINER" pg_dump -U "$LOCAL_DB_USER" -d "$LOCAL_DB_NAME" --clean --if-exists | gzip > "$dump"
|
||||
|
||||
# 3. Transfer and restore
|
||||
echo "📤 Transferring to $SSH_HOST..."
|
||||
scp "$dump" "$SSH_HOST:/tmp/payload_push.sql.gz"
|
||||
|
||||
echo "🔄 Restoring database on $TARGET..."
|
||||
ssh "$SSH_HOST" "gunzip -c /tmp/payload_push.sql.gz | docker exec -i $REMOTE_DB_CONTAINER psql -U $REMOTE_DB_USER -d $REMOTE_DB_NAME --quiet"
|
||||
|
||||
# 4. Sanitize migrations
|
||||
sanitize_migrations "$REMOTE_DB_CONTAINER" "$REMOTE_DB_USER" "$REMOTE_DB_NAME" "true"
|
||||
|
||||
# 5. Sync media
|
||||
echo "🖼️ Syncing media files..."
|
||||
rsync -az --delete --progress "$LOCAL_MEDIA_DIR/" "$SSH_HOST:$REMOTE_MEDIA_VOLUME/"
|
||||
|
||||
# 6. Restart app
|
||||
echo "🔄 Restarting $TARGET app container..."
|
||||
ssh "$SSH_HOST" "docker restart $REMOTE_APP_CONTAINER"
|
||||
|
||||
# Cleanup
|
||||
rm -f "$dump"
|
||||
ssh "$SSH_HOST" "rm -f /tmp/payload_push.sql.gz"
|
||||
|
||||
echo ""
|
||||
echo "✅ Push to $TARGET complete!"
|
||||
}
|
||||
|
||||
# ── PULL: remote → local ──────────────────────────────────────────────────
|
||||
do_pull() {
|
||||
echo ""
|
||||
echo "┌──────────────────────────────────────────────────┐"
|
||||
echo "│ 📥 PULL: $TARGET → local "
|
||||
echo "│ This will OVERWRITE your local database! "
|
||||
echo "│ A safety backup will be created first. "
|
||||
echo "└──────────────────────────────────────────────────┘"
|
||||
echo ""
|
||||
read -p "Are you sure? (y/N) " -n 1 -r
|
||||
echo ""
|
||||
[[ ! $REPLY =~ ^[Yy]$ ]] && { echo "Cancelled."; exit 0; }
|
||||
|
||||
# 0. Ensure local DB is running
|
||||
ensure_local_db
|
||||
|
||||
# 1. Safety backup of local
|
||||
backup_local_db
|
||||
|
||||
# 2. Dump remote DB
|
||||
echo "📥 Dumping $TARGET database..."
|
||||
ssh "$SSH_HOST" "docker exec $REMOTE_DB_CONTAINER pg_dump -U $REMOTE_DB_USER -d $REMOTE_DB_NAME --clean --if-exists | gzip > /tmp/payload_pull.sql.gz"
|
||||
|
||||
# 3. Transfer and restore
|
||||
echo "📥 Downloading from $SSH_HOST..."
|
||||
scp "$SSH_HOST:/tmp/payload_pull.sql.gz" "/tmp/payload_pull.sql.gz"
|
||||
|
||||
echo "🔄 Restoring database locally..."
|
||||
gunzip -c "/tmp/payload_pull.sql.gz" | docker exec -i "$LOCAL_DB_CONTAINER" psql -U "$LOCAL_DB_USER" -d "$LOCAL_DB_NAME" --quiet
|
||||
|
||||
# 4. Sync media
|
||||
echo "🖼️ Syncing media files..."
|
||||
mkdir -p "$LOCAL_MEDIA_DIR"
|
||||
rsync -az --delete --info=progress2 "$SSH_HOST:$REMOTE_MEDIA_VOLUME/" "$LOCAL_MEDIA_DIR/"
|
||||
|
||||
# Cleanup
|
||||
rm -f "/tmp/payload_pull.sql.gz"
|
||||
ssh "$SSH_HOST" "rm -f /tmp/payload_pull.sql.gz"
|
||||
|
||||
echo ""
|
||||
echo "✅ Pull from $TARGET complete! Restart dev server to see changes."
|
||||
}
|
||||
|
||||
# ── Main ───────────────────────────────────────────────────────────────────
|
||||
if [ -z "$DIRECTION" ] || [ -z "$TARGET" ]; then
|
||||
echo "📦 CMS Data Sync Tool"
|
||||
echo ""
|
||||
echo "Usage:"
|
||||
echo " pnpm cms:push:testing Push local DB + media → testing"
|
||||
echo " pnpm cms:push:prod Push local DB + media → production"
|
||||
echo " pnpm cms:pull:testing Pull testing DB + media → local"
|
||||
echo " pnpm cms:pull:prod Pull production DB + media → local"
|
||||
echo ""
|
||||
echo "Safety: A backup is always created before overwriting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
resolve_target
|
||||
|
||||
case "$DIRECTION" in
|
||||
push) do_push ;;
|
||||
pull) do_pull ;;
|
||||
*)
|
||||
echo "❌ Unknown direction: $DIRECTION (use 'push' or 'pull')"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
5
src/migrations/20260225_003500_add_pages_collection.json
Normal file
5
src/migrations/20260225_003500_add_pages_collection.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"id": "20260225_003500_add_pages_collection",
|
||||
"name": "20260225_003500_add_pages_collection",
|
||||
"batch": 3
|
||||
}
|
||||
48
src/migrations/20260225_003500_add_pages_collection.ts
Normal file
48
src/migrations/20260225_003500_add_pages_collection.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-postgres';
|
||||
|
||||
export async function up({ db }: MigrateUpArgs): Promise<void> {
|
||||
await db.execute(sql`
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE "public"."enum_pages_locale" AS ENUM('en', 'de');
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "pages" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"title" varchar NOT NULL,
|
||||
"slug" varchar NOT NULL,
|
||||
"locale" "enum_pages_locale" NOT NULL,
|
||||
"excerpt" varchar,
|
||||
"featured_image_id" integer,
|
||||
"content" jsonb NOT NULL,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE "pages" ADD CONSTRAINT "pages_featured_image_id_media_id_fk"
|
||||
FOREIGN KEY ("featured_image_id") REFERENCES "public"."media"("id")
|
||||
ON DELETE set null ON UPDATE no action;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS "pages_featured_image_idx" ON "pages" USING btree ("featured_image_id");
|
||||
CREATE INDEX IF NOT EXISTS "pages_updated_at_idx" ON "pages" USING btree ("updated_at");
|
||||
CREATE INDEX IF NOT EXISTS "pages_created_at_idx" ON "pages" USING btree ("created_at");
|
||||
|
||||
-- Add pages_id to payload_locked_documents_rels if not already present
|
||||
ALTER TABLE "payload_locked_documents_rels" ADD COLUMN IF NOT EXISTS "pages_id" integer;
|
||||
ALTER TABLE "payload_locked_documents_rels" ADD CONSTRAINT "payload_locked_documents_rels_pages_fk"
|
||||
FOREIGN KEY ("pages_id") REFERENCES "public"."pages"("id")
|
||||
ON DELETE cascade ON UPDATE no action;
|
||||
CREATE INDEX IF NOT EXISTS "payload_locked_documents_rels_pages_id_idx"
|
||||
ON "payload_locked_documents_rels" USING btree ("pages_id");
|
||||
`);
|
||||
}
|
||||
|
||||
export async function down({ db }: MigrateDownArgs): Promise<void> {
|
||||
await db.execute(sql`
|
||||
ALTER TABLE "payload_locked_documents_rels" DROP CONSTRAINT IF EXISTS "payload_locked_documents_rels_pages_fk";
|
||||
ALTER TABLE "payload_locked_documents_rels" DROP COLUMN IF EXISTS "pages_id";
|
||||
DROP TABLE IF EXISTS "pages" CASCADE;
|
||||
DROP TYPE IF EXISTS "public"."enum_pages_locale";
|
||||
`);
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as migration_20260223_195005_products_collection from './20260223_195005_products_collection';
|
||||
import * as migration_20260223_195151_remove_sku_unique from './20260223_195151_remove_sku_unique';
|
||||
import * as migration_20260225_003500_add_pages_collection from './20260225_003500_add_pages_collection';
|
||||
|
||||
export const migrations = [
|
||||
{
|
||||
@@ -12,4 +13,9 @@ export const migrations = [
|
||||
down: migration_20260223_195151_remove_sku_unique.down,
|
||||
name: '20260223_195151_remove_sku_unique',
|
||||
},
|
||||
{
|
||||
up: migration_20260225_003500_add_pages_collection.up,
|
||||
down: migration_20260225_003500_add_pages_collection.down,
|
||||
name: '20260225_003500_add_pages_collection',
|
||||
},
|
||||
];
|
||||
|
||||
44
src/payload/seed.ts
Normal file
44
src/payload/seed.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { Payload } from 'payload';
|
||||
|
||||
export async function seedDatabase(payload: Payload) {
|
||||
// Check if any users exist
|
||||
const { totalDocs: totalUsers } = await payload.find({
|
||||
collection: 'users',
|
||||
limit: 1,
|
||||
});
|
||||
|
||||
if (totalUsers === 0) {
|
||||
payload.logger.info('👤 No users found. Creating default admin user...');
|
||||
await payload.create({
|
||||
collection: 'users',
|
||||
data: {
|
||||
email: 'admin@mintel.me',
|
||||
password: 'klz-admin-setup',
|
||||
},
|
||||
});
|
||||
payload.logger.info('✅ Default admin user created successfully.');
|
||||
}
|
||||
|
||||
// Check if any products exist
|
||||
const { totalDocs: totalProducts } = await payload.find({
|
||||
collection: 'products',
|
||||
limit: 1,
|
||||
});
|
||||
|
||||
if (totalProducts === 0) {
|
||||
payload.logger.info('📦 No products found. Creating smoke test product (NAY2Y)...');
|
||||
await payload.create({
|
||||
collection: 'products',
|
||||
data: {
|
||||
title: 'NAY2Y Smoke Test',
|
||||
sku: 'SMOKE-TEST-001',
|
||||
slug: 'nay2y',
|
||||
description: 'A dummy product for CI/CD smoke testing and OG image verification.',
|
||||
locale: 'de',
|
||||
categories: [{ category: 'Power Cables' }],
|
||||
_status: 'published',
|
||||
},
|
||||
});
|
||||
payload.logger.info('✅ Smoke test product created successfully.');
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user