Compare commits
16 Commits
v1.0.0-rc.
...
v1.0.0-rc.
| Author | SHA1 | Date | |
|---|---|---|---|
| 198944649a | |||
| 6aa741ab0a | |||
| f69952a5da | |||
| 81af9bf3dd | |||
| f1b617e967 | |||
| d6be9beebf | |||
| 0a797260e3 | |||
| 2a4cc76292 | |||
| f87eb27f41 | |||
| acd86099e5 | |||
| 5ab9791c72 | |||
| 8152ccd5df | |||
| 8eeb571c2d | |||
| b1854d5255 | |||
| 7f4f970a38 | |||
| e5908c757c |
6
.env
6
.env
@@ -28,4 +28,8 @@ DIRECTUS_ADMIN_EMAIL=marc@mintel.me
|
||||
DIRECTUS_ADMIN_PASSWORD=Tim300493.
|
||||
DIRECTUS_DB_NAME=directus
|
||||
DIRECTUS_DB_USER=directus
|
||||
DIRECTUS_DB_PASSWORD=directus
|
||||
# Local Development
|
||||
PROJECT_NAME=klz-cables
|
||||
TRAEFIK_HOST=klz.localhost
|
||||
DIRECTUS_HOST=cms.klz.localhost
|
||||
GATEKEEPER_PASSWORD=klz2026
|
||||
|
||||
@@ -10,6 +10,11 @@
|
||||
# ────────────────────────────────────────────────────────────────────────────
|
||||
NODE_ENV=development
|
||||
NEXT_PUBLIC_BASE_URL=http://localhost:3000
|
||||
# TARGET is used to differentiate between environments (testing, staging, production)
|
||||
# NEXT_PUBLIC_TARGET makes this information available to the frontend
|
||||
NEXT_PUBLIC_TARGET=development
|
||||
# TARGET is used server-side
|
||||
TARGET=development
|
||||
|
||||
# ────────────────────────────────────────────────────────────────────────────
|
||||
# Analytics (Umami)
|
||||
|
||||
@@ -14,8 +14,8 @@ on:
|
||||
default: 'false'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: false
|
||||
group: ${{ github.workflow }}-${{ (github.ref_type == 'tag' && !contains(github.ref_name, '-')) && 'prod' || (github.ref_type == 'tag' && 'staging' || 'testing') }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
@@ -38,11 +38,38 @@ jobs:
|
||||
gotify_priority: ${{ steps.determine.outputs.gotify_priority }}
|
||||
short_sha: ${{ steps.determine.outputs.short_sha }}
|
||||
commit_msg: ${{ steps.determine.outputs.commit_msg }}
|
||||
gatekeeper_changed: ${{ steps.changes.outputs.gatekeeper_changed }}
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: 🧹 Maintenance (High Density Cleanup)
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Purging old build layers and dangling images..."
|
||||
docker image prune -f
|
||||
docker builder prune -f --filter "until=6h"
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-depth: 2
|
||||
|
||||
- name: 🔍 Check for Gatekeeper changes
|
||||
id: changes
|
||||
shell: bash
|
||||
run: |
|
||||
if git rev-parse HEAD~1 >/dev/null 2>&1; then
|
||||
if git diff --quiet HEAD~1 HEAD -- gatekeeper; then
|
||||
echo "gatekeeper_changed=false" >> $GITHUB_OUTPUT
|
||||
echo "ℹ️ No changes in gatekeeper/"
|
||||
else
|
||||
echo "gatekeeper_changed=true" >> $GITHUB_OUTPUT
|
||||
echo "⚠️ Changes detected in gatekeeper/"
|
||||
fi
|
||||
else
|
||||
echo "gatekeeper_changed=true" >> $GITHUB_OUTPUT
|
||||
echo "🆕 First commit or no history, building gatekeeper."
|
||||
fi
|
||||
|
||||
- name: 🔍 Environment & Version ermitteln
|
||||
id: determine
|
||||
@@ -105,19 +132,21 @@ jobs:
|
||||
TARGET="skip"
|
||||
fi
|
||||
|
||||
echo "target=$TARGET" >> $GITHUB_OUTPUT
|
||||
echo "image_tag=$IMAGE_TAG" >> $GITHUB_OUTPUT
|
||||
echo "env_file=$ENV_FILE" >> $GITHUB_OUTPUT
|
||||
echo "traefik_host=$TRAEFIK_HOST" >> $GITHUB_OUTPUT
|
||||
echo "next_public_base_url=$NEXT_PUBLIC_BASE_URL" >> $GITHUB_OUTPUT
|
||||
echo "directus_url=$DIRECTUS_URL" >> $GITHUB_OUTPUT
|
||||
echo "directus_host=$DIRECTUS_HOST" >> $GITHUB_OUTPUT
|
||||
echo "project_name=$PROJECT_NAME" >> $GITHUB_OUTPUT
|
||||
echo "is_prod=$IS_PROD" >> $GITHUB_OUTPUT
|
||||
echo "gotify_title=$GOTIFY_TITLE" >> $GITHUB_OUTPUT
|
||||
echo "gotify_priority=$GOTIFY_PRIORITY" >> $GITHUB_OUTPUT
|
||||
echo "short_sha=$SHORT_SHA" >> $GITHUB_OUTPUT
|
||||
echo "commit_msg=$COMMIT_MSG" >> $GITHUB_OUTPUT
|
||||
{
|
||||
echo "target=$TARGET"
|
||||
echo "image_tag=$IMAGE_TAG"
|
||||
echo "env_file=$ENV_FILE"
|
||||
echo "traefik_host=$TRAEFIK_HOST"
|
||||
echo "next_public_base_url=$NEXT_PUBLIC_BASE_URL"
|
||||
echo "directus_url=$DIRECTUS_URL"
|
||||
echo "directus_host=$DIRECTUS_HOST"
|
||||
echo "project_name=$PROJECT_NAME"
|
||||
echo "is_prod=$IS_PROD"
|
||||
echo "gotify_title=$GOTIFY_TITLE"
|
||||
echo "gotify_priority=$GOTIFY_PRIORITY"
|
||||
echo "short_sha=$SHORT_SHA"
|
||||
echo "commit_msg=$COMMIT_MSG"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 2: Quality Assurance (Lint & Test)
|
||||
@@ -127,23 +156,18 @@ jobs:
|
||||
needs: prepare
|
||||
if: needs.prepare.outputs.target != 'skip'
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
|
||||
- name: 📦 Restore npm cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
@@ -166,20 +190,27 @@ jobs:
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 3: Build & Push Docker Image
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
build:
|
||||
name: 🏗️ Build & Push
|
||||
build-app:
|
||||
name: 🏗️ Build App
|
||||
needs: prepare
|
||||
if: ${{ needs.prepare.outputs.target != 'skip' }}
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: 🐳 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: 🔐 Registry Login
|
||||
run: |
|
||||
echo "${{ secrets.REGISTRY_PASS }}" | docker login registry.infra.mintel.me -u "${{ secrets.REGISTRY_USER }}" --password-stdin
|
||||
|
||||
- name: 🏗️ Docker Image bauen & pushen
|
||||
- name: 🏗️ App bauen & pushen
|
||||
env:
|
||||
IMAGE_TAG: ${{ needs.prepare.outputs.image_tag }}
|
||||
TARGET: ${{ needs.prepare.outputs.target }}
|
||||
@@ -188,28 +219,63 @@ jobs:
|
||||
NEXT_PUBLIC_UMAMI_SCRIPT_URL: ${{ needs.prepare.outputs.target == 'production' && secrets.NEXT_PUBLIC_UMAMI_SCRIPT_URL || (needs.prepare.outputs.target == 'staging' && secrets.STAGING_NEXT_PUBLIC_UMAMI_SCRIPT_URL || secrets.TESTING_NEXT_PUBLIC_UMAMI_SCRIPT_URL || secrets.NEXT_PUBLIC_UMAMI_SCRIPT_URL) }}
|
||||
DIRECTUS_URL: ${{ needs.prepare.outputs.directus_url }}
|
||||
run: |
|
||||
echo "🏗️ Building → $TARGET / $IMAGE_TAG"
|
||||
docker buildx build \
|
||||
--pull \
|
||||
--platform linux/arm64 \
|
||||
--build-arg NEXT_PUBLIC_BASE_URL="$NEXT_PUBLIC_BASE_URL" \
|
||||
--build-arg NEXT_PUBLIC_UMAMI_WEBSITE_ID="$NEXT_PUBLIC_UMAMI_WEBSITE_ID" \
|
||||
--build-arg NEXT_PUBLIC_UMAMI_SCRIPT_URL="$NEXT_PUBLIC_UMAMI_SCRIPT_URL" \
|
||||
--build-arg NEXT_PUBLIC_TARGET="$TARGET" \
|
||||
--build-arg DIRECTUS_URL="$DIRECTUS_URL" \
|
||||
-t registry.infra.mintel.me/mintel/klz-cables.com:$IMAGE_TAG \
|
||||
--cache-from type=registry,ref=registry.infra.mintel.me/mintel/klz-cables.com:buildcache \
|
||||
--cache-to type=registry,ref=registry.infra.mintel.me/mintel/klz-cables.com:buildcache,mode=max \
|
||||
--push .
|
||||
|
||||
build-gatekeeper:
|
||||
name: 🏗️ Build Gatekeeper
|
||||
needs: prepare
|
||||
if: ${{ needs.prepare.outputs.target != 'skip' }}
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: 🐳 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: 🔐 Registry Login
|
||||
run: |
|
||||
echo "${{ secrets.REGISTRY_PASS }}" | docker login registry.infra.mintel.me -u "${{ secrets.REGISTRY_USER }}" --password-stdin
|
||||
|
||||
- name: 🏗️ Gatekeeper bauen & pushen
|
||||
env:
|
||||
IMAGE_TAG: ${{ needs.prepare.outputs.image_tag }}
|
||||
CHG: ${{ needs.prepare.outputs.gatekeeper_changed }}
|
||||
run: |
|
||||
docker buildx build \
|
||||
--pull \
|
||||
--platform linux/arm64 \
|
||||
-t registry.infra.mintel.me/mintel/klz-cables-gatekeeper:$IMAGE_TAG \
|
||||
--push ./gatekeeper
|
||||
if [ "$CHG" == "true" ]; then
|
||||
echo "🏗️ Building Gatekeeper (Changes detected)..."
|
||||
docker buildx build \
|
||||
--pull \
|
||||
--platform linux/arm64 \
|
||||
-t registry.infra.mintel.me/mintel/klz-cables-gatekeeper:$IMAGE_TAG \
|
||||
--cache-from type=registry,ref=registry.infra.mintel.me/mintel/klz-cables-gatekeeper:buildcache \
|
||||
--cache-to type=registry,ref=registry.infra.mintel.me/mintel/klz-cables-gatekeeper:buildcache,mode=max \
|
||||
--push ./gatekeeper
|
||||
else
|
||||
echo "⏩ Skipping build, just re-tagging existing image..."
|
||||
# Fast-track: tag the latest (or buildcache) as the new version
|
||||
# We use buildx with cache but without rebuild triggers - it's near instant
|
||||
docker buildx build \
|
||||
--platform linux/arm64 \
|
||||
-t registry.infra.mintel.me/mintel/klz-cables-gatekeeper:$IMAGE_TAG \
|
||||
--cache-from type=registry,ref=registry.infra.mintel.me/mintel/klz-cables-gatekeeper:buildcache \
|
||||
--push ./gatekeeper
|
||||
fi
|
||||
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
@@ -217,9 +283,11 @@ jobs:
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
deploy:
|
||||
name: 🚀 Deploy
|
||||
needs: [prepare, build, qa]
|
||||
needs: [prepare, build-app, build-gatekeeper, qa]
|
||||
if: ${{ needs.prepare.outputs.target != 'skip' }}
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
env:
|
||||
TARGET: ${{ needs.prepare.outputs.target }}
|
||||
IMAGE_TAG: ${{ needs.prepare.outputs.image_tag }}
|
||||
@@ -250,6 +318,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: 🚀 Deploy to ${{ env.TARGET }}
|
||||
run: |
|
||||
@@ -264,9 +334,11 @@ jobs:
|
||||
# Generated by CI - $TARGET - $(date -u)
|
||||
NODE_ENV=production
|
||||
NEXT_PUBLIC_BASE_URL=$NEXT_PUBLIC_BASE_URL
|
||||
NEXT_PUBLIC_TARGET=$TARGET
|
||||
NEXT_PUBLIC_UMAMI_WEBSITE_ID=$NEXT_PUBLIC_UMAMI_WEBSITE_ID
|
||||
NEXT_PUBLIC_UMAMI_SCRIPT_URL=$NEXT_PUBLIC_UMAMI_SCRIPT_URL
|
||||
SENTRY_DSN=$SENTRY_DSN
|
||||
LOG_LEVEL=$( [[ "$TARGET" == "testing" || "$TARGET" == "development" ]] && echo "debug" || echo "info" )
|
||||
MAIL_HOST=$MAIL_HOST
|
||||
MAIL_PORT=$MAIL_PORT
|
||||
MAIL_USERNAME=$MAIL_USERNAME
|
||||
@@ -288,6 +360,8 @@ jobs:
|
||||
INTERNAL_DIRECTUS_URL=http://directus:8055
|
||||
GATEKEEPER_PASSWORD=$GATEKEEPER_PASSWORD
|
||||
|
||||
TARGET=$TARGET
|
||||
SENTRY_ENVIRONMENT=$TARGET
|
||||
IMAGE_TAG=$IMAGE_TAG
|
||||
TRAEFIK_HOST=$TRAEFIK_HOST
|
||||
ENV_FILE=$ENV_FILE
|
||||
@@ -307,7 +381,7 @@ jobs:
|
||||
docker compose -p "$PROJECT_NAME" --env-file "$ENV_FILE" pull
|
||||
echo "→ Starting containers..."
|
||||
docker compose -p "$PROJECT_NAME" --env-file "$ENV_FILE" up -d --remove-orphans
|
||||
docker system prune -f --filter "until=168h"
|
||||
docker system prune -f --filter "until=24h"
|
||||
echo "→ Waiting 15s for warmup..."
|
||||
sleep 15
|
||||
echo "→ Container status:"
|
||||
@@ -330,17 +404,20 @@ jobs:
|
||||
needs.deploy.result == 'success' &&
|
||||
github.event.inputs.skip_long_checks != 'true'
|
||||
runs-on: docker
|
||||
outputs:
|
||||
report_url: ${{ steps.save.outputs.report_url }}
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
# outputs:
|
||||
# report_url: ${{ steps.save.outputs.report_url }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
@@ -412,24 +489,18 @@ jobs:
|
||||
CHROME_PATH: /usr/bin/chromium
|
||||
run: npm run pagespeed:test
|
||||
|
||||
- name: 💾 Save Report URL
|
||||
id: save
|
||||
if: always()
|
||||
run: |
|
||||
if [ -f pagespeed-report-url.txt ]; then
|
||||
URL=$(cat pagespeed-report-url.txt)
|
||||
echo "report_url=$URL" >> $GITHUB_OUTPUT
|
||||
echo "✅ Report URL found: $URL"
|
||||
fi
|
||||
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# JOB 6: Notifications
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
notifications:
|
||||
name: 🔔 Notifications
|
||||
needs: [prepare, qa, build, deploy, pagespeed]
|
||||
needs: [prepare, qa, build-app, build-gatekeeper, deploy, pagespeed]
|
||||
if: always()
|
||||
runs-on: docker
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: 📊 Deployment Summary
|
||||
run: |
|
||||
@@ -446,18 +517,19 @@ jobs:
|
||||
- name: 🔔 Gotify - Success
|
||||
if: needs.deploy.result == 'success'
|
||||
run: |
|
||||
REPORT_MSG=""
|
||||
if [ -n "${{ needs.pagespeed.outputs.report_url }}" ]; then
|
||||
REPORT_MSG="\n\n⚡ **PageSpeed Report:**\n${{ needs.pagespeed.outputs.report_url }}"
|
||||
fi
|
||||
|
||||
curl -s -k -X POST "${{ secrets.GOTIFY_URL }}/message?token=${{ secrets.GOTIFY_TOKEN }}" \
|
||||
-F "title=${{ needs.prepare.outputs.gotify_title }}" \
|
||||
-F "message=Erfolgreich deployt auf **${{ needs.prepare.outputs.target }}**\n\nVersion: **${{ needs.prepare.outputs.image_tag }}**\nCommit: ${{ needs.prepare.outputs.short_sha }} (${{ needs.prepare.outputs.commit_msg }})\nVon: ${{ github.actor }}\nRun: ${{ github.run_id }}${REPORT_MSG}" \
|
||||
-F "message=Erfolgreich deployt auf **${{ needs.prepare.outputs.target }}**\n\nVersion: **${{ needs.prepare.outputs.image_tag }}**\nCommit: ${{ needs.prepare.outputs.short_sha }} (${{ needs.prepare.outputs.commit_msg }})\nVon: ${{ github.actor }}\nRun: ${{ github.run_id }}" \
|
||||
-F "priority=4" || true
|
||||
|
||||
- name: 🔔 Gotify - Failure
|
||||
if: needs.deploy.result == 'failure' || needs.build.result == 'failure' || needs.qa.result == 'failure'
|
||||
if: |
|
||||
needs.prepare.result == 'failure' ||
|
||||
needs.qa.result == 'failure' ||
|
||||
needs.build-app.result == 'failure' ||
|
||||
needs.build-gatekeeper.result == 'failure' ||
|
||||
needs.deploy.result == 'failure' ||
|
||||
needs.pagespeed.result == 'failure'
|
||||
run: |
|
||||
curl -s -k -X POST "${{ secrets.GOTIFY_URL }}/message?token=${{ secrets.GOTIFY_TOKEN }}" \
|
||||
-F "title=❌ Deployment FEHLGESCHLAGEN – ${{ needs.prepare.outputs.target || 'unknown' }}" \
|
||||
|
||||
@@ -27,11 +27,13 @@ ENV NEXT_TELEMETRY_DISABLED=1
|
||||
ARG NEXT_PUBLIC_BASE_URL
|
||||
ARG NEXT_PUBLIC_UMAMI_WEBSITE_ID
|
||||
ARG NEXT_PUBLIC_UMAMI_SCRIPT_URL
|
||||
ARG NEXT_PUBLIC_TARGET
|
||||
ARG DIRECTUS_URL
|
||||
|
||||
ENV NEXT_PUBLIC_BASE_URL=$NEXT_PUBLIC_BASE_URL
|
||||
ENV NEXT_PUBLIC_UMAMI_WEBSITE_ID=$NEXT_PUBLIC_UMAMI_WEBSITE_ID
|
||||
ENV NEXT_PUBLIC_UMAMI_SCRIPT_URL=$NEXT_PUBLIC_UMAMI_SCRIPT_URL
|
||||
ENV NEXT_PUBLIC_TARGET=$NEXT_PUBLIC_TARGET
|
||||
ENV DIRECTUS_URL=$DIRECTUS_URL
|
||||
|
||||
# Validate environment variables during build
|
||||
|
||||
@@ -1,54 +1,60 @@
|
||||
"use server";
|
||||
'use server';
|
||||
|
||||
import client, { ensureAuthenticated } from "@/lib/directus";
|
||||
import { createItem } from "@directus/sdk";
|
||||
import { sendEmail } from "@/lib/mail/mailer";
|
||||
import ContactEmail from "@/components/emails/ContactEmail";
|
||||
import React from "react";
|
||||
import { getServerAppServices } from "@/lib/services/create-services.server";
|
||||
import client, { ensureAuthenticated } from '@/lib/directus';
|
||||
import { createItem } from '@directus/sdk';
|
||||
import { sendEmail } from '@/lib/mail/mailer';
|
||||
import ContactEmail from '@/components/emails/ContactEmail';
|
||||
import React from 'react';
|
||||
import { getServerAppServices } from '@/lib/services/create-services.server';
|
||||
|
||||
export async function sendContactFormAction(formData: FormData) {
|
||||
const services = getServerAppServices();
|
||||
const logger = services.logger.child({ action: 'sendContactFormAction' });
|
||||
const name = formData.get("name") as string;
|
||||
const email = formData.get("email") as string;
|
||||
const message = formData.get("message") as string;
|
||||
const productName = formData.get("productName") as string | null;
|
||||
const name = formData.get('name') as string;
|
||||
const email = formData.get('email') as string;
|
||||
const message = formData.get('message') as string;
|
||||
const productName = formData.get('productName') as string | null;
|
||||
|
||||
if (!name || !email || !message) {
|
||||
logger.warn('Missing required fields in contact form', { name: !!name, email: !!email, message: !!message });
|
||||
return { success: false, error: "Missing required fields" };
|
||||
logger.warn('Missing required fields in contact form', {
|
||||
name: !!name,
|
||||
email: !!email,
|
||||
message: !!message,
|
||||
});
|
||||
return { success: false, error: 'Missing required fields' };
|
||||
}
|
||||
|
||||
// 1. Save to Directus
|
||||
try {
|
||||
await ensureAuthenticated();
|
||||
if (productName) {
|
||||
await client.request(createItem('product_requests', {
|
||||
product_name: productName,
|
||||
email,
|
||||
message
|
||||
}));
|
||||
await client.request(
|
||||
createItem('product_requests', {
|
||||
product_name: productName,
|
||||
email,
|
||||
message,
|
||||
}),
|
||||
);
|
||||
logger.info('Product request stored in Directus');
|
||||
} else {
|
||||
await client.request(createItem('contact_submissions', {
|
||||
name,
|
||||
email,
|
||||
message
|
||||
}));
|
||||
await client.request(
|
||||
createItem('contact_submissions', {
|
||||
name,
|
||||
email,
|
||||
message,
|
||||
}),
|
||||
);
|
||||
logger.info('Contact submission stored in Directus');
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to store submission in Directus', { error });
|
||||
// We continue anyway to try sending the email, but maybe we should report this
|
||||
services.errors.captureException(error, { action: 'directus_store_submission' });
|
||||
}
|
||||
|
||||
// 2. Send Email
|
||||
logger.info('Sending contact form email', { email, productName });
|
||||
|
||||
const subject = productName
|
||||
? `Product Inquiry: ${productName}`
|
||||
: "New Contact Form Submission";
|
||||
const subject = productName ? `Product Inquiry: ${productName}` : 'New Contact Form Submission';
|
||||
|
||||
const result = await sendEmail({
|
||||
subject,
|
||||
@@ -63,9 +69,19 @@ export async function sendContactFormAction(formData: FormData) {
|
||||
|
||||
if (result.success) {
|
||||
logger.info('Contact form email sent successfully', { messageId: result.messageId });
|
||||
await services.notifications.notify({
|
||||
title: `📩 ${subject}`,
|
||||
message: `New message from ${name} (${email}):\n\n${message}`,
|
||||
priority: 5,
|
||||
});
|
||||
} else {
|
||||
logger.error('Failed to send contact form email', { error: result.error });
|
||||
services.errors.captureException(result.error, { action: 'sendContactFormAction', email });
|
||||
await services.notifications.notify({
|
||||
title: '🚨 Contact Form Error',
|
||||
message: `Failed to send email for ${name} (${email}). Error: ${JSON.stringify(result.error)}`,
|
||||
priority: 8,
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { AlertCircle, RefreshCw, Database } from 'lucide-react';
|
||||
import { config } from '../lib/config';
|
||||
|
||||
export default function CMSConnectivityNotice() {
|
||||
const [status, setStatus] = useState<'checking' | 'ok' | 'error'>('checking');
|
||||
@@ -12,14 +13,12 @@ export default function CMSConnectivityNotice() {
|
||||
// Only show if we've detected an issue AND we are in a context where we want to see it
|
||||
const checkCMS = async () => {
|
||||
const isDebug = new URLSearchParams(window.location.search).has('cms_debug');
|
||||
const isLocal =
|
||||
window.location.hostname === 'localhost' || window.location.hostname.includes('127.0.0.1');
|
||||
const isStaging =
|
||||
window.location.hostname.includes('staging') ||
|
||||
window.location.hostname.includes('testing');
|
||||
const isLocal = config.isDevelopment;
|
||||
const isTesting = config.isTesting;
|
||||
|
||||
// Only proceed with check if it's developer context
|
||||
if (!isLocal && !isStaging && !isDebug) return;
|
||||
// Only proceed with check if it's developer context (Local or Testing)
|
||||
// Staging and Production should NEVER see this unless forced with ?cms_debug
|
||||
if (!isLocal && !isTesting && !isDebug) return;
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/health/cms');
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -398,6 +398,24 @@ locale: de
|
||||
"55",
|
||||
"4195"
|
||||
]
|
||||
},
|
||||
{
|
||||
"configuration": "1x1200/35",
|
||||
"cells": [
|
||||
"Al",
|
||||
"RM",
|
||||
"0,95",
|
||||
"48,5",
|
||||
"0,0247",
|
||||
"3,4",
|
||||
"Auf Anfrage",
|
||||
"Auf Anfrage",
|
||||
"113",
|
||||
"2,4",
|
||||
"885",
|
||||
"59",
|
||||
"4800"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -737,6 +755,24 @@ locale: de
|
||||
"60",
|
||||
"4634"
|
||||
]
|
||||
},
|
||||
{
|
||||
"configuration": "1x1200/35",
|
||||
"cells": [
|
||||
"Al",
|
||||
"RM",
|
||||
"1,05",
|
||||
"52,3",
|
||||
"0,0247",
|
||||
"5,5",
|
||||
"Auf Anfrage",
|
||||
"Auf Anfrage",
|
||||
"113",
|
||||
"2,4",
|
||||
"990",
|
||||
"66",
|
||||
"5200"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -1076,6 +1112,24 @@ locale: de
|
||||
"65",
|
||||
"5093"
|
||||
]
|
||||
},
|
||||
{
|
||||
"configuration": "1x1200/35",
|
||||
"cells": [
|
||||
"Al",
|
||||
"RM",
|
||||
"1,15",
|
||||
"57,5",
|
||||
"0,0247",
|
||||
"8,0",
|
||||
"Auf Anfrage",
|
||||
"Auf Anfrage",
|
||||
"113",
|
||||
"2,4",
|
||||
"1065",
|
||||
"71",
|
||||
"5900"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -398,6 +398,24 @@ locale: en
|
||||
"55",
|
||||
"4195"
|
||||
]
|
||||
},
|
||||
{
|
||||
"configuration": "1x1200/35",
|
||||
"cells": [
|
||||
"Al",
|
||||
"RM",
|
||||
"0.95",
|
||||
"48.5",
|
||||
"0.0247",
|
||||
"3.4",
|
||||
"On Request",
|
||||
"On Request",
|
||||
"113",
|
||||
"2.4",
|
||||
"885",
|
||||
"59",
|
||||
"4800"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -737,6 +755,24 @@ locale: en
|
||||
"60",
|
||||
"4634"
|
||||
]
|
||||
},
|
||||
{
|
||||
"configuration": "1x1200/35",
|
||||
"cells": [
|
||||
"Al",
|
||||
"RM",
|
||||
"1.05",
|
||||
"52.3",
|
||||
"0.0247",
|
||||
"5.5",
|
||||
"On Request",
|
||||
"On Request",
|
||||
"113",
|
||||
"2.4",
|
||||
"990",
|
||||
"66",
|
||||
"5200"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -1076,6 +1112,24 @@ locale: en
|
||||
"65",
|
||||
"5093"
|
||||
]
|
||||
},
|
||||
{
|
||||
"configuration": "1x1200/35",
|
||||
"cells": [
|
||||
"Al",
|
||||
"RM",
|
||||
"1.15",
|
||||
"57.5",
|
||||
"0.0247",
|
||||
"8",
|
||||
"On Request",
|
||||
"On Request",
|
||||
"113",
|
||||
"2.4",
|
||||
"1065",
|
||||
"71",
|
||||
"5900"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -9,18 +9,27 @@ services:
|
||||
NODE_ENV: development
|
||||
# Docker Internal Communication
|
||||
DIRECTUS_URL: http://directus:8055
|
||||
ports:
|
||||
- "3000:3000"
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.klz-app-local.rule=Host(`klz.localhost`)"
|
||||
- "traefik.http.routers.klz-app-local.entrypoints=web"
|
||||
- "traefik.http.routers.klz-app-local.service=klz-cables"
|
||||
# Clear all production-related TLS/Middleware settings for the main routers
|
||||
- "traefik.http.routers.klz-cables.entrypoints=web"
|
||||
- "traefik.http.routers.klz-cables.rule=Host(`klz.localhost`)"
|
||||
- "traefik.http.routers.klz-cables.tls=false"
|
||||
- "traefik.http.routers.klz-cables.middlewares="
|
||||
|
||||
- "traefik.http.routers.klz-cables-web.entrypoints=web"
|
||||
- "traefik.http.routers.klz-cables-web.rule=Host(`klz.localhost`)"
|
||||
- "traefik.http.routers.klz-cables-web.middlewares="
|
||||
|
||||
directus:
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.klz-directus-local.rule=Host(`cms.klz.localhost`)"
|
||||
- "traefik.http.routers.klz-directus-local.entrypoints=web"
|
||||
- "traefik.http.routers.klz-directus-local.service=klz-directus"
|
||||
- "traefik.http.routers.klz-cables-directus.entrypoints=web"
|
||||
- "traefik.http.routers.klz-cables-directus.rule=Host(`cms.klz.localhost`)"
|
||||
- "traefik.http.routers.klz-cables-directus.tls=false"
|
||||
- "traefik.http.routers.klz-cables-directus.middlewares="
|
||||
ports:
|
||||
- "8055:8055"
|
||||
environment:
|
||||
|
||||
@@ -75,6 +75,7 @@ services:
|
||||
# Error Tracking
|
||||
SENTRY_DSN: ${SENTRY_DSN}
|
||||
SENTRY_ENVIRONMENT: ${TARGET:-development}
|
||||
LOGGER_LEVEL: ${LOG_LEVEL:-info}
|
||||
volumes:
|
||||
- ./directus/uploads:/directus/uploads
|
||||
- ./directus/extensions:/directus/extensions
|
||||
|
||||
@@ -13,11 +13,15 @@ let memoizedConfig: ReturnType<typeof createConfig> | undefined;
|
||||
function createConfig() {
|
||||
const env = envSchema.parse(getRawEnv());
|
||||
|
||||
const target = env.NEXT_PUBLIC_TARGET || env.TARGET;
|
||||
|
||||
return {
|
||||
env: env.NODE_ENV,
|
||||
isProduction: env.NODE_ENV === 'production',
|
||||
isDevelopment: env.NODE_ENV === 'development',
|
||||
isTest: env.NODE_ENV === 'test',
|
||||
target,
|
||||
isProduction: target === 'production' || !target,
|
||||
isStaging: target === 'staging',
|
||||
isTesting: target === 'testing',
|
||||
isDevelopment: target === 'development',
|
||||
|
||||
baseUrl: env.NEXT_PUBLIC_BASE_URL,
|
||||
|
||||
@@ -65,6 +69,13 @@ function createConfig() {
|
||||
internalUrl: env.INTERNAL_DIRECTUS_URL,
|
||||
proxyPath: '/cms',
|
||||
},
|
||||
notifications: {
|
||||
gotify: {
|
||||
url: env.GOTIFY_URL,
|
||||
token: env.GOTIFY_TOKEN,
|
||||
enabled: Boolean(env.GOTIFY_URL && env.GOTIFY_TOKEN),
|
||||
},
|
||||
},
|
||||
} as const;
|
||||
}
|
||||
|
||||
@@ -87,15 +98,21 @@ export const config = {
|
||||
get env() {
|
||||
return getConfig().env;
|
||||
},
|
||||
get target() {
|
||||
return getConfig().target;
|
||||
},
|
||||
get isProduction() {
|
||||
return getConfig().isProduction;
|
||||
},
|
||||
get isStaging() {
|
||||
return getConfig().isStaging;
|
||||
},
|
||||
get isTesting() {
|
||||
return getConfig().isTesting;
|
||||
},
|
||||
get isDevelopment() {
|
||||
return getConfig().isDevelopment;
|
||||
},
|
||||
get isTest() {
|
||||
return getConfig().isTest;
|
||||
},
|
||||
get baseUrl() {
|
||||
return getConfig().baseUrl;
|
||||
},
|
||||
@@ -117,6 +134,9 @@ export const config = {
|
||||
get directus() {
|
||||
return getConfig().directus;
|
||||
},
|
||||
get notifications() {
|
||||
return getConfig().notifications;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -161,5 +181,12 @@ export function getMaskedConfig() {
|
||||
password: mask(c.directus.password),
|
||||
token: mask(c.directus.token),
|
||||
},
|
||||
notifications: {
|
||||
gotify: {
|
||||
url: c.notifications.gotify.url,
|
||||
token: mask(c.notifications.gotify.token),
|
||||
enabled: c.notifications.gotify.enabled,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { createDirectus, rest, authentication, readItems, readCollections } from '@directus/sdk';
|
||||
import { config } from './config';
|
||||
import { getServerAppServices } from './services/create-services.server';
|
||||
|
||||
const { url, adminEmail, password, token, proxyPath, internalUrl } = config.directus;
|
||||
|
||||
@@ -8,6 +9,21 @@ const effectiveUrl = typeof window === 'undefined' && internalUrl ? internalUrl
|
||||
|
||||
const client = createDirectus(effectiveUrl).with(rest()).with(authentication());
|
||||
|
||||
/**
|
||||
* Helper to determine if we should show detailed errors
|
||||
*/
|
||||
const shouldShowDevErrors = config.isTesting || config.isDevelopment;
|
||||
|
||||
/**
|
||||
* Genericizes error messages for production/staging
|
||||
*/
|
||||
function formatError(error: any) {
|
||||
if (shouldShowDevErrors) {
|
||||
return error.errors?.[0]?.message || error.message || 'An unexpected error occurred.';
|
||||
}
|
||||
return 'A system error occurred. Our team has been notified.';
|
||||
}
|
||||
|
||||
export async function ensureAuthenticated() {
|
||||
if (token) {
|
||||
client.setToken(token);
|
||||
@@ -17,6 +33,9 @@ export async function ensureAuthenticated() {
|
||||
try {
|
||||
await client.login(adminEmail, password);
|
||||
} catch (e) {
|
||||
if (typeof window === 'undefined') {
|
||||
getServerAppServices().errors.captureException(e, { part: 'directus_auth' });
|
||||
}
|
||||
console.error('Failed to authenticate with Directus:', e);
|
||||
}
|
||||
}
|
||||
@@ -61,6 +80,9 @@ export async function getProducts(locale: string = 'de') {
|
||||
);
|
||||
return items.map((item) => mapDirectusProduct(item, locale));
|
||||
} catch (error) {
|
||||
if (typeof window === 'undefined') {
|
||||
getServerAppServices().errors.captureException(error, { part: 'directus_get_products' });
|
||||
}
|
||||
console.error('Error fetching products:', error);
|
||||
return [];
|
||||
}
|
||||
@@ -86,6 +108,12 @@ export async function getProductBySlug(slug: string, locale: string = 'de') {
|
||||
if (!items || items.length === 0) return null;
|
||||
return mapDirectusProduct(items[0], locale);
|
||||
} catch (error) {
|
||||
if (typeof window === 'undefined') {
|
||||
getServerAppServices().errors.captureException(error, {
|
||||
part: 'directus_get_product_by_slug',
|
||||
slug,
|
||||
});
|
||||
}
|
||||
console.error(`Error fetching product ${slug}:`, error);
|
||||
return null;
|
||||
}
|
||||
@@ -98,20 +126,27 @@ export async function checkHealth() {
|
||||
await ensureAuthenticated();
|
||||
await client.request(readCollections());
|
||||
} catch (e: any) {
|
||||
if (typeof window === 'undefined') {
|
||||
getServerAppServices().errors.captureException(e, { part: 'directus_health_auth' });
|
||||
}
|
||||
console.error('Directus authentication failed during health check:', e);
|
||||
return {
|
||||
status: 'error',
|
||||
message:
|
||||
'Authentication failed. Check your DIRECTUS_ADMIN_EMAIL and DIRECTUS_ADMIN_PASSWORD.',
|
||||
message: shouldShowDevErrors
|
||||
? 'Authentication failed. Check your DIRECTUS_ADMIN_EMAIL and DIRECTUS_ADMIN_PASSWORD.'
|
||||
: 'CMS is currently unavailable due to an internal authentication error.',
|
||||
code: 'AUTH_FAILED',
|
||||
details: e.message,
|
||||
details: shouldShowDevErrors ? e.message : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
// 2. Schema check (does the products table exist?)
|
||||
// 2. Schema check (does the contact_submissions table exist?)
|
||||
try {
|
||||
await client.request(readItems('products', { limit: 1 }));
|
||||
await client.request(readItems('contact_submissions', { limit: 1 }));
|
||||
} catch (e: any) {
|
||||
if (typeof window === 'undefined') {
|
||||
getServerAppServices().errors.captureException(e, { part: 'directus_health_schema' });
|
||||
}
|
||||
if (
|
||||
e.message?.includes('does not exist') ||
|
||||
e.code === 'INVALID_PAYLOAD' ||
|
||||
@@ -119,23 +154,30 @@ export async function checkHealth() {
|
||||
) {
|
||||
return {
|
||||
status: 'error',
|
||||
message: 'The "products" collection is missing or inaccessible. Please sync your data.',
|
||||
message: shouldShowDevErrors
|
||||
? `The "contact_submissions" collection is missing or inaccessible. Error: ${e.message || 'Unknown'}`
|
||||
: 'Required data structures are currently unavailable.',
|
||||
code: 'SCHEMA_MISSING',
|
||||
};
|
||||
}
|
||||
return {
|
||||
status: 'error',
|
||||
message: `Schema error: ${e.message}`,
|
||||
message: shouldShowDevErrors
|
||||
? `Schema error: ${e.errors?.[0]?.message || e.message || 'Unknown error'}`
|
||||
: 'The data schema is currently misconfigured.',
|
||||
code: 'SCHEMA_ERROR',
|
||||
};
|
||||
}
|
||||
|
||||
return { status: 'ok', message: 'Directus is reachable and responding.' };
|
||||
} catch (error: any) {
|
||||
if (typeof window === 'undefined') {
|
||||
getServerAppServices().errors.captureException(error, { part: 'directus_health_critical' });
|
||||
}
|
||||
console.error('Directus health check failed with unexpected error:', error);
|
||||
return {
|
||||
status: 'error',
|
||||
message: error.message || 'An unexpected error occurred while connecting to the CMS.',
|
||||
message: formatError(error),
|
||||
code: error.code || 'UNKNOWN',
|
||||
};
|
||||
}
|
||||
|
||||
11
lib/env.ts
11
lib/env.ts
@@ -11,6 +11,7 @@ const preprocessEmptyString = (val: unknown) => (val === '' ? undefined : val);
|
||||
export const envSchema = z.object({
|
||||
NODE_ENV: z.enum(['development', 'production', 'test']).default('development'),
|
||||
NEXT_PUBLIC_BASE_URL: z.preprocess(preprocessEmptyString, z.string().url()),
|
||||
NEXT_PUBLIC_TARGET: z.enum(['development', 'testing', 'staging', 'production']).optional(),
|
||||
|
||||
// Analytics
|
||||
NEXT_PUBLIC_UMAMI_WEBSITE_ID: z.preprocess(preprocessEmptyString, z.string().optional()),
|
||||
@@ -45,6 +46,12 @@ export const envSchema = z.object({
|
||||
DIRECTUS_ADMIN_PASSWORD: z.preprocess(preprocessEmptyString, z.string().optional()),
|
||||
DIRECTUS_API_TOKEN: z.preprocess(preprocessEmptyString, z.string().optional()),
|
||||
INTERNAL_DIRECTUS_URL: z.preprocess(preprocessEmptyString, z.string().url().optional()),
|
||||
|
||||
// Deploy Target
|
||||
TARGET: z.enum(['development', 'testing', 'staging', 'production']).optional(),
|
||||
// Gotify
|
||||
GOTIFY_URL: z.preprocess(preprocessEmptyString, z.string().url().optional()),
|
||||
GOTIFY_TOKEN: z.preprocess(preprocessEmptyString, z.string().optional()),
|
||||
});
|
||||
|
||||
export type Env = z.infer<typeof envSchema>;
|
||||
@@ -57,6 +64,7 @@ export function getRawEnv() {
|
||||
return {
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
NEXT_PUBLIC_BASE_URL: process.env.NEXT_PUBLIC_BASE_URL,
|
||||
NEXT_PUBLIC_TARGET: process.env.NEXT_PUBLIC_TARGET,
|
||||
NEXT_PUBLIC_UMAMI_WEBSITE_ID: process.env.NEXT_PUBLIC_UMAMI_WEBSITE_ID,
|
||||
NEXT_PUBLIC_UMAMI_SCRIPT_URL: process.env.NEXT_PUBLIC_UMAMI_SCRIPT_URL,
|
||||
SENTRY_DSN: process.env.SENTRY_DSN,
|
||||
@@ -72,5 +80,8 @@ export function getRawEnv() {
|
||||
DIRECTUS_ADMIN_PASSWORD: process.env.DIRECTUS_ADMIN_PASSWORD,
|
||||
DIRECTUS_API_TOKEN: process.env.DIRECTUS_API_TOKEN,
|
||||
INTERNAL_DIRECTUS_URL: process.env.INTERNAL_DIRECTUS_URL,
|
||||
TARGET: process.env.TARGET,
|
||||
GOTIFY_URL: process.env.GOTIFY_URL,
|
||||
GOTIFY_TOKEN: process.env.GOTIFY_TOKEN,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import type { AnalyticsService } from './analytics/analytics-service';
|
||||
import type { CacheService } from './cache/cache-service';
|
||||
import type { ErrorReportingService } from './errors/error-reporting-service';
|
||||
import type { LoggerService } from './logging/logger-service';
|
||||
import type { NotificationService } from './notifications/notification-service';
|
||||
|
||||
// Simple constructor-based DI container.
|
||||
export class AppServices {
|
||||
@@ -9,6 +10,7 @@ export class AppServices {
|
||||
public readonly analytics: AnalyticsService,
|
||||
public readonly errors: ErrorReportingService,
|
||||
public readonly cache: CacheService,
|
||||
public readonly logger: LoggerService
|
||||
public readonly logger: LoggerService,
|
||||
public readonly notifications: NotificationService,
|
||||
) {}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,10 @@ import { UmamiAnalyticsService } from './analytics/umami-analytics-service';
|
||||
import { MemoryCacheService } from './cache/memory-cache-service';
|
||||
import { GlitchtipErrorReportingService } from './errors/glitchtip-error-reporting-service';
|
||||
import { NoopErrorReportingService } from './errors/noop-error-reporting-service';
|
||||
import {
|
||||
GotifyNotificationService,
|
||||
NoopNotificationService,
|
||||
} from './notifications/gotify-notification-service';
|
||||
import { PinoLoggerService } from './logging/pino-logger-service';
|
||||
import { config, getMaskedConfig } from '../config';
|
||||
|
||||
@@ -13,7 +17,7 @@ export function getServerAppServices(): AppServices {
|
||||
|
||||
// Create logger first to log initialization
|
||||
const logger = new PinoLoggerService('server');
|
||||
|
||||
|
||||
logger.info('Initializing server application services', {
|
||||
environment: getMaskedConfig(),
|
||||
timestamp: new Date().toISOString(),
|
||||
@@ -23,6 +27,7 @@ export function getServerAppServices(): AppServices {
|
||||
umamiEnabled: config.analytics.umami.enabled,
|
||||
sentryEnabled: config.errors.glitchtip.enabled,
|
||||
mailEnabled: Boolean(config.mail.host && config.mail.user),
|
||||
gotifyEnabled: config.notifications.gotify.enabled,
|
||||
});
|
||||
|
||||
const analytics = config.analytics.umami.enabled
|
||||
@@ -35,12 +40,28 @@ export function getServerAppServices(): AppServices {
|
||||
logger.info('Noop analytics service initialized (analytics disabled)');
|
||||
}
|
||||
|
||||
const notifications = config.notifications.gotify.enabled
|
||||
? new GotifyNotificationService({
|
||||
url: config.notifications.gotify.url!,
|
||||
token: config.notifications.gotify.token!,
|
||||
enabled: true,
|
||||
})
|
||||
: new NoopNotificationService();
|
||||
|
||||
if (config.notifications.gotify.enabled) {
|
||||
logger.info('Gotify notification service initialized');
|
||||
} else {
|
||||
logger.info('Noop notification service initialized (notifications disabled)');
|
||||
}
|
||||
|
||||
const errors = config.errors.glitchtip.enabled
|
||||
? new GlitchtipErrorReportingService({ enabled: true })
|
||||
? new GlitchtipErrorReportingService({ enabled: true }, notifications)
|
||||
: new NoopErrorReportingService();
|
||||
|
||||
if (config.errors.glitchtip.enabled) {
|
||||
logger.info('GlitchTip error reporting service initialized');
|
||||
logger.info('GlitchTip error reporting service initialized', {
|
||||
dsnPresent: Boolean(config.errors.glitchtip.dsn),
|
||||
});
|
||||
} else {
|
||||
logger.info('Noop error reporting service initialized (error reporting disabled)');
|
||||
}
|
||||
@@ -53,10 +74,9 @@ export function getServerAppServices(): AppServices {
|
||||
level: config.logging.level,
|
||||
});
|
||||
|
||||
singleton = new AppServices(analytics, errors, cache, logger);
|
||||
|
||||
singleton = new AppServices(analytics, errors, cache, logger, notifications);
|
||||
|
||||
logger.info('All application services initialized successfully');
|
||||
|
||||
|
||||
return singleton;
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import { GlitchtipErrorReportingService } from './errors/glitchtip-error-reporti
|
||||
import { NoopErrorReportingService } from './errors/noop-error-reporting-service';
|
||||
import { NoopLoggerService } from './logging/noop-logger-service';
|
||||
import { PinoLoggerService } from './logging/pino-logger-service';
|
||||
import { NoopNotificationService } from './notifications/gotify-notification-service';
|
||||
import { config, getMaskedConfig } from '../config';
|
||||
|
||||
/**
|
||||
@@ -71,9 +72,7 @@ export function getAppServices(): AppServices {
|
||||
|
||||
// Create logger first to log initialization
|
||||
const logger =
|
||||
typeof window === 'undefined'
|
||||
? new PinoLoggerService('server')
|
||||
: new NoopLoggerService();
|
||||
typeof window === 'undefined' ? new PinoLoggerService('server') : new NoopLoggerService();
|
||||
|
||||
// Log initialization
|
||||
if (typeof window === 'undefined') {
|
||||
@@ -121,7 +120,9 @@ export function getAppServices(): AppServices {
|
||||
: new NoopErrorReportingService();
|
||||
|
||||
if (sentryEnabled) {
|
||||
logger.info(`GlitchTip error reporting service initialized (${typeof window === 'undefined' ? 'server' : 'client'})`);
|
||||
logger.info(
|
||||
`GlitchTip error reporting service initialized (${typeof window === 'undefined' ? 'server' : 'client'})`,
|
||||
);
|
||||
} else {
|
||||
logger.info('Noop error reporting service initialized (error reporting disabled)');
|
||||
}
|
||||
@@ -138,9 +139,10 @@ export function getAppServices(): AppServices {
|
||||
});
|
||||
|
||||
// Create and cache the singleton
|
||||
singleton = new AppServices(analytics, errors, cache, logger);
|
||||
|
||||
const notifications = new NoopNotificationService();
|
||||
singleton = new AppServices(analytics, errors, cache, logger, notifications);
|
||||
|
||||
logger.info('All application services initialized successfully');
|
||||
|
||||
|
||||
return singleton;
|
||||
}
|
||||
|
||||
@@ -7,10 +7,15 @@ export type ErrorReportingUser = {
|
||||
export type ErrorReportingLevel = 'fatal' | 'error' | 'warning' | 'info' | 'debug' | 'log';
|
||||
|
||||
export interface ErrorReportingService {
|
||||
captureException(error: unknown, context?: Record<string, unknown>): string | undefined;
|
||||
captureMessage(message: string, level?: ErrorReportingLevel): string | undefined;
|
||||
captureException(
|
||||
error: unknown,
|
||||
context?: Record<string, unknown>,
|
||||
): Promise<string | undefined> | string | undefined;
|
||||
captureMessage(
|
||||
message: string,
|
||||
level?: ErrorReportingLevel,
|
||||
): Promise<string | undefined> | string | undefined;
|
||||
setUser(user: ErrorReportingUser | null): void;
|
||||
setTag(key: string, value: string): void;
|
||||
withScope<T>(fn: () => T, context?: Record<string, unknown>): T;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import type {
|
||||
ErrorReportingService,
|
||||
ErrorReportingUser,
|
||||
} from './error-reporting-service';
|
||||
import type { NotificationService } from '../notifications/notification-service';
|
||||
|
||||
type SentryLike = typeof Sentry;
|
||||
|
||||
@@ -15,12 +16,29 @@ export type GlitchtipErrorReportingServiceOptions = {
|
||||
export class GlitchtipErrorReportingService implements ErrorReportingService {
|
||||
constructor(
|
||||
private readonly options: GlitchtipErrorReportingServiceOptions,
|
||||
private readonly sentry: SentryLike = Sentry
|
||||
private readonly notifications?: NotificationService,
|
||||
private readonly sentry: SentryLike = Sentry,
|
||||
) {}
|
||||
|
||||
captureException(error: unknown, context?: Record<string, unknown>) {
|
||||
async captureException(error: unknown, context?: Record<string, unknown>) {
|
||||
if (!this.options.enabled) return undefined;
|
||||
return this.sentry.captureException(error, context as any) as any;
|
||||
const result = this.sentry.captureException(error, context as any) as any;
|
||||
|
||||
// Send to Gotify if it's considered critical or if we just want all exceptions there
|
||||
// For now, let's send all exceptions to Gotify as requested "notify me via gotify about critical error messages"
|
||||
// We'll treat all captureException calls as potentially critical or at least noteworthy
|
||||
if (this.notifications) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
const contextStr = context ? `\nContext: ${JSON.stringify(context, null, 2)}` : '';
|
||||
|
||||
await this.notifications.notify({
|
||||
title: '🔥 Critical Error Captured',
|
||||
message: `Error: ${errorMessage}${contextStr}`,
|
||||
priority: 7,
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
captureMessage(message: string, level: ErrorReportingLevel = 'error') {
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
import type { ErrorReportingLevel, ErrorReportingService, ErrorReportingUser } from './error-reporting-service';
|
||||
import type {
|
||||
ErrorReportingLevel,
|
||||
ErrorReportingService,
|
||||
ErrorReportingUser,
|
||||
} from './error-reporting-service';
|
||||
|
||||
export class NoopErrorReportingService implements ErrorReportingService {
|
||||
captureException(_error: unknown, _context?: Record<string, unknown>) {
|
||||
async captureException(_error: unknown, _context?: Record<string, unknown>) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
captureMessage(_message: string, _level?: ErrorReportingLevel) {
|
||||
async captureMessage(_message: string, _level?: ErrorReportingLevel) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
@@ -12,20 +12,19 @@ export class PinoLoggerService implements LoggerService {
|
||||
// In Next.js, especially in the Edge runtime or during instrumentation,
|
||||
// pino transports (which use worker threads) can cause issues.
|
||||
// We disable transport in production and during instrumentation.
|
||||
const useTransport = !config.isProduction && typeof window === 'undefined';
|
||||
const useTransport = config.isDevelopment && typeof window === 'undefined';
|
||||
|
||||
this.logger = pino({
|
||||
name: name || 'app',
|
||||
level: config.logging.level,
|
||||
transport:
|
||||
useTransport
|
||||
? {
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true,
|
||||
},
|
||||
}
|
||||
: undefined,
|
||||
transport: useTransport
|
||||
? {
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true,
|
||||
},
|
||||
}
|
||||
: undefined,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
49
lib/services/notifications/gotify-notification-service.ts
Normal file
49
lib/services/notifications/gotify-notification-service.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { NotificationOptions, NotificationService } from './notification-service';
|
||||
|
||||
export interface GotifyConfig {
|
||||
url: string;
|
||||
token: string;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
export class GotifyNotificationService implements NotificationService {
|
||||
constructor(private config: GotifyConfig) {}
|
||||
|
||||
async notify(options: NotificationOptions): Promise<void> {
|
||||
if (!this.config.enabled) return;
|
||||
|
||||
try {
|
||||
const { title, message, priority = 4 } = options;
|
||||
const url = new URL('message', this.config.url);
|
||||
url.searchParams.set('token', this.config.token);
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
title,
|
||||
message,
|
||||
priority,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
console.error('Gotify notification failed:', {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Gotify notification error:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class NoopNotificationService implements NotificationService {
|
||||
async notify(): Promise<void> {
|
||||
// Do nothing
|
||||
}
|
||||
}
|
||||
9
lib/services/notifications/notification-service.ts
Normal file
9
lib/services/notifications/notification-service.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export interface NotificationOptions {
|
||||
title: string;
|
||||
message: string;
|
||||
priority?: number;
|
||||
}
|
||||
|
||||
export interface NotificationService {
|
||||
notify(options: NotificationOptions): Promise<void>;
|
||||
}
|
||||
@@ -1,22 +1,19 @@
|
||||
module.exports = {
|
||||
ci: {
|
||||
collect: {
|
||||
numberOfRuns: 1,
|
||||
settings: {
|
||||
preset: 'desktop',
|
||||
onlyCategories: ['performance', 'accessibility', 'best-practices', 'seo'],
|
||||
},
|
||||
},
|
||||
assert: {
|
||||
assertions: {
|
||||
'categories:performance': ['warn', { minScore: 0.9 }],
|
||||
'categories:accessibility': ['warn', { minScore: 0.9 }],
|
||||
'categories:best-practices': ['warn', { minScore: 0.9 }],
|
||||
'categories:seo': ['warn', { minScore: 0.9 }],
|
||||
},
|
||||
},
|
||||
upload: {
|
||||
target: 'temporary-public-storage',
|
||||
},
|
||||
ci: {
|
||||
collect: {
|
||||
numberOfRuns: 1,
|
||||
settings: {
|
||||
preset: 'desktop',
|
||||
onlyCategories: ['performance', 'accessibility', 'best-practices', 'seo'],
|
||||
},
|
||||
},
|
||||
assert: {
|
||||
assertions: {
|
||||
'categories:performance': ['warn', { minScore: 0.9 }],
|
||||
'categories:accessibility': ['warn', { minScore: 0.9 }],
|
||||
'categories:best-practices': ['warn', { minScore: 0.9 }],
|
||||
'categories:seo': ['warn', { minScore: 0.9 }],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -191,7 +191,14 @@
|
||||
"emailPlaceholder": "ihre@email.de",
|
||||
"message": "Nachricht",
|
||||
"messagePlaceholder": "Wie können wir Ihnen helfen?",
|
||||
"submit": "Nachricht senden"
|
||||
"submit": "Nachricht senden",
|
||||
"submitting": "Wird gesendet...",
|
||||
"successTitle": "Nachricht gesendet!",
|
||||
"successDesc": "Vielen Dank für Ihre Nachricht. Wir werden uns so schnell wie möglich bei Ihnen melden.",
|
||||
"sendAnother": "Weitere Nachricht senden",
|
||||
"errorTitle": "Senden fehlgeschlagen!",
|
||||
"error": "Etwas ist schief gelaufen. Bitte überprüfen Sie Ihre Eingaben und versuchen Sie es erneut.",
|
||||
"tryAgain": "Erneut versuchen"
|
||||
}
|
||||
},
|
||||
"Products": {
|
||||
|
||||
@@ -191,7 +191,14 @@
|
||||
"emailPlaceholder": "your@email.com",
|
||||
"message": "Message",
|
||||
"messagePlaceholder": "How can we help you?",
|
||||
"submit": "Send Message"
|
||||
"submit": "Send Message",
|
||||
"submitting": "Sending...",
|
||||
"successTitle": "Message Sent!",
|
||||
"successDesc": "Thank you for your message. We will get back to you as soon as possible.",
|
||||
"sendAnother": "Send another message",
|
||||
"errorTitle": "Submission Failed!",
|
||||
"error": "Something went wrong. Please check your input and try again.",
|
||||
"tryAgain": "Try Again"
|
||||
}
|
||||
},
|
||||
"Products": {
|
||||
|
||||
@@ -65,7 +65,7 @@
|
||||
"name": "klz-cables-nextjs",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "docker network create infra 2>/dev/null || true && echo '\\n🚀 Development Environment Starting...\\n\\n📱 App: http://klz.localhost\\n🗄️ CMS: http://cms.klz.localhost/admin\\n🚦 Traefik: http://localhost:8080\\n\\n(Press Ctrl+C to stop)\\n' && docker-compose down --remove-orphans && docker-compose up",
|
||||
"dev": "docker network create infra 2>/dev/null || true && echo '\\n🚀 Development Environment Starting...\\n\\n📱 App: http://klz.localhost\\n🗄️ CMS: http://cms.klz.localhost/admin\\n🚦 Traefik: http://localhost:8080\\n\\n(Press Ctrl+C to stop)\\n' && docker-compose down --remove-orphans && docker-compose up app directus directus-db",
|
||||
"dev:local": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
@@ -73,7 +73,7 @@
|
||||
"typecheck": "tsc --noEmit",
|
||||
"test": "vitest run --passWithNoTests",
|
||||
"test:og": "vitest run tests/og-image.test.ts",
|
||||
"bootstrap:cms": "DIRECTUS_URL=http://localhost:8055 npx tsx --env-file=.env scripts/setup-directus-branding.ts",
|
||||
"directus:bootstrap": "DIRECTUS_URL=http://localhost:8055 npx tsx --env-file=.env scripts/setup-directus-branding.ts",
|
||||
"pdf:datasheets": "tsx ./scripts/generate-pdf-datasheets.ts",
|
||||
"pdf:datasheets:legacy": "tsx ./scripts/generate-pdf-datasheets-pdf-lib.ts",
|
||||
"directus:push:staging": "./scripts/sync-directus.sh push staging",
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -79,33 +79,67 @@ async function main() {
|
||||
const chromePath = process.env.CHROME_PATH || process.env.PUPPETEER_EXECUTABLE_PATH;
|
||||
const chromePathArg = chromePath ? `--collect.chromePath="${chromePath}"` : '';
|
||||
|
||||
// Clean up old reports
|
||||
if (fs.existsSync('.lighthouseci')) {
|
||||
fs.rmSync('.lighthouseci', { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// Using a more robust way to execute and capture output
|
||||
const lhciCommand = `npx lhci collect ${urlArgs} ${chromePathArg} --collect.settings.chromeFlags='--no-sandbox --disable-setuid-sandbox' --collect.settings.extraHeaders='${extraHeaders}' && npx lhci assert && npx lhci upload`;
|
||||
// We remove 'npx lhci upload' to keep everything local and avoid Google-hosted reports
|
||||
const lhciCommand = `npx lhci collect ${urlArgs} ${chromePathArg} --collect.settings.chromeFlags='--no-sandbox --disable-setuid-sandbox' --collect.settings.extraHeaders='${extraHeaders}' && npx lhci assert`;
|
||||
|
||||
console.log(`💻 Executing LHCI...`);
|
||||
|
||||
try {
|
||||
const output = execSync(lhciCommand, {
|
||||
execSync(lhciCommand, {
|
||||
encoding: 'utf8',
|
||||
stdio: ['inherit', 'pipe', 'inherit'], // Pipe stdout so we can parse it
|
||||
stdio: 'inherit',
|
||||
});
|
||||
} catch (err: any) {
|
||||
console.warn('⚠️ LHCI assertion finished with warnings or errors.');
|
||||
// We continue to show the table even if assertions failed
|
||||
}
|
||||
|
||||
// 3. Summarize Results (Local & Independent)
|
||||
const manifestPath = path.join(process.cwd(), '.lighthouseci', 'manifest.json');
|
||||
if (fs.existsSync(manifestPath)) {
|
||||
const manifest = JSON.parse(fs.readFileSync(manifestPath, 'utf8'));
|
||||
console.log(`\n📊 PageSpeed Summary (FOSS - Local Report):\n`);
|
||||
|
||||
const summaryTable = manifest.map((entry: any) => {
|
||||
const s = entry.summary;
|
||||
return {
|
||||
URL: entry.url.replace(targetUrl, ''),
|
||||
Perf: Math.round(s.performance * 100),
|
||||
Acc: Math.round(s.accessibility * 100),
|
||||
BP: Math.round(s['best-practices'] * 100),
|
||||
SEO: Math.round(s.seo * 100),
|
||||
};
|
||||
});
|
||||
|
||||
console.log(output);
|
||||
console.table(summaryTable);
|
||||
|
||||
// Extract report URL from LHCI output
|
||||
const reportMatch = output.match(
|
||||
/Sent to (https:\/\/storage\.googleapis\.com\/lighthouse-infrastructure\.appspot\.com\/reports\/[^\s]+)/,
|
||||
);
|
||||
if (reportMatch && reportMatch[1]) {
|
||||
const reportUrl = reportMatch[1];
|
||||
console.log(`\n📊 Report URL: ${reportUrl}`);
|
||||
fs.writeFileSync('pagespeed-report-url.txt', reportUrl);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error('❌ LHCI execution failed.');
|
||||
if (err.stdout) console.log(err.stdout);
|
||||
if (err.stderr) console.error(err.stderr);
|
||||
throw err;
|
||||
// Calculate Average
|
||||
const avg = {
|
||||
Perf: Math.round(
|
||||
summaryTable.reduce((acc: any, curr: any) => acc + curr.Perf, 0) / summaryTable.length,
|
||||
),
|
||||
Acc: Math.round(
|
||||
summaryTable.reduce((acc: any, curr: any) => acc + curr.Acc, 0) / summaryTable.length,
|
||||
),
|
||||
BP: Math.round(
|
||||
summaryTable.reduce((acc: any, curr: any) => acc + curr.BP, 0) / summaryTable.length,
|
||||
),
|
||||
SEO: Math.round(
|
||||
summaryTable.reduce((acc: any, curr: any) => acc + curr.SEO, 0) / summaryTable.length,
|
||||
),
|
||||
};
|
||||
|
||||
console.log(`\n📈 Average Scores:`);
|
||||
console.log(` Performance: ${avg.Perf > 90 ? '✅' : '⚠️'} ${avg.Perf}`);
|
||||
console.log(` Accessibility: ${avg.Acc > 90 ? '✅' : '⚠️'} ${avg.Acc}`);
|
||||
console.log(` Best Practices: ${avg.BP > 90 ? '✅' : '⚠️'} ${avg.BP}`);
|
||||
console.log(` SEO: ${avg.SEO > 90 ? '✅' : '⚠️'} ${avg.SEO}`);
|
||||
}
|
||||
|
||||
console.log(`\n✨ PageSpeed tests completed successfully!`);
|
||||
|
||||
59
scripts/update_ampacity.py
Normal file
59
scripts/update_ampacity.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import openpyxl
|
||||
|
||||
def update_excel_ampacity(file_path, headers_row_idx, ampacity_cols_identifiers, target_cross_section="1x1200/35"):
|
||||
print(f"Updating {file_path}...")
|
||||
wb = openpyxl.load_workbook(file_path)
|
||||
ws = wb.active
|
||||
|
||||
# openpyxl is 1-indexed for rows and columns
|
||||
headers = [cell.value for cell in ws[headers_row_idx]]
|
||||
|
||||
# Identify column indices for ampacity (0-indexed locally for easier row access)
|
||||
col_indices = []
|
||||
for identifier in ampacity_cols_identifiers:
|
||||
if isinstance(identifier, int):
|
||||
col_indices.append(identifier)
|
||||
else:
|
||||
try:
|
||||
# list.index returns 0-indexed position
|
||||
col_indices.append(headers.index(identifier))
|
||||
except ValueError:
|
||||
print(f"Warning: Could not find column '{identifier}' in {file_path}")
|
||||
|
||||
# Find row index for "Number of cores and cross-section" or use index 8
|
||||
cs_col_idx = 8
|
||||
try:
|
||||
cs_col_idx = headers.index("Number of cores and cross-section")
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
rows_updated = 0
|
||||
# ws.iter_rows returns 1-indexed rows
|
||||
for row in ws.iter_rows(min_row=headers_row_idx + 1):
|
||||
# row is a tuple of cells, so row[cs_col_idx] is 0-indexed access to the tuple
|
||||
if str(row[cs_col_idx].value).strip() == target_cross_section:
|
||||
for col_idx in col_indices:
|
||||
row[col_idx].value = "On Request"
|
||||
rows_updated += 1
|
||||
|
||||
wb.save(file_path)
|
||||
print(f"Updated {rows_updated} rows in {file_path}")
|
||||
|
||||
# File 1: medium-voltage-KM.xlsx
|
||||
update_excel_ampacity(
|
||||
'data/excel/medium-voltage-KM.xlsx',
|
||||
1, # Headers are in first row (1-indexed)
|
||||
[
|
||||
'Current ratings in air, trefoil*',
|
||||
'Current ratings in air, flat*',
|
||||
'Current ratings in ground, trefoil*',
|
||||
'Current ratings in ground, flat*'
|
||||
]
|
||||
)
|
||||
|
||||
# File 2: medium-voltage-KM 170126.xlsx
|
||||
update_excel_ampacity(
|
||||
'data/excel/medium-voltage-KM 170126.xlsx',
|
||||
1, # Indices 39 and 41 were from a 0-indexed JSON representation
|
||||
[39, 41]
|
||||
)
|
||||
87
scripts/update_excel.py
Normal file
87
scripts/update_excel.py
Normal file
@@ -0,0 +1,87 @@
|
||||
import openpyxl
|
||||
|
||||
excel_path = 'data/excel/medium-voltage-KM.xlsx'
|
||||
wb = openpyxl.load_workbook(excel_path)
|
||||
ws = wb.active
|
||||
|
||||
# Technical data for 1x1200RM/35
|
||||
new_rows_data = [
|
||||
{
|
||||
"Rated voltage": "6/10",
|
||||
"Test voltage": 21,
|
||||
"Nominal insulation thickness": 3.4,
|
||||
"Diameter over insulation (approx.)": 48.5,
|
||||
"Minimum sheath thickness": 2.1,
|
||||
"Outer diameter (approx.)": 59,
|
||||
"Bending radius (min.)": 885,
|
||||
"Weight (approx.)": 4800,
|
||||
"Capacitance (approx.)": 0.95,
|
||||
"Inductance, trefoil (approx.)": 0.24,
|
||||
"Inductance in air, flat (approx.) 1": 0.40,
|
||||
"Inductance in ground, flat (approx.) 1": 0.42,
|
||||
},
|
||||
{
|
||||
"Rated voltage": "12/20",
|
||||
"Test voltage": 42,
|
||||
"Nominal insulation thickness": 5.5,
|
||||
"Diameter over insulation (approx.)": 52.3,
|
||||
"Minimum sheath thickness": 2.1,
|
||||
"Outer diameter (approx.)": 66,
|
||||
"Bending radius (min.)": 990,
|
||||
"Weight (approx.)": 5200,
|
||||
"Capacitance (approx.)": 1.05,
|
||||
"Inductance, trefoil (approx.)": 0.23,
|
||||
"Inductance in air, flat (approx.) 1": 0.43,
|
||||
"Inductance in ground, flat (approx.) 1": 0.45,
|
||||
},
|
||||
{
|
||||
"Rated voltage": "18/30",
|
||||
"Test voltage": 63,
|
||||
"Nominal insulation thickness": 8.0,
|
||||
"Diameter over insulation (approx.)": 57.5,
|
||||
"Minimum sheath thickness": 2.4,
|
||||
"Outer diameter (approx.)": 71,
|
||||
"Bending radius (min.)": 1065,
|
||||
"Weight (approx.)": 5900,
|
||||
"Capacitance (approx.)": 1.15,
|
||||
"Inductance, trefoil (approx.)": 0.22,
|
||||
"Inductance in air, flat (approx.) 1": 0.45,
|
||||
"Inductance in ground, flat (approx.) 1": 0.47,
|
||||
}
|
||||
]
|
||||
|
||||
# Find a template row for NA2XS(F)2Y
|
||||
template_row = None
|
||||
headers = [cell.value for cell in ws[1]]
|
||||
|
||||
for row in ws.iter_rows(min_row=3, values_only=True):
|
||||
if row[0] == 'NA2XS(F)2Y':
|
||||
template_row = list(row)
|
||||
break
|
||||
|
||||
if not template_row:
|
||||
print("Error: Could not find template row for NA2XS(F)2Y")
|
||||
exit(1)
|
||||
|
||||
# Function to update template with new values
|
||||
def create_row(template, updates, headers):
|
||||
new_row = template[:]
|
||||
# Change "Number of cores and cross-section"
|
||||
cs_idx = headers.index("Number of cores and cross-section")
|
||||
new_row[cs_idx] = "1x1200/35"
|
||||
|
||||
# Apply specific updates
|
||||
for key, value in updates.items():
|
||||
if key in headers:
|
||||
idx = headers.index(key)
|
||||
new_row[idx] = value
|
||||
return new_row
|
||||
|
||||
# Append new rows
|
||||
for data in new_rows_data:
|
||||
new_row_values = create_row(template_row, data, headers)
|
||||
ws.append(new_row_values)
|
||||
print(f"Added row for {data['Rated voltage']} kV")
|
||||
|
||||
wb.save(excel_path)
|
||||
print("Excel file updated successfully.")
|
||||
120
scripts/update_excel_v2.py
Normal file
120
scripts/update_excel_v2.py
Normal file
@@ -0,0 +1,120 @@
|
||||
import openpyxl
|
||||
|
||||
excel_path = 'data/excel/medium-voltage-KM 170126.xlsx'
|
||||
wb = openpyxl.load_workbook(excel_path)
|
||||
ws = wb.active
|
||||
|
||||
# Technical data for 1x1200RM/35
|
||||
# Indices based on Row 2 (Units) and Row 1
|
||||
# Index 0: Part Number
|
||||
# Index 8: Querschnitt
|
||||
# Index 9: Rated voltage
|
||||
# Index 10: Test voltage
|
||||
# Index 23: LD mm
|
||||
# Index 24: ID mm
|
||||
# Index 25: DI mm
|
||||
# Index 26: MWD mm
|
||||
# Index 27: AD mm
|
||||
# Index 28: BR
|
||||
# Index 29: G kg
|
||||
# Index 30: RI Ohm
|
||||
# Index 31: Cap
|
||||
# Index 32: Inductance trefoil
|
||||
# Index 35: BK
|
||||
# Index 39: SBL 30
|
||||
# Index 41: SBE 20
|
||||
|
||||
new_rows_data = [
|
||||
{
|
||||
"voltage": "6/10",
|
||||
"test_v": 21,
|
||||
"ld": 41.5,
|
||||
"id": 3.4,
|
||||
"di": 48.5,
|
||||
"mwd": 2.1,
|
||||
"ad": 59,
|
||||
"br": 885,
|
||||
"g": 4800,
|
||||
"ri": 0.0247,
|
||||
"cap": 0.95,
|
||||
"ind": 0.24,
|
||||
"bk": 113,
|
||||
"sbl": 1300,
|
||||
"sbe": 933
|
||||
},
|
||||
{
|
||||
"voltage": "12/20",
|
||||
"test_v": 42,
|
||||
"ld": 41.5,
|
||||
"id": 5.5,
|
||||
"di": 52.3,
|
||||
"mwd": 2.1,
|
||||
"ad": 66,
|
||||
"br": 990,
|
||||
"g": 5200,
|
||||
"ri": 0.0247,
|
||||
"cap": 1.05,
|
||||
"ind": 0.23,
|
||||
"bk": 113,
|
||||
"sbl": 1200,
|
||||
"sbe": 900
|
||||
},
|
||||
{
|
||||
"voltage": "18/30",
|
||||
"test_v": 63,
|
||||
"ld": 41.5,
|
||||
"id": 8.0,
|
||||
"di": 57.5,
|
||||
"mwd": 2.4,
|
||||
"ad": 71,
|
||||
"br": 1065,
|
||||
"g": 5900,
|
||||
"ri": 0.0247,
|
||||
"cap": 1.15,
|
||||
"ind": 0.22,
|
||||
"bk": 113,
|
||||
"sbl": 1300,
|
||||
"sbe": 950
|
||||
}
|
||||
]
|
||||
|
||||
# Find a template row for NA2XS(F)2Y
|
||||
template_row = None
|
||||
for row in ws.iter_rows(min_row=3, values_only=True):
|
||||
if row[0] == 'NA2XS(F)2Y' and row[9] == '6/10':
|
||||
template_row = list(row)
|
||||
break
|
||||
|
||||
if not template_row:
|
||||
print("Error: Could not find template row for NA2XS(F)2Y")
|
||||
exit(1)
|
||||
|
||||
# Function to update template with new values
|
||||
def create_row(template, data):
|
||||
new_row = template[:]
|
||||
new_row[8] = "1x1200/35"
|
||||
new_row[9] = data["voltage"]
|
||||
new_row[10] = data["test_v"]
|
||||
new_row[23] = data["ld"]
|
||||
new_row[24] = data["id"]
|
||||
new_row[25] = data["di"]
|
||||
new_row[26] = data["mwd"]
|
||||
new_row[27] = data["ad"]
|
||||
new_row[28] = data["br"]
|
||||
new_row[29] = data["g"]
|
||||
new_row[30] = data["ri"]
|
||||
new_row[31] = data["cap"]
|
||||
new_row[32] = data["ind"]
|
||||
new_row[35] = data["bk"]
|
||||
new_row[39] = data["sbl"]
|
||||
new_row[41] = data["sbe"]
|
||||
return new_row
|
||||
|
||||
# Append new rows
|
||||
for data in new_rows_data:
|
||||
new_row_values = create_row(template_row, data)
|
||||
ws.append(new_row_values)
|
||||
print(f"Added row for {data['voltage']} kV")
|
||||
|
||||
wb.save(excel_path)
|
||||
print("Excel file updated successfully.")
|
||||
Reference in New Issue
Block a user