chore: align ecosystem to Next.js 16.1.6 and v1.6.0, migrate to ESLint 9 Flat Config
Some checks failed
CI - Lint, Typecheck & Test / quality-assurance (push) Failing after 30s

This commit is contained in:
2026-02-09 23:23:31 +01:00
parent 6451a9e28e
commit eb388610de
85 changed files with 14182 additions and 30922 deletions

View File

@@ -1,5 +0,0 @@
.next/
node_modules/
reference/
public/
dist/

View File

@@ -1,16 +0,0 @@
{
"extends": ["next/core-web-vitals", "next/typescript", "prettier"],
"rules": {
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-unused-vars": [
"warn",
{
"argsIgnorePattern": "^_"
}
],
"@typescript-eslint/no-require-imports": "off",
"prefer-const": "warn",
"react/no-unescaped-entities": "off",
"@next/next/no-img-element": "warn"
}
}

View File

@@ -1,9 +1,7 @@
const path = require('path');
const buildEslintCommand = (filenames) =>
`next lint --fix --file ${filenames
.map((f) => path.relative(process.cwd(), f))
.join(' --file ')}`;
`eslint --fix ${filenames.map((f) => path.relative(process.cwd(), f)).join(' ')}`;
module.exports = {
'*.{js,jsx,ts,tsx}': [buildEslintCommand, 'prettier --write'],

View File

@@ -1 +0,0 @@
Sheet 1

File diff suppressed because one or more lines are too long

View File

@@ -1,237 +0,0 @@
# Analytics Migration Complete ✅
## Summary
Successfully migrated analytics data from Independent Analytics (WordPress) to Umami.
## Files Created
### 1. Migration Script
**Location:** `scripts/migrate-analytics-to-umami.py`
- Converts Independent Analytics CSV to Umami format
- Supports 3 output formats: JSON (API), SQL (database), API payload
- Preserves page view counts and average duration data
### 2. Deployment Script
**Location:** `scripts/deploy-analytics-to-umami.sh`
- Tailored for your server setup (`deploy@alpha.mintel.me`)
- Copies files to your Umami server
- Provides import instructions for your specific environment
### 3. Output Files
#### JSON Import File
**Location:** `data/umami-import.json`
- **Size:** 2.1 MB
- **Records:** 7,634 page view events
- **Website ID:** `59a7db94-0100-4c7e-98ef-99f45b17f9c3`
- **Use:** Import via Umami API
#### SQL Import File
**Location:** `data/umami-import.sql`
- **Size:** 1.8 MB
- **Records:** 5,250 SQL statements
- **Website ID:** `59a7db94-0100-4c7e-98ef-99f45b17f9c3`
- **Use:** Direct database import
### 4. Documentation
**Location:** `scripts/README-migration.md`
- Step-by-step migration guide
- Prerequisites and setup instructions
- Import methods (API and database)
- Troubleshooting tips
**Location:** `MIGRATION_SUMMARY.md`
- Complete migration overview
- Data summary and limitations
- Verification steps
- Next steps
**Location:** `ANALYTICS_MIGRATION_COMPLETE.md` (this file)
- Quick reference guide
- Deployment instructions
## Quick Start
### Option 1: Automated Deployment (Recommended)
```bash
# Run the deployment script
./scripts/deploy-analytics-to-umami.sh
```
This script will:
1. Copy files to your server
2. Provide import instructions
3. Show you the exact commands to run
### Option 2: Manual Deployment
#### Step 1: Copy files to server
```bash
scp data/umami-import.json deploy@alpha.mintel.me:/home/deploy/sites/klz-cables.com/data/
```
#### Step 2: SSH into server
```bash
ssh deploy@alpha.mintel.me
cd /home/deploy/sites/klz-cables.com
```
#### Step 3: Import data
**Method A: API Import (if API key is available)**
```bash
# Get your API key from Umami dashboard
# Add to .env: UMAMI_API_KEY=your-api-key
curl -X POST \
-H "Content-Type: application/json" \
-H "Authorization: Bearer YOUR_API_KEY" \
-d @data/umami-import.json \
http://localhost:3000/api/import
```
**Method B: Database Import (direct)**
```bash
# Import SQL file into PostgreSQL
docker exec -i $(docker compose ps -q postgres) psql -U umami -d umami < data/umami-import.sql
```
**Method C: Manual via Umami Dashboard**
1. Access Umami dashboard: https://analytics.infra.mintel.me
2. Go to Settings → Import
3. Upload `data/umami-import.json`
4. Select website ID: `59a7db94-0100-4c7e-98ef-99f45b17f9c3`
5. Click Import
## Your Umami Configuration
**Website ID:** `59a7db94-0100-4c7e-98ef-99f45b17f9c3`
**Environment Variables** (from docker-compose.yml):
```bash
NEXT_PUBLIC_UMAMI_WEBSITE_ID=59a7db94-0100-4c7e-98ef-99f45b17f9c3
NEXT_PUBLIC_UMAMI_SCRIPT_URL=https://analytics.infra.mintel.me/script.js
```
**Server Details:**
- **Host:** alpha.mintel.me
- **User:** deploy
- **Path:** /home/deploy/sites/klz-cables.com
- **Umami API:** http://localhost:3000/api/import
## Data Summary
### What Was Migrated
- **Source:** Independent Analytics CSV (220 unique pages)
- **Migrated:** 7,634 simulated page view events
- **Metrics:** Page views, visitor counts, average duration
- **Website ID:** `59a7db94-0100-4c7e-98ef-99f45b17f9c3`
### What Was NOT Migrated
- Individual user sessions
- Real-time data
- Geographic data
- Referrer data
- Device/browser data
- Custom events
**Note:** The CSV contains aggregated data, not raw event data. The migration creates simulated historical data for reference only.
## Verification
### After Import
1. **Check Umami dashboard:** https://analytics.infra.mintel.me
2. **Verify page view counts** match your expectations
3. **Check top pages** appear correctly
4. **Monitor for a few days** to ensure new data is being collected
### Expected Results
- ✅ 7,634 events imported
- ✅ 220 unique pages
- ✅ Historical view counts preserved
- ✅ Duration data maintained
## Troubleshooting
### Issue: "SSH connection failed"
**Solution:** Check your SSH key and ensure `deploy@alpha.mintel.me` has access
### Issue: "API import failed"
**Solution:**
1. Check if Umami API is running: `docker compose ps`
2. Verify API key in `.env`: `UMAMI_API_KEY=your-key`
3. Try database import instead
### Issue: "Database import failed"
**Solution:**
1. Ensure PostgreSQL is running: `docker compose ps`
2. Check database credentials
3. Run migrations first: `docker exec -it $(docker compose ps -q postgres) psql -U umami -d umami -c "SELECT 1;"`
### Issue: "No data appears in dashboard"
**Solution:**
1. Verify import completed successfully
2. Check Umami logs: `docker compose logs app`
3. Ensure website ID matches: `59a7db94-0100-4c7e-98ef-99f45b17f9c3`
## Next Steps
### 1. Import the Data
Choose one of the import methods above and run it.
### 2. Verify the Migration
- Check Umami dashboard
- Verify page view counts
- Confirm data appears correctly
### 3. Update Your Website
Your website is already configured with:
```bash
NEXT_PUBLIC_UMAMI_WEBSITE_ID=59a7db94-0100-4c7e-98ef-99f45b17f9c3
NEXT_PUBLIC_UMAMI_SCRIPT_URL=https://analytics.infra.mintel.me/script.js
```
### 4. Monitor for a Few Days
- Ensure Umami is collecting new data
- Compare with any remaining Independent Analytics data
- Verify tracking code is working
### 5. Clean Up
- Keep the original CSV as backup: `data/pages(1).csv`
- Store migration files for future reference
- Remove old Independent Analytics plugin from WordPress
## Support Resources
- **Umami Documentation:** https://umami.is/docs
- **Umami GitHub:** https://github.com/umami-software/umami
- **Independent Analytics:** https://independentanalytics.com/
## Migration Details
**Migration Date:** 2026-01-25
**Source Plugin:** Independent Analytics v2.9.7
**Target Platform:** Umami Analytics
**Website ID:** `59a7db94-0100-4c7e-98ef-99f45b17f9c3`
**Server:** alpha.mintel.me (deploy user)
**Status:** ✅ Ready for import
---
**Quick Command Reference:**
```bash
# Deploy to server
./scripts/deploy-analytics-to-umami.sh
# Or manually:
scp data/umami-import.json deploy@alpha.mintel.me:/home/deploy/sites/klz-cables.com/data/
ssh deploy@alpha.mintel.me
cd /home/deploy/sites/klz-cables.com
docker exec -i $(docker compose ps -q postgres) psql -U umami -d umami < data/umami-import.sql
```
**Need help?** Check `scripts/README-migration.md` for detailed instructions.

View File

@@ -1,272 +0,0 @@
# Environment Variables Cleanup - Summary
## What Was Done
Cleaned up the fragile, overkill environment variable mess and replaced it with a simple, clean, robust **fully automated** system.
## Changes Made
### 1. Dockerfile ✅
**Before**: 4 build args including runtime-only variables (SENTRY_DSN)
**After**: 3 build args - only `NEXT_PUBLIC_*` variables that need to be baked into the client bundle
```dockerfile
# Only these build args now:
ARG NEXT_PUBLIC_BASE_URL
ARG NEXT_PUBLIC_UMAMI_WEBSITE_ID
ARG NEXT_PUBLIC_UMAMI_SCRIPT_URL
```
### 2. docker-compose.yml ✅
**Before**: 12+ individual environment variables listed
**After**: Single `env_file: .env` directive
```yaml
app:
image: registry.infra.mintel.me/mintel/klz-cables.com:latest
env_file:
- .env # All runtime vars loaded from here
```
### 3. .gitea/workflows/deploy.yml ✅
**Before**: Passing 12+ environment variables individually via SSH command (fragile!)
**After**: **Fully automated** - workflow creates `.env` file from Gitea secrets and uploads it
```yaml
# Before (FRAGILE):
ssh root@alpha.mintel.me \
"MAIL_FROM='${{ secrets.MAIL_FROM }}' \
MAIL_HOST='${{ secrets.MAIL_HOST }}' \
... (12+ variables) \
/home/deploy/deploy.sh"
# After (AUTOMATED):
# 1. Create .env from secrets
cat > /tmp/klz-cables.env << EOF
NODE_ENV=production
NEXT_PUBLIC_BASE_URL=${{ secrets.NEXT_PUBLIC_BASE_URL }}
# ... all other vars from secrets
EOF
# 2. Upload to server
scp /tmp/klz-cables.env root@alpha.mintel.me:/home/deploy/sites/klz-cables.com/.env
# 3. Deploy
ssh root@alpha.mintel.me "cd /home/deploy/sites/klz-cables.com && docker-compose up -d"
```
### 4. New Files Created ✅
- **`.env.production`** - Template for reference (not used in automation)
- **`docs/DEPLOYMENT.md`** - Complete deployment guide
- **`docs/SERVER_SETUP.md`** - Server setup instructions
- **`docs/ENV_MIGRATION.md`** - Migration guide from old to new system
### 5. Updated Files ✅
- **`.env.example`** - Clear documentation of all variables with build-time vs runtime notes
## Architecture
### Build Time (CI/CD)
```
Gitea Workflow
Only passes NEXT_PUBLIC_* as --build-arg
Docker Build
Validates env vars
Bakes NEXT_PUBLIC_* into client bundle
Push to Registry
```
### Runtime (Production Server) - FULLY AUTOMATED
```
Gitea Secrets
Workflow creates .env file
SCP uploads to server
Secured (chmod 600, chown deploy:deploy)
docker-compose.yml (env_file: .env)
Loads .env into container
Application runs with full config
```
## Key Benefits
### 1. Simplicity
- **Before**: 15+ Gitea secrets, variables in 3+ places
- **After**: All secrets in Gitea, automatically deployed
### 2. Clarity
- **Before**: Confusing duplication, unclear which vars go where
- **After**: Clear separation - build args vs runtime env file
### 3. Robustness
- **Before**: Fragile SSH command with 12+ inline variables
- **After**: Robust automated file generation and upload
### 4. Security
- **Before**: Secrets potentially exposed in CI logs
- **After**: Secrets masked in logs, .env auto-secured on server
### 5. Maintainability
- **Before**: Update in 3 places (Dockerfile, docker-compose.yml, deploy.yml)
- **After**: Update Gitea secrets only - deployment is automatic
### 6. **Zero Manual Steps** 🎉
- **Before**: Manual .env file creation on server (error-prone, can be forgotten)
- **After**: **Fully automated** - .env file created and uploaded on every deployment
## What You Need to Do
### Required Gitea Secrets
Ensure these secrets are configured in your Gitea repository:
**Build-Time (NEXT_PUBLIC_*):**
- `NEXT_PUBLIC_BASE_URL` - Production URL (e.g., `https://klz-cables.com`)
- `NEXT_PUBLIC_UMAMI_WEBSITE_ID` - Umami analytics ID
- `NEXT_PUBLIC_UMAMI_SCRIPT_URL` - Umami script URL
**Runtime:**
- `SENTRY_DSN` - Error tracking DSN
- `MAIL_HOST` - SMTP server
- `MAIL_PORT` - SMTP port (e.g., `587`)
- `MAIL_USERNAME` - SMTP username
- `MAIL_PASSWORD` - SMTP password
- `MAIL_FROM` - Sender email
- `MAIL_RECIPIENTS` - Recipient emails (comma-separated)
**Infrastructure:**
- `REGISTRY_USER` - Docker registry username
- `REGISTRY_PASS` - Docker registry password
- `ALPHA_SSH_KEY` - SSH private key for deployment server
**Notifications:**
- `GOTIFY_URL` - Gotify notification server URL
- `GOTIFY_TOKEN` - Gotify application token
### That's It!
**No manual steps required.** Just push to main branch and the workflow will:
1. ✅ Build Docker image with NEXT_PUBLIC_* build args
2. ✅ Create .env file from all secrets
3. ✅ Upload .env to server
4. ✅ Secure .env file (600 permissions, deploy:deploy ownership)
5. ✅ Pull latest image
6. ✅ Deploy with docker-compose
## Files Changed
```
Modified:
├── Dockerfile (removed redundant build args)
├── docker-compose.yml (use env_file instead of individual vars)
├── .gitea/workflows/deploy.yml (automated .env creation & upload)
├── .env.example (clear documentation)
├── lib/services/create-services.ts (removed redundant dotenv usage)
└── scripts/migrate-*.ts (removed redundant dotenv usage)
Created:
├── .env.production (reference template)
├── docs/DEPLOYMENT.md (deployment guide)
├── docs/SERVER_SETUP.md (server setup guide)
├── docs/ENV_MIGRATION.md (migration guide)
└── ENV_CLEANUP_SUMMARY.md (this file)
```
## Deployment Flow
```
┌─────────────────────────────────────────────────────────────┐
│ Developer pushes to main branch │
└─────────────────────────────────────────────────────────────┘
┌─────────────────────────────────────────────────────────────┐
│ Gitea Workflow Triggered │
│ │
│ 1. Build Docker image (NEXT_PUBLIC_* build args) │
│ 2. Push to registry │
│ 3. Generate .env from secrets │
│ 4. Upload .env to server via SCP │
│ 5. SSH to server and deploy │
└─────────────────────────────────────────────────────────────┘
┌─────────────────────────────────────────────────────────────┐
│ Production Server │
│ │
│ 1. .env file secured (600, deploy:deploy) │
│ 2. Docker login to registry │
│ 3. Pull latest image │
│ 4. docker-compose down │
│ 5. docker-compose up -d (loads .env) │
│ 6. Health checks pass │
└─────────────────────────────────────────────────────────────┘
┌─────────────────────────────────────────────────────────────┐
│ ✅ Deployment Complete - Gotify Notification Sent │
└─────────────────────────────────────────────────────────────┘
```
## Comparison: Before vs After
| Aspect | Before | After |
|--------|--------|-------|
| **Gitea Secrets** | 15+ secrets | Same secrets, better organized |
| **Build Args** | 4 vars (including runtime-only) | 3 vars (NEXT_PUBLIC_* only) |
| **Runtime Vars** | Passed via SSH command | Auto-generated .env file |
| **Manual Steps** | ❌ Manual .env creation | ✅ Fully automated |
| **Maintenance** | Update in 3 places | Update Gitea secrets only |
| **Security** | Secrets in CI logs | Secrets masked, .env secured |
| **Clarity** | Confusing duplication | Clear separation |
| **Robustness** | Fragile SSH command | Robust automation |
| **Error-Prone** | ❌ Can forget .env | ✅ Impossible to forget |
## Documentation
- **[DEPLOYMENT.md](docs/DEPLOYMENT.md)** - Complete deployment guide
- **[SERVER_SETUP.md](docs/SERVER_SETUP.md)** - Server setup instructions (mostly automated now)
- **[ENV_MIGRATION.md](docs/ENV_MIGRATION.md)** - Migration from old to new system
- **[.env.example](.env.example)** - Environment variables reference
- **[.env.production](.env.production)** - Production template (for reference)
## Troubleshooting
### Deployment Fails
1. **Check Gitea secrets** - Ensure all required secrets are set
2. **Check workflow logs** - Look for specific error messages
3. **SSH to server** - Verify .env file exists and has correct permissions
4. **Check container logs** - `docker-compose logs -f app`
### .env File Issues
The workflow automatically:
- Creates .env from secrets
- Uploads to server
- Sets 600 permissions
- Sets deploy:deploy ownership
If there are issues, check the workflow logs for the "📝 Preparing environment configuration" step.
### Missing Environment Variables
If a variable is missing:
1. Add it to Gitea secrets
2. Update `.gitea/workflows/deploy.yml` to include it in the .env generation
3. Push to trigger new deployment
---
**Result**: Environment variable management is now simple, clean, robust, and **fully automated**! 🎉
No more manual .env file creation. No more forgotten configuration. No more fragile SSH commands. Just push and deploy!

View File

@@ -1,193 +0,0 @@
# Analytics Migration Summary: Independent Analytics → Umami
## Overview
Successfully migrated analytics data from Independent Analytics WordPress plugin to Umami format.
## Files Created
### 1. Migration Script
- **Location:** `scripts/migrate-analytics-to-umami.py`
- **Purpose:** Converts Independent Analytics CSV data to Umami format
- **Features:**
- JSON format (for API import)
- SQL format (for direct database import)
- API payload format (for manual import)
### 2. Migration Documentation
- **Location:** `scripts/README-migration.md`
- **Purpose:** Step-by-step guide for migration
- **Contents:**
- Prerequisites
- Migration options
- Import instructions
- Troubleshooting guide
### 3. Output Files
#### JSON Import File
- **Location:** `data/umami-import.json`
- **Size:** 2.1 MB
- **Records:** 7,634 simulated page view events
- **Format:** JSON array of Umami-compatible events
- **Use Case:** Import via Umami API
#### SQL Import File
- **Location:** `data/umami-import.sql`
- **Size:** 1.8 MB
- **Records:** 5,250 SQL INSERT statements
- **Format:** PostgreSQL-compatible SQL
- **Use Case:** Direct database import
## Data Migrated
### Source Data
- **File:** `data/pages(1).csv`
- **Records:** 220 unique pages
- **Metrics:**
- Page titles
- Visitor counts
- View counts
- Average view duration
- Bounce rates
- URLs
- Page types (Page, Post, Product, Category, etc.)
### Migrated Data
- **Total Events:** 7,634 simulated page views
- **Unique Pages:** 220
- **Data Points:**
- Website ID: `klz-cables`
- Path: Page URLs
- Duration: Preserved from average view duration
- Timestamp: Current time (for historical reference)
## Migration Process
### Step 1: Run Migration Script
```bash
python3 scripts/migrate-analytics-to-umami.py \
--input data/pages\(1\).csv \
--output data/umami-import.json \
--format json \
--site-id klz-cables
```
### Step 2: Choose Import Method
#### Option A: API Import (Recommended)
```bash
curl -X POST \
-H "Content-Type: application/json" \
-H "Authorization: Bearer YOUR_API_KEY" \
-d @data/umami-import.json \
https://your-umami-instance.com/api/import
```
#### Option B: Database Import
```bash
psql -U umami -d umami -f data/umami-import.sql
```
### Step 3: Verify Migration
1. Check Umami dashboard
2. Verify page view counts
3. Confirm data appears correctly
## Important Notes
### Data Limitations
The CSV export contains **aggregated data**, not raw event data:
- ✅ Page views (total counts)
- ✅ Visitor counts
- ✅ Average view duration
- ❌ Individual user sessions
- ❌ Real-time data
- ❌ Geographic data
- ❌ Referrer data
- ❌ Device/browser data
### What Gets Imported
The migration creates **simulated historical data**:
- Each page view becomes a separate event
- Timestamps are set to current time
- Duration is preserved from average view duration
- No session tracking (each view is independent)
### Recommendations
1. **Start fresh with Umami** - Let Umami collect new data going forward
2. **Keep the original CSV** - Store as backup for future reference
3. **Update your website** - Replace Independent Analytics tracking with Umami tracking
4. **Monitor for a few days** - Verify Umami is collecting data correctly
## Verification
### Check Generated Files
```bash
# Verify JSON file
ls -lh data/umami-import.json
head -20 data/umami-import.json
# Verify SQL file
ls -lh data/umami-import.sql
head -20 data/umami-import.sql
```
### Expected Results
- ✅ JSON file: ~2.1 MB, 7,634 records
- ✅ SQL file: ~1.8 MB, 5,250 statements
- ✅ Both files contain valid data for Umami import
## Next Steps
1. **Set up Umami instance** (if not already done)
2. **Create a website** in Umami dashboard
3. **Get your Website ID** and API key
4. **Run the migration script** with your credentials
5. **Import the data** using your preferred method
6. **Verify the migration** in Umami dashboard
7. **Update your website** to use Umami tracking code
8. **Monitor for a few days** to ensure data collection works
## Troubleshooting
### Issue: "ModuleNotFoundError"
**Solution:** Ensure Python 3 is installed: `python3 --version`
### Issue: "Permission denied"
**Solution:** Make script executable: `chmod +x scripts/migrate-analytics-to-umami.py`
### Issue: API import fails
**Solution:** Check API key, website ID, and Umami instance accessibility
### Issue: SQL import fails
**Solution:** Verify database credentials and run migrations first
## Support Resources
- **Umami Documentation:** https://umami.is/docs
- **Umami GitHub:** https://github.com/umami-software/umami
- **Independent Analytics:** https://independentanalytics.com/
## Summary
**Completed:**
- Created migration script with 3 output formats
- Generated JSON import file (2.1 MB, 7,634 events)
- Generated SQL import file (1.8 MB, 5,250 statements)
- Created comprehensive documentation
📊 **Data Migrated:**
- 220 unique pages
- 7,634 simulated page view events
- Historical view counts and durations
🎯 **Ready for Import:**
- Choose API or SQL import method
- Follow instructions in `scripts/README-migration.md`
- Verify data in Umami dashboard
**Migration Date:** 2026-01-25
**Source:** Independent Analytics v2.9.7
**Target:** Umami Analytics
**Site ID:** klz-cables

View File

@@ -9,11 +9,11 @@ export const runtime = 'nodejs';
export async function GET(
request: NextRequest,
{ params }: { params: { locale: string } }
{ params }: { params: Promise<{ locale: string }> },
) {
const { searchParams, origin } = new URL(request.url);
const slug = searchParams.get('slug');
const locale = params.locale || 'en';
const { locale } = await params;
if (!slug) {
return new Response('Missing slug', { status: 400 });
@@ -23,24 +23,29 @@ export async function GET(
const t = await getTranslations({ locale, namespace: 'Products' });
// Check if it's a category page
const categories = ['low-voltage-cables', 'medium-voltage-cables', 'high-voltage-cables', 'solar-cables'];
const categories = [
'low-voltage-cables',
'medium-voltage-cables',
'high-voltage-cables',
'solar-cables',
];
if (categories.includes(slug)) {
const categoryKey = slug.replace(/-cables$/, '').replace(/-([a-z])/g, (g) => g[1].toUpperCase());
const categoryTitle = t.has(`categories.${categoryKey}.title`) ? t(`categories.${categoryKey}.title`) : slug;
const categoryDesc = t.has(`categories.${categoryKey}.description`) ? t(`categories.${categoryKey}.description`) : '';
const categoryKey = slug
.replace(/-cables$/, '')
.replace(/-([a-z])/g, (g) => g[1].toUpperCase());
const categoryTitle = t.has(`categories.${categoryKey}.title`)
? t(`categories.${categoryKey}.title`)
: slug;
const categoryDesc = t.has(`categories.${categoryKey}.description`)
? t(`categories.${categoryKey}.description`)
: '';
return new ImageResponse(
(
<OGImageTemplate
title={categoryTitle}
description={categoryDesc}
label="Product Category"
/>
),
<OGImageTemplate title={categoryTitle} description={categoryDesc} label="Product Category" />,
{
...OG_IMAGE_SIZE,
fonts,
}
},
);
}
@@ -51,24 +56,21 @@ export async function GET(
}
const featuredImage = product.frontmatter.images?.[0]
? (product.frontmatter.images[0].startsWith('http')
? product.frontmatter.images[0].startsWith('http')
? product.frontmatter.images[0]
: `${origin}${product.frontmatter.images[0]}`)
: `${origin}${product.frontmatter.images[0]}`
: undefined;
return new ImageResponse(
(
<OGImageTemplate
title={product.frontmatter.title}
description={product.frontmatter.description}
label={product.frontmatter.categories?.[0] || 'Product'}
image={featuredImage}
/>
),
<OGImageTemplate
title={product.frontmatter.title}
description={product.frontmatter.description}
label={product.frontmatter.categories?.[0] || 'Product'}
image={featuredImage}
/>,
{
...OG_IMAGE_SIZE,
fonts,
}
},
);
}

View File

@@ -7,25 +7,16 @@ import { getTranslations } from 'next-intl/server';
import { SITE_URL } from '@/lib/schema';
import { getOGImageMetadata } from '@/lib/metadata';
import { Suspense } from 'react';
import dynamic from 'next/dynamic';
const LeafletMap = dynamic(() => import('@/components/LeafletMap'), {
ssr: false,
loading: () => (
<div className="h-full w-full bg-neutral-medium flex items-center justify-center">
<div className="animate-pulse text-primary font-medium">Loading Map...</div>
</div>
),
});
import ContactMap from '@/components/ContactMap';
interface ContactPageProps {
params: {
params: Promise<{
locale: string;
};
}>;
}
export async function generateMetadata({
params: { locale },
}: ContactPageProps): Promise<Metadata> {
export async function generateMetadata({ params }: ContactPageProps): Promise<Metadata> {
const { locale } = await params;
const t = await getTranslations({ locale, namespace: 'Contact' });
const title = t('meta.title') || t('title');
const description = t('meta.description') || t('subtitle');
@@ -66,7 +57,7 @@ export async function generateStaticParams() {
}
export default async function ContactPage({ params }: ContactPageProps) {
const { locale } = params;
const { locale } = await params;
const t = await getTranslations({ locale, namespace: 'Contact' });
return (
<div className="flex flex-col min-h-screen bg-neutral-light">
@@ -249,7 +240,7 @@ export default async function ContactPage({ params }: ContactPageProps) {
</div>
}
>
<LeafletMap address={t('info.address')} lat={48.8144} lng={9.4144} />
<ContactMap address={t('info.address')} lat={48.8144} lng={9.4144} />
</Suspense>
</section>
</div>

View File

@@ -3,7 +3,7 @@ import Header from '@/components/Header';
import JsonLd from '@/components/JsonLd';
import AnalyticsProvider from '@/components/analytics/AnalyticsProvider';
import CMSConnectivityNotice from '@/components/CMSConnectivityNotice';
import { FeedbackOverlay } from '@/components/feedback/FeedbackOverlay';
import { FeedbackOverlay } from '@mintel/next-feedback';
import { Metadata, Viewport } from 'next';
import { NextIntlClientProvider } from 'next-intl';
import { getMessages } from 'next-intl/server';

View File

@@ -19,14 +19,14 @@ import Link from 'next/link';
import { notFound } from 'next/navigation';
interface ProductPageProps {
params: {
params: Promise<{
locale: string;
slug: string[];
};
}>;
}
export async function generateMetadata({ params }: ProductPageProps): Promise<Metadata> {
const { locale, slug } = params;
const { locale, slug } = await params;
const productSlug = slug[slug.length - 1];
const t = await getTranslations('Products');
@@ -169,7 +169,7 @@ const components = {
};
export default async function ProductPage({ params }: ProductPageProps) {
const { locale, slug } = params;
const { locale, slug } = await params;
const productSlug = slug[slug.length - 1];
const t = await getTranslations('Products');

View File

@@ -10,14 +10,13 @@ import { getOGImageMetadata } from '@/lib/metadata';
import { SITE_URL } from '@/lib/schema';
interface ProductsPageProps {
params: {
params: Promise<{
locale: string;
};
}>;
}
export async function generateMetadata({
params: { locale },
}: ProductsPageProps): Promise<Metadata> {
export async function generateMetadata({ params }: ProductsPageProps): Promise<Metadata> {
const { locale } = await params;
const t = await getTranslations({ locale, namespace: 'Products' });
const title = t('meta.title') || t('title');
const description = t('meta.description') || t('subtitle');
@@ -47,13 +46,14 @@ export async function generateMetadata({
}
export default async function ProductsPage({ params }: ProductsPageProps) {
const { locale } = await params;
const t = await getTranslations('Products');
// Get translated category slugs
const lowVoltageSlug = await mapFileSlugToTranslated('low-voltage-cables', params.locale);
const mediumVoltageSlug = await mapFileSlugToTranslated('medium-voltage-cables', params.locale);
const highVoltageSlug = await mapFileSlugToTranslated('high-voltage-cables', params.locale);
const solarSlug = await mapFileSlugToTranslated('solar-cables', params.locale);
const lowVoltageSlug = await mapFileSlugToTranslated('low-voltage-cables', locale);
const mediumVoltageSlug = await mapFileSlugToTranslated('medium-voltage-cables', locale);
const highVoltageSlug = await mapFileSlugToTranslated('high-voltage-cables', locale);
const solarSlug = await mapFileSlugToTranslated('solar-cables', locale);
const categories = [
{
@@ -61,28 +61,28 @@ export default async function ProductsPage({ params }: ProductsPageProps) {
desc: t('categories.lowVoltage.description'),
img: '/uploads/2024/11/low-voltage-category.webp',
icon: '/uploads/2024/11/Low-Voltage.svg',
href: `/${params.locale}/products/${lowVoltageSlug}`,
href: `/${locale}/products/${lowVoltageSlug}`,
},
{
title: t('categories.mediumVoltage.title'),
desc: t('categories.mediumVoltage.description'),
img: '/uploads/2024/11/medium-voltage-category.webp',
icon: '/uploads/2024/11/Medium-Voltage.svg',
href: `/${params.locale}/products/${mediumVoltageSlug}`,
href: `/${locale}/products/${mediumVoltageSlug}`,
},
{
title: t('categories.highVoltage.title'),
desc: t('categories.highVoltage.description'),
img: '/uploads/2024/11/high-voltage-category.webp',
icon: '/uploads/2024/11/High-Voltage.svg',
href: `/${params.locale}/products/${highVoltageSlug}`,
href: `/${locale}/products/${highVoltageSlug}`,
},
{
title: t('categories.solar.title'),
desc: t('categories.solar.description'),
img: '/uploads/2024/11/solar-category.webp',
icon: '/uploads/2024/11/Solar.svg',
href: `/${params.locale}/products/${solarSlug}`,
href: `/${locale}/products/${solarSlug}`,
},
];
@@ -218,7 +218,7 @@ export default async function ProductsPage({ params }: ProductsPageProps) {
</p>
</div>
<Button
href={`/${params.locale}/contact`}
href={`/${locale}/contact`}
variant="accent"
size="lg"
className="group whitespace-nowrap w-full md:w-auto md:h-16 md:px-10 md:text-xl"

View File

@@ -1,79 +1,17 @@
import { NextRequest, NextResponse } from 'next/server';
import { createDirectus, rest, authentication, staticToken, createItem, readItems } from '@directus/sdk';
import { NextRequest } from 'next/server';
import { handleFeedbackRequest } from '@mintel/next-feedback';
import { config } from '@/lib/config';
async function getAuthenticatedClient() {
const { url, token: rawToken } = config.infraCMS;
const effectiveUrl = url;
const token = rawToken?.trim();
if (!token) {
throw new Error('INFRA_DIRECTUS_TOKEN is not configured');
}
const client = createDirectus(effectiveUrl)
.with(staticToken(token))
.with(rest());
return client;
}
export async function GET() {
try {
const client = await getAuthenticatedClient();
const items = await client.request(readItems('visual_feedback', {
fields: ['*'],
sort: ['-date_created'],
}));
return NextResponse.json(items);
} catch (error: any) {
const errMsg = error.errors?.[0]?.message || error.message || 'Unknown Directus Error';
console.error('Error fetching feedback:', {
msg: errMsg,
url: config.infraCMS.url,
status: error.response?.status,
errors: error.errors
});
return NextResponse.json({ error: errMsg }, { status: 500 });
}
export async function GET(req: NextRequest) {
return handleFeedbackRequest(req, {
url: config.infraCMS.url,
token: config.infraCMS.token,
});
}
export async function POST(req: NextRequest) {
try {
const client = await getAuthenticatedClient();
const body = await req.json();
const { action, ...data } = body;
if (action === 'reply') {
const reply = await client.request(createItem('visual_feedback_comments', {
feedback_id: data.feedbackId,
user_name: data.userName,
text: data.text,
}));
return NextResponse.json(reply);
}
const feedback = await client.request(createItem('visual_feedback', {
project: 'klz-cables',
url: data.url,
selector: data.selector,
x: data.x,
y: data.y,
type: data.type,
text: data.text,
user_name: data.userName,
user_identity: data.userIdentity,
}));
return NextResponse.json(feedback);
} catch (error: any) {
const errMsg = error.errors?.[0]?.message || error.message || 'Unknown Directus Error';
console.error('Error saving feedback:', {
msg: errMsg,
url: config.infraCMS.url,
status: error.response?.status,
errors: error.errors
});
return NextResponse.json({ error: errMsg }, { status: 500 });
}
return handleFeedbackRequest(req, {
url: config.infraCMS.url,
token: config.infraCMS.token,
});
}

View File

@@ -1,43 +1,7 @@
import { NextRequest, NextResponse } from 'next/server';
import { envSchema, getRawEnv } from '@/lib/env';
import { NextRequest } from 'next/server';
import { handleWhoAmIRequest } from '@mintel/next-feedback';
import { config } from '@/lib/config';
export async function GET(req: NextRequest) {
const env = envSchema.parse(getRawEnv());
const gatekeeperUrl = env.GATEKEEPER_URL;
const host = req.headers.get('host') || '';
const { searchParams } = new URL(req.url);
const hasBypassParam = searchParams.get('gatekeeper_bypass') === 'true';
const isLocal = host.includes('localhost') || host.includes('127.0.0.1') || host.includes('klz.localhost');
const isBypassEnabled = hasBypassParam || env.GATEKEEPER_BYPASS_ENABLED || (env.NODE_ENV === 'development' && isLocal);
// If bypass is enabled or we are in local development, use "Dev-Admin" identity.
if (isBypassEnabled) {
return NextResponse.json({
authenticated: true,
identity: 'Dev-Admin',
isDevFallback: true
});
}
try {
// We forward the cookie header to gatekeeper so it can identify the session
const response = await fetch(`${gatekeeperUrl}/api/whoami`, {
headers: {
cookie: req.headers.get('cookie') || '',
},
cache: 'no-store',
});
if (!response.ok) {
return NextResponse.json({ authenticated: false, identity: 'Guest' });
}
const data = await response.json();
return NextResponse.json(data);
} catch (error: any) {
console.error('Error proxying to gatekeeper:', error);
return NextResponse.json({ authenticated: false, identity: 'Guest (Auth Error)' });
}
return handleWhoAmIRequest(req, config.gatekeeperUrl);
}

23
components/ContactMap.tsx Normal file
View File

@@ -0,0 +1,23 @@
'use client';
import React from 'react';
import dynamic from 'next/dynamic';
const LeafletMap = dynamic(() => import('@/components/LeafletMap'), {
ssr: false,
loading: () => (
<div className="h-full w-full bg-neutral-medium flex items-center justify-center">
<div className="animate-pulse text-primary font-medium">Loading Map...</div>
</div>
),
});
interface ContactMapProps {
address: string;
lat: number;
lng: number;
}
export default function ContactMap({ address, lat, lng }: ContactMapProps) {
return <LeafletMap address={address} lat={lat} lng={lng} />;
}

View File

@@ -1,539 +0,0 @@
'use client';
import React, { useState, useEffect, useRef, useMemo } from 'react';
import { motion, AnimatePresence } from 'framer-motion';
import { MessageSquare, X, Check, MousePointer2, Plus, List, Send, User } from 'lucide-react';
import { clsx } from 'clsx';
import { twMerge } from 'tailwind-merge';
function cn(...inputs: any[]) {
return twMerge(clsx(inputs));
}
interface FeedbackComment {
id: string;
userName: string;
text: string;
createdAt: string;
}
interface Feedback {
id: string;
x: number;
y: number;
selector: string;
text: string;
type: 'design' | 'content';
elementRect: DOMRect | null;
userName: string;
comments: FeedbackComment[];
}
export function FeedbackOverlay() {
const [isActive, setIsActive] = useState(false);
const [hoveredElement, setHoveredElement] = useState<HTMLElement | null>(null);
const [selectedElement, setSelectedElement] = useState<HTMLElement | null>(null);
const [feedbacks, setFeedbacks] = useState<Feedback[]>([]);
const [currentComment, setCurrentComment] = useState('');
const [currentType, setCurrentType] = useState<'design' | 'content'>('design');
const [showList, setShowList] = useState(false);
const [currentUser, setCurrentUser] = useState<{ identity: string, isDevFallback?: boolean } | null>(null);
const [newCommentTexts, setNewCommentTexts] = useState<{ [feedbackId: string]: string }>({});
// 1. Fetch Identity and Existing Feedback
useEffect(() => {
const checkAuth = async () => {
try {
// Determine if we have a bypass parameter in the URL
const urlParams = new URLSearchParams(window.location.search);
const bypass = urlParams.get('gatekeeper_bypass');
const apiUrl = bypass ? `/api/whoami?gatekeeper_bypass=${bypass}` : '/api/whoami';
const res = await fetch(apiUrl);
if (res.ok) {
const data = await res.json();
setCurrentUser(data);
} else {
setCurrentUser({ identity: "Guest" });
}
} catch (e) {
setCurrentUser({ identity: "Guest" });
}
};
const fetchFeedback = async () => {
try {
const res = await fetch('/api/feedback');
if (res.ok) {
const data = await res.json();
// Map Directus fields back to our interface if necessary
const mapped = data.map((fb: any) => ({
id: fb.id,
x: fb.x,
y: fb.y,
selector: fb.selector,
text: fb.text,
type: fb.type,
userName: fb.user_name,
comments: (fb.comments || []).map((c: any) => ({
id: c.id,
userName: c.user_name,
text: c.text,
createdAt: c.date_created
}))
}));
setFeedbacks(mapped);
}
} catch (e) {
console.error("Failed to fetch feedbacks", e);
}
};
checkAuth();
fetchFeedback();
}, []);
// Helper to get unique selector
const getSelector = (el: HTMLElement): string => {
if (el.id) return `#${el.id}`;
let path = [];
while (el.parentElement) {
let index = Array.from(el.parentElement.children).indexOf(el) + 1;
path.unshift(`${el.tagName.toLowerCase()}:nth-child(${index})`);
el = el.parentElement;
}
return path.join(' > ');
};
useEffect(() => {
if (!isActive) {
setHoveredElement(null);
return;
}
const handleMouseMove = (e: MouseEvent) => {
if (selectedElement) return;
const target = e.target as HTMLElement;
if (target.closest('.feedback-ui-ignore')) {
setHoveredElement(null);
return;
}
setHoveredElement(target);
};
const handleClick = (e: MouseEvent) => {
if (selectedElement) return;
const target = e.target as HTMLElement;
if (target.closest('.feedback-ui-ignore')) return;
e.preventDefault();
e.stopPropagation();
setSelectedElement(target);
setHoveredElement(null);
};
window.addEventListener('mousemove', handleMouseMove);
window.addEventListener('click', handleClick, true);
return () => {
window.removeEventListener('mousemove', handleMouseMove);
window.removeEventListener('click', handleClick, true);
};
}, [isActive, selectedElement]);
const saveFeedback = async () => {
if (!selectedElement || !currentComment) return;
const rect = selectedElement.getBoundingClientRect();
const feedbackData = {
url: window.location.href,
x: rect.left + rect.width / 2 + window.scrollX,
y: rect.top + rect.height / 2 + window.scrollY,
selector: getSelector(selectedElement),
text: currentComment,
type: currentType,
userName: currentUser?.identity || "Unknown",
userIdentity: currentUser?.identity === 'Admin' ? 'admin' : 'user'
};
try {
const res = await fetch('/api/feedback', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(feedbackData)
});
if (res.ok) {
const savedFb = await res.json();
const newFeedback: Feedback = {
id: savedFb.id,
x: savedFb.x,
y: savedFb.y,
selector: savedFb.selector,
text: savedFb.text,
type: savedFb.type,
elementRect: rect,
userName: savedFb.user_name,
comments: [],
};
setFeedbacks([...feedbacks, newFeedback]);
setSelectedElement(null);
setCurrentComment('');
}
} catch (e) {
console.error("Failed to save feedback", e);
}
};
const addReply = async (feedbackId: string) => {
const text = newCommentTexts[feedbackId];
if (!text) return;
if (!currentUser?.identity || currentUser.identity === 'Guest') {
alert("Nur angemeldete Benutzer können antworten.");
return;
}
try {
const res = await fetch('/api/feedback', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
action: 'reply',
feedbackId,
userName: currentUser?.identity || "Unknown",
text
})
});
if (res.ok) {
const savedReply = await res.json();
setFeedbacks(feedbacks.map(f => {
if (f.id === feedbackId) {
return {
...f,
comments: [...f.comments, {
id: savedReply.id,
userName: savedReply.user_name,
text: savedReply.text,
createdAt: savedReply.date_created
}]
};
}
return f;
}));
setNewCommentTexts({ ...newCommentTexts, [feedbackId]: '' });
}
} catch (e) {
console.error("Failed to save reply", e);
}
};
const hoveredRect = useMemo(() => hoveredElement?.getBoundingClientRect(), [hoveredElement]);
const selectedRect = useMemo(() => selectedElement?.getBoundingClientRect(), [selectedElement]);
return (
<div className="feedback-ui-ignore">
{/* 1. Global Toolbar */}
<div className="fixed bottom-6 left-1/2 -translate-x-1/2 z-[9999]">
<div className="bg-black/80 backdrop-blur-xl border border-white/10 p-2 rounded-2xl shadow-2xl flex items-center gap-2">
<div className={cn(
"flex items-center gap-2 px-3 py-2 rounded-xl transition-all",
currentUser?.isDevFallback ? "bg-orange-500/20 text-orange-400" : "bg-white/5 text-white/40"
)}>
<User size={14} />
<span className="text-[10px] font-bold uppercase tracking-wider">
{currentUser?.identity || "Loading..."}
{currentUser?.isDevFallback && " (Local Dev Bypass)"}
</span>
</div>
<div className="w-px h-6 bg-white/10 mx-1" />
<button
onClick={() => {
if (!currentUser?.identity || currentUser.identity === 'Guest') {
// Maybe show a toast or just stay disabled
alert("Bitte logge dich ein, um Feedback zu geben.");
return;
}
setIsActive(!isActive);
}}
disabled={!currentUser?.identity || currentUser.identity === 'Guest'}
className={cn(
"flex items-center gap-2 px-4 py-2 rounded-xl transition-all font-medium disabled:opacity-30 disabled:cursor-not-allowed",
isActive
? "bg-blue-500 text-white shadow-lg shadow-blue-500/20"
: "text-white/70 hover:text-white hover:bg-white/10"
)}
>
{isActive ? <X size={18} /> : <MessageSquare size={18} />}
{isActive ? "Modus beenden" : "Feedback geben"}
</button>
<div className="w-px h-6 bg-white/10 mx-1" />
<button
onClick={() => setShowList(!showList)}
className="p-2 text-white/70 hover:text-white hover:bg-white/10 rounded-xl relative"
>
<List size={20} />
{feedbacks.length > 0 && (
<span className="absolute -top-1 -right-1 w-5 h-5 bg-blue-500 text-[10px] flex items-center justify-center rounded-full text-white font-bold border-2 border-[#1a1a1a]">
{feedbacks.length}
</span>
)}
</button>
</div>
</div>
{/* 2. Feedback Markers & Highlights */}
<AnimatePresence>
{isActive && (
<>
{/* Fixed Overlay for real-time highlights */}
<div className="fixed inset-0 pointer-events-none z-[9998]">
{hoveredRect && (
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
exit={{ opacity: 0 }}
className="absolute border-2 border-blue-400 bg-blue-400/10 rounded-sm transition-all duration-200"
style={{
top: hoveredRect.top,
left: hoveredRect.left,
width: hoveredRect.width,
height: hoveredRect.height,
}}
/>
)}
{selectedRect && (
<motion.div
initial={{ scale: 0.9, opacity: 0 }}
animate={{ scale: 1, opacity: 1 }}
className="absolute border-2 border-yellow-400 bg-yellow-400/20 rounded-sm"
style={{
top: selectedRect.top,
left: selectedRect.left,
width: selectedRect.width,
height: selectedRect.height,
}}
/>
)}
</div>
{/* Absolute Overlay for persistent pins */}
<div className="absolute inset-0 pointer-events-none z-[9997]">
{feedbacks.map((fb) => (
<div
key={fb.id}
className="absolute"
style={{ top: fb.y, left: fb.x }}
>
<button
onClick={() => {
setShowList(true);
// TODO: Scroll to feedback in list
}}
className={cn(
"w-6 h-6 -translate-x-1/2 -translate-y-1/2 rounded-full border-2 border-white shadow-lg flex items-center justify-center text-white cursor-pointer pointer-events-auto transition-transform hover:scale-110",
fb.type === 'design' ? 'bg-purple-500' : 'bg-orange-500'
)}
>
<Plus size={14} className="rotate-45" />
</button>
</div>
))}
</div>
</>
)}
</AnimatePresence>
{/* 3. Feedback Modal */}
<AnimatePresence>
{selectedElement && (
<div className="fixed inset-0 flex items-center justify-center z-[10000] bg-black/40 backdrop-blur-sm">
<motion.div
initial={{ opacity: 0, y: 20, scale: 0.95 }}
animate={{ opacity: 1, y: 0, scale: 1 }}
exit={{ opacity: 0, y: 20, scale: 0.95 }}
className="bg-[#1c1c1e] border border-white/10 rounded-3xl p-6 w-[400px] shadow-2xl"
>
<div className="flex items-center justify-between mb-6">
<h3 className="text-white font-bold text-lg">Feedback geben</h3>
<button
onClick={() => setSelectedElement(null)}
className="text-white/40 hover:text-white"
>
<X size={20} />
</button>
</div>
<div className="flex gap-2 mb-6">
{(['design', 'content'] as const).map((type) => (
<button
key={type}
onClick={() => setCurrentType(type)}
className={cn(
"flex-1 py-3 px-4 rounded-xl text-sm font-medium transition-all capitalize",
currentType === type
? "bg-white text-black shadow-lg"
: "bg-white/5 text-white/40 hover:bg-white/10"
)}
>
{type === 'design' ? '🎨 Design' : '✍️ Content'}
</button>
))}
</div>
<textarea
autoFocus
value={currentComment}
onChange={(e) => setCurrentComment(e.target.value)}
placeholder="Was möchtest du anmerken?"
className="w-full h-32 bg-white/5 border border-white/5 rounded-2xl p-4 text-white placeholder:text-white/20 focus:outline-none focus:border-blue-500/50 transition-colors resize-none mb-6"
/>
<button
disabled={!currentComment}
onClick={saveFeedback}
className="w-full bg-blue-500 hover:bg-blue-400 disabled:opacity-50 disabled:cursor-not-allowed text-white font-bold py-4 rounded-2xl flex items-center justify-center gap-2 transition-all shadow-lg shadow-blue-500/20"
>
<Check size={20} />
Feedback speichern
</button>
</motion.div>
</div>
)}
</AnimatePresence>
{/* 4. Feedback List Sidebar */}
<AnimatePresence>
{showList && (
<>
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
exit={{ opacity: 0 }}
onClick={() => setShowList(false)}
className="fixed inset-0 bg-black/60 backdrop-blur-sm z-[10001]"
/>
<motion.div
initial={{ x: '100%' }}
animate={{ x: 0 }}
exit={{ x: '100%' }}
transition={{ type: 'spring', damping: 25, stiffness: 200 }}
className="fixed top-0 right-0 h-full w-[400px] bg-[#1c1c1e] border-l border-white/10 z-[10002] shadow-2xl flex flex-col"
>
<div className="p-8 border-b border-white/10 flex items-center justify-between">
<div>
<h2 className="text-2xl font-bold text-white mb-1">Feedback</h2>
<p className="text-white/40 text-sm">{feedbacks.length} Anmerkungen live</p>
</div>
<button
onClick={() => setShowList(false)}
className="p-2 text-white/40 hover:text-white bg-white/5 rounded-xl transition-colors"
>
<X size={20} />
</button>
</div>
<div className="flex-1 overflow-y-auto p-6 space-y-6">
{feedbacks.length === 0 ? (
<div className="h-full flex flex-col items-center justify-center text-center px-8 opacity-40">
<MessageSquare size={48} className="mb-4" />
<p>Noch kein Feedback vorhanden. Aktiviere den Modus um Stellen auf der Seite zu markieren.</p>
</div>
) : (
feedbacks.map((fb) => (
<div
key={fb.id}
className="bg-white/5 border border-white/5 rounded-3xl overflow-hidden hover:border-white/20 transition-all flex flex-col"
>
{/* Header */}
<div className="p-5 border-b border-white/5 bg-white/[0.02]">
<div className="flex items-center justify-between mb-4">
<div className="flex items-center gap-2">
<div className="w-8 h-8 rounded-full bg-blue-500/20 flex items-center justify-center text-blue-400">
<User size={14} />
</div>
<div>
<p className="text-white text-[11px] font-bold uppercase tracking-wider">{fb.userName}</p>
<p className="text-white/20 text-[9px] uppercase tracking-widest">Original Poster</p>
</div>
</div>
<span className={cn(
"px-3 py-1 rounded-full text-[9px] font-bold uppercase tracking-wider",
fb.type === 'design' ? 'bg-purple-500/20 text-purple-400' : 'bg-orange-500/20 text-orange-400'
)}>
{fb.type}
</span>
</div>
<p className="text-white/80 whitespace-pre-wrap text-sm leading-relaxed">{fb.text}</p>
<div className="mt-3 flex items-center gap-2">
<div className="w-1 h-1 bg-white/10 rounded-full" />
<span className="text-white/20 text-[9px] truncate tracking-wider italic">
{fb.selector}
</span>
</div>
</div>
{/* Comments List */}
{fb.comments.length > 0 && (
<div className="bg-black/20 p-5 space-y-4">
{fb.comments.map(comment => (
<div key={comment.id} className="flex gap-3">
<div className="w-6 h-6 rounded-full bg-white/10 flex items-center justify-center text-white/40 shrink-0">
<User size={10} />
</div>
<div className="space-y-1">
<div className="flex items-center gap-2">
<p className="text-[10px] font-bold text-white/60 uppercase">{comment.userName}</p>
<p className="text-[10px] text-white/20">
{new Date(comment.createdAt).toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })}
</p>
</div>
<p className="text-white/80 text-xs leading-snug">{comment.text}</p>
</div>
</div>
))}
</div>
)}
{/* Reply Input */}
<div className="p-4 bg-white/[0.01] mt-auto border-t border-white/5">
<div className="relative">
<input
type="text"
value={newCommentTexts[fb.id] || ''}
onChange={(e) => setNewCommentTexts({ ...newCommentTexts, [fb.id]: e.target.value })}
placeholder="Antworten..."
className="w-full bg-black/40 border border-white/5 rounded-2xl py-3 pl-4 pr-12 text-xs text-white placeholder:text-white/20 focus:outline-none focus:border-blue-500/50 transition-colors"
onKeyDown={(e) => {
if (e.key === 'Enter') addReply(fb.id);
}}
/>
<button
onClick={() => addReply(fb.id)}
className="absolute right-2 top-1/2 -translate-y-1/2 p-2 text-blue-500 hover:text-blue-400 transition-colors disabled:opacity-30"
disabled={!newCommentTexts[fb.id]}
>
<Send size={14} />
</button>
</div>
</div>
</div>
))
)}
</div>
</motion.div>
</>
)}
</AnimatePresence>
</div>
);
}

View File

@@ -1,4 +0,0 @@
# Netscape HTTP Cookie File
# https://curl.se/docs/http-cookies.html
# This file was generated by libcurl! Edit at your own risk.

View File

@@ -1,81 +0,0 @@
services:
app:
image: registry.infra.mintel.me/mintel/klz-cables.com:latest
restart: always
networks:
- infra
env_file:
- .env
labels:
- "traefik.enable=true"
# HTTP ⇒ HTTPS redirect
- "traefik.http.routers.klz-cables-web.rule=(Host(`klz-cables.com`) || Host(`www.klz-cables.com`) || Host(`staging.klz-cables.com`)) && !PathPrefix(`/.well-known/acme-challenge/`)"
- "traefik.http.routers.klz-cables-web.entrypoints=web"
- "traefik.http.routers.klz-cables-web.middlewares=redirect-https"
# HTTPS router
- "traefik.http.routers.klz-cables.rule=Host(`klz-cables.com`) || Host(`www.klz-cables.com`) || Host(`staging.klz-cables.com`)"
- "traefik.http.routers.klz-cables.entrypoints=websecure"
- "traefik.http.routers.klz-cables.tls.certresolver=le"
- "traefik.http.routers.klz-cables.tls=true"
- "traefik.http.routers.klz-cables.service=klz-cables"
- "traefik.http.services.klz-cables.loadbalancer.server.port=3000"
- "traefik.http.services.klz-cables.loadbalancer.server.scheme=http"
# Forwarded Headers
- "traefik.http.middlewares.klz-forward.headers.customrequestheaders.X-Forwarded-Proto=https"
- "traefik.http.middlewares.klz-forward.headers.customrequestheaders.X-Forwarded-Ssl=on"
# Middlewares
- "traefik.http.routers.klz-cables.middlewares=klz-forward,compress"
cms:
build:
context: ./cms
dockerfile: Dockerfile
restart: always
networks:
- infra
env_file:
- .env
environment:
DATABASE_CLIENT: postgres
DATABASE_HOST: cms-db
DATABASE_PORT: 5432
DATABASE_NAME: ${STRAPI_DATABASE_NAME:-strapi}
DATABASE_USERNAME: ${STRAPI_DATABASE_USERNAME:-strapi}
DATABASE_PASSWORD: ${STRAPI_DATABASE_PASSWORD:-strapi}
NODE_ENV: ${NODE_ENV:-development}
STRAPI_URL: ${STRAPI_URL:-https://cms.klz-cables.com}
volumes:
- ./cms/config:/opt/app/config
- ./cms/src:/opt/app/src
- ./cms/package.json:/opt/app/package.json
- ./cms/package-lock.json:/opt/app/package-lock.json
- ./cms/public/uploads:/opt/app/public/uploads
- ./cms/dist:/opt/app/dist
labels:
- "traefik.enable=true"
- "traefik.http.routers.klz-cms.rule=Host(`cms.klz-cables.com`) || Host(`cms-staging.klz-cables.com`)"
- "traefik.http.routers.klz-cms.entrypoints=websecure"
- "traefik.http.routers.klz-cms.tls.certresolver=le"
- "traefik.http.routers.klz-cms.tls=true"
- "traefik.http.services.klz-cms.loadbalancer.server.port=1337"
cms-db:
image: postgres:16-alpine
restart: always
networks:
- infra
env_file:
- .env
environment:
POSTGRES_DB: ${STRAPI_DATABASE_NAME:-strapi}
POSTGRES_USER: ${STRAPI_DATABASE_USERNAME:-strapi}
POSTGRES_PASSWORD: ${STRAPI_DATABASE_PASSWORD:-strapi}
volumes:
- cms-db-data:/var/lib/postgresql/data
networks:
infra:
external: true
volumes:
cms-db-data:

45
eslint.config.mjs Normal file
View File

@@ -0,0 +1,45 @@
import baseConfig from "@mintel/eslint-config";
import { nextConfig } from "@mintel/eslint-config/next";
export default [
{
ignores: [
"**/node_modules/**",
"node_modules/**",
"**/.next/**",
".next/**",
"**/dist/**",
"dist/**",
"**/out/**",
"out/**",
"**/.pnpm-store/**",
"**/at-mintel/**",
"at-mintel/**",
"**/.git/**",
"*.js",
"*.mjs",
"scripts/**",
"tests/**",
"next-env.d.ts"
],
},
...baseConfig,
...nextConfig.map((config) => ({
...config,
files: ["**/*.{ts,tsx}"],
rules: {
...config.rules,
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-unused-vars": [
"warn",
{
"argsIgnorePattern": "^_"
}
],
"@typescript-eslint/no-require-imports": "off",
"prefer-const": "warn",
"react/no-unescaped-entities": "off",
"@next/next/no-img-element": "warn"
}
})),
];

View File

@@ -1,171 +0,0 @@
[
{
"url": "https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-N2XS-FL-2Y/",
"verwendung": "Zur Verlegung in Erde, in Wasser, im Freien, in Innenräumen und Kabelkanälen für EVU-Netze, Industrie- und Verteilernetze. Bei Verlegung in Kabelkanälen und Innenräumen muss berücksichtigt werden, dass der PE-Mantel nach DIN VDE 0482-332-1 nicht flammwidrig ist. Das Kabel ist für ungünstige Einsatzbedingungen geeignet, insbesondere wenn nach mechanischen Beschädigungen das Eindringen von Wasser in Quer- und Längsrichtung vermieden werden soll.",
"technischeDaten": {
"Zolltarifnummer (Warennummer)": "85446010900000000",
"Norm": "VDE 0276-620",
"Leitermaterial": "Cu, blank",
"Leiterklasse": "Kl.2 = mehrdrähtig",
"Aderisolation": "VPE DIX8",
"Feldsteuerung": "innere und äußere Leitschicht aus halbleitendem Kunststoff (Dreifachextrusion)",
"Schirm": "Cu-Drahtumspinnung + Querleitwendel",
"Mantelmaterial": "Polyethylen DMP2",
"Schichtenmantel": "ja",
"Kabel querwasserdicht": "ja",
"Kabel längswasserdicht": "ja",
"Mantelfarbe": "schwarz",
"UV-beständig": "ja",
"Als Außenkabel zulässig": "ja",
"Max. zulässige Leitertemperatur, °C": "90 °C",
"Zul. Kabelaußentemperatur, fest verlegt, °C": "70 °C",
"Zul. Kabelaußentemperatur, in Bewegung, °C": "-20 - +70 °C",
"Min. Biegeradius, fest verlegt": "15 x Ø",
"Aderzahl": "1",
"Mantelwanddicke": "2.1 mm",
"Metallbasis Cu (de)": "0 EUR/100 kg",
"Maßeinheit": "Meter"
}
},
{
"url": "https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-N2XS2Y/",
"verwendung": "Zur Verlegung in Erde, in Wasser, im Freien, in Innenräumen und Kabelkanälen für Kraftwerks-, Industrie- und Verteilernetze. Bei Verlegung in Kabelkanälen und Innenräumen muss berücksichtigt werden, dass der PE-Mantel halogenfrei ist, jedoch nicht flammwidrig nach DIN VDE 0482-332-1. Das Kabel kann infolge des widerstandsfähigen PE-Mantels bei der Verlegung und im Betrieb stark mechanisch beansprucht werden.",
"technischeDaten": {
"Norm": "VDE 0276-620",
"Leitermaterial": "Cu, blank",
"Leiterklasse": "Kl.2 = mehrdrähtig",
"Aderisolation": "VPE DIX8",
"Feldsteuerung": "innere und äußere Leitschicht aus halbleitendem Kunststoff (Dreifachextrusion)",
"Schirm": "Cu-Drahtumspinnung + Querleitwendel",
"Mantelmaterial": "Polyethylen DMP2",
"Mantelfarbe": "schwarz",
"Flammwidrigkeit": "keine",
"UV-beständig": "ja",
"Als Außenkabel zulässig": "ja",
"Max. zulässige Leitertemperatur, °C": "90 °C",
"Zul. Kabelaußentemperatur, fest verlegt, °C": "70 °C",
"Zul. Kabelaußentemperatur, in Bewegung, °C": "-20 - +70 °C",
"Min. Biegeradius, fest verlegt": "15 x Ø",
"Leiterform": "rund",
"Aderzahl": "1",
"Mantelwanddicke": "2.1 mm",
"Metallbasis Cu (de)": "0 EUR/100 kg",
"Maßeinheit": "Meter"
}
},
{
"url": "https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-N2XSF2Y/",
"verwendung": "",
"technischeDaten": {}
},
{
"url": "https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-N2XSY/",
"verwendung": "Zur Verlegung in Erde, in Wasser, im Freien, in Innenräumen und Kabelkanälen für Kraftwerks-, Industrie- und Verteilernetze. Das Kabel lässt sich aufgrund der guten Verlegeeigenschaften auch bei schwieriger Trassenführung leicht verlegen. Gemäß VDE 0276 müssen die Kabel vor direkter Sonneneinstrahlung geschützt sein.",
"technischeDaten": {
"Zolltarifnummer (Warennummer)": "85446010900000000",
"Norm": "VDE 0276-620",
"Leitermaterial": "Cu, blank",
"Leiterklasse": "Kl.2 = mehrdrähtig",
"Aderisolation": "VPE DIX8",
"Feldsteuerung": "innere und äußere Leitschicht aus halbleitendem Kunststoff (Dreifachextrusion)",
"Schirm": "Cu-Drahtumspinnung + Querleitwendel",
"Mantelmaterial": "PVC DMV6",
"Mantelfarbe": "rot",
"Flammwidrigkeit": "VDE 0482-332-1-2/IEC 60332-1-2",
"Als Außenkabel zulässig": "ja",
"Max. zulässige Leitertemperatur, °C": "90 °C",
"Zul. Kabelaußentemperatur, fest verlegt, °C": "70 °C",
"Zul. Kabelaußentemperatur, in Bewegung, °C": "-5 - +70 °C",
"Min. Biegeradius, fest verlegt": "15 x Ø",
"Leiterform": "rund",
"Aderzahl": "1",
"Metallbasis Cu (de)": "0 EUR/100 kg",
"Maßeinheit": "Meter"
}
},
{
"url": "https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-NA2XS2Y/",
"verwendung": "Zur Verlegung in Erde, in Wasser, im Freien, in Innenräumen und Kabelkanälen für Kraftwerks-, Industrie- und Verteilernetze. Bei Verlegung in Kabelkanälen und Innenräumen muss berücksichtigt werden, dass der PE-Mantel halogenfrei ist, jedoch nicht flammwidrig nach DIN VDE 0482-332-1. Das Kabel kann infolge des widerstandsfähigen PE-Mantels bei der Verlegung und im Betrieb stark mechanisch beansprucht werden.",
"technischeDaten": {
"Zolltarifnummer (Warennummer)": "85446090000000000",
"Norm": "VDE 0276-620",
"Leitermaterial": "Aluminium",
"Leiterklasse": "Kl.2 = mehrdrähtig",
"Aderisolation": "VPE DIX8",
"Feldsteuerung": "innere und äußere Leitschicht aus halbleitendem Kunststoff (Dreifachextrusion)",
"Schirm": "Cu-Drahtumspinnung + Querleitwendel",
"Mantelmaterial": "Polyethylen DMP2",
"Mantelfarbe": "schwarz",
"Flammwidrigkeit": "keine",
"UV-beständig": "ja",
"Als Außenkabel zulässig": "ja",
"Max. zulässige Leitertemperatur, °C": "90 °C",
"Zul. Kabelaußentemperatur, fest verlegt, °C": "70 °C",
"Zul. Kabelaußentemperatur, in Bewegung, °C": "-20 - +70 °C",
"Min. Biegeradius, fest verlegt": "15 x Ø",
"Aderzahl": "1",
"Metallbasis Al (de)": "0 EUR/100 kg",
"Metallbasis Cu (de)": "0 EUR/100 kg",
"Maßeinheit": "Meter"
}
},
{
"url": "https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-NA2XSF2Y/",
"verwendung": "",
"technischeDaten": {}
},
{
"url": "https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-NA2XS-FL-2Y/",
"verwendung": "Zur Verlegung in Erde, in Wasser, im Freien, in Innenräumen und Kabelkanälen für EVU-Netze, Industrie- und Verteilernetze. Bei Verlegung in Kabelkanälen und Innenräumen muss berücksichtigt werden, dass der PE-Mantel nach DIN VDE 0482-332-1 nicht flammwidrig ist. Das Kabel ist für ungünstige Einsatzbedingungen geeignet, insbesondere wenn nach mechanischen Beschädigungen das Eindringen von Wasser in Quer- und Längsrichtung vermieden werden soll.",
"technischeDaten": {
"Zolltarifnummer (Warennummer)": "85446090000000000",
"Norm": "VDE 0276-620",
"Leitermaterial": "Aluminium",
"Leiterklasse": "Kl.2 = mehrdrähtig",
"Aderisolation": "VPE DIX8",
"Mantelmaterial": "Polyethylen DMP2",
"Schichtenmantel": "ja",
"Kabel querwasserdicht": "ja",
"Kabel längswasserdicht": "ja",
"Mantelfarbe": "schwarz",
"Flammwidrigkeit": "keine",
"UV-beständig": "ja",
"Als Außenkabel zulässig": "ja",
"Max. zulässige Leitertemperatur, °C": "90 °C",
"Zul. Kabelaußentemperatur, fest verlegt, °C": "70 °C",
"Zul. Kabelaußentemperatur, in Bewegung, °C": "-20 - +70 °C",
"Min. Biegeradius, fest verlegt": "15 x Ø",
"Leiterform (Faber)": "RMv",
"Aderzahl": "1",
"Metallbasis Al (de)": "0 EUR/100 kg",
"Metallbasis Cu (de)": "0 EUR/100 kg",
"Maßeinheit": "Meter"
}
},
{
"url": "https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-NA2XSY/",
"verwendung": "Zur Verlegung in Erde, in Wasser, im Freien, in Innenräumen und Kabelkanälen für Kraftwerks-, Industrie- und Verteilernetze. Das Kabel lässt sich aufgrund der guten Verlegeeigenschaften auch bei schwieriger Trassenführung leicht verlegen. Gemäß VDE 0276 müssen die Kabel vor direkter Sonneneinstrahlung geschützt sein.",
"technischeDaten": {
"Zolltarifnummer (Warennummer)": "85446090000000000",
"Norm": "VDE 0276-620",
"Leitermaterial": "Aluminium",
"Leiterklasse": "Kl.2 = mehrdrähtig",
"Aderisolation": "VPE DIX8",
"Feldsteuerung": "innere und äußere Leitschicht aus halbleitendem Kunststoff (Dreifachextrusion)",
"Schirm": "Cu-Drahtumspinnung + Querleitwendel",
"Mantelmaterial": "PVC DMV6",
"Mantelfarbe": "rot",
"Flammwidrigkeit": "VDE 0482-332-1-2/IEC 60332-1-2",
"Als Außenkabel zulässig": "ja",
"Max. zulässige Leitertemperatur, °C": "90 °C",
"Zul. Kabelaußentemperatur, fest verlegt, °C": "70 °C",
"Zul. Kabelaußentemperatur, in Bewegung, °C": "-5 - +70 °C",
"Min. Biegeradius, fest verlegt": "15 x Ø",
"Leiterform (Faber)": "RMv",
"Aderzahl": "1",
"Metallbasis Al (de)": "0 EUR/100 kg",
"Metallbasis Cu (de)": "0 EUR/100 kg",
"Maßeinheit": "Meter"
}
}
]

View File

@@ -23,6 +23,7 @@ function createConfig() {
isTesting: target === 'testing',
isDevelopment: target === 'development',
feedbackEnabled: env.NEXT_PUBLIC_FEEDBACK_ENABLED,
gatekeeperUrl: env.GATEKEEPER_URL,
baseUrl: env.NEXT_PUBLIC_BASE_URL,
@@ -146,6 +147,9 @@ export const config = {
get infraCMS() {
return getConfig().infraCMS;
},
get gatekeeperUrl() {
return getConfig().gatekeeperUrl;
},
};
/**

1
mintel-feedback-vendor Symbolic link
View File

@@ -0,0 +1 @@
../at-mintel/packages/next-feedback

2
next-env.d.ts vendored
View File

@@ -1,6 +1,6 @@
/// <reference types="next" />
/// <reference types="next/image-types/global" />
import "./.next/dev/types/routes.d.ts";
import './.next/types/routes.d.ts';
// NOTE: This file should not be edited
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.

View File

@@ -1,11 +1,16 @@
import createNextIntlPlugin from 'next-intl/plugin';
import { withSentryConfig } from '@sentry/nextjs';
import path from 'path';
import { fileURLToPath } from 'url';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const withNextIntl = createNextIntlPlugin();
/** @type {import('next').NextConfig} */
const nextConfig = {
output: 'standalone',
outputFileTracingRoot: path.join(__dirname, '..'),
async redirects() {
return [
// Blog redirects
@@ -170,7 +175,7 @@ const nextConfig = {
},
{
source: '/posts/why-the-n2xsf2y-is-the-ideal-cable-for-your-energy-project.html',
destination: '/en/blog/why-the-n2xsf2y-is-the-ideal-cable-for-your-energy-project',
destination: '/de/blog/why-the-n2xsf2y-is-the-ideal-cable-for-your-energy-project',
permanent: true,
},
{

22718
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
{
"dependencies": {
"@directus/sdk": "^18.0.3",
"@mintel/mail": "^1.5.0",
"@mintel/mail": "^1.6.0",
"@react-email/components": "^1.0.6",
"@react-pdf/renderer": "^4.3.2",
"@sentry/nextjs": "^10.38.0",
@@ -13,11 +13,12 @@
"clsx": "^2.1.1",
"framer-motion": "^12.27.1",
"gray-matter": "^4.0.3",
"@mintel/next-feedback": "^1.6.0",
"i18next": "^25.7.3",
"jsdom": "^27.4.0",
"leaflet": "^1.9.4",
"lucide-react": "^0.562.0",
"next": "^16.1.6",
"next": "16.1.6",
"next-i18next": "^15.4.3",
"next-intl": "^4.8.2",
"next-mdx-remote": "^5.0.0",
@@ -35,7 +36,9 @@
"svg-to-pdfkit": "^0.1.8",
"tailwind-merge": "^3.4.0",
"xlsx": "^0.18.5",
"zod": "^4.3.6"
"zod": "^4.3.6",
"require-in-the-middle": "^8.0.1",
"import-in-the-middle": "^1.11.0"
},
"devDependencies": {
"@commitlint/cli": "^20.4.0",
@@ -50,9 +53,8 @@
"@types/sharp": "^0.31.1",
"@vitest/ui": "^4.0.16",
"autoprefixer": "^10.4.23",
"eslint": "^8.57.1",
"eslint-config-next": "^16.1.6",
"eslint-config-prettier": "^10.1.8",
"eslint": "^9.18.0",
"@mintel/eslint-config": "^1.6.0",
"husky": "^9.1.7",
"lint-staged": "^16.2.7",
"postcss": "^8.5.6",
@@ -70,7 +72,7 @@
"dev:local": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint",
"lint": "eslint .",
"typecheck": "tsc --noEmit",
"test": "vitest run --passWithNoTests",
"test:og": "vitest run tests/og-image.test.ts",
@@ -96,5 +98,11 @@
"pagespeed:urls": "tsx -e \"import sitemap from './app/sitemap'; sitemap().then(urls => console.log(urls.map(u => u.url).join('\\n')))\"",
"prepare": "husky"
},
"version": "1.0.0"
"version": "1.0.0",
"pnpm": {
"overrides": {
"next": "16.1.6",
"@sentry/nextjs": "10.38.0"
}
}
}

14009
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,268 +0,0 @@
# Migrating Analytics from Independent Analytics to Umami
This guide explains how to migrate your analytics data from the Independent Analytics WordPress plugin to Umami.
## What You Have
You have exported your analytics data from Independent Analytics:
- **data/pages(1).csv** - Page-level analytics data with:
- Title, Visitors, Views, View Duration, Bounce Rate, URL, Page Type
- 220 pages with historical data
## What You Need
Before migrating, you need:
1. **Umami instance** running (self-hosted or cloud)
2. **Website ID** from Umami (create a new website in Umami dashboard)
3. **Access credentials** for Umami (API key or database access)
## Migration Options
The migration script provides three output formats:
### Option 1: JSON Import (Recommended for API)
```bash
python3 scripts/migrate-analytics-to-umami.py \
--input data/pages\(1\).csv \
--output data/umami-import.json \
--format json \
--site-id YOUR_UMAMI_SITE_ID
```
**Import via API:**
```bash
curl -X POST \
-H "Content-Type: application/json" \
-H "Authorization: Bearer YOUR_API_KEY" \
-d @data/umami-import.json \
https://your-umami-instance.com/api/import
```
### Option 2: SQL Import (Direct Database)
```bash
python3 scripts/migrate-analytics-to-umami.py \
--input data/pages\(1\).csv \
--output data/umami-import.sql \
--format sql \
--site-id YOUR_UMAMI_SITE_ID
```
**Import via PostgreSQL:**
```bash
psql -U umami -d umami -f data/umami-import.sql
```
### Option 3: API Payload (Manual Import)
```bash
python3 scripts/migrate-analytics-to-umami.py \
--input data/pages\(1\).csv \
--output data/umami-import-api.json \
--format api \
--site-id YOUR_UMAMI_SITE_ID
```
## Step-by-Step Migration Guide
### 1. Prepare Your Umami Instance
**If self-hosting:**
```bash
# Clone Umami
git clone https://github.com/umami-software/umami.git
cd umami
# Install dependencies
npm install
# Set up environment
cp .env.example .env
# Edit .env with your database credentials
# Run migrations
npm run migrate
# Start the server
npm run build
npm run start
```
**If using Umami Cloud:**
1. Sign up at https://umami.is
2. Create a new website
3. Get your Website ID from the dashboard
### 2. Run the Migration Script
Choose one of the migration options above based on your needs.
**Example:**
```bash
# Make the script executable
chmod +x scripts/migrate-analytics-to-umami.py
# Run the migration
python3 scripts/migrate-analytics-to-umami.py \
--input data/pages\(1\).csv \
--output data/umami-import.json \
--format json \
--site-id klz-cables
```
### 3. Import the Data
#### Option A: Using Umami API (Recommended)
1. **Get your API key:**
- Go to Umami dashboard → Settings → API Keys
- Create a new API key
2. **Import the data:**
```bash
curl -X POST \
-H "Content-Type: application/json" \
-H "Authorization: Bearer YOUR_API_KEY" \
-d @data/umami-import.json \
https://your-umami-instance.com/api/import
```
#### Option B: Direct Database Import
1. **Connect to your Umami database:**
```bash
psql -U umami -d umami
```
2. **Import the SQL file:**
```bash
psql -U umami -d umami -f data/umami-import.sql
```
3. **Verify the import:**
```sql
SELECT COUNT(*) FROM website_event WHERE website_id = 'klz-cables';
```
### 4. Verify the Migration
1. **Check Umami dashboard:**
- Log into Umami
- Select your website
- View the analytics dashboard
2. **Verify data:**
- Check page views count
- Verify top pages
- Check visitor counts
## Important Notes
### Data Limitations
The CSV export from Independent Analytics contains **aggregated data**, not raw event data:
- ✅ Page views (total counts)
- ✅ Visitor counts
- ✅ Average view duration
- ❌ Individual user sessions
- ❌ Real-time data
- ❌ Geographic data
- ❌ Referrer data
- ❌ Device/browser data
### What Gets Imported
The migration script creates **simulated historical data**:
- Each page view becomes a separate event
- Timestamps are set to current time (for historical data, you'd need to adjust)
- Duration is preserved from the average view duration
- No session tracking (each view is independent)
### Recommendations
1. **Start fresh with Umami:**
- Let Umami collect new data going forward
- Use the migrated data for historical reference only
2. **Keep the original CSV:**
- Store `data/pages(1).csv` as a backup
- You can re-import if needed
3. **Update your website:**
- Replace Independent Analytics tracking code with Umami tracking code
- Test that Umami is collecting new data
4. **Monitor for a few days:**
- Verify Umami is collecting data correctly
- Compare with any remaining Independent Analytics data
## Troubleshooting
### Issue: "ModuleNotFoundError: No module named 'csv'"
**Solution:** Ensure Python 3 is installed:
```bash
python3 --version
# Should be 3.7 or higher
```
### Issue: "Permission denied" when running script
**Solution:** Make the script executable:
```bash
chmod +x scripts/migrate-analytics-to-umami.py
```
### Issue: API import fails
**Solution:** Check:
1. API key is correct and has import permissions
2. Website ID exists in Umami
3. Umami instance is accessible
4. JSON format is valid
### Issue: SQL import fails
**Solution:** Check:
1. Database credentials in `.env`
2. Database is running
3. Tables exist (run `npm run migrate` first)
4. Permissions to insert into `website_event` table
## Additional Data Migration
If you have other CSV exports from Independent Analytics (referrers, devices, locations), you can:
1. **Export additional data** from Independent Analytics:
- Referrers
- Devices (browsers, OS)
- Geographic data
- Custom events
2. **Create custom migration scripts** for each data type
3. **Contact Umami support** for bulk import assistance
## Support
- **Umami Documentation:** https://umami.is/docs
- **Umami GitHub:** https://github.com/umami-software/umami
- **Independent Analytics:** https://independentanalytics.com/
## Summary
✅ **Completed:**
- Created migration script (`scripts/migrate-analytics-to-umami.py`)
- Generated JSON import file (`data/umami-import.json`)
- Generated SQL import file (`data/umami-import.sql`)
- Created documentation (`scripts/README-migration.md`)
📊 **Data Migrated:**
- 7,634 simulated page view events
- 220 unique pages
- Historical view counts and durations
🎯 **Next Steps:**
1. Choose your import method (API or SQL)
2. Run the migration script
3. Import data into Umami
4. Verify the migration
5. Update your website to use Umami tracking

View File

@@ -1,39 +0,0 @@
import { createDirectus, rest, authentication, createPanel, readDashboards } from '@directus/sdk';
async function addStatusPanel() {
const url = 'http://localhost:8059';
const email = 'marc@mintel.me';
const password = 'Tim300493.';
console.log(`🚀 Adding Status Panel: ${url}`);
const client = createDirectus(url).with(authentication('json')).with(rest());
try {
await client.login(email, password);
console.log('✅ Authenticated');
const dashboards = await client.request(readDashboards({ filter: { name: { _eq: 'Feedback Operational Intelligence' } } }));
const db = dashboards[0];
if (db) {
await client.request(createPanel({
dashboard: db.id,
name: 'Dashboard Status: LIVE',
type: 'label',
width: 24, height: 2, position_x: 0, position_y: 24,
options: {
text: '### ✅ Dashboard Rendering Service Active\n\nIf you see this, the system is online and updated as of ' + new Date().toISOString()
}
}));
console.log('✅ Status Panel Added');
} else {
console.error('❌ Dashboard not found');
}
} catch (e: any) {
console.error('❌ Failed:');
console.error(e.message);
}
}
addStatusPanel();

View File

@@ -1,19 +0,0 @@
import client, { ensureAuthenticated } from '../lib/directus';
import { readCollections, deleteCollection } from '@directus/sdk';
async function cleanup() {
await ensureAuthenticated();
const collections = await (client as any).request(readCollections());
for (const c of collections) {
if (!c.collection.startsWith('directus_')) {
console.log(`Deleting ${c.collection}...`);
try {
await (client as any).request(deleteCollection(c.collection));
} catch (e) {
console.error(`Failed to delete ${c.collection}`);
}
}
}
}
cleanup().catch(console.error);

View File

@@ -1,33 +0,0 @@
const sqlite3 = require('sqlite3').verbose();
const db = new sqlite3.Database('/directus/database/data.db');
db.serialize(() => {
console.log('--- INTERNAL REPAIR START ---');
// 1. Grant to Public Policy
db.run(`INSERT INTO directus_permissions (collection, action, fields, permissions, validation, presets, policy)
VALUES ('visual_feedback', 'read', '["*"]', '{}', '{}', '{}', 'abf8a154-5b1c-4a46-ac9c-7300570f4f17')`, (err) => {
if (err) console.log('Public grant note:', err.message);
else console.log('✅ Public READ granted.');
});
// 2. Grant to Admin Policy
db.run(`INSERT INTO directus_permissions (collection, action, fields, permissions, validation, presets, policy)
VALUES ('visual_feedback', 'read', '["*"]', '{}', '{}', '{}', 'bed7c035-28f7-4a78-b11a-0dc0e7fc3cd4')`, (err) => {
if (err) console.log('Admin grant note:', err.message);
else console.log('✅ Admin READ granted.');
});
// 3. Mark collection as non-hidden
db.run(`UPDATE directus_collections SET hidden = 0, accountability = NULL WHERE collection = 'visual_feedback'`, (err) => {
if (err) console.log('Collection update error:', err.message);
else console.log('✅ Collection metadata cleared.');
});
db.all(`SELECT COUNT(*) as count FROM visual_feedback`, (err, rows) => {
if (err) console.log('Item count error:', err.message);
else console.log(`📊 Items in visual_feedback: ${rows[0].count}`);
});
});
db.close();

View File

@@ -1,230 +0,0 @@
#!/usr/bin/env tsx
/**
* Manual Translation Mapping Generator
* Creates translationKey mappings for posts that couldn't be auto-detected
*/
import { readFileSync, writeFileSync } from 'fs';
import { join } from 'path';
interface Post {
id: number;
slug: string;
title: { rendered: string };
date: string;
lang: string;
pll_translation_id?: number;
pll_master_post_id?: number;
}
interface TranslationMapping {
posts: Record<string, string[]>; // translationKey -> [en_id, de_id]
products: Record<string, string[]>;
pages: Record<string, string[]>;
}
interface RawData {
posts: {
en: Post[];
de: Post[];
};
products: {
en: any[];
de: any[];
};
pages: {
en: any[];
de: any[];
};
}
// Simple text similarity function
function calculateSimilarity(text1: string, text2: string): number {
const normalize = (str: string) =>
str.toLowerCase()
.replace(/[^\w\s]/g, '')
.replace(/\s+/g, ' ')
.trim();
const s1 = normalize(text1);
const s2 = normalize(text2);
if (s1 === s2) return 1.0;
// Simple overlap calculation
const words1 = s1.split(' ');
const words2 = s2.split(' ');
const intersection = words1.filter(w => words2.includes(w));
const union = new Set([...words1, ...words2]);
return intersection.length / union.size;
}
// Generate translation key from title
function generateKeyFromTitle(title: string): string {
return title.toLowerCase()
.replace(/[^\w\s-]/g, '')
.replace(/\s+/g, '-')
.replace(/-+/g, '-')
.trim();
}
function findPostTranslations(
postsEn: Post[],
postsDe: Post[]
): TranslationMapping['posts'] {
const mapping: TranslationMapping['posts'] = {};
// First pass: try to match by Polylang metadata
const deById = new Map(postsDe.map(p => [p.id, p]));
const deByTranslationId = new Map(postsDe.map(p => [p.pll_translation_id, p]));
for (const enPost of postsEn) {
// Try by pll_translation_id
if (enPost.pll_translation_id && deByTranslationId.has(enPost.pll_translation_id)) {
const dePost = deByTranslationId.get(enPost.pll_translation_id)!;
const key = `post-${enPost.pll_translation_id}`;
mapping[key] = [enPost.id, dePost.id];
continue;
}
// Try by pll_master_post_id
if (enPost.pll_master_post_id && deById.has(enPost.pll_master_post_id)) {
const dePost = deById.get(enPost.pll_master_post_id)!;
const key = `post-${enPost.pll_master_post_id}`;
mapping[key] = [enPost.id, dePost.id];
continue;
}
}
// Second pass: content-based matching for remaining unmatched posts
const matchedEnIds = new Set(
Object.values(mapping).flat()
);
const unmatchedEn = postsEn.filter(p => !matchedEnIds.includes(p.id));
const unmatchedDe = postsDe.filter(p => !matchedEnIds.includes(p.id));
for (const enPost of unmatchedEn) {
let bestMatch: { post: Post; score: number } | null = null;
for (const dePost of unmatchedDe) {
const titleScore = calculateSimilarity(enPost.title.rendered, dePost.title.rendered);
const slugScore = calculateSimilarity(enPost.slug, dePost.slug);
const dateScore = enPost.date === dePost.date ? 1.0 : 0.0;
// Weighted average
const score = (titleScore * 0.6) + (slugScore * 0.3) + (dateScore * 0.1);
if (score > 0.7 && (!bestMatch || score > bestMatch.score)) {
bestMatch = { post: dePost, score };
}
}
if (bestMatch) {
const key = generateKeyFromTitle(enPost.title.rendered);
mapping[key] = [enPost.id, bestMatch.post.id];
unmatchedDe.splice(unmatchedDe.indexOf(bestMatch.post), 1);
}
}
return mapping;
}
function findProductTranslations(
productsEn: any[],
productsDe: any[]
): TranslationMapping['products'] {
const mapping: TranslationMapping['products'] = {};
// Use SKU as primary key if available
const deBySku = new Map(productsDe.map(p => [p.sku, p]));
for (const enProduct of productsEn) {
if (enProduct.sku && deBySku.has(enProduct.sku)) {
const key = `product-${enProduct.sku}`;
mapping[key] = [enProduct.id, deBySku.get(enProduct.sku)!.id];
}
}
return mapping;
}
function findPageTranslations(
pagesEn: any[],
pagesDe: any[]
): TranslationMapping['pages'] {
const mapping: TranslationMapping['pages'] = {};
// Pages should have better Polylang metadata
const deById = new Map(pagesDe.map(p => [p.id, p]));
const deByTranslationId = new Map(pagesDe.map(p => [p.pll_translation_id, p]));
for (const enPage of pagesEn) {
if (enPage.pll_translation_id && deByTranslationId.has(enPage.pll_translation_id)) {
const dePage = deByTranslationId.get(enPage.pll_translation_id)!;
const key = `page-${enPage.pll_translation_id}`;
mapping[key] = [enPage.id, dePage.id];
}
}
return mapping;
}
function main() {
console.log('🔍 Creating manual translation mapping...\n');
// Read raw data
const rawData: RawData = {
posts: {
en: JSON.parse(readFileSync('data/raw/posts.en.json', 'utf8')),
de: JSON.parse(readFileSync('data/raw/posts.de.json', 'utf8'))
},
products: {
en: JSON.parse(readFileSync('data/raw/products.en.json', 'utf8')),
de: JSON.parse(readFileSync('data/raw/products.de.json', 'utf8'))
},
pages: {
en: JSON.parse(readFileSync('data/raw/pages.en.json', 'utf8')),
de: JSON.parse(readFileSync('data/raw/pages.de.json', 'utf8'))
}
};
console.log('📊 Raw data loaded:');
console.log(` - Posts: ${rawData.posts.en.length} EN, ${rawData.posts.de.length} DE`);
console.log(` - Products: ${rawData.products.en.length} EN, ${rawData.products.de.length} DE`);
console.log(` - Pages: ${rawData.pages.en.length} EN, ${rawData.pages.de.length} DE`);
console.log('');
// Generate mappings
const mapping: TranslationMapping = {
posts: findPostTranslations(rawData.posts.en, rawData.posts.de),
products: findProductTranslations(rawData.products.en, rawData.products.de),
pages: findPageTranslations(rawData.pages.en, rawData.pages.de)
};
// Save mapping
const outputPath = 'data/manual-translation-mapping.json';
writeFileSync(outputPath, JSON.stringify(mapping, null, 2));
console.log('✅ Manual translation mapping created:\n');
console.log(`Posts: ${Object.keys(mapping.posts).length} pairs`);
console.log(`Products: ${Object.keys(mapping.products).length} pairs`);
console.log(`Pages: ${Object.keys(mapping.pages).length} pairs`);
console.log(`\nSaved to: ${outputPath}`);
// Show some examples
if (Object.keys(mapping.posts).length > 0) {
console.log('\n📝 Post mapping examples:');
Object.entries(mapping.posts).slice(0, 3).forEach(([key, ids]) => {
const enPost = rawData.posts.en.find(p => p.id === ids[0]);
const dePost = rawData.posts.de.find(p => p.id === ids[1]);
console.log(` ${key}:`);
console.log(` EN: [${ids[0]}] ${enPost?.title.rendered}`);
console.log(` DE: [${ids[1]}] ${dePost?.title.rendered}`);
});
}
}
main();

View File

@@ -1,82 +0,0 @@
import { createDirectus, rest, authentication, readDashboards, createDashboard, createPanel } from '@directus/sdk';
async function debugVariants() {
const url = 'http://localhost:8059';
const email = 'marc@mintel.me';
const password = 'Tim300493.';
console.log(`🚀 creating Debug Variants Dashboard: ${url}`);
const client = createDirectus(url).with(authentication('json')).with(rest());
try {
await client.login(email, password);
console.log('✅ Authenticated');
const dashboard = await client.request(createDashboard({
name: 'Debug List Variants',
icon: 'bug_report',
color: '#FF0000'
}));
// Variant 1: No Template
await client.request(createPanel({
dashboard: dashboard.id,
name: 'No Template',
type: 'list',
width: 8, height: 8, position_x: 0, position_y: 0,
options: {
collection: 'visual_feedback',
fields: ['text'],
limit: 5
}
}));
// Variant 2: Simple Template {{text}}
await client.request(createPanel({
dashboard: dashboard.id,
name: 'Simple {{text}}',
type: 'list',
width: 8, height: 8, position_x: 8, position_y: 0,
options: {
collection: 'visual_feedback',
template: '{{text}}',
limit: 5
}
}));
// Variant 3: Spaced Template {{ text }}
await client.request(createPanel({
dashboard: dashboard.id,
name: 'Spaced {{ text }}',
type: 'list',
width: 8, height: 8, position_x: 16, position_y: 0,
options: {
collection: 'visual_feedback',
template: '{{ text }}',
limit: 5
}
}));
// Variant 4: With fields array AND template
await client.request(createPanel({
dashboard: dashboard.id,
name: 'Fields + Template',
type: 'list',
width: 8, height: 8, position_x: 0, position_y: 8,
options: {
collection: 'visual_feedback',
fields: ['text', 'user_name'],
template: '{{user_name}}: {{text}}',
limit: 5
}
}));
console.log('✅ Debug Dashboard Created');
} catch (e: any) {
console.error('❌ Creation failed:');
console.error(e.message);
}
}
debugVariants();

View File

@@ -1,42 +0,0 @@
import { createDirectus, rest, authentication, readDashboards, createDashboard, createPanel } from '@directus/sdk';
async function debugLabelFallback() {
const url = 'http://localhost:8059';
const email = 'marc@mintel.me';
const password = 'Tim300493.';
console.log(`🚀 creating Debug Label Fallback Dashboard: ${url}`);
const client = createDirectus(url).with(authentication('json')).with(rest());
try {
await client.login(email, password);
console.log('✅ Authenticated');
const dashboard = await client.request(createDashboard({
name: 'Debug Label Fallback',
icon: 'label',
color: '#0000FF'
}));
// Variant 5: Label with Markdown (Static list simulation)
// Note: Label panels don't take a collection, they just render text.
// This confirms if we can at least show SOMETHING.
await client.request(createPanel({
dashboard: dashboard.id,
name: 'Label Fallback',
type: 'label',
width: 12, height: 10, position_x: 0, position_y: 0,
options: {
text: '### Recent Feedback\n\n- User: Test Message\n- User2: Another Message'
}
}));
console.log('✅ Debug Label Dashboard Created');
} catch (e: any) {
console.error('❌ Creation failed:');
console.error(e.message);
}
}
debugLabelFallback();

View File

@@ -1,45 +0,0 @@
import { createDirectus, rest, authentication, readDashboards, createPanel, readPanels } from '@directus/sdk';
async function createDefaultList() {
const url = 'http://localhost:8059';
const email = 'marc@mintel.me';
const password = 'Tim300493.';
console.log(`🚀 Creating Default List Panel: ${url}`);
const client = createDirectus(url).with(authentication('json')).with(rest());
try {
await client.login(email, password);
console.log('✅ Authenticated');
const dashboards = await client.request(readDashboards({ filter: { name: { _eq: 'Feedback Operational Intelligence' } } }));
const db = dashboards[0];
// Create a completely default list panel
const panel = await client.request(createPanel({
dashboard: db.id,
name: 'Debug Default List',
type: 'list',
width: 12,
height: 10,
position_x: 0,
position_y: 24, // below
options: {
collection: 'visual_feedback'
}
}));
console.log(`Created Debug Panel: ${panel.id}`);
console.log(`Options: ${JSON.stringify(panel.options, null, 2)}`);
// Let's read it back to see if Directus enriched it with defaults
const panels = await client.request(readPanels({ filter: { id: { _eq: panel.id } } }));
console.log(`Enriched Options: ${JSON.stringify(panels[0].options, null, 2)}`);
} catch (e: any) {
console.error('❌ Creation failed:');
console.error(e.message);
}
}
createDefaultList();

View File

@@ -1,76 +0,0 @@
#!/bin/bash
# Deploy analytics data to your Umami instance on alpha.mintel.me
set -e
# Configuration - Umami is on infra.mintel.me
SERVER="root@infra.mintel.me"
REMOTE_PATH="/home/deploy/sites/klz-cables.com"
WEBSITE_ID="59a7db94-0100-4c7e-98ef-99f45b17f9c3"
# Umami API endpoint (assuming it's running on the same server)
UMAMI_API="http://localhost:3000/api/import"
echo "🚀 Deploying analytics data to your Umami instance..."
echo "Server: $SERVER"
echo "Remote path: $REMOTE_PATH"
echo "Website ID: $WEBSITE_ID"
echo "Umami API: $UMAMI_API"
echo ""
# Check if files exist
if [ ! -f "data/umami-import.json" ]; then
echo "❌ Error: data/umami-import.json not found"
echo "Please run the migration script first:"
echo " python3 scripts/migrate-analytics-to-umami.py --input data/pages\(1\).csv --output data/umami-import.json --format json --site-id $WEBSITE_ID"
exit 1
fi
# Test SSH connection
echo "🔍 Testing SSH connection to $SERVER..."
if ! ssh -o ConnectTimeout=5 "$SERVER" "echo 'SSH connection successful'"; then
echo "❌ Error: Cannot connect to $SERVER"
echo "Please check your SSH key and connection"
exit 1
fi
echo "✅ SSH connection successful"
echo ""
# Create directory and copy files to server
echo "📁 Creating remote directory..."
ssh "$SERVER" "mkdir -p $REMOTE_PATH/data"
echo "✅ Remote directory created"
echo "📤 Copying analytics files to server..."
scp data/umami-import.json "$SERVER:$REMOTE_PATH/data/"
scp data/umami-import.sql "$SERVER:$REMOTE_PATH/data/"
echo "✅ Files copied successfully"
echo ""
# Detect Umami container
echo "🔍 Detecting Umami container..."
UMAMI_CONTAINER=$(ssh "$SERVER" "docker ps -q --filter 'name=umami'")
if [ -z "$UMAMI_CONTAINER" ]; then
echo "❌ Error: Could not detect Umami container"
echo "Make sure Umami is running on $SERVER"
exit 1
fi
echo "✅ Umami container detected: $UMAMI_CONTAINER"
echo ""
# Import data via database (most reliable method)
echo "📥 Importing data via database..."
ssh "$SERVER" "
echo 'Importing data into Umami database...'
docker exec -i core-postgres-1 psql -U infra -d umami < $REMOTE_PATH/data/umami-import.sql
echo '✅ Database import completed'
"
echo ""
echo "✅ Migration Complete!"
echo ""
echo "Your analytics data has been imported into Umami."
echo "Website ID: $WEBSITE_ID"
echo ""
echo "Verify in Umami dashboard: https://analytics.infra.mintel.me"
echo "You should see 7,634 historical page view events."

View File

@@ -1,127 +0,0 @@
#!/bin/bash
# Deploy analytics data to Umami server
set -e
# Configuration
SERVER="root@alpha.mintel.me"
REMOTE_PATH="/home/deploy/sites/klz-cables.com"
WEBSITE_ID="59a7db94-0100-4c7e-98ef-99f45b17f9c3"
echo "🚀 Deploying analytics data to Umami server..."
echo "Server: $SERVER"
echo "Remote path: $REMOTE_PATH"
echo "Website ID: $WEBSITE_ID"
echo ""
# Check if files exist
if [ ! -f "data/umami-import.json" ]; then
echo "❌ Error: data/umami-import.json not found"
echo "Please run the migration script first:"
echo " python3 scripts/migrate-analytics-to-umami.py --input data/pages\(1\).csv --output data/umami-import.json --format json --site-id $WEBSITE_ID"
exit 1
fi
if [ ! -f "data/umami-import.sql" ]; then
echo "❌ Error: data/umami-import.sql not found"
echo "Please run the migration script first:"
echo " python3 scripts/migrate-analytics-to-umami.py --input data/pages\(1\).csv --output data/umami-import.sql --format sql --site-id $WEBSITE_ID"
exit 1
fi
# Check if SSH connection works
echo "🔍 Testing SSH connection..."
if ! ssh -o ConnectTimeout=5 "$SERVER" "echo 'SSH connection successful'"; then
echo "❌ Error: Cannot connect to $SERVER"
echo "Please check your SSH key and connection"
exit 1
fi
echo "✅ SSH connection successful"
echo ""
# Create remote directory if it doesn't exist
echo "📁 Creating remote directory..."
ssh "$SERVER" "mkdir -p $REMOTE_PATH/data"
echo "✅ Remote directory created"
echo ""
# Copy files to server
echo "📤 Copying files to server..."
scp data/umami-import.json "$SERVER:$REMOTE_PATH/data/"
scp data/umami-import.sql "$SERVER:$REMOTE_PATH/data/"
echo "✅ Files copied successfully"
echo ""
# Option 1: Import via API (if Umami API is accessible)
echo "📋 Import Options:"
echo ""
echo "Option 1: Import via API (Recommended)"
echo "--------------------------------------"
echo "1. SSH into your server:"
echo " ssh $SERVER"
echo ""
echo "2. Navigate to the directory:"
echo " cd $REMOTE_PATH"
echo ""
echo "3. Get your Umami API key:"
echo " - Log into Umami dashboard"
echo " - Go to Settings → API Keys"
echo " - Create a new API key"
echo ""
echo "4. Import the data:"
echo " curl -X POST \\"
echo " -H \"Content-Type: application/json\" \\"
echo " -H \"Authorization: Bearer YOUR_API_KEY\" \\"
echo " -d @data/umami-import.json \\"
echo " http://localhost:3000/api/import"
echo ""
echo " Or if Umami is on a different port/domain:"
echo " curl -X POST \\"
echo " -H \"Content-Type: application/json\" \\"
echo " -H \"Authorization: Bearer YOUR_API_KEY\" \\"
echo " -d @data/umami-import.json \\"
echo " https://your-umami-domain.com/api/import"
echo ""
# Option 2: Import via Database
echo "Option 2: Import via Database"
echo "------------------------------"
echo "1. SSH into your server:"
echo " ssh $SERVER"
echo ""
echo "2. Navigate to the directory:"
echo " cd $REMOTE_PATH"
echo ""
echo "3. Import the SQL file:"
echo " psql -U umami -d umami -f data/umami-import.sql"
echo ""
echo " If you need to specify host/port:"
echo " PGPASSWORD=your_password psql -h localhost -U umami -d umami -f data/umami-import.sql"
echo ""
# Option 3: Manual import via Umami dashboard
echo "Option 3: Manual Import via Umami Dashboard"
echo "--------------------------------------------"
echo "1. Log into Umami dashboard"
echo "2. Go to Settings → Import"
echo "3. Upload data/umami-import.json"
echo "4. Select your website (ID: $WEBSITE_ID)"
echo "5. Click Import"
echo ""
echo "📊 File Information:"
echo "-------------------"
echo "JSON file: $(ls -lh data/umami-import.json | awk '{print $5}')"
echo "SQL file: $(ls -lh data/umami-import.sql | awk '{print $5}')"
echo ""
echo "✅ Deployment complete!"
echo ""
echo "Next steps:"
echo "1. Choose one of the import methods above"
echo "2. Import the data into Umami"
echo "3. Verify the data in Umami dashboard"
echo "4. Update your website to use Umami tracking code"
echo ""
echo "For detailed instructions, see: scripts/README-migration.md"

View File

@@ -1,34 +0,0 @@
version: 1
directus: 11.14.1
vendor: sqlite
collections:
- collection: visual_feedback
meta:
icon: feedback
display_template: "{{user_name}}: {{text}}"
accountability: null
schema:
name: visual_feedback
fields:
- collection: visual_feedback
field: id
type: integer
schema:
is_primary_key: true
has_auto_increment: true
- collection: visual_feedback
field: status
type: string
schema:
default_value: open
- collection: visual_feedback
field: user_name
type: string
- collection: visual_feedback
field: text
type: text
- collection: visual_feedback
field: date_created
type: timestamp
schema:
default_value: CURRENT_TIMESTAMP

View File

@@ -1,32 +0,0 @@
const fs = require('fs');
const path = require('path');
const { execSync } = require('child_process');
const blogDir = path.join(process.cwd(), 'data', 'blog', 'en');
const outputDir = path.join(process.cwd(), 'reference', 'klz-cables-clone', 'posts');
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, { recursive: true });
}
const files = fs.readdirSync(blogDir);
files.forEach(file => {
if (!file.endsWith('.mdx')) return;
const slug = file.replace('.mdx', '');
const url = `https://klz-cables.com/${slug}/`;
const outputPath = path.join(outputDir, `${slug}.html`);
if (fs.existsSync(outputPath)) {
console.log(`Skipping ${slug}, already exists.`);
return;
}
console.log(`Fetching ${slug}...`);
try {
execSync(`curl -L -s "${url}" -o "${outputPath}"`);
} catch (e) {
console.error(`Failed to fetch ${slug}: ${e.message}`);
}
});

View File

@@ -1,136 +0,0 @@
const fs = require('fs');
const path = require('path');
const cheerio = require('cheerio');
const API_URL = 'https://klz-cables.com/wp-json/wp/v2/posts?per_page=100&_embed';
async function fetchPosts() {
console.log('Fetching posts...');
const response = await fetch(API_URL);
if (!response.ok) {
throw new Error(`Failed to fetch posts: ${response.statusText}`);
}
const posts = await response.json();
console.log(`Fetched ${posts.length} posts.`);
return posts;
}
function cleanContent(content) {
let cleaned = content;
// Decode HTML entities first to make regex easier
cleaned = cleaned.replace(/&#8221;/g, '"').replace(/&#8220;/g, '"').replace(/&#8217;/g, "'").replace(/&/g, '&').replace(/&#8243;/g, '"');
// Remove vc_row and vc_column wrappers
cleaned = cleaned.replace(/\[\/?vc_row.*?\]/g, '');
cleaned = cleaned.replace(/\[\/?vc_column.*?\]/g, '');
// Remove vc_column_text wrapper but keep content
cleaned = cleaned.replace(/\[vc_column_text.*?\]/g, '');
cleaned = cleaned.replace(/\[\/vc_column_text\]/g, '');
// Convert split_line_heading to h2
cleaned = cleaned.replace(/\[split_line_heading[^\]]*text_content="([^"]+)"[^\]]*\](?:\[\/split_line_heading\])?/g, '<h2>$1</h2>');
// Remove other shortcodes
cleaned = cleaned.replace(/\[image_with_animation.*?\]/g, '');
cleaned = cleaned.replace(/\[divider.*?\]/g, '');
cleaned = cleaned.replace(/\[nectar_global_section.*?\]/g, '');
// Use Cheerio for HTML manipulation
const $ = cheerio.load(cleaned, { xmlMode: false, decodeEntities: false });
// Convert VisualLinkPreview
$('.vlp-link-container').each((i, el) => {
const $el = $(el);
const url = $el.find('a.vlp-link').attr('href');
const title = $el.find('.vlp-link-title').text().trim() || $el.find('a.vlp-link').attr('title');
const image = $el.find('.vlp-link-image img').attr('src');
const summary = $el.find('.vlp-link-summary').text().trim();
if (url && title) {
// We use a placeholder to avoid Cheerio messing up the React component syntax
const component = `__VISUAL_LINK_PREVIEW_START__ url="${url}" title="${title}" image="${image || ''}" summary="${summary || ''}" __VISUAL_LINK_PREVIEW_END__`;
$el.replaceWith(component);
}
});
// Remove data attributes
$('*').each((i, el) => {
const attribs = el.attribs;
for (const name in attribs) {
if (name.startsWith('data-')) {
$(el).removeAttr(name);
}
}
});
// Unwrap divs (remove div tags but keep content)
$('div').each((i, el) => {
$(el).replaceWith($(el).html());
});
// Remove empty paragraphs
$('p').each((i, el) => {
if ($(el).text().trim() === '' && $(el).children().length === 0) {
$(el).remove();
}
});
let output = $('body').html() || '';
// Restore VisualLinkPreview
output = output.replace(/__VISUAL_LINK_PREVIEW_START__/g, '<VisualLinkPreview').replace(/__VISUAL_LINK_PREVIEW_END__/g, '/>');
return output.trim();
}
function generateMdx(post) {
const title = post.title.rendered.replace(/&#8221;/g, '"').replace(/&#8220;/g, '"').replace(/&#8217;/g, "'").replace(/&/g, '&');
const date = post.date;
const slug = post.slug;
const lang = post.lang || 'en'; // Default to en if not specified
let featuredImage = '';
if (post._embedded && post._embedded['wp:featuredmedia'] && post._embedded['wp:featuredmedia'][0]) {
featuredImage = post._embedded['wp:featuredmedia'][0].source_url;
}
const content = cleanContent(post.content.rendered);
return `---
title: "${title}"
date: '${date}'
featuredImage: ${featuredImage}
locale: ${lang}
---
${content}
`;
}
async function main() {
try {
const posts = await fetchPosts();
for (const post of posts) {
const lang = post.lang || 'en';
const slug = post.slug;
const mdxContent = generateMdx(post);
const dir = path.join('data/blog', lang);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
const filePath = path.join(dir, `${slug}.mdx`);
fs.writeFileSync(filePath, mdxContent);
console.log(`Saved ${filePath}`);
}
console.log('Done.');
} catch (error) {
console.error('Error:', error);
}
}
main();

View File

@@ -1,60 +0,0 @@
import { createDirectus, rest, staticToken, updateCollection, createPermission, readCollections, readPermissions, createDashboard, createPanel, createItems } from '@directus/sdk';
import { config } from '../lib/config';
async function finalFix() {
const url = 'http://localhost:8059';
const token = '59fb8f4c1a51b18fe28ad947f713914e';
const client = createDirectus(url).with(staticToken(token)).with(rest());
try {
console.log('--- 1. UPDATE COLLECTION ACCOUNTABILITY ---');
await client.request(updateCollection('visual_feedback', {
meta: { accountability: null }
} as any));
console.log('✅ Accountability set to null.');
console.log('\n--- 2. GRANT PUBLIC READ ---');
// Policy ID for Public is always 'abf8a154-5b1c-4a46-ac9c-7300570f4f17' in v11 bootstrap usually,
// but let's check first.
try {
await client.request(createPermission({
policy: 'abf8a154-5b1c-4a46-ac9c-7300570f4f17',
collection: 'visual_feedback',
action: 'read',
fields: ['*']
} as any));
console.log('✅ Public READ granted.');
} catch (e) {
console.log(' (Public READ might already exist)');
}
console.log('\n--- 3. RECREATE DASHBOARD ---');
const dash = await client.request(createDashboard({
name: 'Feedback Final',
icon: 'check_circle',
color: '#00FF00'
}));
console.log(`✅ Dashboard "Feedback Final" ID: ${dash.id}`);
await client.request(createPanel({
dashboard: dash.id,
name: 'Visible Feedbacks',
type: 'metric',
width: 12,
height: 6,
position_x: 1,
position_y: 1,
options: { collection: 'visual_feedback', function: 'count', field: 'id' }
} as any));
console.log('\n--- 4. VERIFY READ VIA TOKEN ---');
const items = await client.request(() => ({ path: '/items/visual_feedback', method: 'GET' }));
console.log(`✅ Items count via token: ${items.data.length}`);
} catch (e: any) {
console.error('❌ Final fix failed:', e);
if (e.errors) console.error(JSON.stringify(e.errors, null, 2));
}
}
finalFix();

View File

@@ -1,45 +0,0 @@
import { createDirectus, rest, authentication, readCollections, updateCollection } from '@directus/sdk';
async function checkCollectionConfig() {
const url = 'http://localhost:8059';
const email = 'marc@mintel.me';
const password = 'Tim300493.';
console.log(`🚀 Checking Collection Config: ${url}`);
const client = createDirectus(url).with(authentication('json')).with(rest());
try {
await client.login(email, password);
console.log('✅ Authenticated');
const collection = await client.request(readCollections());
const fb = collection.find(c => c.collection === 'visual_feedback');
if (fb) {
console.log(`Collection: ${fb.collection}`);
console.log(`Display Template: ${fb.meta?.display_template}`);
console.log(`Hidden: ${fb.meta?.hidden}`);
if (!fb.meta?.display_template) {
console.log('⚠️ Display Template is missing! Fixing it...');
await client.request(updateCollection('visual_feedback', {
meta: {
...fb.meta,
display_template: '{{text}}' // Set a sensible default
}
}));
console.log('✅ Display Template set to {{text}}');
} else {
console.log('✅ Display Template is already set.');
}
} else {
console.error('❌ Collection visual_feedback not found!');
}
} catch (e: any) {
console.error('❌ Check failed:');
console.error(e.message);
}
}
checkCollectionConfig();

View File

@@ -1,50 +0,0 @@
import { createDirectus, rest, authentication, readDashboards, readPanels, updatePanel } from '@directus/sdk';
async function fixListPanel() {
const url = 'http://localhost:8059';
const email = 'marc@mintel.me';
const password = 'Tim300493.';
console.log(`🚀 Fixing List Panel Template: ${url}`);
const client = createDirectus(url).with(authentication('json')).with(rest());
try {
await client.login(email, password);
console.log('✅ Authenticated');
const dashboards = await client.request(readDashboards({ filter: { name: { _eq: 'Feedback Operational Intelligence' } } }));
const db = dashboards[0];
if (!db) throw new Error('Dashboard not found');
const panels = await client.request(readPanels({
filter: { dashboard: { _eq: db.id }, type: { _eq: 'list' } }
}));
const listPanel = panels[0];
if (!listPanel) throw new Error('List panel not found');
console.log(`Found Panel: ${listPanel.id}`);
console.log(`Current Template: ${listPanel.options.template}`);
// Try a different syntax or simple field
// In some versions it's {{field}}, in others it might be just field field
// Let's try to set it to just {{text}} to see if basic interpolation works
// Or maybe it needs HTML?
console.log('Updating template to simple {{text}} ...');
await client.request(updatePanel(listPanel.id, {
options: {
...listPanel.options,
template: '{{text}}'
}
}));
console.log('✅ Panel updated');
} catch (e: any) {
console.error('❌ Fix failed:');
console.error(e.message);
}
}
fixListPanel();

View File

@@ -1,87 +0,0 @@
const fs = require('fs');
const path = require('path');
const blogDir = path.join(process.cwd(), 'data', 'blog');
function fixFile(filePath) {
const content = fs.readFileSync(filePath, 'utf8');
const lines = content.split('\n');
if (lines[0].trim() !== '---') {
return; // Not a frontmatter file or already fixed/different format
}
let newLines = [];
let inFrontmatter = false;
let frontmatterLines = [];
let contentLines = [];
// Separate frontmatter and content
if (lines[0].trim() === '---') {
inFrontmatter = true;
let i = 1;
// Skip empty line after first ---
if (lines[1].trim() === '') {
i = 2;
}
for (; i < lines.length; i++) {
if (lines[i].trim() === '---') {
inFrontmatter = false;
contentLines = lines.slice(i + 1);
break;
}
frontmatterLines.push(lines[i]);
}
}
// Process frontmatter lines to fix multiline strings
let fixedFrontmatter = [];
for (let i = 0; i < frontmatterLines.length; i++) {
let line = frontmatterLines[i];
// Check for multiline indicator >-
if (line.includes('>-')) {
const [key, ...rest] = line.split(':');
if (rest.join(':').trim() === '>-') {
// It's a multiline start
let value = '';
let j = i + 1;
while (j < frontmatterLines.length) {
const nextLine = frontmatterLines[j];
// If next line is a new key (contains : and doesn't start with space), stop
if (nextLine.includes(':') && !nextLine.startsWith(' ')) {
break;
}
value += (value ? ' ' : '') + nextLine.trim();
j++;
}
fixedFrontmatter.push(`${key}: '${value.replace(/'/g, "''")}'`);
i = j - 1; // Skip processed lines
} else {
fixedFrontmatter.push(line);
}
} else {
fixedFrontmatter.push(line);
}
}
const newContent = `---\n${fixedFrontmatter.join('\n')}\n---\n${contentLines.join('\n')}`;
fs.writeFileSync(filePath, newContent);
console.log(`Fixed ${filePath}`);
}
function processDir(dir) {
const files = fs.readdirSync(dir);
for (const file of files) {
const filePath = path.join(dir, file);
const stat = fs.statSync(filePath);
if (stat.isDirectory()) {
processDir(filePath);
} else if (file.endsWith('.mdx')) {
fixFile(filePath);
}
}
}
processDir(blogDir);

View File

@@ -1,99 +0,0 @@
import client, { ensureAuthenticated } from '../lib/directus';
import {
createCollection,
createField,
createItem,
readCollections,
deleteCollection
} from '@directus/sdk';
async function fixSchema() {
console.log('🚑 EXTERNAL RESCUE: Fixing Schema & Data...');
await ensureAuthenticated();
// 1. Reset Products Collection to be 100% Standard
console.log('🗑️ Clearing broken collections...');
try { await client.request(deleteCollection('products')); } catch (e) { }
try { await client.request(deleteCollection('products_translations')); } catch (e) { }
// 2. Create Products (Simple, Standard ID)
console.log('🏗️ Rebuilding Products Schema...');
await client.request(createCollection({
collection: 'products',
schema: {}, // Let Directus decide defaults
meta: {
display_template: '{{sku}}',
archive_field: 'status',
archive_value: 'archived',
unarchive_value: 'published'
},
fields: [
{
field: 'id',
type: 'integer',
schema: { is_primary_key: true, has_auto_increment: true },
meta: { hidden: true }
},
{
field: 'status',
type: 'string',
schema: { default_value: 'published' },
meta: { width: 'full', options: { choices: [{ text: 'Published', value: 'published' }] } }
},
{
field: 'sku',
type: 'string',
meta: { interface: 'input', width: 'half' }
}
]
} as any));
// 3. Create Translation Relation Safely
console.log('🌍 Rebuilding Translations...');
await client.request(createCollection({
collection: 'products_translations',
schema: {},
fields: [
{
field: 'id',
type: 'integer',
schema: { is_primary_key: true, has_auto_increment: true },
meta: { hidden: true }
},
{ field: 'products_id', type: 'integer' },
{ field: 'languages_code', type: 'string' },
{ field: 'name', type: 'string', meta: { interface: 'input', width: 'full' } },
{ field: 'description', type: 'text', meta: { interface: 'input-multiline' } },
{ field: 'technical_items', type: 'json', meta: { interface: 'input-code-json' } }
]
} as any));
// 4. Manually Insert ONE Product to Verify
console.log('📦 Injecting Test Product...');
try {
// We do this in two steps to be absolutely sure permissions don't block us
// Step A: Create User-Facing Product
const product = await client.request(createItem('products', {
sku: 'H1Z2Z2-K-TEST',
status: 'published'
}));
// Step B: Add Translation
await client.request(createItem('products_translations', {
products_id: product.id,
languages_code: 'de-DE',
name: 'H1Z2Z2-K Test Cable',
description: 'This is a verified imported product.',
technical_items: [{ label: 'Test', value: '100%' }]
}));
console.log(`✅ SUCCESS! Product Created with ID: ${product.id}`);
console.log(`verify at: ${process.env.DIRECTUS_URL}/admin/content/products/${product.id}`);
} catch (e: any) {
console.error('❌ Failed to create product:', e);
if (e.errors) console.error(JSON.stringify(e.errors, null, 2));
}
}
fixSchema().catch(console.error);

View File

@@ -1,38 +0,0 @@
import { createDirectus, rest, authentication, readDashboards, readPanels } from '@directus/sdk';
async function inspectDashboards() {
const url = 'http://localhost:8059';
const email = 'marc@mintel.me';
const password = 'Tim300493.';
console.log(`🚀 Inspecting Dashboards: ${url}`);
const client = createDirectus(url).with(authentication('json')).with(rest());
try {
await client.login(email, password);
console.log('✅ Authenticated');
const dashboards = await client.request(readDashboards({ fields: ['*'] }));
console.log('\n--- DASHBOARDS ---');
for (const db of dashboards) {
console.log(`Dashboard: ${db.name} (${db.id})`);
const panels = await client.request(readPanels({
filter: { dashboard: { _eq: db.id } },
fields: ['*']
}));
console.log(' Panels:');
panels.forEach(p => {
console.log(` - [${p.type}] ${p.name}`);
console.log(` Options: ${JSON.stringify(p.options, null, 2)}`);
});
}
} catch (e: any) {
console.error('❌ Inspection failed:');
console.error(e.message);
}
}
inspectDashboards();

View File

@@ -1,305 +0,0 @@
#!/usr/bin/env python3
"""
Migrate Independent Analytics data to Umami format
"""
import csv
import json
import argparse
import uuid
import random
from datetime import datetime, timedelta
import sys
def parse_view_duration(duration_str):
"""Convert view duration from 'X:XX' format to seconds"""
if not duration_str or duration_str == '-':
return 0
parts = duration_str.split(':')
if len(parts) == 2:
return int(parts[0]) * 60 + int(parts[1])
elif len(parts) == 3:
return int(parts[0]) * 3600 + int(parts[1]) * 60 + int(parts[2])
return 0
def convert_to_umami_format(csv_file, output_file, site_id="your-site-id"):
"""
Convert Independent Analytics CSV to Umami import format
Umami expects data in this format for API import:
{
"website_id": "uuid",
"hostname": "example.com",
"path": "/path",
"referrer": "",
"event_name": null,
"pageview": true,
"session": true,
"duration": 0,
"created_at": "2024-01-01T00:00:00.000Z"
}
"""
umami_records = []
with open(csv_file, 'r', encoding='utf-8') as f:
reader = csv.DictReader(f)
for row in reader:
# Skip 404 pages and empty entries
if row.get('Page Type') == '404' or not row.get('URL'):
continue
# Extract data
title = row.get('Title', '')
url = row.get('URL', '/')
visitors = int(row.get('Visitors', 0))
views = int(row.get('Views', 0))
view_duration = parse_view_duration(row.get('View Duration', '0:00'))
bounce_rate = float(row.get('Bounce Rate', '0').strip('%')) if row.get('Bounce Rate') else 0
# Calculate total session duration (views * average duration)
total_duration = views * view_duration
# Create multiple records for each view to simulate historical data
# This is a simplified approach - in reality, you'd want more granular data
for i in range(min(views, 100)): # Limit to 100 records per page to avoid huge files
umami_record = {
"website_id": site_id,
"hostname": "your-domain.com", # Update this
"path": url,
"referrer": "",
"event_name": None,
"pageview": True,
"session": True,
"duration": view_duration,
"created_at": datetime.now().isoformat() + "Z"
}
umami_records.append(umami_record)
# Write to JSON file
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(umami_records, f, indent=2)
print(f"✅ Converted {len(umami_records)} records to Umami format")
print(f"📁 Output saved to: {output_file}")
return umami_records
def generate_sql_import(csv_file, output_file, site_id="your-site-id"):
"""
Generate SQL statements for direct database import into Umami.
Optimized to match target metrics:
- Visitors: ~7,639
- Views: ~20,718
- Sessions: ~9,216
- Avg Duration: ~3:41
- Bounce Rate: ~61%
"""
sql_statements = []
with open(csv_file, 'r', encoding='utf-8') as f:
reader = csv.DictReader(f)
rows = [r for r in reader if r.get('Page Type') != '404' and r.get('URL')]
# Target totals
TARGET_VISITORS = 7639
TARGET_VIEWS = 20718
TARGET_SESSIONS = 9216
TARGET_AVG_DURATION = 221 # 3:41 in seconds
TARGET_BOUNCE_RATE = 0.61
# Umami "Visitors" = count(distinct session_id)
# Umami "Visits" = count(distinct visit_id)
# Umami "Views" = count(*) where event_type = 1
# To get 7639 Visitors and 9216 Sessions, we need 7639 unique session_ids.
# Wait, if Visitors < Sessions, it usually means some visitors had multiple sessions.
# But in Umami DB, session_id IS the visitor.
# If we want 7639 Visitors, we MUST have exactly 7639 unique session_ids.
# If we want 9216 Sessions, we need to understand what Umami calls a "Session" in the UI.
# In Umami v2, "Sessions" in the UI often refers to unique visit_id.
# Let's aim for:
# 7639 unique session_id (Visitors)
# 9216 unique visit_id (Sessions/Visits)
# 20718 total events (Views)
session_ids = [str(uuid.uuid4()) for _ in range(TARGET_VISITORS)]
# Distribute sessions over 30 days
# We'll create 9216 "visits" distributed among 7639 "sessions"
visits = []
for i in range(TARGET_SESSIONS):
visit_id = str(uuid.uuid4())
sess_id = session_ids[i % len(session_ids)]
# Distribute over 30 days
# Last 7 days target: ~218 visitors, ~249 sessions
# 249/9216 = ~2.7% of data in last 7 days.
# Let's use a weighted distribution to match the "Last 7 days" feedback.
if random.random() < 0.027: # ~2.7% chance for last 7 days
days_ago = random.randint(0, 6)
else:
days_ago = random.randint(7, 30)
hour = random.randint(0, 23)
minute = random.randint(0, 59)
start_time = (datetime.now() - timedelta(days=days_ago, hours=hour, minutes=minute))
visits.append({'sess_id': sess_id, 'visit_id': visit_id, 'time': start_time, 'views': 0})
# Create the unique sessions in DB
for sess_id in session_ids:
# Find the earliest visit for this session to use as session created_at
sess_time = min([v['time'] for v in visits if v['sess_id'] == sess_id])
sql_sess = f"""
INSERT INTO session (session_id, website_id, browser, os, device, screen, language, country, created_at)
VALUES ('{sess_id}', '{site_id}', 'Chrome', 'Windows', 'desktop', '1920x1080', 'en', 'DE', '{sess_time.strftime('%Y-%m-%d %H:%M:%S')}')
ON CONFLICT (session_id) DO NOTHING;
"""
sql_statements.append(sql_sess.strip())
# Distribute 20718 views among 9216 visits
views_remaining = TARGET_VIEWS - TARGET_SESSIONS
# Every visit gets at least 1 view
url_pool = []
for row in rows:
weight = int(row['Views'])
url_pool.extend([{'url': row['URL'], 'title': row['Title'].replace("'", "''")}] * weight)
random.shuffle(url_pool)
url_idx = 0
for v in visits:
url_data = url_pool[url_idx % len(url_pool)]
url_idx += 1
event_id = str(uuid.uuid4())
sql_ev = f"""
INSERT INTO website_event (event_id, website_id, session_id, created_at, url_path, url_query, referrer_path, referrer_query, referrer_domain, page_title, event_type, event_name, visit_id, hostname)
VALUES ('{event_id}', '{site_id}', '{v['sess_id']}', '{v['time'].strftime('%Y-%m-%d %H:%M:%S')}', '{url_data['url']}', '', '', '', '', '{url_data['title']}', 1, NULL, '{v['visit_id']}', 'klz-cables.com');
"""
sql_statements.append(sql_ev.strip())
v['views'] += 1
# Add remaining views to visits
# To match bounce rate, we only add views to (1 - bounce_rate) of visits
num_non_bounces = int(TARGET_SESSIONS * (1 - TARGET_BOUNCE_RATE))
non_bounce_visits = random.sample(visits, num_non_bounces)
for _ in range(views_remaining):
v = random.choice(non_bounce_visits)
url_data = url_pool[url_idx % len(url_pool)]
url_idx += 1
v['views'] += 1
# Add duration
view_time = v['time'] + timedelta(seconds=random.randint(30, 300))
event_id = str(uuid.uuid4())
sql_ev = f"""
INSERT INTO website_event (event_id, website_id, session_id, created_at, url_path, url_query, referrer_path, referrer_query, referrer_domain, page_title, event_type, event_name, visit_id, hostname)
VALUES ('{event_id}', '{site_id}', '{v['sess_id']}', '{view_time.strftime('%Y-%m-%d %H:%M:%S')}', '{url_data['url']}', '', '', '', '', '{url_data['title']}', 1, NULL, '{v['visit_id']}', 'klz-cables.com');
"""
sql_statements.append(sql_ev.strip())
with open(output_file, 'w', encoding='utf-8') as f:
f.write("\n".join(sql_statements))
print(f"✅ Generated {len(sql_statements)} SQL statements")
print(f"📁 Output saved to: {output_file}")
return sql_statements
with open(output_file, 'w', encoding='utf-8') as f:
f.write("\n".join(sql_statements))
print(f"✅ Generated {len(sql_statements)} SQL statements")
print(f"📁 Output saved to: {output_file}")
return sql_statements
with open(output_file, 'w', encoding='utf-8') as f:
f.write("\n".join(sql_statements))
print(f"✅ Generated {len(sql_statements)} SQL statements")
print(f"📁 Output saved to: {output_file}")
return sql_statements
def generate_api_payload(csv_file, output_file, site_id="your-site-id"):
"""
Generate payload for Umami API import
"""
payload = {
"website_id": site_id,
"events": []
}
with open(csv_file, 'r', encoding='utf-8') as f:
reader = csv.DictReader(f)
for row in reader:
if row.get('Page Type') == '404' or not row.get('URL'):
continue
url = row.get('URL', '/')
views = int(row.get('Views', 0))
view_duration = parse_view_duration(row.get('View Duration', '0:00'))
# Add pageview events
for i in range(min(views, 20)): # Limit for API payload size
payload["events"].append({
"type": "pageview",
"url": url,
"referrer": "",
"duration": view_duration,
"timestamp": datetime.now().isoformat() + "Z"
})
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(payload, f, indent=2)
print(f"✅ Generated API payload with {len(payload['events'])} events")
print(f"📁 Output saved to: {output_file}")
return payload
def main():
parser = argparse.ArgumentParser(description='Migrate Independent Analytics to Umami')
parser.add_argument('--input', '-i', required=True, help='Input CSV file from Independent Analytics')
parser.add_argument('--output', '-o', required=True, help='Output file path')
parser.add_argument('--format', '-f', choices=['json', 'sql', 'api'], default='json',
help='Output format: json (for API), sql (for DB), api (for API payload)')
parser.add_argument('--site-id', '-s', default='your-site-id', help='Umami website ID')
args = parser.parse_args()
print(f"🔄 Converting {args.input} to Umami format...")
print(f"Format: {args.format}")
print(f"Site ID: {args.site_id}")
print()
try:
if args.format == 'json':
convert_to_umami_format(args.input, args.output, args.site_id)
elif args.format == 'sql':
generate_sql_import(args.input, args.output, args.site_id)
elif args.format == 'api':
generate_api_payload(args.input, args.output, args.site_id)
print("\n✅ Migration completed successfully!")
print("\nNext steps:")
if args.format == 'json':
print("1. Use the JSON file with Umami's import API")
elif args.format == 'sql':
print("1. Import the SQL file into Umami's database")
print("2. Run: psql -U umami -d umami -f output.sql")
elif args.format == 'api':
print("1. POST the JSON payload to Umami's API endpoint")
print("2. Example: curl -X POST -H 'Content-Type: application/json' -d @output.json https://your-umami-instance.com/api/import")
except Exception as e:
print(f"❌ Error: {e}")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -1,87 +0,0 @@
import axios from 'axios';
import * as fs from 'fs';
import * as path from 'path';
const WP_URL = 'https://klz-cables.com';
async function fetchAllPosts() {
let page = 1;
let allPosts: any[] = [];
while (true) {
console.log(`Fetching posts page ${page}...`);
try {
const response = await axios.get(`${WP_URL}/wp-json/wp/v2/posts`, {
params: {
per_page: 100,
page: page,
_embed: true
}
});
const posts = response.data;
if (posts.length === 0) break;
allPosts = allPosts.concat(posts);
page++;
} catch (error: any) {
if (error.response && error.response.status === 400) {
// End of pagination
break;
}
console.error('Error fetching posts:', error);
break;
}
}
return allPosts;
}
function generateMdxContent(post: any, locale: 'en' | 'de') {
const frontmatter = {
title: post.title.rendered,
date: post.date,
excerpt: post.excerpt.rendered.replace(/<[^>]*>/g, '').trim(),
featuredImage: post._embedded?.['wp:featuredmedia']?.[0]?.source_url || null,
locale: locale
};
return `---
${JSON.stringify(frontmatter, null, 2)}
---
# ${post.title.rendered}
${post.content.rendered}
`;
}
async function run() {
const posts = await fetchAllPosts();
console.log(`Fetched ${posts.length} posts.`);
for (const post of posts) {
// Determine locale.
// If using Polylang, we might check categories or tags, or a specific field if exposed.
// Or we can check the link structure if it contains /de/ or /en/ (though API link might be different)
// Let's try to guess from the link or content language detection if needed.
// For now, let's assume we can filter by category or just save all and manually sort if needed.
// Actually, Polylang usually exposes 'lang' in the API if configured, or we might need to fetch by lang.
// Simple heuristic: check if link contains '/de/'
const locale = post.link.includes('/de/') ? 'de' : 'en';
const mdx = generateMdxContent(post, locale);
const outDir = path.join(process.cwd(), 'data', 'blog', locale);
if (!fs.existsSync(outDir)) {
fs.mkdirSync(outDir, { recursive: true });
}
const filename = `${post.slug}.mdx`;
fs.writeFileSync(path.join(outDir, filename), mdx);
console.log(`Saved ${filename} (${locale})`);
}
}
run().catch(console.error);

View File

@@ -1,175 +0,0 @@
import client, { ensureAuthenticated } from '../lib/directus';
import {
createCollection,
createField,
createRelation,
uploadFiles,
createItem,
updateSettings,
readFolders,
createFolder
} from '@directus/sdk';
import fs from 'fs';
import path from 'path';
import matter from 'gray-matter';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
async function run() {
console.log('🚀 CLEAN SLATE MIGRATION 🚀');
await ensureAuthenticated();
// 1. Folders
console.log('📂 Creating Folders...');
const folders: any = {};
const folderNames = ['Products', 'Blog', 'Pages', 'Technical'];
for (const name of folderNames) {
try {
const res = await client.request(createFolder({ name }));
folders[name] = res.id;
} catch (e) {
const existing = await client.request(readFolders({ filter: { name: { _eq: name } } }));
folders[name] = existing[0].id;
}
}
// 2. Assets
const assetMap: Record<string, string> = {};
const uploadDir = async (dir: string, folderId: string) => {
if (!fs.existsSync(dir)) return;
const files = fs.readdirSync(dir, { withFileTypes: true });
for (const file of files) {
const fullPath = path.join(dir, file.name);
if (file.isDirectory()) {
await uploadDir(fullPath, folderId);
} else {
const relPath = '/' + path.relative(path.join(process.cwd(), 'public'), fullPath).split(path.sep).join('/');
try {
const form = new FormData();
form.append('folder', folderId);
form.append('file', new Blob([fs.readFileSync(fullPath)]), file.name);
const res = await client.request(uploadFiles(form));
assetMap[relPath] = res.id;
console.log(`✅ Asset: ${relPath}`);
} catch (e) { }
}
}
};
await uploadDir(path.join(process.cwd(), 'public/uploads'), folders.Products);
// 3. Collections (Minimalist)
const collections = [
'categories', 'products', 'posts', 'pages', 'globals',
'categories_translations', 'products_translations', 'posts_translations', 'pages_translations', 'globals_translations',
'categories_link'
];
console.log('🏗️ Creating Collections...');
for (const name of collections) {
try {
const isSingleton = name === 'globals';
await client.request(createCollection({
collection: name,
schema: {},
meta: { singleton: isSingleton }
} as any));
// Add ID field
await client.request(createField(name, {
field: 'id',
type: 'integer',
meta: { hidden: true },
schema: { is_primary_key: true, has_auto_increment: name !== 'globals' }
}));
console.log(`✅ Collection: ${name}`);
} catch (e: any) {
console.log(` Collection ${name} exists or error: ${e.message}`);
}
}
// 4. Fields & Relations
console.log('🔧 Configuring Schema...');
const safeAdd = async (col: string, f: any) => { try { await client.request(createField(col, f)); } catch (e) { } };
// Products
await safeAdd('products', { field: 'sku', type: 'string' });
await safeAdd('products', { field: 'image', type: 'uuid', meta: { interface: 'file' } });
// Translations Generic
for (const col of ['categories', 'products', 'posts', 'pages', 'globals']) {
const transTable = `${col}_translations`;
await safeAdd(transTable, { field: `${col}_id`, type: 'integer' });
await safeAdd(transTable, { field: 'languages_code', type: 'string' });
// Link to Parent
try {
await client.request(createRelation({
collection: transTable,
field: `${col}_id`,
related_collection: col,
meta: { one_field: 'translations' }
}));
} catch (e) { }
}
// Specific Fields
await safeAdd('products_translations', { field: 'name', type: 'string' });
await safeAdd('products_translations', { field: 'slug', type: 'string' });
await safeAdd('products_translations', { field: 'description', type: 'text' });
await safeAdd('products_translations', { field: 'content', type: 'text', meta: { interface: 'input-rich-text-html' } });
await safeAdd('products_translations', { field: 'technical_items', type: 'json' });
await safeAdd('products_translations', { field: 'voltage_tables', type: 'json' });
await safeAdd('categories_translations', { field: 'name', type: 'string' });
await safeAdd('posts_translations', { field: 'title', type: 'string' });
await safeAdd('posts_translations', { field: 'slug', type: 'string' });
await safeAdd('posts_translations', { field: 'content', type: 'text' });
await safeAdd('globals', { field: 'company_name', type: 'string' });
await safeAdd('globals_translations', { field: 'tagline', type: 'string' });
// M2M Link
await safeAdd('categories_link', { field: 'products_id', type: 'integer' });
await safeAdd('categories_link', { field: 'categories_id', type: 'integer' });
try {
await client.request(createRelation({ collection: 'categories_link', field: 'products_id', related_collection: 'products', meta: { one_field: 'categories_link' } }));
await client.request(createRelation({ collection: 'categories_link', field: 'categories_id', related_collection: 'categories' }));
} catch (e) { }
// 5. Data Import
console.log('📥 Importing Data...');
const deDir = path.join(process.cwd(), 'data/products/de');
const files = fs.readdirSync(deDir).filter(f => f.endsWith('.mdx'));
for (const file of files) {
const doc = matter(fs.readFileSync(path.join(deDir, file), 'utf8'));
const enPath = path.join(process.cwd(), `data/products/en/${file}`);
const enDoc = fs.existsSync(enPath) ? matter(fs.readFileSync(enPath, 'utf8')) : doc;
const clean = (c: string) => c.replace(/<ProductTabs.*?>|<\/ProductTabs>|<ProductTechnicalData.*?\/>/gs, '').trim();
const extract = (c: string) => {
const m = c.match(/technicalData=\{<ProductTechnicalData data=\{(.*?)\}\s*\/>\}/s);
try { return m ? JSON.parse(m[1]) : {}; } catch (e) { return {}; }
};
try {
await client.request(createItem('products', {
sku: doc.data.sku,
image: assetMap[doc.data.images?.[0]] || null,
translations: [
{ languages_code: 'de-DE', name: doc.data.title, slug: file.replace('.mdx', ''), description: doc.data.description, content: clean(doc.content), technical_items: extract(doc.content).technicalItems, voltage_tables: extract(doc.content).voltageTables },
{ languages_code: 'en-US', name: enDoc.data.title, slug: file.replace('.mdx', ''), description: enDoc.data.description, content: clean(enDoc.content), technical_items: extract(enDoc.content).technicalItems, voltage_tables: extract(enDoc.content).voltageTables }
]
}));
console.log(`✅ Product: ${doc.data.sku}`);
} catch (e: any) {
console.error(`❌ Product ${file}: ${e.message}`);
}
}
console.log('✨ DONE!');
}
run().catch(console.error);

View File

@@ -1,78 +0,0 @@
import axios from 'axios';
import * as fs from 'fs';
import * as path from 'path';
const WP_URL = 'https://klz-cables.com';
async function fetchAllPages() {
let page = 1;
let allPages: any[] = [];
while (true) {
console.log(`Fetching pages page ${page}...`);
try {
const response = await axios.get(`${WP_URL}/wp-json/wp/v2/pages`, {
params: {
per_page: 100,
page: page,
_embed: true
}
});
const pages = response.data;
if (pages.length === 0) break;
allPages = allPages.concat(pages);
page++;
} catch (error: any) {
if (error.response && error.response.status === 400) {
break;
}
console.error('Error fetching pages:', error);
break;
}
}
return allPages;
}
function generateMdxContent(page: any, locale: 'en' | 'de') {
const frontmatter = {
title: page.title.rendered,
excerpt: page.excerpt.rendered.replace(/<[^>]*>/g, '').trim(),
featuredImage: page._embedded?.['wp:featuredmedia']?.[0]?.source_url || null,
locale: locale
};
return `---
${JSON.stringify(frontmatter, null, 2)}
---
# ${page.title.rendered}
${page.content.rendered}
`;
}
async function run() {
const pages = await fetchAllPages();
console.log(`Fetched ${pages.length} pages.`);
for (const page of pages) {
// Determine locale.
const locale = page.link.includes('/de/') ? 'de' : 'en';
const mdx = generateMdxContent(page, locale);
const outDir = path.join(process.cwd(), 'data', 'pages', locale);
if (!fs.existsSync(outDir)) {
fs.mkdirSync(outDir, { recursive: true });
}
const filename = `${page.slug}.mdx`;
fs.writeFileSync(path.join(outDir, filename), mdx);
console.log(`Saved ${filename} (${locale})`);
}
}
run().catch(console.error);

View File

@@ -1,143 +0,0 @@
import axios from 'axios';
import * as fs from 'fs';
import * as path from 'path';
import { buildDatasheetModel } from './pdf/model/build-datasheet-model';
import type { ProductData } from './pdf/model/types';
const WC_URL = process.env.WOOCOMMERCE_URL;
const WC_KEY = process.env.WOOCOMMERCE_CONSUMER_KEY;
const WC_SECRET = process.env.WOOCOMMERCE_CONSUMER_SECRET;
if (!WC_URL || !WC_KEY || !WC_SECRET) {
console.error('Missing WooCommerce credentials in .env');
process.exit(1);
}
async function fetchAllProducts() {
let page = 1;
let allProducts: any[] = [];
while (true) {
console.log(`Fetching page ${page}...`);
try {
const response = await axios.get(`${WC_URL}/wp-json/wc/v3/products`, {
params: {
consumer_key: WC_KEY,
consumer_secret: WC_SECRET,
per_page: 100,
page: page
}
});
const products = response.data;
if (products.length === 0) break;
allProducts = allProducts.concat(products);
page++;
} catch (error) {
console.error('Error fetching products:', error);
break;
}
}
return allProducts;
}
function mapWcProductToProductData(wcProduct: any, locale: 'en' | 'de'): ProductData {
// This mapping needs to be adjusted based on actual WC response structure
// and how translations are handled (e.g. if they are separate products or same product with different fields)
// Assuming standard WC response
return {
id: wcProduct.id,
name: wcProduct.name,
shortDescriptionHtml: wcProduct.short_description,
descriptionHtml: wcProduct.description,
images: wcProduct.images.map((img: any) => img.src),
featuredImage: wcProduct.images[0]?.src || null,
sku: wcProduct.sku,
slug: wcProduct.slug,
categories: wcProduct.categories.map((cat: any) => ({ name: cat.name })),
attributes: wcProduct.attributes.map((attr: any) => ({
name: attr.name,
options: attr.options
})),
locale: locale // This might need to be derived
};
}
function generateMdxContent(product: ProductData, technicalData: any, locale: 'en' | 'de') {
const frontmatter = {
title: product.name,
sku: product.sku,
description: product.shortDescriptionHtml.replace(/<[^>]*>/g, ''), // Simple strip tags
categories: product.categories.map(c => c.name),
images: product.images,
locale: locale
};
const technicalDataJson = JSON.stringify(technicalData, null, 2);
return `---
${JSON.stringify(frontmatter, null, 2)}
---
# ${product.name}
${product.descriptionHtml}
## Technical Data
<ProductTechnicalData data={${technicalDataJson}} />
`;
}
async function run() {
const products = await fetchAllProducts();
console.log(`Fetched ${products.length} products.`);
for (const product of products) {
// Determine locale. WC might return 'lang' property if using plugins like Polylang
// Or we might have to infer it.
// For now, let's assume we can detect it or default to 'en'.
// If the site uses Polylang, usually there is a 'lang' field.
const locale = product.lang || 'en'; // Default to en if not found
// We need to handle both en and de.
// If the API returns mixed, we process them.
// If the API only returns default lang, we might need to fetch translations specifically.
// Let's try to generate for the detected locale.
const productData = mapWcProductToProductData(product, locale as 'en' | 'de');
// Build datasheet model to get technical data
// We need to try both locales if we are not sure, or just the one we have.
// But buildDatasheetModel takes a locale.
const model = buildDatasheetModel({ product: productData, locale: locale as 'en' | 'de' });
if (model.voltageTables.length > 0 || model.technicalItems.length > 0) {
console.log(`Generated technical data for ${product.name} (${locale})`);
} else {
console.warn(`No technical data found for ${product.name} (${locale})`);
}
const mdx = generateMdxContent(productData, {
technicalItems: model.technicalItems,
voltageTables: model.voltageTables
}, locale as 'en' | 'de');
const outDir = path.join(process.cwd(), 'data', 'products', locale);
if (!fs.existsSync(outDir)) {
fs.mkdirSync(outDir, { recursive: true });
}
const filename = `${product.slug}.mdx`;
fs.writeFileSync(path.join(outDir, filename), mdx);
console.log(`Saved ${filename}`);
}
}
run().catch(console.error);

View File

@@ -1,64 +0,0 @@
import * as fs from 'fs';
import * as path from 'path';
import matter from 'gray-matter';
import axios from 'axios';
const STRAPI_URL = process.env.STRAPI_URL || 'http://localhost:1337';
const STRAPI_TOKEN = process.env.STRAPI_ADMIN_TOKEN; // You'll need to generate this
async function migrateProducts() {
const productsDir = path.join(process.cwd(), 'data/products');
const locales = ['de', 'en'];
for (const locale of locales) {
const localeDir = path.join(productsDir, locale);
if (!fs.existsSync(localeDir)) continue;
const files = fs.readdirSync(localeDir).filter(f => f.endsWith('.mdx'));
for (const file of files) {
const filePath = path.join(localeDir, file);
const fileContent = fs.readFileSync(filePath, 'utf8');
const { data, content } = matter(fileContent);
console.log(`Migrating ${data.title} (${locale})...`);
try {
// 1. Check if product exists (by SKU)
const existing = await axios.get(`${STRAPI_URL}/api/products?filters[sku][$eq]=${data.sku}&locale=${locale}`, {
headers: { Authorization: `Bearer ${STRAPI_TOKEN}` }
});
const productData = {
title: data.title,
sku: data.sku,
description: data.description,
application: data.application,
content: content,
technicalData: data.technicalData || {}, // This might need adjustment based on how it's stored in MDX
locale: locale,
};
if (existing.data.data.length > 0) {
// Update
const id = existing.data.data[0].id;
await axios.put(`${STRAPI_URL}/api/products/${id}`, { data: productData }, {
headers: { Authorization: `Bearer ${STRAPI_TOKEN}` }
});
console.log(`Updated ${data.title}`);
} else {
// Create
await axios.post(`${STRAPI_URL}/api/products`, { data: productData }, {
headers: { Authorization: `Bearer ${STRAPI_TOKEN}` }
});
console.log(`Created ${data.title}`);
}
} catch (error) {
console.error(`Error migrating ${data.title}:`, error.response?.data || error.message);
}
}
}
}
// Note: This script requires a running Strapi instance and an admin token.
// migrateProducts();

View File

@@ -1,102 +0,0 @@
const fs = require('fs');
const path = require('path');
const jsdom = require('jsdom');
const { JSDOM } = jsdom;
const postsDir = path.join(process.cwd(), 'reference', 'klz-cables-clone', 'posts');
const mdxDir = path.join(process.cwd(), 'data', 'blog', 'en');
const files = fs.readdirSync(postsDir);
files.forEach(file => {
if (!file.endsWith('.html')) return;
const slug = file.replace('.html', '');
const mdxPath = path.join(mdxDir, `${slug}.mdx`);
if (!fs.existsSync(mdxPath)) {
console.log(`MDX file not found for ${slug}`);
return;
}
const htmlContent = fs.readFileSync(path.join(postsDir, file), 'utf8');
const dom = new JSDOM(htmlContent);
const document = dom.window.document;
const vlpContainers = document.querySelectorAll('.vlp-link-container');
if (vlpContainers.length === 0) return;
console.log(`Processing ${slug} with ${vlpContainers.length} visual links`);
let mdxContent = fs.readFileSync(mdxPath, 'utf8');
let modified = false;
vlpContainers.forEach(container => {
const link = container.querySelector('a.vlp-link');
const titleEl = container.querySelector('.vlp-link-title');
const summaryEl = container.querySelector('.vlp-link-summary');
const imgEl = container.querySelector('.vlp-link-image img');
if (!link) return;
const url = link.getAttribute('href');
const title = titleEl ? titleEl.textContent.trim() : '';
const summary = summaryEl ? summaryEl.textContent.trim() : '';
const image = imgEl ? imgEl.getAttribute('src') : '';
// Construct the component string
const component = `
<VisualLinkPreview
url="${url}"
title="${title.replace(/"/g, '"')}"
summary="${summary.replace(/"/g, '"')}"
image="${image}"
/>
`;
// Try to find the link in MDX
// It could be [Title](URL) or just URL or <a href="URL">...</a>
// We'll try to find the URL and replace the paragraph containing it if it looks like a standalone link
// Or just append it if we can't find it easily? No, that's risky.
// Strategy: Look for the URL.
// If found in `[...](url)`, replace the whole markdown link.
// If found in `href="url"`, replace the anchor tag.
const markdownLinkRegex = new RegExp(`\\[.*?\\]\\(${escapeRegExp(url)}\\)`, 'g');
const plainUrlRegex = new RegExp(`(?<!\\()${escapeRegExp(url)}(?!\\))`, 'g'); // URL not in parens
if (markdownLinkRegex.test(mdxContent)) {
mdxContent = mdxContent.replace(markdownLinkRegex, component);
modified = true;
} else if (plainUrlRegex.test(mdxContent)) {
// Be careful not to replace inside other attributes
// This is a bit loose, but might work for standalone URLs
// Better to check if it's a standalone line?
// Let's just replace it.
mdxContent = mdxContent.replace(plainUrlRegex, component);
modified = true;
} else {
console.log(`Could not find link for ${url} in ${slug}`);
// Maybe the URL in MDX is slightly different (e.g. trailing slash)?
// Or maybe it's not there at all.
// Let's try without trailing slash
const urlNoSlash = url.replace(/\/$/, '');
const markdownLinkRegex2 = new RegExp(`\\[.*?\\]\\(${escapeRegExp(urlNoSlash)}\\)`, 'g');
if (markdownLinkRegex2.test(mdxContent)) {
mdxContent = mdxContent.replace(markdownLinkRegex2, component);
modified = true;
}
}
});
if (modified) {
fs.writeFileSync(mdxPath, mdxContent);
console.log(`Updated ${slug}`);
}
});
function escapeRegExp(string) {
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
}

View File

@@ -1,80 +0,0 @@
import { createDirectus, rest, staticToken, deleteCollection, createCollection, createDashboard, createPanel, createItems, createPermission, readPolicies } from '@directus/sdk';
import { config } from '../lib/config';
async function nukeAndPaveV11() {
console.log('🚀 NUKE & PAVE: Feedback System v11...');
const url = 'http://localhost:8059';
const token = '59fb8f4c1a51b18fe28ad947f713914e';
const client = createDirectus(url).with(staticToken(token)).with(rest());
try {
console.log('🗑️ Deleting collections...');
try { await client.request(deleteCollection('visual_feedback_comments')); } catch (e) { }
try { await client.request(deleteCollection('visual_feedback')); } catch (e) { }
console.log('🏗️ Creating "visual_feedback" fresh...');
await client.request(createCollection({
collection: 'visual_feedback',
meta: { icon: 'feedback', display_template: '{{user_name}}: {{text}}' },
fields: [
{ field: 'id', type: 'uuid', schema: { is_primary_key: true } },
{ field: 'status', type: 'string', schema: { default_value: 'open' }, meta: { interface: 'select-dropdown' } },
{ field: 'url', type: 'string', meta: { interface: 'input' } },
{ field: 'selector', type: 'string', meta: { interface: 'input' } },
{ field: 'text', type: 'text', meta: { interface: 'input-multiline' } },
{ field: 'user_name', type: 'string', meta: { interface: 'input' } },
{ field: 'date_created', type: 'timestamp', schema: { default_value: 'NOW()' }, meta: { interface: 'datetime' } }
]
} as any));
console.log('🔐 Granting Permissions...');
const policies = await client.request(readPolicies());
const adminPolicy = policies.find(p => p.name === 'Administrator')?.id;
const publicPolicy = policies.find(p => p.name === '$t:public_label' || p.name === 'Public')?.id;
for (const policy of [adminPolicy, publicPolicy]) {
if (!policy) continue;
console.log(` - Granting to Policy: ${policy}...`);
await client.request(createPermission({
policy,
collection: 'visual_feedback',
action: 'read',
fields: ['*'],
permissions: {},
validation: {}
} as any));
}
console.log('💉 Injecting items...');
await client.request(createItems('visual_feedback', [
{ user_name: 'Antigravity', text: 'Nuke & Pave Success', status: 'open' }
]));
console.log('📊 Recreating Dashboard...');
const dash = await client.request(createDashboard({
name: 'Feedback Insights',
icon: 'analytics',
color: '#6644FF'
}));
await client.request(createPanel({
dashboard: dash.id,
name: 'Status',
type: 'metric',
width: 12,
height: 6,
position_x: 1,
position_y: 1,
options: { collection: 'visual_feedback', function: 'count', field: 'id' }
} as any));
console.log('✅ Nuke & Pave Complete!');
} catch (e: any) {
console.error('❌ Nuke failed:', e);
if (e.errors) console.error(JSON.stringify(e.errors, null, 2));
}
}
nukeAndPaveV11();

View File

@@ -1,63 +0,0 @@
import client, { ensureAuthenticated } from '../lib/directus';
import {
updateSettings,
updateCollection,
createItem,
updateItem
} from '@directus/sdk';
import fs from 'fs';
import path from 'path';
async function optimize() {
await ensureAuthenticated();
console.log('🎨 Fixing Branding...');
await client.request(updateSettings({
project_name: 'KLZ Cables',
public_note: '<div style="text-align: center;"><h1>Sustainable Energy.</h1><p>Industrial Reliability.</p></div>',
custom_css: 'body { font-family: Inter, sans-serif !important; } .public-view .v-card { border-radius: 20px !important; }'
}));
console.log('🔧 Fixing List Displays...');
const collections = ['products', 'categories', 'posts', 'pages'];
for (const collection of collections) {
try {
await (client as any).request(updateCollection(collection, {
meta: { display_template: '{{translations.name || translations.title}}' }
}));
} catch (e) {
console.error(`Failed to update ${collection}:`, e);
}
}
console.log('🏛️ Force-Syncing Globals...');
const de = JSON.parse(fs.readFileSync(path.join(process.cwd(), 'messages/de.json'), 'utf8'));
const en = JSON.parse(fs.readFileSync(path.join(process.cwd(), 'messages/en.json'), 'utf8'));
const payload = {
id: 1,
company_name: 'KLZ Cables GmbH',
email: 'info@klz-cables.com',
phone: '+49 711 1234567',
address: de.Contact.info.address,
opening_hours: `${de.Contact.hours.weekdays}: ${de.Contact.hours.weekdaysTime}`,
translations: [
{ languages_code: 'en-US', tagline: en.Footer.tagline },
{ languages_code: 'de-DE', tagline: de.Footer.tagline }
]
};
try {
await client.request(createItem('globals', payload));
} catch (e) {
try {
await client.request(updateItem('globals', 1, payload));
} catch (err) {
console.error('Globals still failing:', (err as any).message);
}
}
console.log('✅ Optimization complete.');
}
optimize().catch(console.error);

File diff suppressed because it is too large Load Diff

View File

@@ -1,192 +0,0 @@
import * as fs from 'fs';
import * as path from 'path';
import { execSync } from 'child_process';
import type { ProductData } from './types';
import { normalizeValue } from './utils';
type ExcelRow = Record<string, unknown>;
export type ExcelMatch = { rows: ExcelRow[]; units: Record<string, string> };
export type MediumVoltageCrossSectionExcelMatch = {
headerRow: ExcelRow;
rows: ExcelRow[];
units: Record<string, string>;
partNumberKey: string;
crossSectionKey: string;
ratedVoltageKey: string | null;
};
const EXCEL_SOURCE_FILES = [
path.join(process.cwd(), 'data/excel/high-voltage.xlsx'),
path.join(process.cwd(), 'data/excel/medium-voltage-KM.xlsx'),
path.join(process.cwd(), 'data/excel/low-voltage-KM.xlsx'),
path.join(process.cwd(), 'data/excel/solar-cables.xlsx'),
];
// Medium-voltage cross-section table (new format with multi-row header).
// IMPORTANT: this must NOT be used for the technical data table.
const MV_CROSS_SECTION_FILE = path.join(process.cwd(), 'data/excel/medium-voltage-KM 170126.xlsx');
type MediumVoltageCrossSectionIndex = {
headerRow: ExcelRow;
units: Record<string, string>;
partNumberKey: string;
crossSectionKey: string;
ratedVoltageKey: string | null;
rowsByDesignation: Map<string, ExcelRow[]>;
};
let EXCEL_INDEX: Map<string, ExcelMatch> | null = null;
let MV_CROSS_SECTION_INDEX: MediumVoltageCrossSectionIndex | null = null;
export function normalizeExcelKey(value: string): string {
return String(value || '')
.toUpperCase()
.replace(/-\d+$/g, '')
.replace(/[^A-Z0-9]+/g, '');
}
function loadExcelRows(filePath: string): ExcelRow[] {
const out = execSync(`npx -y xlsx-cli -j "${filePath}"`, {
encoding: 'utf8',
stdio: ['ignore', 'pipe', 'ignore'],
});
const trimmed = out.trim();
const jsonStart = trimmed.indexOf('[');
if (jsonStart < 0) return [];
const jsonText = trimmed.slice(jsonStart);
try {
return JSON.parse(jsonText) as ExcelRow[];
} catch {
return [];
}
}
function findKeyByHeaderValue(headerRow: ExcelRow, pattern: RegExp): string | null {
for (const [k, v] of Object.entries(headerRow || {})) {
const text = normalizeValue(String(v ?? ''));
if (!text) continue;
if (pattern.test(text)) return k;
}
return null;
}
function getMediumVoltageCrossSectionIndex(): MediumVoltageCrossSectionIndex {
if (MV_CROSS_SECTION_INDEX) return MV_CROSS_SECTION_INDEX;
const rows = fs.existsSync(MV_CROSS_SECTION_FILE) ? loadExcelRows(MV_CROSS_SECTION_FILE) : [];
const headerRow = (rows[0] || {}) as ExcelRow;
const partNumberKey = findKeyByHeaderValue(headerRow, /^part\s*number$/i) || '__EMPTY';
const crossSectionKey = findKeyByHeaderValue(headerRow, /querschnitt|cross.?section/i) || '';
const ratedVoltageKey = findKeyByHeaderValue(headerRow, /rated voltage|voltage rating|nennspannung/i) || null;
const unitsRow = rows.find(r => normalizeValue(String((r as ExcelRow)?.[partNumberKey] ?? '')) === 'Units') || null;
const units: Record<string, string> = {};
if (unitsRow) {
for (const [k, v] of Object.entries(unitsRow)) {
if (k === partNumberKey) continue;
const unit = normalizeValue(String(v ?? ''));
if (unit) units[k] = unit;
}
}
const rowsByDesignation = new Map<string, ExcelRow[]>();
for (const r of rows) {
if (r === headerRow) continue;
const pn = normalizeValue(String((r as ExcelRow)?.[partNumberKey] ?? ''));
if (!pn || pn === 'Units' || pn === 'Part Number') continue;
const key = normalizeExcelKey(pn);
if (!key) continue;
const cur = rowsByDesignation.get(key) || [];
cur.push(r);
rowsByDesignation.set(key, cur);
}
MV_CROSS_SECTION_INDEX = { headerRow, units, partNumberKey, crossSectionKey, ratedVoltageKey, rowsByDesignation };
return MV_CROSS_SECTION_INDEX;
}
export function getExcelIndex(): Map<string, ExcelMatch> {
if (EXCEL_INDEX) return EXCEL_INDEX;
const idx = new Map<string, ExcelMatch>();
for (const file of EXCEL_SOURCE_FILES) {
if (!fs.existsSync(file)) continue;
const rows = loadExcelRows(file);
const unitsRow = rows.find(r => r && r['Part Number'] === 'Units') || null;
const units: Record<string, string> = {};
if (unitsRow) {
for (const [k, v] of Object.entries(unitsRow)) {
if (k === 'Part Number') continue;
const unit = normalizeValue(String(v ?? ''));
if (unit) units[k] = unit;
}
}
for (const r of rows) {
const pn = r?.['Part Number'];
if (!pn || pn === 'Units') continue;
const key = normalizeExcelKey(String(pn));
if (!key) continue;
const cur = idx.get(key);
if (!cur) {
idx.set(key, { rows: [r], units });
} else {
cur.rows.push(r);
if (Object.keys(cur.units).length < Object.keys(units).length) cur.units = units;
}
}
}
EXCEL_INDEX = idx;
return idx;
}
export function findExcelForProduct(product: ProductData): ExcelMatch | null {
const idx = getExcelIndex();
const candidates = [
product.name,
product.slug ? product.slug.replace(/-\d+$/g, '') : '',
product.sku,
product.translationKey,
].filter(Boolean) as string[];
for (const c of candidates) {
const key = normalizeExcelKey(c);
const match = idx.get(key);
if (match && match.rows.length) return match;
}
return null;
}
export function findMediumVoltageCrossSectionExcelForProduct(product: ProductData): MediumVoltageCrossSectionExcelMatch | null {
const idx = getMediumVoltageCrossSectionIndex();
const candidates = [
product.name,
product.slug ? product.slug.replace(/-\d+$/g, '') : '',
product.sku,
product.translationKey,
].filter(Boolean) as string[];
for (const c of candidates) {
const key = normalizeExcelKey(c);
const rows = idx.rowsByDesignation.get(key) || [];
if (rows.length) {
return {
headerRow: idx.headerRow,
rows,
units: idx.units,
partNumberKey: idx.partNumberKey,
crossSectionKey: idx.crossSectionKey,
ratedVoltageKey: idx.ratedVoltageKey,
};
}
}
return null;
}

View File

@@ -1,54 +0,0 @@
export interface ProductData {
id: number;
name: string;
shortDescriptionHtml: string;
descriptionHtml: string;
applicationHtml: string;
images: string[];
featuredImage: string | null;
sku: string;
slug?: string;
path?: string;
translationKey?: string;
locale?: 'en' | 'de';
categories: Array<{ name: string }>;
attributes: Array<{
name: string;
options: string[];
}>;
voltageType?: string;
}
export type KeyValueItem = { label: string; value: string; unit?: string };
export type DatasheetVoltageTable = {
voltageLabel: string;
metaItems: KeyValueItem[];
columns: Array<{ key: string; label: string }>;
rows: Array<{ configuration: string; cells: string[] }>;
};
export type DatasheetModel = {
locale: 'en' | 'de';
product: {
id: number;
name: string;
sku: string;
categoriesLine: string;
descriptionText: string;
heroSrc: string | null;
productUrl: string;
};
labels: {
datasheet: string;
description: string;
technicalData: string;
crossSection: string;
sku: string;
noImage: string;
};
technicalItems: KeyValueItem[];
voltageTables: DatasheetVoltageTable[];
legendItems: KeyValueItem[];
};

View File

@@ -1,72 +0,0 @@
import * as path from 'path';
import type { ProductData } from './types';
export const CONFIG = {
siteUrl: 'https://klz-cables.com',
publicDir: path.join(process.cwd(), 'public'),
assetMapFile: path.join(process.cwd(), 'data/processed/asset-map.json'),
} as const;
export function stripHtml(html: string): string {
if (!html) return '';
let text = String(html).replace(/<[^>]*>/g, '').normalize('NFC');
text = text
.replace(/[\u00A0\u202F]/g, ' ')
.replace(/[\u2013\u2014]/g, '-')
.replace(/[\u2018\u2019]/g, "'")
.replace(/[\u201C\u201D]/g, '"')
.replace(/\u2026/g, '...')
.replace(/[\u2022]/g, '·')
.replace(/[\u2264]/g, '<=')
.replace(/[\u2265]/g, '>=')
.replace(/[\u2248]/g, '~')
.replace(/[\u03A9\u2126]/g, 'Ohm')
.replace(/[\u00B5\u03BC]/g, 'u')
.replace(/[\u2193]/g, 'v')
.replace(/[\u2191]/g, '^')
.replace(/[\u00B0]/g, '°');
// eslint-disable-next-line no-control-regex
text = text.replace(/[\u0000-\u001F\u007F]/g, '');
return text.replace(/\s+/g, ' ').trim();
}
export function normalizeValue(value: string): string {
return stripHtml(value).replace(/\s+/g, ' ').trim();
}
export function getProductUrl(product: ProductData): string {
if (product.path) return `${CONFIG.siteUrl}${product.path}`;
return CONFIG.siteUrl;
}
export function generateFileName(product: ProductData, locale: 'en' | 'de'): string {
const baseName = product.slug || product.translationKey || `product-${product.id}`;
const cleanSlug = baseName
.toLowerCase()
.replace(/[^a-z0-9-]/g, '-')
.replace(/-+/g, '-')
.replace(/^-|-$/g, '');
return `${cleanSlug}-${locale}.pdf`;
}
export function getLabels(locale: 'en' | 'de') {
return {
en: {
datasheet: 'Technical Datasheet',
description: 'APPLICATION',
technicalData: 'TECHNICAL DATA',
crossSection: 'Cross-sections/Voltage',
sku: 'SKU',
noImage: 'No image available',
},
de: {
datasheet: 'Technisches Datenblatt',
description: 'ANWENDUNG',
technicalData: 'TECHNISCHE DATEN',
crossSection: 'Querschnitte/Spannung',
sku: 'ARTIKELNUMMER',
noImage: 'Kein Bild verfügbar',
},
}[locale];
}

View File

@@ -1,88 +0,0 @@
import * as React from 'react';
import { Document, Image, Page, Text, View } from '@react-pdf/renderer';
import type { DatasheetModel, DatasheetVoltageTable } from '../model/types';
import { CONFIG } from '../model/utils';
import { styles } from './styles';
import { Header } from './components/Header';
import { Footer } from './components/Footer';
import { Section } from './components/Section';
import { KeyValueGrid } from './components/KeyValueGrid';
import { DenseTable } from './components/DenseTable';
type Assets = {
logoDataUrl: string | null;
heroDataUrl: string | null;
qrDataUrl: string | null;
};
export function DatasheetDocument(props: { model: DatasheetModel; assets: Assets }): React.ReactElement {
const { model, assets } = props;
const headerTitle = model.labels.datasheet;
// Dense tables require compact headers (no wrapping). Use standard abbreviations.
const firstColLabel = model.locale === 'de' ? 'Adern & Querschnitt' : 'Cores & Cross-section';
return (
<Document>
<Page size="A4" style={styles.page}>
<View style={styles.hero}>
<Header title={headerTitle} logoDataUrl={assets.logoDataUrl} qrDataUrl={assets.qrDataUrl} isHero={true} />
<View style={styles.productRow}>
<View style={styles.productInfoCol}>
<View style={styles.productHero}>
{model.product.categoriesLine ? <Text style={styles.productMeta}>{model.product.categoriesLine}</Text> : null}
<Text style={styles.productName}>{model.product.name}</Text>
</View>
</View>
<View style={styles.productImageCol}>
{assets.heroDataUrl ? (
<Image src={assets.heroDataUrl} style={styles.heroImage} />
) : (
<Text style={styles.noImage}>{model.labels.noImage}</Text>
)}
</View>
</View>
</View>
<Footer locale={model.locale} siteUrl={CONFIG.siteUrl} />
<View style={styles.content}>
{model.product.descriptionText ? (
<Section title={model.labels.description} minPresenceAhead={24}>
<Text style={styles.body}>{model.product.descriptionText}</Text>
</Section>
) : null}
{model.technicalItems.length ? (
<Section title={model.labels.technicalData} minPresenceAhead={24}>
<KeyValueGrid items={model.technicalItems} />
</Section>
) : null}
</View>
</Page>
<Page size="A4" style={styles.page}>
<Header title={headerTitle} logoDataUrl={assets.logoDataUrl} qrDataUrl={assets.qrDataUrl} />
<Footer locale={model.locale} siteUrl={CONFIG.siteUrl} />
<View style={styles.content}>
{model.voltageTables.map((t: DatasheetVoltageTable) => (
<View key={t.voltageLabel} style={{ marginBottom: 14 }} break={false} minPresenceAhead={24}>
<Text style={styles.sectionTitle}>{`${model.labels.crossSection}${t.voltageLabel}`}</Text>
<View style={styles.sectionAccent} />
<DenseTable table={{ columns: t.columns, rows: t.rows }} firstColLabel={firstColLabel} />
</View>
))}
{model.legendItems.length ? (
<Section title={model.locale === 'de' ? 'ABKÜRZUNGEN' : 'ABBREVIATIONS'} minPresenceAhead={24}>
<KeyValueGrid items={model.legendItems} />
</Section>
) : null}
</View>
</Page>
</Document>
);
}

View File

@@ -1,81 +0,0 @@
import * as fs from 'fs';
import * as path from 'path';
type SharpLike = (input?: unknown, options?: unknown) => { png: () => { toBuffer: () => Promise<Buffer> } };
let sharpFn: SharpLike | null = null;
async function getSharp(): Promise<SharpLike> {
if (sharpFn) return sharpFn;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const mod: any = await import('sharp');
sharpFn = (mod?.default || mod) as SharpLike;
return sharpFn;
}
const PUBLIC_DIR = path.join(process.cwd(), 'public');
async function fetchBytes(url: string): Promise<Uint8Array> {
const res = await fetch(url);
if (!res.ok) throw new Error(`Failed to fetch ${url}: ${res.status} ${res.statusText}`);
return new Uint8Array(await res.arrayBuffer());
}
async function readBytesFromPublic(localPath: string): Promise<Uint8Array> {
const abs = path.join(PUBLIC_DIR, localPath.replace(/^\//, ''));
return new Uint8Array(fs.readFileSync(abs));
}
function transformLogoSvgToPrintBlack(svg: string): string {
return svg
.replace(/fill\s*:\s*white/gi, 'fill:#000000')
.replace(/fill\s*=\s*"white"/gi, 'fill="#000000"')
.replace(/fill\s*=\s*'white'/gi, "fill='#000000'")
.replace(/fill\s*:\s*#[0-9a-fA-F]{6}/gi, 'fill:#000000')
.replace(/fill\s*=\s*"#[0-9a-fA-F]{6}"/gi, 'fill="#000000"')
.replace(/fill\s*=\s*'#[0-9a-fA-F]{6}'/gi, "fill='#000000'");
}
async function toPngBytes(inputBytes: Uint8Array, inputHint: string): Promise<Uint8Array> {
const ext = (path.extname(inputHint).toLowerCase() || '').replace('.', '');
if (ext === 'png') return inputBytes;
if (ext === 'svg' && (/\/media\/logo\.svg$/i.test(inputHint) || /\/logo-blue\.svg$/i.test(inputHint))) {
const svg = Buffer.from(inputBytes).toString('utf8');
inputBytes = new Uint8Array(Buffer.from(transformLogoSvgToPrintBlack(svg), 'utf8'));
}
const sharp = await getSharp();
return new Uint8Array(await sharp(Buffer.from(inputBytes)).png().toBuffer());
}
function toDataUrlPng(bytes: Uint8Array): string {
return `data:image/png;base64,${Buffer.from(bytes).toString('base64')}`;
}
export async function loadImageAsPngDataUrl(src: string | null): Promise<string | null> {
if (!src) return null;
try {
if (src.startsWith('/')) {
const bytes = await readBytesFromPublic(src);
const png = await toPngBytes(bytes, src);
return toDataUrlPng(png);
}
const bytes = await fetchBytes(src);
const png = await toPngBytes(bytes, src);
return toDataUrlPng(png);
} catch {
return null;
}
}
export async function loadQrAsPngDataUrl(data: string): Promise<string | null> {
try {
const safe = encodeURIComponent(data);
const url = `https://api.qrserver.com/v1/create-qr-code/?size=180x180&data=${safe}`;
const bytes = await fetchBytes(url);
const png = await toPngBytes(bytes, url);
return toDataUrlPng(png);
} catch {
return null;
}
}

View File

@@ -1,215 +0,0 @@
import * as React from 'react';
import { Text, View } from '@react-pdf/renderer';
import type { DatasheetVoltageTable } from '../../model/types';
import { styles } from '../styles';
function clamp(n: number, min: number, max: number): number {
return Math.max(min, Math.min(max, n));
}
function normTextForMeasure(v: unknown): string {
return String(v ?? '')
.replace(/\s+/g, ' ')
.trim();
}
function textLen(v: unknown): number {
return normTextForMeasure(v).length;
}
function distributeWithMinMax(weights: number[], total: number, minEach: number, maxEach: number): number[] {
const n = weights.length;
if (!n) return [];
const mins = Array.from({ length: n }, () => minEach);
const maxs = Array.from({ length: n }, () => maxEach);
// If mins don't fit, scale them down proportionally.
const minSum = mins.reduce((a, b) => a + b, 0);
if (minSum > total) {
const k = total / minSum;
return mins.map(m => m * k);
}
const result = mins.slice();
let remaining = total - minSum;
let remainingIdx = Array.from({ length: n }, (_, i) => i);
// Distribute remaining proportionally, respecting max constraints.
// Loop is guaranteed to terminate because each iteration either:
// - removes at least one index due to hitting max, or
// - exhausts `remaining`.
while (remaining > 1e-9 && remainingIdx.length) {
const wSum = remainingIdx.reduce((acc, i) => acc + Math.max(0, weights[i] || 0), 0);
if (wSum <= 1e-9) {
// No meaningful weights: distribute evenly.
const even = remaining / remainingIdx.length;
for (const i of remainingIdx) result[i] += even;
remaining = 0;
break;
}
const nextIdx: number[] = [];
for (const i of remainingIdx) {
const w = Math.max(0, weights[i] || 0);
const add = (w / wSum) * remaining;
const capped = Math.min(result[i] + add, maxs[i]);
const used = capped - result[i];
result[i] = capped;
remaining -= used;
if (result[i] + 1e-9 < maxs[i]) nextIdx.push(i);
}
remainingIdx = nextIdx;
}
// Numerical guard: force exact sum by adjusting the last column.
const sum = result.reduce((a, b) => a + b, 0);
const drift = total - sum;
if (Math.abs(drift) > 1e-9) result[result.length - 1] += drift;
return result;
}
export function DenseTable(props: {
table: Pick<DatasheetVoltageTable, 'columns' | 'rows'>;
firstColLabel: string;
}): React.ReactElement {
const cols = props.table.columns;
const rows = props.table.rows;
const headerText = (label: string): string => {
// Table headers must NEVER wrap into a second line.
// react-pdf can wrap on spaces, so we replace whitespace with NBSP.
return String(label || '').replace(/\s+/g, '\u00A0').trim();
};
// Column widths: use explicit percentages (no rounding gaps) so the table always
// consumes the full content width.
// Goal:
// - keep the designation column *not too wide*
// - distribute data columns by estimated content width (header + cells)
// so columns better fit their data
// Make first column denser so numeric columns get more room.
// (Long designations can still wrap in body if needed, but table scanability
// benefits more from wider data columns.)
const cfgMin = 0.14;
const cfgMax = 0.23;
// A content-based heuristic.
// React-PDF doesn't expose a reliable text-measurement API at render time,
// so we approximate width by string length (compressed via sqrt to reduce outliers).
const cfgContentLen = Math.max(textLen(props.firstColLabel), ...rows.map(r => textLen(r.configuration)), 8);
const dataContentLens = cols.map((c, ci) => {
const headerL = textLen(c.label);
let cellMax = 0;
for (const r of rows) cellMax = Math.max(cellMax, textLen(r.cells[ci]));
// Slightly prioritize the header (scanability) over a single long cell.
return Math.max(headerL * 1.15, cellMax, 3);
});
// Use mostly-linear weights so long headers get noticeably more space.
const cfgWeight = cfgContentLen * 1.05;
const dataWeights = dataContentLens.map(l => l);
const dataWeightSum = dataWeights.reduce((a, b) => a + b, 0);
const rawCfgPct = dataWeightSum > 0 ? cfgWeight / (cfgWeight + dataWeightSum) : 0.28;
let cfgPct = clamp(rawCfgPct, cfgMin, cfgMax);
// Ensure a minimum per-data-column width; if needed, shrink cfgPct.
// These floors are intentionally generous. Too-narrow columns are worse than a
// slightly narrower first column for scanability.
const minDataPct = cols.length >= 14 ? 0.045 : cols.length >= 12 ? 0.05 : cols.length >= 10 ? 0.055 : 0.06;
const cfgPctMaxForMinData = 1 - cols.length * minDataPct;
if (Number.isFinite(cfgPctMaxForMinData)) cfgPct = Math.min(cfgPct, cfgPctMaxForMinData);
cfgPct = clamp(cfgPct, cfgMin, cfgMax);
const dataTotal = Math.max(0, 1 - cfgPct);
const maxDataPct = Math.min(0.24, Math.max(minDataPct * 2.8, dataTotal * 0.55));
const dataPcts = distributeWithMinMax(dataWeights, dataTotal, minDataPct, maxDataPct);
const cfgW = `${(cfgPct * 100).toFixed(4)}%`;
const dataWs = dataPcts.map((p, idx) => {
// Keep the last column as the remainder so percentages sum to exactly 100%.
if (idx === dataPcts.length - 1) {
const used = dataPcts.slice(0, -1).reduce((a, b) => a + b, 0);
const remainder = Math.max(0, dataTotal - used);
return `${(remainder * 100).toFixed(4)}%`;
}
return `${(p * 100).toFixed(4)}%`;
});
const headerFontSize = cols.length >= 14 ? 5.7 : cols.length >= 12 ? 5.9 : cols.length >= 10 ? 6.2 : 6.6;
return (
<View style={styles.tableWrap} break={false} minPresenceAhead={24}>
<View style={styles.tableHeader} wrap={false}>
<View style={{ width: cfgW }}>
<Text
style={[
styles.tableHeaderCell,
styles.tableHeaderCellCfg,
{ fontSize: headerFontSize, paddingHorizontal: 3 },
cols.length ? styles.tableHeaderCellDivider : null,
]}
wrap={false}
>
{headerText(props.firstColLabel)}
</Text>
</View>
{cols.map((c, idx) => {
const isLast = idx === cols.length - 1;
return (
<View key={c.key} style={{ width: dataWs[idx] }}>
<Text
style={[
styles.tableHeaderCell,
{ fontSize: headerFontSize, paddingHorizontal: 3 },
!isLast ? styles.tableHeaderCellDivider : null,
]}
wrap={false}
>
{headerText(c.label)}
</Text>
</View>
);
})}
</View>
{rows.map((r, ri) => (
<View
key={`${r.configuration}-${ri}`}
style={[styles.tableRow, ri % 2 === 0 ? styles.tableRowAlt : null]}
wrap={false}
// If the row doesn't fit, move the whole row to the next page.
// This prevents page breaks mid-row.
minPresenceAhead={16}
>
<View style={{ width: cfgW }} wrap={false}>
<Text
style={[
styles.tableCell,
styles.tableCellCfg,
// Denser first column: slightly smaller type + tighter padding.
{ fontSize: 6.2, paddingHorizontal: 3 },
cols.length ? styles.tableCellDivider : null,
]}
wrap={false}
>
{r.configuration}
</Text>
</View>
{r.cells.map((cell, ci) => {
const isLast = ci === r.cells.length - 1;
return (
<View key={`${cols[ci]?.key || ci}`} style={{ width: dataWs[ci] }} wrap={false}>
<Text style={[styles.tableCell, !isLast ? styles.tableCellDivider : null]} wrap={false}>
{cell}
</Text>
</View>
);
})}
</View>
))}
</View>
);
}

View File

@@ -1,22 +0,0 @@
import * as React from 'react';
import { Text, View } from '@react-pdf/renderer';
import { styles } from '../styles';
export function Footer(props: { locale: 'en' | 'de'; siteUrl?: string }): React.ReactElement {
const date = new Date().toLocaleDateString(props.locale === 'en' ? 'en-US' : 'de-DE', {
year: 'numeric',
month: 'long',
day: 'numeric',
});
const siteUrl = props.siteUrl || 'https://klz-cables.com';
return (
<View style={styles.footer} fixed>
<Text style={styles.footerBrand}>KLZ CABLES</Text>
<Text style={styles.footerText}>{date}</Text>
<Text style={styles.footerText} render={({ pageNumber, totalPages }) => `${pageNumber} / ${totalPages}`} />
</View>
);
}

View File

@@ -1,25 +0,0 @@
import * as React from 'react';
import { Image, Text, View } from '@react-pdf/renderer';
import { styles } from '../styles';
export function Header(props: { title: string; logoDataUrl?: string | null; qrDataUrl?: string | null; isHero?: boolean }): React.ReactElement {
const { isHero = false } = props;
return (
<View style={isHero ? styles.header : [styles.header, { paddingHorizontal: 0, backgroundColor: 'transparent', borderBottomWidth: 0, marginBottom: 24, paddingTop: 40 }]}>
<View style={styles.headerLeft}>
{props.logoDataUrl ? (
<Image src={props.logoDataUrl} style={styles.logo} />
) : (
<Text style={styles.brandFallback}>KLZ</Text>
)}
</View>
<View style={styles.headerRight}>
<Text style={styles.headerTitle}>{props.title}</Text>
{props.qrDataUrl ? <Image src={props.qrDataUrl} style={styles.qr} /> : null}
</View>
</View>
);
}

View File

@@ -1,36 +0,0 @@
import * as React from 'react';
import { Text, View } from '@react-pdf/renderer';
import type { KeyValueItem } from '../../model/types';
import { styles } from '../styles';
export function KeyValueGrid(props: { items: KeyValueItem[] }): React.ReactElement | null {
const items = (props.items || []).filter(i => i.label && i.value);
if (!items.length) return null;
// 2-column layout: (label, value)
return (
<View style={styles.kvGrid}>
{items.map((item, rowIndex) => {
const isLast = rowIndex === items.length - 1;
const value = item.unit ? `${item.value} ${item.unit}` : item.value;
return (
<View
key={`${item.label}-${rowIndex}`}
style={[styles.kvRow, rowIndex % 2 === 0 ? styles.kvRowAlt : null, isLast ? styles.kvRowLast : null]}
wrap={false}
minPresenceAhead={12}
>
<View style={[styles.kvCell, { width: '50%' }]}>
<Text style={styles.kvLabelText}>{item.label}</Text>
</View>
<View style={[styles.kvCell, { width: '50%' }]}>
<Text style={styles.kvValueText}>{value}</Text>
</View>
</View>
);
})}
</View>
);
}

View File

@@ -1,20 +0,0 @@
import * as React from 'react';
import { Text, View } from '@react-pdf/renderer';
import { styles } from '../styles';
export function Section(props: {
title: string;
children: React.ReactNode;
boxed?: boolean;
minPresenceAhead?: number;
}): React.ReactElement {
const boxed = props.boxed ?? true;
return (
<View style={styles.section} minPresenceAhead={props.minPresenceAhead}>
<Text style={styles.sectionTitle}>{props.title}</Text>
<View style={styles.sectionAccent} />
{props.children}
</View>
);
}

View File

@@ -1,26 +0,0 @@
import * as React from 'react';
import { renderToBuffer } from '@react-pdf/renderer';
import type { ProductData } from '../model/types';
import { buildDatasheetModel } from '../model/build-datasheet-model';
import { loadImageAsPngDataUrl, loadQrAsPngDataUrl } from './assets';
import { DatasheetDocument } from './DatasheetDocument';
export async function generateDatasheetPdfBuffer(args: {
product: ProductData;
locale: 'en' | 'de';
}): Promise<Buffer> {
const model = buildDatasheetModel({ product: args.product, locale: args.locale });
const logoDataUrl =
(await loadImageAsPngDataUrl('/logo-blue.svg')) ||
(await loadImageAsPngDataUrl('/logo-white.svg')) ||
null;
const heroDataUrl = await loadImageAsPngDataUrl(model.product.heroSrc);
const qrDataUrl = await loadQrAsPngDataUrl(model.product.productUrl);
const element = <DatasheetDocument model={model} assets={{ logoDataUrl, heroDataUrl, qrDataUrl }} />;
return await renderToBuffer(element);
}

View File

@@ -1,216 +0,0 @@
import { Font, StyleSheet } from '@react-pdf/renderer';
// Prevent automatic word hyphenation, which can create multi-line table headers
// even when we try to keep them single-line.
Font.registerHyphenationCallback(word => [word]);
export const COLORS = {
primary: '#001a4d',
primaryDark: '#000d26',
accent: '#82ed20',
textPrimary: '#111827',
textSecondary: '#4b5563',
textLight: '#9ca3af',
neutral: '#f8f9fa',
border: '#e5e7eb',
} as const;
export const styles = StyleSheet.create({
page: {
paddingTop: 0,
paddingLeft: 30,
paddingRight: 30,
paddingBottom: 60,
fontFamily: 'Helvetica',
fontSize: 10,
color: COLORS.textPrimary,
backgroundColor: '#FFFFFF',
},
// Hero-style header
hero: {
backgroundColor: '#FFFFFF',
paddingTop: 30,
paddingBottom: 0,
paddingHorizontal: 0,
marginBottom: 20,
position: 'relative',
borderBottomWidth: 0,
borderBottomColor: COLORS.border,
},
header: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
marginBottom: 24,
paddingHorizontal: 0,
},
headerLeft: { flexDirection: 'row', alignItems: 'center', gap: 10 },
logo: { width: 100, height: 22, objectFit: 'contain' },
brandFallback: { fontSize: 20, fontWeight: 700, color: COLORS.primaryDark, letterSpacing: 1, textTransform: 'uppercase' },
headerRight: { flexDirection: 'row', alignItems: 'center', gap: 10 },
headerTitle: { fontSize: 9, fontWeight: 700, color: COLORS.primary, letterSpacing: 1.5, textTransform: 'uppercase' },
qr: { width: 30, height: 30, objectFit: 'contain' },
productRow: {
flexDirection: 'row',
alignItems: 'center',
gap: 20,
},
productInfoCol: {
flex: 1,
justifyContent: 'center',
},
productImageCol: {
flex: 1,
height: 120,
justifyContent: 'center',
alignItems: 'center',
borderRadius: 8,
borderWidth: 1,
borderColor: COLORS.border,
backgroundColor: '#FFFFFF',
overflow: 'hidden',
},
productHero: {
marginTop: 0,
paddingHorizontal: 0,
},
productName: {
fontSize: 24,
fontWeight: 700,
color: COLORS.primaryDark,
marginBottom: 0,
textTransform: 'uppercase',
letterSpacing: -0.5,
},
productMeta: {
fontSize: 9,
color: COLORS.textSecondary,
fontWeight: 700,
textTransform: 'uppercase',
letterSpacing: 1,
marginBottom: 4,
},
content: {
paddingHorizontal: 0,
},
footer: {
position: 'absolute',
left: 30,
right: 30,
bottom: 30,
paddingTop: 16,
borderTopWidth: 1,
borderTopColor: COLORS.border,
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
},
footerBrand: { fontSize: 9, fontWeight: 700, color: COLORS.primaryDark, textTransform: 'uppercase', letterSpacing: 1 },
footerText: { fontSize: 8, color: COLORS.textLight, fontWeight: 500, textTransform: 'uppercase', letterSpacing: 0.5 },
h1: { fontSize: 22, fontWeight: 700, color: COLORS.primaryDark, marginBottom: 8, textTransform: 'uppercase' },
subhead: { fontSize: 10, fontWeight: 700, color: COLORS.textSecondary, marginBottom: 16, textTransform: 'uppercase', letterSpacing: 0.5 },
heroBox: {
height: 180,
borderRadius: 12,
borderWidth: 1,
borderColor: COLORS.border,
backgroundColor: '#FFFFFF',
marginBottom: 24,
justifyContent: 'center',
overflow: 'hidden',
padding: 0,
},
heroImage: { width: '100%', height: '100%', objectFit: 'contain' },
noImage: { fontSize: 8, color: COLORS.textLight, textAlign: 'center' },
section: {
marginBottom: 10,
},
sectionTitle: {
fontSize: 14,
fontWeight: 700,
color: COLORS.primaryDark,
marginBottom: 8,
textTransform: 'uppercase',
letterSpacing: -0.2,
},
sectionAccent: {
width: 30,
height: 3,
backgroundColor: COLORS.accent,
marginBottom: 8,
borderRadius: 1.5,
},
body: { fontSize: 10, lineHeight: 1.6, color: COLORS.textSecondary },
kvGrid: {
width: '100%',
borderWidth: 1,
borderColor: COLORS.border,
borderRadius: 8,
overflow: 'hidden',
},
kvRow: {
flexDirection: 'row',
borderBottomWidth: 1,
borderBottomColor: COLORS.border,
},
kvRowAlt: { backgroundColor: COLORS.neutral },
kvRowLast: { borderBottomWidth: 0 },
kvCell: { paddingVertical: 3, paddingHorizontal: 12 },
kvMidDivider: {
borderRightWidth: 1,
borderRightColor: COLORS.border,
},
kvLabelText: { fontSize: 8, fontWeight: 700, color: COLORS.primaryDark, textTransform: 'uppercase', letterSpacing: 0.3 },
kvValueText: { fontSize: 9, color: COLORS.textPrimary, fontWeight: 500 },
tableWrap: {
width: '100%',
borderWidth: 1,
borderColor: COLORS.border,
borderRadius: 8,
overflow: 'hidden',
marginBottom: 16,
},
tableHeader: {
width: '100%',
flexDirection: 'row',
backgroundColor: COLORS.neutral,
borderBottomWidth: 1,
borderBottomColor: COLORS.border,
},
tableHeaderCell: {
paddingVertical: 8,
paddingHorizontal: 6,
fontSize: 7,
fontWeight: 700,
color: COLORS.primaryDark,
textTransform: 'uppercase',
letterSpacing: 0.2,
},
tableHeaderCellCfg: {
paddingHorizontal: 8,
},
tableHeaderCellDivider: {
borderRightWidth: 1,
borderRightColor: COLORS.border,
},
tableRow: { width: '100%', flexDirection: 'row', borderBottomWidth: 1, borderBottomColor: COLORS.border },
tableRowAlt: { backgroundColor: '#FFFFFF' },
tableCell: { paddingVertical: 6, paddingHorizontal: 6, fontSize: 7, color: COLORS.textSecondary, fontWeight: 500 },
tableCellCfg: {
paddingHorizontal: 8,
},
tableCellDivider: {
borderRightWidth: 1,
borderRightColor: COLORS.border,
},
});

View File

@@ -1,176 +0,0 @@
import { createDirectus, rest, authentication, readDashboards, deleteDashboard, createDashboard, createPanel } from '@directus/sdk';
async function rebuildDashboards() {
const url = 'http://localhost:8059';
const email = 'marc@mintel.me';
const password = 'Tim300493.';
console.log(`🚀 Rebuilding Dashboards: ${url}`);
const client = createDirectus(url).with(authentication('json')).with(rest());
try {
await client.login(email, password);
console.log('✅ Authenticated');
// 1. Delete existing dashboard
const oldDashboards = await client.request(readDashboards());
for (const db of oldDashboards) {
console.log(`Deleting dashboard: ${db.name} (${db.id})`);
await client.request(deleteDashboard(db.id));
}
// 2. Create the "Intelligence" Dashboard
const dashboard = await client.request(createDashboard({
name: 'Feedback Operational Intelligence',
note: 'High-fidelity overview of user feedback and system status.',
icon: 'analytics',
color: '#000000'
}));
console.log(`Created Dashboard: ${dashboard.id}`);
// 3. Add Panels (Grid is 24 units wide)
// --- Row 1: Key Metrics ---
// Total
await client.request(createPanel({
dashboard: dashboard.id as string,
name: 'Total Submissions',
type: 'metric',
width: 6,
height: 4,
position_x: 0,
position_y: 0,
options: {
collection: 'visual_feedback',
function: 'count',
field: 'id',
color: '#666666',
icon: 'all_inbox'
}
}));
// Open
await client.request(createPanel({
dashboard: dashboard.id as string,
name: 'Pending Action',
type: 'metric',
width: 6,
height: 4,
position_x: 6,
position_y: 0,
options: {
collection: 'visual_feedback',
function: 'count',
field: 'id',
filter: { status: { _eq: 'open' } },
color: '#FF0000',
icon: 'warning'
}
}));
// Type: Bug
await client.request(createPanel({
dashboard: dashboard.id as string,
name: 'Bugs Reported',
type: 'metric',
width: 6,
height: 4,
position_x: 12,
position_y: 0,
options: {
collection: 'visual_feedback',
function: 'count',
field: 'id',
filter: { type: { _eq: 'bug' } },
color: '#E91E63',
icon: 'bug_report'
}
}));
// Type: Feature
await client.request(createPanel({
dashboard: dashboard.id as string,
name: 'Feature Requests',
type: 'metric',
width: 6,
height: 4,
position_x: 18,
position_y: 0,
options: {
collection: 'visual_feedback',
function: 'count',
field: 'id',
filter: { type: { _eq: 'feature' } },
color: '#4CAF50',
icon: 'lightbulb'
}
}));
// --- Row 2: Trends and Distribution ---
// Time series (Volume)
await client.request(createPanel({
dashboard: dashboard.id as string,
name: 'Feedback Volume (Last 30 Days)',
type: 'chart-timeseries',
width: 16,
height: 10,
position_x: 0,
position_y: 4,
options: {
collection: 'visual_feedback',
function: 'count',
field: 'id',
group: 'date_created',
interval: 'day',
show_marker: true,
color: '#000000'
}
}));
// Category distribution (Pie)
await client.request(createPanel({
dashboard: dashboard.id as string,
name: 'Type Distribution',
type: 'chart-pie',
width: 8,
height: 10,
position_x: 16,
position_y: 4,
options: {
collection: 'visual_feedback',
function: 'count',
field: 'id',
group: 'type',
donut: true,
show_labels: true
}
}));
// --- Row 3: Details ---
// Detailed List
await client.request(createPanel({
dashboard: dashboard.id as string,
name: 'Recent Feedback (High Priority)',
type: 'list',
width: 24,
height: 10,
position_x: 0,
position_y: 14,
options: {
collection: 'visual_feedback',
fields: ['user_name', 'type', 'text', 'status', 'date_created'],
sort: ['-date_created'],
limit: 10
}
}));
console.log('✅ Dashboard rebuilt successfully');
} catch (e: any) {
console.error('❌ Rebuild failed:');
console.error(e.message);
if (e.errors) console.error(JSON.stringify(e.errors, null, 2));
}
}
rebuildDashboards();

View File

@@ -1,208 +0,0 @@
import client, { ensureAuthenticated } from '../lib/directus';
import {
deleteCollection,
deleteFile,
readFiles,
updateSettings,
uploadFiles
} from '@directus/sdk';
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
// Helper for ESM __dirname
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
async function revertAndRestoreBranding() {
console.log('🚨 REVERTING EVERYTHING - RESTORING BRANDING ONLY 🚨');
await ensureAuthenticated();
// 1. DELETE ALL COLLECTIONS
const collectionsToDelete = [
'categories_link',
'categories_translations', 'categories',
'products_translations', 'products',
'posts_translations', 'posts',
'pages_translations', 'pages',
'globals_translations', 'globals'
];
console.log('🗑️ Deleting custom collections...');
for (const col of collectionsToDelete) {
try {
await client.request(deleteCollection(col));
console.log(`✅ Deleted collection: ${col}`);
} catch (e: any) {
console.log(` Collection ${col} not found or already deleted.`);
}
}
// 2. DELETE ALL FILES
console.log('🗑️ Deleting ALL files...');
try {
const files = await client.request(readFiles({ limit: -1 }));
if (files && files.length > 0) {
const ids = files.map(f => f.id);
await client.request(deleteFile(ids)); // Batch delete if supported by SDK version, else loop
console.log(`✅ Deleted ${ids.length} files.`);
} else {
console.log(' No files to delete.');
}
} catch (e: any) {
// Fallback to loop if batch fails
try {
const files = await client.request(readFiles({ limit: -1 }));
for (const f of files) {
await client.request(deleteFile(f.id));
}
console.log(`✅ Deleted files individually.`);
} catch (err) { }
}
// 3. RESTORE BRANDING (Exact copy of setup-directus-branding.ts logic)
console.log('🎨 Restoring Premium Branding...');
try {
const getMimeType = (filePath: string) => {
const ext = path.extname(filePath).toLowerCase();
switch (ext) {
case '.svg': return 'image/svg+xml';
case '.png': return 'image/png';
case '.jpg':
case '.jpeg': return 'image/jpeg';
case '.ico': return 'image/x-icon';
default: return 'application/octet-stream';
}
};
const uploadAsset = async (filePath: string, title: string) => {
if (!fs.existsSync(filePath)) {
console.warn(`⚠️ File not found: ${filePath}`);
return null;
}
const mimeType = getMimeType(filePath);
const form = new FormData();
const fileBuffer = fs.readFileSync(filePath);
const blob = new Blob([fileBuffer], { type: mimeType });
form.append('file', blob, path.basename(filePath));
form.append('title', title);
const res = await client.request(uploadFiles(form));
return res.id;
};
const logoWhiteId = await uploadAsset(path.resolve(__dirname, '../public/logo-white.svg'), 'Logo White');
const logoBlueId = await uploadAsset(path.resolve(__dirname, '../public/logo-blue.svg'), 'Logo Blue');
const faviconId = await uploadAsset(path.resolve(__dirname, '../public/favicon.ico'), 'Favicon');
// Smoother Background SVG
const bgSvgPath = path.resolve(__dirname, '../public/login-bg.svg');
fs.writeFileSync(bgSvgPath, `<svg width="1920" height="1080" viewBox="0 0 1920 1080" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="1920" height="1080" fill="#001a4d"/>
<ellipse cx="960" cy="540" rx="800" ry="600" fill="url(#paint0_radial_premium)"/>
<defs>
<radialGradient id="paint0_radial_premium" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(960 540) rotate(90) scale(600 800)">
<stop stop-color="#003d82" stop-opacity="0.8"/>
<stop offset="1" stop-color="#001a4d" stop-opacity="0"/>
</radialGradient>
</defs>
</svg>`);
const backgroundId = await uploadAsset(bgSvgPath, 'Login Bg');
if (fs.existsSync(bgSvgPath)) fs.unlinkSync(bgSvgPath);
// Update Settings
const COLOR_PRIMARY = '#001a4d';
const COLOR_ACCENT = '#82ed20';
const COLOR_SECONDARY = '#003d82';
const cssInjection = `
<style>
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap');
body, .v-app {
font-family: 'Inter', sans-serif !important;
-webkit-font-smoothing: antialiased;
}
.public-view .v-card {
background: rgba(255, 255, 255, 0.95) !important;
backdrop-filter: blur(20px);
border: 1px solid rgba(255, 255, 255, 0.3) !important;
border-radius: 32px !important;
box-shadow: 0 50px 100px -20px rgba(0, 0, 0, 0.4) !important;
padding: 40px !important;
}
.public-view .v-button {
border-radius: 9999px !important;
height: 56px !important;
font-weight: 600 !important;
letter-spacing: -0.01em !important;
transition: all 0.4s cubic-bezier(0.16, 1, 0.3, 1) !important;
}
.public-view .v-button:hover {
transform: translateY(-2px);
box-shadow: 0 15px 30px rgba(130, 237, 32, 0.2) !important;
}
.public-view .v-input {
--v-input-border-radius: 12px !important;
--v-input-background-color: #f8f9fa !important;
}
</style>
<div style="font-family: 'Inter', sans-serif; text-align: center; margin-top: 24px;">
<p style="color: rgba(255,255,255,0.7); font-size: 14px; margin-bottom: 4px; font-weight: 500;">KLZ INFRASTRUCTURE ENGINE</p>
<h1 style="color: #ffffff; font-size: 18px; font-weight: 700; margin: 0;">Sustainable Energy. <span style="color: #82ed20;">Industrial Reliability.</span></h1>
</div>
`;
await client.request(updateSettings({
project_name: 'KLZ Cables',
project_url: 'https://klz-cables.com',
project_color: COLOR_ACCENT,
project_descriptor: 'Sustainable Energy Infrastructure',
project_owner: 'KLZ Cables',
project_logo: logoWhiteId as any,
public_foreground: logoWhiteId as any,
public_background: backgroundId as any,
public_note: cssInjection,
public_favicon: faviconId as any,
theme_light_overrides: {
"primary": COLOR_ACCENT,
"secondary": COLOR_SECONDARY,
"background": "#f1f3f7",
"backgroundNormal": "#ffffff",
"backgroundAccent": "#eef2ff",
"navigationBackground": COLOR_PRIMARY,
"navigationForeground": "#ffffff",
"navigationBackgroundHover": "rgba(255,255,255,0.05)",
"navigationForegroundHover": "#ffffff",
"navigationBackgroundActive": "rgba(130, 237, 32, 0.15)",
"navigationForegroundActive": COLOR_ACCENT,
"moduleBarBackground": "#000d26",
"moduleBarForeground": "#ffffff",
"moduleBarForegroundActive": COLOR_ACCENT,
"borderRadius": "16px",
"borderWidth": "1px",
"borderColor": "#e2e8f0",
"formFieldHeight": "48px"
} as any,
theme_dark_overrides: {
"primary": COLOR_ACCENT,
"background": "#0a0a0a",
"navigationBackground": "#000000",
"moduleBarBackground": COLOR_PRIMARY,
"borderRadius": "16px",
"formFieldHeight": "48px"
} as any
}));
console.log('✨ System Cleaned & Branding Restored Successfully');
} catch (error: any) {
console.error('❌ Error restoring branding:', JSON.stringify(error, null, 2));
}
}
revertAndRestoreBranding().catch(console.error);

View File

@@ -1,81 +0,0 @@
import axios from 'axios';
import * as cheerio from 'cheerio';
import * as fs from 'fs';
import * as path from 'path';
interface FaberKabelData {
url: string;
verwendung: string;
technischeDaten: { [key: string]: string };
}
async function scrapeFaberKabel(url: string): Promise<FaberKabelData> {
try {
const response = await axios.get(url);
const $ = cheerio.load(response.data);
// Extract Verwendung
const verwendung = $('#applicationdata .text-module--light').text().trim();
// Extract Technische Daten
const technischeDaten: { [key: string]: string } = {};
$('#technicaldata table.attributes tr').each((i, el) => {
const tds = $(el).find('td');
if (tds.length === 2) {
const key = $(tds[0]).text().trim();
const value = $(tds[1]).text().trim();
if (key && value) {
technischeDaten[key] = value;
}
}
});
return {
url,
verwendung,
technischeDaten,
};
} catch (error) {
console.error('Error scraping:', url, error);
return {
url,
verwendung: '',
technischeDaten: {},
};
}
}
async function scrapeAll(urls: string[]): Promise<FaberKabelData[]> {
const results: FaberKabelData[] = [];
for (const url of urls) {
console.log('Scraping:', url);
const data = await scrapeFaberKabel(url);
results.push(data);
}
return results;
}
// List of Faber Kabel URLs for KLZ products
const faberKabelUrls = [
'https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-N2XS-FL-2Y/',
'https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-N2XS2Y/',
'https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-N2XSF2Y/',
'https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-N2XSY/',
'https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-NA2XS2Y/',
'https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-NA2XSF2Y/',
'https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-NA2XS-FL-2Y/',
'https://shop.faberkabel.de/Starkstromkabel-1-30-kV/Mittelspannungskabel/Mittelspannungskabel-NA2XSY/',
];
async function main() {
const data = await scrapeAll(faberKabelUrls);
const outputPath = path.join(__dirname, '..', 'faber-kabel-data.json');
fs.writeFileSync(outputPath, JSON.stringify(data, null, 2));
console.log('Data saved to:', outputPath);
}
if (require.main === module) {
main();
}
export { scrapeFaberKabel, scrapeAll };

View File

@@ -1,122 +0,0 @@
import { createDirectus, rest, authentication, createCollection, createDashboard, createPanel, createItems, createPermission, readPolicies, readRoles, readUsers } from '@directus/sdk';
async function setupHardened() {
const url = 'http://localhost:8059';
const email = 'marc@mintel.me';
const password = 'Tim300493.';
console.log('🚀 v11 HARDENED SETUP START...');
const client = createDirectus(url)
.with(authentication('json'))
.with(rest());
try {
console.log('🔑 Authenticating...');
await client.login(email, password);
console.log('👤 Identifying IDs...');
const me = await client.request(readUsers({ filter: { email: { _eq: email } } }));
const adminUser = me[0];
const roles = await client.request(readRoles());
const adminRole = roles.find(r => r.name === 'Administrator');
const policies = await client.request(readPolicies());
const adminPolicy = policies.find(p => p.name === 'Administrator');
console.log(`- User: ${adminUser.id}`);
console.log(`- Role: ${adminRole?.id}`);
console.log(`- Policy: ${adminPolicy?.id}`);
if (adminPolicy && adminRole) {
console.log('🔗 Linking Role -> Policy...');
try {
await client.request(() => ({
path: '/access',
method: 'POST',
body: JSON.stringify({ role: adminRole.id, policy: adminPolicy.id })
}));
} catch (e) { }
console.log('🔗 Linking User -> Policy (individual)...');
try {
await client.request(() => ({
path: '/access',
method: 'POST',
body: JSON.stringify({ user: adminUser.id, policy: adminPolicy.id })
}));
} catch (e) { }
}
console.log('🏗️ Creating Collection "visual_feedback"...');
try {
await client.request(createCollection({
collection: 'visual_feedback',
meta: { icon: 'feedback', display_template: '{{user_name}}: {{text}}' },
fields: [
{ field: 'id', type: 'uuid', schema: { is_primary_key: true } },
{ field: 'status', type: 'string', schema: { default_value: 'open' }, meta: { interface: 'select-dropdown' } },
{ field: 'url', type: 'string' },
{ field: 'text', type: 'text' },
{ field: 'user_name', type: 'string' },
{ field: 'date_created', type: 'timestamp', schema: { default_value: 'NOW()' } }
]
} as any));
} catch (e) {
console.log(' (Collection might already exist)');
}
if (adminPolicy) {
console.log('🔐 Granting ALL permissions to Administrator Policy...');
for (const action of ['create', 'read', 'update', 'delete']) {
try {
await client.request(createPermission({
collection: 'visual_feedback',
action,
fields: ['*'],
policy: adminPolicy.id
} as any));
} catch (e) { }
}
}
console.log('💉 Injecting Demo Item...');
try {
await client.request(createItems('visual_feedback', [
{ user_name: 'Antigravity', text: 'v11 Recovery Successful', status: 'open' }
]));
} catch (e) { }
console.log('📊 Recreating Dashboard...');
const dash = await client.request(createDashboard({
name: 'Feedback Final',
icon: 'check_circle',
color: '#00FF00'
}));
await client.request(createPanel({
dashboard: dash.id,
name: 'Total Feedbacks',
type: 'metric',
width: 12,
height: 6,
position_x: 1,
position_y: 1,
options: { collection: 'visual_feedback', function: 'count', field: 'id' }
} as any));
console.log('✅ Setup Complete! Setting static token...');
await client.request(() => ({
path: `/users/${adminUser.id}`,
method: 'PATCH',
body: JSON.stringify({ token: '59fb8f4c1a51b18fe28ad947f713914e' })
}));
console.log('✨ ALL DONE.');
} catch (e: any) {
console.error('❌ CRITICAL FAILURE:', e);
if (e.errors) console.error(JSON.stringify(e.errors, null, 2));
}
}
setupHardened();

View File

@@ -1,86 +0,0 @@
import { createDirectus, rest, staticToken, createCollection, readCollections, createDashboard, createPanel, createItems, readDashboards, readPanels, createPermission, readPolicies } from '@directus/sdk';
import { config } from '../lib/config';
async function setupInfraFeedback() {
console.log('🚀 Setting up INFRA_FEEDBACK (Renamed for v11 Visibility)...');
const url = 'http://localhost:8059';
const token = '59fb8f4c1a51b18fe28ad947f713914e';
const client = createDirectus(url).with(staticToken(token)).with(rest());
try {
const collections = await client.request(readCollections());
const existing = collections.map(c => c.collection);
const COLL = 'infra_feedback';
if (!existing.includes(COLL)) {
console.log(`🏗️ Creating "${COLL}"...`);
await client.request(createCollection({
collection: COLL,
meta: { icon: 'feedback', display_template: '{{user_name}}: {{text}}' },
fields: [
{ field: 'id', type: 'integer', schema: { is_primary_key: true, has_auto_increment: true } },
{ field: 'status', type: 'string', schema: { default_value: 'open' }, meta: { interface: 'select-dropdown' } },
{ field: 'url', type: 'string' },
{ field: 'text', type: 'text' },
{ field: 'user_name', type: 'string' },
{ field: 'date_created', type: 'timestamp', schema: { default_value: 'NOW()' } }
]
} as any));
}
const policies = await client.request(readPolicies());
const adminPolicy = policies.find(p => p.name === 'Administrator')?.id;
const publicPolicy = policies.find(p => p.name === '$t:public_label' || p.name === 'Public')?.id;
for (const policy of [adminPolicy, publicPolicy]) {
if (!policy) continue;
console.log(`🔐 Granting permissions to ${policy}...`);
for (const action of ['create', 'read', 'update', 'delete']) {
try {
await client.request(createPermission({
policy,
collection: COLL,
action,
fields: ['*']
} as any));
} catch (e) { }
}
}
console.log('💉 Injecting test data...');
await client.request(createItems(COLL, [
{ user_name: 'Antigravity', text: 'Rename Test Success', status: 'open' }
]));
console.log('📊 Configuring Dashboard "Feedback OVERVIEW"...');
const dashboards = await client.request(readDashboards());
let dash = dashboards.find(d => d.name === 'Feedback OVERVIEW');
if (dash) await client.request(() => ({ path: `/dashboards/${dash.id}`, method: 'DELETE' }));
dash = await client.request(createDashboard({
name: 'Feedback OVERVIEW',
icon: 'visibility',
color: '#FFCC00'
}));
await client.request(createPanel({
dashboard: dash.id,
name: 'Table View',
type: 'list',
width: 24,
height: 12,
position_x: 1,
position_y: 1,
options: { collection: COLL, display_template: '{{user_name}}: {{text}}' }
} as any));
console.log('✅ Renamed Setup Complete! Dash: "Feedback OVERVIEW"');
} catch (error: any) {
console.error('❌ Rename setup failed:', error);
}
}
setupInfraFeedback();

View File

@@ -1,36 +0,0 @@
import * as http from 'http';
/**
* Simple smoke test to verify the application is running and healthy.
* This script is intended to be run against a local or remote instance.
*/
const url = process.argv[2] || 'http://localhost:3000/health';
const timeout = 10000; // 10 seconds
console.log(`🔍 Running smoke test against: ${url}`);
const request = http.get(url, (res) => {
const { statusCode } = res;
if (statusCode === 200) {
console.log('✅ Smoke test passed: Application is healthy.');
process.exit(0);
} else {
console.error(`❌ Smoke test failed: Received status code ${statusCode}`);
process.exit(1);
}
});
request.on('error', (err) => {
console.error(`❌ Smoke test failed: ${err.message}`);
process.exit(1);
});
request.on('timeout', () => {
console.error('❌ Smoke test failed: Request timed out');
request.destroy();
process.exit(1);
});
request.setTimeout(timeout);

View File

@@ -1,33 +0,0 @@
#!/bin/bash
# Script to sync Strapi data between environments
# Usage: ./scripts/strapi-sync.sh [export|import] [filename]
COMMAND=$1
FILENAME=$2
if [ -z "$COMMAND" ]; then
echo "Usage: $0 [export|import] [filename]"
exit 1
fi
if [ "$COMMAND" == "export" ]; then
if [ -z "$FILENAME" ]; then
FILENAME="strapi-export-$(date +%Y%m%d%H%M%S).tar.gz"
fi
echo "Exporting Strapi data to $FILENAME..."
docker-compose exec cms npm run strapi export -- --no-encrypt -f "$FILENAME"
docker cp $(docker-compose ps -q cms):/opt/app/$FILENAME .
echo "Export complete: $FILENAME"
fi
if [ "$COMMAND" == "import" ]; then
if [ -z "$FILENAME" ]; then
echo "Please specify a filename to import"
exit 1
fi
echo "Importing Strapi data from $FILENAME..."
docker cp $FILENAME $(docker-compose ps -q cms):/opt/app/$FILENAME
docker-compose exec cms npm run strapi import -- -f "$FILENAME" --force
echo "Import complete"
fi

View File

@@ -1,229 +0,0 @@
import { createDirectus, rest, authentication, readItems, readCollections } from '@directus/sdk';
import { config } from './config';
import { getServerAppServices } from './services/create-services.server';
const { url, adminEmail, password, token, proxyPath, internalUrl } = config.directus;
// Use internal URL if on server to bypass Gatekeeper/Auth
// Use proxy path in browser to stay on the same origin
const effectiveUrl =
typeof window === 'undefined'
? internalUrl || url
: typeof window !== 'undefined'
? `${window.location.origin}${proxyPath}`
: proxyPath;
const client = createDirectus(effectiveUrl).with(rest()).with(authentication());
/**
* Helper to determine if we should show detailed errors
*/
const shouldShowDevErrors = config.isTesting || config.isDevelopment;
/**
* Genericizes error messages for production/staging
*/
function formatError(error: any) {
if (shouldShowDevErrors) {
return error.errors?.[0]?.message || error.message || 'An unexpected error occurred.';
}
return 'A system error occurred. Our team has been notified.';
}
let authPromise: Promise<void> | null = null;
export async function ensureAuthenticated() {
if (token) {
client.setToken(token);
return;
}
// Check if we already have a valid session token in memory
const existingToken = await client.getToken();
if (existingToken) {
return;
}
if (adminEmail && password) {
if (authPromise) {
return authPromise;
}
authPromise = (async () => {
try {
client.setToken(null as any);
await client.login(adminEmail, password);
console.log(`✅ Directus: Authenticated successfully as ${adminEmail}`);
} catch (e: any) {
if (typeof window === 'undefined') {
getServerAppServices().errors.captureException(e, { part: 'directus_auth' });
}
console.error(`Failed to authenticate with Directus (${adminEmail}):`, e.message);
if (shouldShowDevErrors && e.errors) {
console.error('Directus Auth Details:', JSON.stringify(e.errors, null, 2));
}
// Clear the promise on failure (especially on invalid credentials)
// so we can retry on next request if credentials were updated
authPromise = null;
throw e;
}
})();
return authPromise;
} else if (shouldShowDevErrors && !adminEmail && !password && !token) {
console.warn('Directus: No token or admin credentials provided.');
}
}
/**
* Maps the new translation-based schema back to the application's Product interface
*/
function mapDirectusProduct(item: any, locale: string): any {
const langCode = locale === 'en' ? 'en-US' : 'de-DE';
const translation =
item.translations?.find((t: any) => t.languages_code === langCode) ||
item.translations?.[0] ||
{};
return {
id: item.id,
sku: item.sku,
title: translation.name || '',
description: translation.description || '',
content: translation.content || '',
technicalData: {
technicalItems: translation.technical_items || [],
voltageTables: translation.voltage_tables || [],
},
locale: locale,
// Use proxy URL for assets to avoid CORS and handle internal/external issues
data_sheet_url: item.data_sheet ? `${proxyPath}/assets/${item.data_sheet}` : null,
categories: (item.categories_link || [])
.map((c: any) => c.categories_id?.translations?.[0]?.name)
.filter(Boolean),
};
}
export async function getProducts(locale: string = 'de') {
await ensureAuthenticated();
try {
const items = await client.request(
readItems('products', {
fields: ['*', 'translations.*', 'categories_link.categories_id.translations.name'],
}),
);
return items.map((item) => mapDirectusProduct(item, locale));
} catch (error) {
if (typeof window === 'undefined') {
getServerAppServices().errors.captureException(error, { part: 'directus_get_products' });
}
console.error('Error fetching products:', error);
return [];
}
}
export async function getProductBySlug(slug: string, locale: string = 'de') {
await ensureAuthenticated();
const langCode = locale === 'en' ? 'en-US' : 'de-DE';
try {
const items = await client.request(
readItems('products', {
filter: {
translations: {
slug: { _eq: slug },
languages_code: { _eq: langCode },
},
},
fields: ['*', 'translations.*', 'categories_link.categories_id.translations.name'],
limit: 1,
}),
);
if (!items || items.length === 0) return null;
return mapDirectusProduct(items[0], locale);
} catch (error) {
if (typeof window === 'undefined') {
getServerAppServices().errors.captureException(error, {
part: 'directus_get_product_by_slug',
slug,
});
}
console.error(`Error fetching product ${slug}:`, error);
return null;
}
}
export async function checkHealth() {
try {
// 1. Connectivity & Auth Check
try {
await ensureAuthenticated();
await client.request(readCollections());
} catch (e: any) {
if (typeof window === 'undefined') {
getServerAppServices().errors.captureException(e, { part: 'directus_health_auth' });
}
console.error('Directus authentication failed during health check:', e);
return {
status: 'error',
message: shouldShowDevErrors
? 'Authentication failed. Check your DIRECTUS_ADMIN_EMAIL and DIRECTUS_ADMIN_PASSWORD.'
: 'CMS is currently unavailable due to an internal authentication error.',
code: 'AUTH_FAILED',
details: shouldShowDevErrors ? e.message : undefined,
};
}
// 2. Schema check (does the contact_submissions table exist?)
try {
await client.request(readItems('contact_submissions', { limit: 1 }));
} catch (e: any) {
if (typeof window === 'undefined') {
getServerAppServices().errors.captureException(e, { part: 'directus_health_schema' });
}
if (
e.message?.includes('does not exist') ||
e.code === 'INVALID_PAYLOAD' ||
e.status === 404
) {
return {
status: 'error',
message: shouldShowDevErrors
? `The "contact_submissions" collection is missing or inaccessible. Error: ${e.message || 'Unknown'}`
: 'Required data structures are currently unavailable.',
code: 'SCHEMA_MISSING',
};
}
return {
status: 'error',
message: shouldShowDevErrors
? `Schema error: ${e.errors?.[0]?.message || e.message || 'Unknown error'}`
: 'The data schema is currently misconfigured.',
code: 'SCHEMA_ERROR',
};
}
return { status: 'ok', message: 'Directus is reachable and responding.' };
} catch (error: any) {
if (typeof window === 'undefined') {
getServerAppServices().errors.captureException(error, { part: 'directus_health_critical' });
}
console.error('Directus health check failed with unexpected error:', error);
return {
status: 'error',
message: formatError(error),
code: error.code || 'UNKNOWN',
};
}
}
export default client;
(async () => {
try {
await ensureAuthenticated();
console.log('Auth test successful');
} catch (e) {
console.error('Auth test failed:', e.message);
}
})();

View File

@@ -1,46 +0,0 @@
import * as http from 'http';
const baseUrl = 'http://localhost:3010';
const paths = [
'/en/opengraph-image',
'/de/opengraph-image',
'/en/blog/opengraph-image',
'/en/contact/opengraph-image',
'/en/products/opengraph-image',
'/en/team/opengraph-image',
];
async function testUrl(path: string) {
return new Promise((resolve) => {
const url = `${baseUrl}${path}`;
console.log(`Testing ${url}...`);
const req = http.get(url, (res) => {
console.log(` Status: ${res.statusCode}`);
console.log(` Content-Type: ${res.headers['content-type']}`);
resolve(res.statusCode === 200);
});
req.on('error', (e) => {
console.error(` Error: ${e.message}`);
resolve(false);
});
req.end();
});
}
async function run() {
let allPassed = true;
for (const path of paths) {
const passed = await testUrl(path);
if (!passed) allPassed = false;
}
if (allPassed) {
console.log('\n✅ All OG images are working!');
process.exit(0);
} else {
console.log('\n❌ Some OG images failed.');
process.exit(1);
}
}
run();

View File

@@ -1,59 +0,0 @@
import openpyxl
def update_excel_ampacity(file_path, headers_row_idx, ampacity_cols_identifiers, target_cross_section="1x1200/35"):
print(f"Updating {file_path}...")
wb = openpyxl.load_workbook(file_path)
ws = wb.active
# openpyxl is 1-indexed for rows and columns
headers = [cell.value for cell in ws[headers_row_idx]]
# Identify column indices for ampacity (0-indexed locally for easier row access)
col_indices = []
for identifier in ampacity_cols_identifiers:
if isinstance(identifier, int):
col_indices.append(identifier)
else:
try:
# list.index returns 0-indexed position
col_indices.append(headers.index(identifier))
except ValueError:
print(f"Warning: Could not find column '{identifier}' in {file_path}")
# Find row index for "Number of cores and cross-section" or use index 8
cs_col_idx = 8
try:
cs_col_idx = headers.index("Number of cores and cross-section")
except ValueError:
pass
rows_updated = 0
# ws.iter_rows returns 1-indexed rows
for row in ws.iter_rows(min_row=headers_row_idx + 1):
# row is a tuple of cells, so row[cs_col_idx] is 0-indexed access to the tuple
if str(row[cs_col_idx].value).strip() == target_cross_section:
for col_idx in col_indices:
row[col_idx].value = "On Request"
rows_updated += 1
wb.save(file_path)
print(f"Updated {rows_updated} rows in {file_path}")
# File 1: medium-voltage-KM.xlsx
update_excel_ampacity(
'data/excel/medium-voltage-KM.xlsx',
1, # Headers are in first row (1-indexed)
[
'Current ratings in air, trefoil*',
'Current ratings in air, flat*',
'Current ratings in ground, trefoil*',
'Current ratings in ground, flat*'
]
)
# File 2: medium-voltage-KM 170126.xlsx
update_excel_ampacity(
'data/excel/medium-voltage-KM 170126.xlsx',
1, # Indices 39 and 41 were from a 0-indexed JSON representation
[39, 41]
)

View File

@@ -1,87 +0,0 @@
import openpyxl
excel_path = 'data/excel/medium-voltage-KM.xlsx'
wb = openpyxl.load_workbook(excel_path)
ws = wb.active
# Technical data for 1x1200RM/35
new_rows_data = [
{
"Rated voltage": "6/10",
"Test voltage": 21,
"Nominal insulation thickness": 3.4,
"Diameter over insulation (approx.)": 48.5,
"Minimum sheath thickness": 2.1,
"Outer diameter (approx.)": 59,
"Bending radius (min.)": 885,
"Weight (approx.)": 4800,
"Capacitance (approx.)": 0.95,
"Inductance, trefoil (approx.)": 0.24,
"Inductance in air, flat (approx.) 1": 0.40,
"Inductance in ground, flat (approx.) 1": 0.42,
},
{
"Rated voltage": "12/20",
"Test voltage": 42,
"Nominal insulation thickness": 5.5,
"Diameter over insulation (approx.)": 52.3,
"Minimum sheath thickness": 2.1,
"Outer diameter (approx.)": 66,
"Bending radius (min.)": 990,
"Weight (approx.)": 5200,
"Capacitance (approx.)": 1.05,
"Inductance, trefoil (approx.)": 0.23,
"Inductance in air, flat (approx.) 1": 0.43,
"Inductance in ground, flat (approx.) 1": 0.45,
},
{
"Rated voltage": "18/30",
"Test voltage": 63,
"Nominal insulation thickness": 8.0,
"Diameter over insulation (approx.)": 57.5,
"Minimum sheath thickness": 2.4,
"Outer diameter (approx.)": 71,
"Bending radius (min.)": 1065,
"Weight (approx.)": 5900,
"Capacitance (approx.)": 1.15,
"Inductance, trefoil (approx.)": 0.22,
"Inductance in air, flat (approx.) 1": 0.45,
"Inductance in ground, flat (approx.) 1": 0.47,
}
]
# Find a template row for NA2XS(F)2Y
template_row = None
headers = [cell.value for cell in ws[1]]
for row in ws.iter_rows(min_row=3, values_only=True):
if row[0] == 'NA2XS(F)2Y':
template_row = list(row)
break
if not template_row:
print("Error: Could not find template row for NA2XS(F)2Y")
exit(1)
# Function to update template with new values
def create_row(template, updates, headers):
new_row = template[:]
# Change "Number of cores and cross-section"
cs_idx = headers.index("Number of cores and cross-section")
new_row[cs_idx] = "1x1200/35"
# Apply specific updates
for key, value in updates.items():
if key in headers:
idx = headers.index(key)
new_row[idx] = value
return new_row
# Append new rows
for data in new_rows_data:
new_row_values = create_row(template_row, data, headers)
ws.append(new_row_values)
print(f"Added row for {data['Rated voltage']} kV")
wb.save(excel_path)
print("Excel file updated successfully.")

View File

@@ -1,120 +0,0 @@
import openpyxl
excel_path = 'data/excel/medium-voltage-KM 170126.xlsx'
wb = openpyxl.load_workbook(excel_path)
ws = wb.active
# Technical data for 1x1200RM/35
# Indices based on Row 2 (Units) and Row 1
# Index 0: Part Number
# Index 8: Querschnitt
# Index 9: Rated voltage
# Index 10: Test voltage
# Index 23: LD mm
# Index 24: ID mm
# Index 25: DI mm
# Index 26: MWD mm
# Index 27: AD mm
# Index 28: BR
# Index 29: G kg
# Index 30: RI Ohm
# Index 31: Cap
# Index 32: Inductance trefoil
# Index 35: BK
# Index 39: SBL 30
# Index 41: SBE 20
new_rows_data = [
{
"voltage": "6/10",
"test_v": 21,
"ld": 41.5,
"id": 3.4,
"di": 48.5,
"mwd": 2.1,
"ad": 59,
"br": 885,
"g": 4800,
"ri": 0.0247,
"cap": 0.95,
"ind": 0.24,
"bk": 113,
"sbl": 1300,
"sbe": 933
},
{
"voltage": "12/20",
"test_v": 42,
"ld": 41.5,
"id": 5.5,
"di": 52.3,
"mwd": 2.1,
"ad": 66,
"br": 990,
"g": 5200,
"ri": 0.0247,
"cap": 1.05,
"ind": 0.23,
"bk": 113,
"sbl": 1200,
"sbe": 900
},
{
"voltage": "18/30",
"test_v": 63,
"ld": 41.5,
"id": 8.0,
"di": 57.5,
"mwd": 2.4,
"ad": 71,
"br": 1065,
"g": 5900,
"ri": 0.0247,
"cap": 1.15,
"ind": 0.22,
"bk": 113,
"sbl": 1300,
"sbe": 950
}
]
# Find a template row for NA2XS(F)2Y
template_row = None
for row in ws.iter_rows(min_row=3, values_only=True):
if row[0] == 'NA2XS(F)2Y' and row[9] == '6/10':
template_row = list(row)
break
if not template_row:
print("Error: Could not find template row for NA2XS(F)2Y")
exit(1)
# Function to update template with new values
def create_row(template, data):
new_row = template[:]
new_row[8] = "1x1200/35"
new_row[9] = data["voltage"]
new_row[10] = data["test_v"]
new_row[23] = data["ld"]
new_row[24] = data["id"]
new_row[25] = data["di"]
new_row[26] = data["mwd"]
new_row[27] = data["ad"]
new_row[28] = data["br"]
new_row[29] = data["g"]
new_row[30] = data["ri"]
new_row[31] = data["cap"]
new_row[32] = data["ind"]
new_row[35] = data["bk"]
new_row[39] = data["sbl"]
new_row[41] = data["sbe"]
return new_row
# Append new rows
for data in new_rows_data:
new_row_values = create_row(template_row, data)
ws.append(new_row_values)
print(f"Added row for {data['voltage']} kV")
wb.save(excel_path)
print("Excel file updated successfully.")

View File

@@ -1,208 +0,0 @@
#!/usr/bin/env ts-node
/**
* Verification script for Excel integration
* Tests that Excel data is correctly parsed and integrated into products
*/
// Import from the compiled lib directory
import { getExcelTechnicalDataForProduct, getExcelRowsForProduct } from '../lib/excel-products';
import { getAllProducts, enrichProductWithExcelData } from '../lib/data';
interface TestResult {
name: string;
passed: boolean;
message: string;
details?: any;
}
const results: TestResult[] = [];
function addResult(name: string, passed: boolean, message: string, details?: any): void {
results.push({ name, passed, message, details });
console.log(`${passed ? '✓' : '✗'} ${name}: ${message}`);
}
async function runTests(): Promise<void> {
console.log('🔍 Starting Excel Integration Verification...\n');
// Test 1: Check if Excel files exist and can be parsed
console.log('Test 1: Excel File Parsing');
try {
const testProduct = {
name: 'NA2XS(FL)2Y',
slug: 'na2xsfl2y-3',
sku: 'NA2XS(FL)2Y-high-voltage-cables',
translationKey: 'na2xsfl2y-3'
};
const excelData = getExcelTechnicalDataForProduct(testProduct);
if (excelData && excelData.configurations.length > 0) {
addResult(
'Excel File Parsing',
true,
`Successfully parsed Excel data with ${excelData.configurations.length} configurations`,
{ configurations: excelData.configurations.slice(0, 3) }
);
} else {
addResult(
'Excel File Parsing',
false,
'No Excel data found for test product',
{ product: testProduct }
);
}
} catch (error) {
addResult('Excel File Parsing', false, `Error: ${error}`);
}
// Test 2: Check Excel data structure
console.log('\nTest 2: Excel Data Structure');
try {
const testProduct = {
name: 'NA2XS(FL)2Y',
slug: 'na2xsfl2y-3',
sku: 'NA2XS(FL)2Y-high-voltage-cables',
translationKey: 'na2xsfl2y-3'
};
const excelData = getExcelTechnicalDataForProduct(testProduct);
if (excelData) {
const hasConfigurations = Array.isArray(excelData.configurations) && excelData.configurations.length > 0;
const hasAttributes = Array.isArray(excelData.attributes);
addResult(
'Excel Data Structure',
hasConfigurations && hasAttributes,
`Configurations: ${hasConfigurations ? '✓' : '✗'}, Attributes: ${hasAttributes ? '✓' : '✗'}`,
{
configCount: excelData.configurations.length,
attrCount: excelData.attributes.length,
sampleAttributes: excelData.attributes.slice(0, 2)
}
);
} else {
addResult('Excel Data Structure', false, 'No Excel data returned');
}
} catch (error) {
addResult('Excel Data Structure', false, `Error: ${error}`);
}
// Test 3: Check product enrichment
console.log('\nTest 3: Product Enrichment');
try {
const products = getAllProducts();
const testProduct = products.find(p => p.slug === 'na2xsfl2y-3');
if (!testProduct) {
addResult('Product Enrichment', false, 'Test product not found in data');
} else {
const enriched = enrichProductWithExcelData(testProduct);
const hasExcelConfig = enriched.excelConfigurations && enriched.excelConfigurations.length > 0;
const hasExcelAttrs = enriched.excelAttributes && enriched.excelAttributes.length > 0;
addResult(
'Product Enrichment',
hasExcelConfig && hasExcelAttrs,
`Enrichment successful: ${hasExcelConfig && hasExcelAttrs ? '✓' : '✗'}`,
{
originalAttributes: testProduct.attributes.length,
excelConfigurations: enriched.excelConfigurations?.length || 0,
excelAttributes: enriched.excelAttributes?.length || 0
}
);
}
} catch (error) {
addResult('Product Enrichment', false, `Error: ${error}`);
}
// Test 4: Check multiple products
console.log('\nTest 4: Multiple Product Support');
try {
const products = getAllProducts();
const sampleProducts = products.slice(0, 3);
let successCount = 0;
const details: any[] = [];
for (const product of sampleProducts) {
const enriched = enrichProductWithExcelData(product);
const hasExcelData = enriched.excelConfigurations || enriched.excelAttributes;
if (hasExcelData) {
successCount++;
details.push({
name: product.name,
slug: product.slug,
configs: enriched.excelConfigurations?.length || 0,
attrs: enriched.excelAttributes?.length || 0
});
}
}
addResult(
'Multiple Product Support',
successCount > 0,
`Enriched ${successCount} out of ${sampleProducts.length} products`,
{ details }
);
} catch (error) {
addResult('Multiple Product Support', false, `Error: ${error}`);
}
// Test 5: Check raw Excel rows
console.log('\nTest 5: Raw Excel Data Access');
try {
const testProduct = {
name: 'NA2XS(FL)2Y',
slug: 'na2xsfl2y-3',
sku: 'NA2XS(FL)2Y-high-voltage-cables',
translationKey: 'na2xsfl2y-3'
};
const rows = getExcelRowsForProduct(testProduct);
addResult(
'Raw Excel Data Access',
rows.length > 0,
`Found ${rows.length} raw rows for test product`,
{
sampleRow: rows[0] ? Object.keys(rows[0]).slice(0, 5) : 'No rows'
}
);
} catch (error) {
addResult('Raw Excel Data Access', false, `Error: ${error}`);
}
// Summary
console.log('\n📊 Test Summary:');
console.log('='.repeat(50));
const passed = results.filter(r => r.passed).length;
const total = results.length;
console.log(`Passed: ${passed}/${total}`);
console.log(`Failed: ${total - passed}/${total}`);
if (passed === total) {
console.log('\n🎉 All tests passed! Excel integration is working correctly.');
} else {
console.log('\n⚠ Some tests failed. Please review the details above.');
console.log('\nFailed tests:');
results.filter(r => !r.passed).forEach(r => {
console.log(` - ${r.name}: ${r.message}`);
if (r.details) console.log(` Details:`, r.details);
});
}
// Exit with appropriate code
process.exit(passed === total ? 0 : 1);
}
// Run tests
runTests().catch(error => {
console.error('Fatal error:', error);
process.exit(1);
});

View File

@@ -1,54 +0,0 @@
import fs from 'fs';
import path from 'path';
const locales = ['en', 'de'];
locales.forEach(locale => {
const productsDir = path.join(process.cwd(), 'data', 'products', locale);
if (!fs.existsSync(productsDir)) return;
const files = fs.readdirSync(productsDir).filter(f => f.endsWith('.mdx'));
files.forEach(file => {
const filePath = path.join(productsDir, file);
let content = fs.readFileSync(filePath, 'utf8');
// Find the end of frontmatter
const parts = content.split('---');
if (parts.length < 3) return;
const frontmatter = parts[1];
let body = parts.slice(2).join('---').trim();
// Find the ProductTechnicalData component
const techDataMatch = body.match(/<ProductTechnicalData[\s\S]*?\/>/);
if (!techDataMatch) {
console.log(`No ProductTechnicalData found in ${locale}/${file}`);
return;
}
const techData = techDataMatch[0];
// Remove techData from body
let description = body.replace(techData, '').trim();
// Clean up description from ProductTabs if it was already there
description = description.replace(/<ProductTabs[^>]*>/, '').replace('</ProductTabs>', '').trim();
// Remove the title from description if it's there (it's usually # Title)
description = description.replace(/^# .*\n/, '').trim();
// Remove any trailing "## Technical Data" or similar
description = description.replace(/## Technical Data\s*$/, '').trim();
description = description.replace(/## Technische Daten\s*$/, '').trim();
const newContent = `---${frontmatter}---
<ProductTabs technicalData={${techData}}>
${description}
</ProductTabs>
`;
fs.writeFileSync(filePath, newContent);
console.log(`Updated ${locale}/${file}`);
});
});

View File

@@ -1,11 +1,7 @@
{
"compilerOptions": {
"target": "ES2020",
"lib": [
"dom",
"dom.iterable",
"esnext"
],
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"strict": false,
@@ -24,18 +20,10 @@
],
"baseUrl": ".",
"paths": {
"@/*": [
"./*"
],
"lib/*": [
"./lib/*"
],
"components/*": [
"./components/*"
],
"data/*": [
"./data/*"
]
"@/*": ["./*"],
"lib/*": ["./lib/*"],
"components/*": ["./components/*"],
"data/*": ["./data/*"]
}
},
"include": [
@@ -46,8 +34,5 @@
"tests/**/*.test.ts",
".next/dev/types/**/*.ts"
],
"exclude": [
"node_modules",
"scripts"
]
"exclude": ["node_modules", "scripts"]
}

File diff suppressed because one or more lines are too long