feat(ci): add deep quality assertions (html, security, links, spelling)
Some checks failed
Build & Deploy / 🔍 Prepare (push) Successful in 18s
Build & Deploy / 🧪 QA (push) Successful in 2m0s
Build & Deploy / 🏗️ Build (push) Successful in 2m49s
Build & Deploy / 🚀 Deploy (push) Successful in 27s
Build & Deploy / 🧪 Smoke Test (push) Successful in 57s
Build & Deploy / ♿ WCAG (push) Successful in 2m29s
Build & Deploy / 🛡️ Quality Gates (push) Failing after 3m42s
Build & Deploy / 📸 Visual Diff (push) Failing after 6m6s
Build & Deploy / ⚡ Lighthouse (push) Successful in 10m55s
Build & Deploy / 🔔 Notify (push) Successful in 3s

This commit is contained in:
2026-02-22 00:29:49 +01:00
parent 36d193f8ec
commit b29e08e954
8 changed files with 1130 additions and 2 deletions

View File

@@ -187,6 +187,7 @@ jobs:
if: github.event.inputs.skip_checks != 'true'
run: |
pnpm lint
pnpm check:spell
pnpm typecheck
pnpm test
@@ -709,11 +710,66 @@ jobs:
path: backstop_data/html_report/
# ──────────────────────────────────────────────────────────────────────────────
# JOB 9: Notifications
# JOB 9: Quality Assertions
# ──────────────────────────────────────────────────────────────────────────────
quality_assertions:
name: 🛡️ Quality Gates
needs: [prepare, deploy, smoke_test]
if: success() && needs.prepare.outputs.target != 'skip'
runs-on: docker
container:
image: catthehacker/ubuntu:act-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup pnpm
uses: pnpm/action-setup@v3
with:
version: 10
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- name: Setup pnpm cache
uses: actions/cache@v4
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- name: 🔐 Registry Auth
run: |
echo "@mintel:registry=https://${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}" > .npmrc
echo "//${{ vars.REGISTRY_HOST || 'npm.infra.mintel.me' }}/:_authToken=${{ secrets.REGISTRY_PASS }}" >> .npmrc
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: 🌐 HTML DOM Validation
env:
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
run: pnpm check:html
- name: 🔒 Security Headers Scan
env:
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
run: pnpm check:security
- name: 🔗 Lychee Deep Link Crawl
env:
NEXT_PUBLIC_BASE_URL: ${{ needs.prepare.outputs.next_public_url }}
GATEKEEPER_PASSWORD: ${{ secrets.GATEKEEPER_PASSWORD || 'klz2026' }}
run: pnpm check:links
# ──────────────────────────────────────────────────────────────────────────────
# JOB 10: Notifications
# ──────────────────────────────────────────────────────────────────────────────
notifications:
name: 🔔 Notify
needs: [prepare, deploy, smoke_test, lighthouse, wcag, visual_regression]
needs: [prepare, deploy, smoke_test, lighthouse, wcag, visual_regression, quality_assertions]
if: always()
runs-on: docker
container:

13
.htmlvalidate.json Normal file
View File

@@ -0,0 +1,13 @@
{
"extends": ["html-validate:recommended", "html-validate:document"],
"rules": {
"require-sri": "off",
"meta-refresh": "off",
"heading-level": "warn",
"no-trailing-whitespace": "off",
"wcag/h37": "warn",
"no-inline-style": "off",
"svg-focusable": "off",
"attribute-boolean-style": "off"
}
}

97
cspell.json Normal file
View File

@@ -0,0 +1,97 @@
{
"version": "0.2",
"language": "en,de",
"dictionaries": ["de-de", "html", "css", "typescript", "npm"],
"words": [
"Datasheet",
"datasheets",
"Bodemer",
"Mintel",
"Umami",
"Energiezukunft",
"Energiewende",
"Solarparks",
"Energiekabel",
"Kabelinfrastruktur",
"Großprojekte",
"Zertifizierte",
"Erstberatung",
"Vertriebs",
"Windparkbau",
"Kabelherausforderungen",
"Energieprojekt",
"mittelspannungskabel",
"niederspannungskabel",
"hochspannungskabel",
"solarkabel",
"extralight",
"medv",
"Crect",
"Csvg",
"mintel",
"Zurück",
"Übersicht",
"Raiffeisenstraße",
"Remshalden",
"Experte",
"hochwertige",
"Stromkabel",
"Mittelspannungslösungen",
"Zuverlässige",
"Infrastruktur",
"eine",
"grüne",
"Weiterer",
"Artikel",
"Vorheriger",
"Beitrag",
"Nächster",
"Lösungen",
"Bereit",
"Planung",
"Lieferung",
"hochwertiger",
"erwecken",
"Ihre",
"Projekte",
"Leben",
"Strategischer",
"schnelle",
"Nachhaltige",
"Expertenberatung",
"Qualität",
"nach",
"Projekt",
"anfragen",
"Kostenlose",
"Vorhaben",
"kopiert",
"Teilen",
"Inhalt",
"produkte",
"Fokus",
"drei",
"typische",
"fokus",
"Warum",
"ideale",
"Kabel",
"Deutsch",
"Spannung",
"unbekannt"
],
"ignorePaths": [
"node_modules",
".next",
"public",
"pnpm-lock.yaml",
"*.svg",
"*.mp4",
"directus",
"backstop_data",
".gitea",
"out",
"coverage",
"*.json"
]
}

View File

@@ -46,6 +46,7 @@
"devDependencies": {
"@commitlint/cli": "^20.4.0",
"@commitlint/config-conventional": "^20.4.0",
"@cspell/dict-de-de": "^4.1.2",
"@lhci/cli": "^0.15.1",
"@mintel/eslint-config": "1.8.3",
"@mintel/tsconfig": "1.8.3",
@@ -69,8 +70,10 @@
"backstopjs": "^6.3.25",
"cheerio": "^1.2.0",
"critters": "^0.0.25",
"cspell": "^9.6.4",
"eslint": "^9.18.0",
"happy-dom": "^20.6.1",
"html-validate": "^10.8.0",
"husky": "^9.1.7",
"lint-staged": "^16.2.7",
"lucide-react": "^0.563.0",
@@ -100,6 +103,10 @@
"check:mdx": "node scripts/validate-mdx.mjs",
"check:a11y": "pa11y-ci",
"check:wcag": "tsx ./scripts/wcag-sitemap.ts",
"check:html": "tsx ./scripts/check-html.ts",
"check:spell": "cspell \"content/**/*.{md,mdx}\" \"app/**/*.tsx\" \"components/**/*.tsx\"",
"check:security": "tsx ./scripts/check-security.ts",
"check:links": "bash ./scripts/check-links.sh",
"backstop:reference": "backstop reference --config=backstop.config.js --docker",
"backstop:test": "backstop test --config=backstop.config.js --docker",
"backstop:approve": "backstop approve --config=backstop.config.js --docker",

793
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

82
scripts/check-html.ts Normal file
View File

@@ -0,0 +1,82 @@
import axios from 'axios';
import * as cheerio from 'cheerio';
import * as fs from 'fs';
import * as path from 'path';
import { execSync } from 'child_process';
const targetUrl = process.argv[2] || process.env.NEXT_PUBLIC_BASE_URL || 'http://localhost:3000';
const limit = process.env.PAGESPEED_LIMIT ? parseInt(process.env.PAGESPEED_LIMIT) : 20;
const gatekeeperPassword = process.env.GATEKEEPER_PASSWORD || 'klz2026';
async function main() {
console.log(`\n🚀 Starting HTML Validation for: ${targetUrl}`);
console.log(`📊 Limit: ${limit} pages\n`);
try {
const sitemapUrl = `${targetUrl.replace(/\/$/, '')}/sitemap.xml`;
console.log(`📥 Fetching sitemap from ${sitemapUrl}...`);
const response = await axios.get(sitemapUrl, {
headers: { Cookie: `klz_gatekeeper_session=${gatekeeperPassword}` },
validateStatus: (status) => status < 400,
});
const $ = cheerio.load(response.data, { xmlMode: true });
let urls = $('url loc')
.map((i, el) => $(el).text())
.get();
const urlPattern = /https?:\/\/[^\/]+/;
urls = [...new Set(urls)]
.filter((u) => u.startsWith('http'))
.map((u) => u.replace(urlPattern, targetUrl.replace(/\/$/, '')))
.sort();
console.log(`✅ Found ${urls.length} URLs in sitemap.`);
if (urls.length === 0) {
console.error('❌ No URLs found in sitemap. Is the site up?');
process.exit(1);
}
if (urls.length > limit) {
console.log(
`⚠️ Too many pages (${urls.length}). Limiting to ${limit} representative pages.`,
);
const home = urls.filter((u) => u.endsWith('/de') || u.endsWith('/en') || u === targetUrl);
const others = urls.filter((u) => !home.includes(u));
urls = [...home, ...others.slice(0, limit - home.length)];
}
const outputDir = path.join(process.cwd(), '.htmlvalidate-tmp');
if (fs.existsSync(outputDir)) fs.rmSync(outputDir, { recursive: true, force: true });
fs.mkdirSync(outputDir, { recursive: true });
console.log(`📥 Fetching HTML for ${urls.length} pages...`);
for (let i = 0; i < urls.length; i++) {
const u = urls[i];
const res = await axios.get(u, {
headers: { Cookie: `klz_gatekeeper_session=${gatekeeperPassword}` },
});
const filename = `page-${i}.html`;
fs.writeFileSync(path.join(outputDir, filename), res.data);
}
console.log(`\n💻 Executing html-validate...`);
try {
execSync(`npx html-validate .htmlvalidate-tmp/*.html`, { stdio: 'inherit' });
console.log(`✅ HTML Validation passed perfectly!`);
} catch (e) {
console.error(`❌ HTML Validation found issues.`);
process.exit(1);
}
} catch (error: any) {
console.error(`\n❌ Error during HTML Validation:`, error.message);
process.exit(1);
} finally {
const outputDir = path.join(process.cwd(), '.htmlvalidate-tmp');
if (fs.existsSync(outputDir)) fs.rmSync(outputDir, { recursive: true, force: true });
}
}
main();

26
scripts/check-links.sh Normal file
View File

@@ -0,0 +1,26 @@
#!/bin/bash
set -e
# Auto-provision Lychee Rust Binary if missing
if [ ! -f ./lychee ]; then
echo "📥 Downloading Lychee Link Checker (v0.15.1)..."
curl -sSLo lychee.tar.gz https://github.com/lycheeverse/lychee/releases/download/v0.15.1/lychee-v0.15.1-x86_64-unknown-linux-gnu.tar.gz
tar -xzf lychee.tar.gz lychee
rm lychee.tar.gz
chmod +x ./lychee
fi
echo "🚀 Starting Deep Link Assessment (Lychee)..."
# Exclude localhost, mintel.me (internal infrastructure), and common placeholder domains
# Scan markdown files and component files for hardcoded dead links
./lychee \
--exclude "localhost" \
--exclude "127.0.0.1" \
--exclude "mintel\.me" \
--exclude "example\.com" \
--exclude-mail \
--accept 200,204,401,403 \
"./content/**/*.mdx" "./content/**/*.md" "./app/**/*.tsx" "./components/**/*.tsx"
echo "✅ All project source links are alive and healthy!"

54
scripts/check-security.ts Normal file
View File

@@ -0,0 +1,54 @@
import axios from 'axios';
const targetUrl = process.argv[2] || process.env.NEXT_PUBLIC_BASE_URL || 'http://localhost:3000';
const gatekeeperPassword = process.env.GATEKEEPER_PASSWORD || 'klz2026';
const requiredHeaders = [
'strict-transport-security',
'x-frame-options',
'x-content-type-options',
'referrer-policy',
'content-security-policy',
];
async function main() {
console.log(`\n🛡 Starting Security Headers Scan for: ${targetUrl}\n`);
try {
const response = await axios.head(targetUrl, {
headers: { Cookie: `klz_gatekeeper_session=${gatekeeperPassword}` },
validateStatus: () => true,
});
const headers = response.headers;
let allPassed = true;
const results = requiredHeaders.map((header) => {
const present = !!headers[header];
if (!present) allPassed = false;
return {
Header: header,
Status: present ? '✅ Present' : '❌ Missing',
Value: present
? headers[header].length > 50
? headers[header].substring(0, 47) + '...'
: headers[header]
: 'N/A',
};
});
console.table(results);
if (allPassed) {
console.log(`\n✅ All required security headers are correctly configured!\n`);
process.exit(0);
} else {
console.log(`\n❌ Missing critical security headers. Please update next.config.mjs!\n`);
process.exit(process.env.CI ? 1 : 0); // Don't crash local dev hard if missing, but crash CI
}
} catch (error: any) {
console.error(`❌ Failed to scan headers: ${error.message}`);
process.exit(1);
}
}
main();