All checks were successful
Monorepo Pipeline / ⚡ Prioritize Release (push) Successful in 3s
Monorepo Pipeline / 🧹 Lint (push) Successful in 1m19s
Monorepo Pipeline / 🧪 Test (push) Successful in 1m5s
Monorepo Pipeline / 🏗️ Build (push) Successful in 1m26s
Monorepo Pipeline / 🚀 Release (push) Has been skipped
Monorepo Pipeline / 🐳 Build Image Processor (push) Has been skipped
Monorepo Pipeline / 🐳 Build Directus (Base) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Gatekeeper (Product) (push) Has been skipped
Monorepo Pipeline / 🐳 Build Build-Base (push) Has been skipped
Monorepo Pipeline / 🐳 Build Production Runtime (push) Has been skipped
164 lines
5.3 KiB
JavaScript
164 lines
5.3 KiB
JavaScript
#!/usr/bin/env node
|
|
// ============================================================================
|
|
// @mintel/concept-engine — CLI Entry Point
|
|
// Simple commander-based CLI for concept generation.
|
|
// ============================================================================
|
|
|
|
import { Command } from "commander";
|
|
import * as path from "node:path";
|
|
import * as fs from "node:fs/promises";
|
|
import { existsSync } from "node:fs";
|
|
import { config as dotenvConfig } from "dotenv";
|
|
import { ConceptPipeline } from "./pipeline.js";
|
|
|
|
// Load .env from monorepo root
|
|
dotenvConfig({ path: path.resolve(process.cwd(), "../../.env") });
|
|
dotenvConfig({ path: path.resolve(process.cwd(), ".env") });
|
|
|
|
const program = new Command();
|
|
|
|
program
|
|
.name("concept")
|
|
.description("AI-powered project concept generator")
|
|
.version("1.0.0");
|
|
|
|
program
|
|
.command("run")
|
|
.description("Run the full concept pipeline")
|
|
.argument("[briefing]", "Briefing text or @path/to/file.txt")
|
|
.option("--url <url>", "Target website URL")
|
|
.option("--comments <comments>", "Additional notes")
|
|
.option("--clear-cache", "Clear crawl cache and re-crawl")
|
|
.option("--output <dir>", "Output directory", "../../out/concepts")
|
|
.option("--crawl-dir <dir>", "Crawl data directory", "../../data/crawls")
|
|
.action(async (briefingArg: string | undefined, options: any) => {
|
|
const openrouterKey =
|
|
process.env.OPENROUTER_API_KEY || process.env.OPENROUTER_KEY;
|
|
if (!openrouterKey) {
|
|
console.error("❌ OPENROUTER_API_KEY not found in environment.");
|
|
process.exit(1);
|
|
}
|
|
|
|
let briefing = briefingArg || "";
|
|
|
|
// Handle @file references
|
|
if (briefing.startsWith("@")) {
|
|
const rawPath = briefing.substring(1);
|
|
const filePath = rawPath.startsWith("/")
|
|
? rawPath
|
|
: path.resolve(process.cwd(), rawPath);
|
|
if (!existsSync(filePath)) {
|
|
console.error(`❌ Briefing file not found: ${filePath}`);
|
|
process.exit(1);
|
|
}
|
|
briefing = await fs.readFile(filePath, "utf8");
|
|
console.log(`📄 Loaded briefing from: ${filePath}`);
|
|
}
|
|
|
|
// Auto-discover URL from briefing
|
|
let url = options.url;
|
|
if (!url && briefing) {
|
|
const urlMatch = briefing.match(/https?:\/\/[^\s]+/);
|
|
if (urlMatch) {
|
|
url = urlMatch[0];
|
|
console.log(`🔗 Discovered URL in briefing: ${url}`);
|
|
}
|
|
}
|
|
|
|
if (!briefing && !url) {
|
|
console.error("❌ Provide a briefing text or --url");
|
|
process.exit(1);
|
|
}
|
|
|
|
const pipeline = new ConceptPipeline(
|
|
{
|
|
openrouterKey,
|
|
zyteApiKey: process.env.ZYTE_API_KEY,
|
|
outputDir: path.resolve(process.cwd(), options.output),
|
|
crawlDir: path.resolve(process.cwd(), options.crawlDir),
|
|
},
|
|
{
|
|
onStepStart: (_id, _name) => {
|
|
// Will be enhanced with Ink spinner later
|
|
},
|
|
onStepComplete: (_id, _result) => {
|
|
// Will be enhanced with Ink UI later
|
|
},
|
|
},
|
|
);
|
|
|
|
try {
|
|
await pipeline.run({
|
|
briefing,
|
|
url,
|
|
comments: options.comments,
|
|
clearCache: options.clearCache,
|
|
});
|
|
|
|
console.log("\n✨ Concept generation complete!");
|
|
} catch (err) {
|
|
console.error(`\n❌ Pipeline failed: ${(err as Error).message}`);
|
|
process.exit(1);
|
|
}
|
|
});
|
|
|
|
program
|
|
.command("analyze")
|
|
.description("Only crawl and analyze a website (no LLM)")
|
|
.argument("<url>", "Website URL to analyze")
|
|
.option("--crawl-dir <dir>", "Crawl data directory", "../../data/crawls")
|
|
.option("--clear-cache", "Clear existing crawl cache")
|
|
.action(async (url: string, options: any) => {
|
|
const { crawlSite } = await import("./scraper.js");
|
|
const { analyzeSite } = await import("./analyzer.js");
|
|
|
|
if (options.clearCache) {
|
|
const { clearCrawlCache } = await import("./scraper.js");
|
|
const domain = new URL(url).hostname;
|
|
await clearCrawlCache(
|
|
path.resolve(process.cwd(), options.crawlDir),
|
|
domain,
|
|
);
|
|
}
|
|
|
|
const pages = await crawlSite(url, {
|
|
zyteApiKey: process.env.ZYTE_API_KEY,
|
|
crawlDir: path.resolve(process.cwd(), options.crawlDir),
|
|
});
|
|
|
|
const domain = new URL(url).hostname;
|
|
const profile = analyzeSite(pages, domain);
|
|
|
|
console.log("\n📊 Site Profile:");
|
|
console.log(` Domain: ${profile.domain}`);
|
|
console.log(` Total Pages: ${profile.totalPages}`);
|
|
console.log(
|
|
` Navigation: ${profile.navigation.map((n) => n.label).join(", ")}`,
|
|
);
|
|
console.log(` Features: ${profile.existingFeatures.join(", ") || "none"}`);
|
|
console.log(` Services: ${profile.services.join(", ") || "none"}`);
|
|
console.log(
|
|
` External Domains: ${profile.externalDomains.join(", ") || "none"}`,
|
|
);
|
|
console.log(` Company: ${profile.companyInfo.name || "unbekannt"}`);
|
|
console.log(` Tax ID: ${profile.companyInfo.taxId || "unbekannt"}`);
|
|
console.log(` Colors: ${profile.colors.join(", ")}`);
|
|
console.log(` Images Found: ${profile.images.length}`);
|
|
console.log(
|
|
` Social: ${
|
|
Object.entries(profile.socialLinks)
|
|
.map(([_k, _v]) => `${_k}`)
|
|
.join(", ") || "none"
|
|
}`,
|
|
);
|
|
|
|
const outputPath = path.join(
|
|
path.resolve(process.cwd(), options.crawlDir),
|
|
domain.replace(/\./g, "-"),
|
|
"_site_profile.json",
|
|
);
|
|
console.log(`\n📦 Full profile saved to: ${outputPath}`);
|
|
});
|
|
|
|
program.parse();
|