contract testing
This commit is contained in:
288
scripts/contract-compatibility.ts
Normal file
288
scripts/contract-compatibility.ts
Normal file
@@ -0,0 +1,288 @@
|
||||
#!/usr/bin/env tsx
|
||||
/**
|
||||
* Contract Compatibility Verification Script
|
||||
*
|
||||
* This script verifies that the API contracts are compatible with the website
|
||||
* by running the type generation and then checking for breaking changes.
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { glob } from 'glob';
|
||||
|
||||
interface OpenAPISchema {
|
||||
type?: string;
|
||||
format?: string;
|
||||
$ref?: string;
|
||||
items?: OpenAPISchema;
|
||||
properties?: Record<string, OpenAPISchema>;
|
||||
required?: string[];
|
||||
enum?: string[];
|
||||
nullable?: boolean;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
interface OpenAPISpec {
|
||||
openapi: string;
|
||||
info: {
|
||||
title: string;
|
||||
description: string;
|
||||
version: string;
|
||||
};
|
||||
paths: Record<string, any>;
|
||||
components: {
|
||||
schemas: Record<string, OpenAPISchema>;
|
||||
};
|
||||
}
|
||||
|
||||
interface ContractChange {
|
||||
type: 'added' | 'removed' | 'modified' | 'breaking';
|
||||
dto: string;
|
||||
property?: string;
|
||||
details: string;
|
||||
}
|
||||
|
||||
const colors = {
|
||||
reset: '\x1b[0m',
|
||||
green: '\x1b[32m',
|
||||
red: '\x1b[31m',
|
||||
yellow: '\x1b[33m',
|
||||
blue: '\x1b[34m',
|
||||
dim: '\x1b[2m'
|
||||
};
|
||||
|
||||
async function runContractCompatibilityCheck(): Promise<void> {
|
||||
console.log(`${colors.blue}🔍 Running Contract Compatibility Check...${colors.reset}\n`);
|
||||
|
||||
const apiRoot = path.join(__dirname, '../apps/api');
|
||||
const websiteRoot = path.join(__dirname, '../apps/website');
|
||||
const openapiPath = path.join(apiRoot, 'openapi.json');
|
||||
const generatedTypesDir = path.join(websiteRoot, 'lib/types/generated');
|
||||
const backupDir = path.join(__dirname, '../.backup/contract-types');
|
||||
|
||||
// Step 1: Generate current OpenAPI spec
|
||||
console.log(`${colors.yellow}1. Generating OpenAPI spec...${colors.reset}`);
|
||||
try {
|
||||
execSync('npm run api:generate-spec', { stdio: 'inherit' });
|
||||
} catch (error) {
|
||||
console.error(`${colors.red}❌ Failed to generate OpenAPI spec${colors.reset}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Step 2: Backup current generated types
|
||||
console.log(`${colors.yellow}2. Backing up current generated types...${colors.reset}`);
|
||||
await fs.mkdir(backupDir, { recursive: true });
|
||||
try {
|
||||
const files = await fs.readdir(generatedTypesDir);
|
||||
for (const file of files) {
|
||||
if (file.endsWith('.ts')) {
|
||||
const content = await fs.readFile(path.join(generatedTypesDir, file), 'utf-8');
|
||||
await fs.writeFile(path.join(backupDir, file), content);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`${colors.yellow}⚠️ No existing types to backup${colors.reset}`);
|
||||
}
|
||||
|
||||
// Step 3: Generate new types
|
||||
console.log(`${colors.yellow}3. Generating new types...${colors.reset}`);
|
||||
try {
|
||||
execSync('npm run api:generate-types', { stdio: 'inherit' });
|
||||
} catch (error) {
|
||||
console.error(`${colors.red}❌ Failed to generate types${colors.reset}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Step 4: Compare and detect changes
|
||||
console.log(`${colors.yellow}4. Analyzing contract changes...${colors.reset}`);
|
||||
const changes = await detectContractChanges(backupDir, generatedTypesDir, openapiPath);
|
||||
|
||||
// Step 5: Report results
|
||||
console.log(`${colors.yellow}5. Reporting changes...${colors.reset}\n`);
|
||||
await reportChanges(changes);
|
||||
|
||||
// Step 6: Clean up backup
|
||||
console.log(`${colors.yellow}6. Cleaning up...${colors.reset}`);
|
||||
await fs.rm(backupDir, { recursive: true, force: true });
|
||||
|
||||
console.log(`\n${colors.green}✅ Contract compatibility check completed!${colors.reset}`);
|
||||
}
|
||||
|
||||
async function detectContractChanges(
|
||||
backupDir: string,
|
||||
currentDir: string,
|
||||
openapiPath: string
|
||||
): Promise<ContractChange[]> {
|
||||
const changes: ContractChange[] = [];
|
||||
|
||||
// Read OpenAPI spec
|
||||
const specContent = await fs.readFile(openapiPath, 'utf-8');
|
||||
const spec: OpenAPISpec = JSON.parse(specContent);
|
||||
const schemas = spec.components.schemas;
|
||||
|
||||
// Get current and backup files
|
||||
const currentFiles = await fs.readdir(currentDir);
|
||||
const backupFiles = await fs.readdir(backupDir);
|
||||
|
||||
const currentDTOs = currentFiles.filter(f => f.endsWith('.ts')).map(f => f.replace('.ts', ''));
|
||||
const backupDTOs = backupFiles.filter(f => f.endsWith('.ts')).map(f => f.replace('.ts', ''));
|
||||
|
||||
// Check for removed DTOs
|
||||
for (const backupDTO of backupDTOs) {
|
||||
if (!currentDTOs.includes(backupDTO)) {
|
||||
changes.push({
|
||||
type: 'removed',
|
||||
dto: backupDTO,
|
||||
details: `DTO ${backupDTO} was removed`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check for added and modified DTOs
|
||||
for (const currentDTO of currentDTOs) {
|
||||
const currentPath = path.join(currentDir, `${currentDTO}.ts`);
|
||||
const backupPath = path.join(backupDir, `${currentDTO}.ts`);
|
||||
|
||||
const currentContent = await fs.readFile(currentPath, 'utf-8');
|
||||
const backupExists = backupDTOs.includes(currentDTO);
|
||||
|
||||
if (!backupExists) {
|
||||
changes.push({
|
||||
type: 'added',
|
||||
dto: currentDTO,
|
||||
details: `New DTO ${currentDTO} was added`
|
||||
});
|
||||
} else {
|
||||
const backupContent = await fs.readFile(backupPath, 'utf-8');
|
||||
|
||||
// Check for property changes
|
||||
const schema = schemas[currentDTO];
|
||||
if (schema && schema.properties) {
|
||||
const currentProps = extractProperties(currentContent);
|
||||
const backupProps = extractProperties(backupContent);
|
||||
|
||||
// Check for removed properties
|
||||
for (const [propName, backupProp] of Object.entries(backupProps)) {
|
||||
if (!currentProps[propName]) {
|
||||
const isRequired = schema.required?.includes(propName);
|
||||
changes.push({
|
||||
type: isRequired ? 'breaking' : 'modified',
|
||||
dto: currentDTO,
|
||||
property: propName,
|
||||
details: `Property ${propName} was removed${isRequired ? ' (BREAKING)' : ''}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check for added properties
|
||||
for (const [propName, currentProp] of Object.entries(currentProps)) {
|
||||
if (!backupProps[propName]) {
|
||||
const isRequired = schema.required?.includes(propName);
|
||||
changes.push({
|
||||
type: isRequired ? 'breaking' : 'added',
|
||||
dto: currentDTO,
|
||||
property: propName,
|
||||
details: `Property ${propName} was added${isRequired ? ' (potentially breaking)' : ''}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check for type changes
|
||||
for (const [propName, currentProp] of Object.entries(currentProps)) {
|
||||
if (backupProps[propName]) {
|
||||
const backupProp = backupProps[propName];
|
||||
if (currentProp.type !== backupProp.type) {
|
||||
changes.push({
|
||||
type: 'breaking',
|
||||
dto: currentDTO,
|
||||
property: propName,
|
||||
details: `Property ${propName} type changed from ${backupProp.type} to ${currentProp.type} (BREAKING)`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
function extractProperties(content: string): Record<string, { type: string; optional: boolean }> {
|
||||
const properties: Record<string, { type: string; optional: boolean }> = {};
|
||||
|
||||
// Match property lines: propertyName?: type;
|
||||
const propertyRegex = /^\s*(\w+)(\??):\s*([^;]+);/gm;
|
||||
let match;
|
||||
|
||||
while ((match = propertyRegex.exec(content)) !== null) {
|
||||
const [, name, optional, type] = match;
|
||||
properties[name] = {
|
||||
type: type.trim(),
|
||||
optional: !!optional
|
||||
};
|
||||
}
|
||||
|
||||
return properties;
|
||||
}
|
||||
|
||||
async function reportChanges(changes: ContractChange[]): Promise<void> {
|
||||
if (changes.length === 0) {
|
||||
console.log(`${colors.green}✅ No changes detected - contracts are stable${colors.reset}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const breaking = changes.filter(c => c.type === 'breaking');
|
||||
const modified = changes.filter(c => c.type === 'modified');
|
||||
const added = changes.filter(c => c.type === 'added');
|
||||
const removed = changes.filter(c => c.type === 'removed');
|
||||
|
||||
if (breaking.length > 0) {
|
||||
console.log(`${colors.red}🚨 BREAKING CHANGES DETECTED:${colors.reset}`);
|
||||
breaking.forEach(change => {
|
||||
console.log(` ${colors.red}• ${change.dto}${change.property ? '.' + change.property : ''}: ${change.details}${colors.reset}`);
|
||||
});
|
||||
console.log('');
|
||||
}
|
||||
|
||||
if (removed.length > 0) {
|
||||
console.log(`${colors.red}❌ REMOVED:${colors.reset}`);
|
||||
removed.forEach(change => {
|
||||
console.log(` ${colors.red}• ${change.dto}: ${change.details}${colors.reset}`);
|
||||
});
|
||||
console.log('');
|
||||
}
|
||||
|
||||
if (modified.length > 0) {
|
||||
console.log(`${colors.yellow}⚠️ MODIFIED:${colors.reset}`);
|
||||
modified.forEach(change => {
|
||||
console.log(` ${colors.yellow}• ${change.dto}.${change.property}: ${change.details}${colors.reset}`);
|
||||
});
|
||||
console.log('');
|
||||
}
|
||||
|
||||
if (added.length > 0) {
|
||||
console.log(`${colors.green}➕ ADDED:${colors.reset}`);
|
||||
added.forEach(change => {
|
||||
console.log(` ${colors.green}• ${change.dto}${change.property ? '.' + change.property : ''}: ${change.details}${colors.reset}`);
|
||||
});
|
||||
console.log('');
|
||||
}
|
||||
|
||||
const totalChanges = changes.length;
|
||||
console.log(`${colors.blue}📊 Summary: ${totalChanges} total changes (${breaking.length} breaking, ${removed.length} removed, ${modified.length} modified, ${added.length} added)${colors.reset}`);
|
||||
|
||||
if (breaking.length > 0) {
|
||||
console.log(`\n${colors.red}❌ Contract compatibility check FAILED due to breaking changes${colors.reset}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run if called directly
|
||||
if (require.main === module) {
|
||||
runContractCompatibilityCheck().catch(error => {
|
||||
console.error(`${colors.red}❌ Error running contract compatibility check:${colors.reset}`, error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
@@ -64,18 +64,42 @@ async function generateIndividualDtoFiles(openapiPath: string, outputDir: string
|
||||
|
||||
const schemaNames = Object.keys(schemas);
|
||||
|
||||
// Get existing files in output directory
|
||||
let existingFiles: string[] = [];
|
||||
try {
|
||||
existingFiles = await fs.readdir(outputDir);
|
||||
existingFiles = existingFiles.filter(f => f.endsWith('.ts'));
|
||||
} catch (error) {
|
||||
// Directory doesn't exist yet
|
||||
}
|
||||
|
||||
// Generate individual files for each schema
|
||||
const generatedFileNames: string[] = [];
|
||||
for (const schemaName of schemaNames) {
|
||||
const schema = schemas[schemaName];
|
||||
|
||||
// File name should match the schema name exactly
|
||||
const fileName = `${schemaName}.ts`;
|
||||
const filePath = path.join(outputDir, fileName);
|
||||
|
||||
const fileContent = generateDtoFileContent(schemaName, schema, schemas);
|
||||
await fs.writeFile(filePath, fileContent);
|
||||
console.log(` ✅ Generated ${fileName}`);
|
||||
generatedFileNames.push(fileName);
|
||||
}
|
||||
|
||||
// Clean up files that are no longer in the spec
|
||||
const filesToRemove = existingFiles.filter(f => !generatedFileNames.includes(f));
|
||||
for (const file of filesToRemove) {
|
||||
const filePath = path.join(outputDir, file);
|
||||
await fs.unlink(filePath);
|
||||
console.log(` 🗑️ Removed obsolete file: ${file}`);
|
||||
}
|
||||
|
||||
console.log(`✅ Generated ${schemaNames.length} individual DTO files at: ${outputDir}`);
|
||||
if (filesToRemove.length > 0) {
|
||||
console.log(`🧹 Cleaned up ${filesToRemove.length} obsolete files`);
|
||||
}
|
||||
}
|
||||
|
||||
function generateDtoFileContent(schemaName: string, schema: any, allSchemas: Record<string, any>): string {
|
||||
@@ -101,7 +125,7 @@ function generateDtoFileContent(schemaName: string, schema: any, allSchemas: Rec
|
||||
content += '\n';
|
||||
}
|
||||
|
||||
// Generate interface
|
||||
// Generate interface - use the schema name directly
|
||||
content += `export interface ${schemaName} {\n`;
|
||||
|
||||
const properties = schema.properties || {};
|
||||
|
||||
@@ -86,12 +86,22 @@ async function processDTOFile(filePath: string, schemas: Record<string, OpenAPIS
|
||||
const className = classMatch[1];
|
||||
const classBody = classMatch[2];
|
||||
|
||||
console.log(` 📝 Processing ${className}`);
|
||||
// Normalize class name to always use DTO suffix (not Dto)
|
||||
const normalizedName = className.endsWith('Dto') ?
|
||||
className.slice(0, -3) + 'DTO' : className;
|
||||
|
||||
console.log(` 📝 Processing ${className} -> ${normalizedName}`);
|
||||
|
||||
// Check for conflicts
|
||||
if (schemas[normalizedName]) {
|
||||
console.warn(` ⚠️ Conflict: ${normalizedName} already exists. Skipping duplicate from ${filePath}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const schema = extractSchemaFromClassBody(classBody, content);
|
||||
if (schema && Object.keys(schema.properties || {}).length > 0) {
|
||||
schemas[className] = schema;
|
||||
console.log(` ✅ Added ${className} with ${Object.keys(schema.properties || {}).length} properties`);
|
||||
schemas[normalizedName] = schema;
|
||||
console.log(` ✅ Added ${normalizedName} with ${Object.keys(schema.properties || {}).length} properties`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
85
scripts/run-contract-tests.ts
Normal file
85
scripts/run-contract-tests.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env tsx
|
||||
/**
|
||||
* Contract Testing Integration Script
|
||||
*
|
||||
* This script runs all contract tests in the correct order:
|
||||
* 1. API contract validation
|
||||
* 2. Type generation
|
||||
* 3. Website contract consumption tests
|
||||
* 4. Compatibility verification
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
const colors = {
|
||||
reset: '\x1b[0m',
|
||||
green: '\x1b[32m',
|
||||
red: '\x1b[31m',
|
||||
yellow: '\x1b[33m',
|
||||
blue: '\x1b[34m',
|
||||
cyan: '\x1b[36m',
|
||||
dim: '\x1b[2m'
|
||||
};
|
||||
|
||||
async function runContractTests(): Promise<void> {
|
||||
console.log(`${colors.cyan}🚀 Starting Contract Testing Suite${colors.reset}\n`);
|
||||
|
||||
const steps = [
|
||||
{
|
||||
name: 'API Contract Validation',
|
||||
command: 'npm run test:api:contracts',
|
||||
description: 'Validate API DTOs and OpenAPI spec integrity'
|
||||
},
|
||||
{
|
||||
name: 'Generate OpenAPI Spec',
|
||||
command: 'npm run api:generate-spec',
|
||||
description: 'Generate OpenAPI specification from DTOs'
|
||||
},
|
||||
{
|
||||
name: 'Generate TypeScript Types',
|
||||
command: 'npm run api:generate-types',
|
||||
description: 'Generate TypeScript types for website'
|
||||
},
|
||||
{
|
||||
name: 'Contract Compatibility Check',
|
||||
command: 'npm run test:contract:compatibility',
|
||||
description: 'Check for breaking changes in contracts'
|
||||
},
|
||||
{
|
||||
name: 'Website Type Checking',
|
||||
command: 'npm run website:type-check',
|
||||
description: 'Verify website can consume generated types'
|
||||
}
|
||||
];
|
||||
|
||||
for (let i = 0; i < steps.length; i++) {
|
||||
const step = steps[i];
|
||||
console.log(`${colors.yellow}${i + 1}/${steps.length} ${step.name}${colors.reset}`);
|
||||
console.log(`${colors.dim} ${step.description}${colors.reset}`);
|
||||
|
||||
try {
|
||||
execSync(step.command, {
|
||||
stdio: 'inherit',
|
||||
env: { ...process.env, FORCE_COLOR: 'true' }
|
||||
});
|
||||
console.log(`${colors.green} ✅ ${step.name} completed${colors.reset}\n`);
|
||||
} catch (error) {
|
||||
console.log(`${colors.red} ❌ ${step.name} failed${colors.reset}\n`);
|
||||
console.log(`${colors.red}Contract testing suite failed at step: ${step.name}${colors.reset}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`${colors.green}🎉 All contract tests passed!${colors.reset}`);
|
||||
console.log(`${colors.green}✅ Contracts are compatible and validated${colors.reset}`);
|
||||
}
|
||||
|
||||
// Run if called directly
|
||||
if (require.main === module) {
|
||||
runContractTests().catch(error => {
|
||||
console.error(`${colors.red}❌ Contract testing suite failed:${colors.reset}`, error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
258
scripts/test/type-generation.test.ts
Normal file
258
scripts/test/type-generation.test.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
/**
|
||||
* Test suite for type generation script
|
||||
* Validates that the type generation process works correctly
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import { execSync } from 'child_process';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { glob } from 'glob';
|
||||
|
||||
describe('Type Generation Script', () => {
|
||||
const apiRoot = path.join(__dirname, '../../apps/api');
|
||||
const websiteRoot = path.join(__dirname, '../../apps/website');
|
||||
const openapiPath = path.join(apiRoot, 'openapi.json');
|
||||
const generatedTypesDir = path.join(websiteRoot, 'lib/types/generated');
|
||||
const backupDir = path.join(__dirname, '../../.backup/type-gen-test');
|
||||
|
||||
beforeAll(async () => {
|
||||
// Backup existing generated types
|
||||
await fs.mkdir(backupDir, { recursive: true });
|
||||
try {
|
||||
const files = await fs.readdir(generatedTypesDir);
|
||||
for (const file of files) {
|
||||
if (file.endsWith('.ts')) {
|
||||
const content = await fs.readFile(path.join(generatedTypesDir, file), 'utf-8');
|
||||
await fs.writeFile(path.join(backupDir, file), content);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// No existing files to backup
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Restore backup
|
||||
try {
|
||||
const backupFiles = await fs.readdir(backupDir);
|
||||
for (const file of backupFiles) {
|
||||
if (file.endsWith('.ts')) {
|
||||
const content = await fs.readFile(path.join(backupDir, file), 'utf-8');
|
||||
await fs.writeFile(path.join(generatedTypesDir, file), content);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// No backup to restore
|
||||
}
|
||||
// Clean up backup
|
||||
await fs.rm(backupDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('OpenAPI Spec Generation', () => {
|
||||
it('should generate valid OpenAPI spec', async () => {
|
||||
// Run the spec generation
|
||||
execSync('npm run api:generate-spec', {
|
||||
cwd: path.join(__dirname, '../..'),
|
||||
stdio: 'pipe'
|
||||
});
|
||||
|
||||
// Check that spec exists and is valid JSON
|
||||
const specContent = await fs.readFile(openapiPath, 'utf-8');
|
||||
expect(() => JSON.parse(specContent)).not.toThrow();
|
||||
|
||||
const spec = JSON.parse(specContent);
|
||||
expect(spec.openapi).toMatch(/^3\.\d+\.\d+$/);
|
||||
expect(spec.components).toBeDefined();
|
||||
expect(spec.components.schemas).toBeDefined();
|
||||
});
|
||||
|
||||
it('should not have duplicate schema names with different casing', async () => {
|
||||
const specContent = await fs.readFile(openapiPath, 'utf-8');
|
||||
const spec = JSON.parse(specContent);
|
||||
const schemas = Object.keys(spec.components.schemas);
|
||||
|
||||
// Check for duplicates with different casing
|
||||
const lowerCaseMap = new Map<string, string[]>();
|
||||
schemas.forEach(schema => {
|
||||
const lower = schema.toLowerCase();
|
||||
if (!lowerCaseMap.has(lower)) {
|
||||
lowerCaseMap.set(lower, []);
|
||||
}
|
||||
lowerCaseMap.get(lower)!.push(schema);
|
||||
});
|
||||
|
||||
const duplicates = Array.from(lowerCaseMap.entries())
|
||||
.filter(([_, names]) => names.length > 1);
|
||||
|
||||
expect(duplicates.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should generate spec with consistent naming', async () => {
|
||||
const specContent = await fs.readFile(openapiPath, 'utf-8');
|
||||
const spec = JSON.parse(specContent);
|
||||
const schemas = Object.keys(spec.components.schemas);
|
||||
|
||||
// All schemas should follow DTO naming convention
|
||||
const invalidNames = schemas.filter(name => !name.endsWith('DTO') && !name.endsWith('Dto'));
|
||||
expect(invalidNames.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type Generation', () => {
|
||||
it('should generate TypeScript files for all schemas', async () => {
|
||||
// Generate types
|
||||
execSync('npm run api:generate-types', {
|
||||
cwd: path.join(__dirname, '../..'),
|
||||
stdio: 'pipe'
|
||||
});
|
||||
|
||||
// Read generated files
|
||||
const generatedFiles = await fs.readdir(generatedTypesDir);
|
||||
const generatedDTOs = generatedFiles
|
||||
.filter(f => f.endsWith('.ts'))
|
||||
.map(f => f.replace('.ts', ''));
|
||||
|
||||
// Read OpenAPI spec
|
||||
const specContent = await fs.readFile(openapiPath, 'utf-8');
|
||||
const spec = JSON.parse(specContent);
|
||||
const schemas = Object.keys(spec.components.schemas);
|
||||
|
||||
// Most schemas should have corresponding generated files
|
||||
// (allowing for some duplicates/conflicts that are intentionally skipped)
|
||||
const missingFiles = schemas.filter(schema => !generatedDTOs.includes(schema));
|
||||
|
||||
// Should have at least 95% coverage
|
||||
const coverage = (schemas.length - missingFiles.length) / schemas.length;
|
||||
expect(coverage).toBeGreaterThan(0.95);
|
||||
});
|
||||
|
||||
it('should generate files with correct interface names', async () => {
|
||||
const files = await fs.readdir(generatedTypesDir);
|
||||
const dtos = files.filter(f => f.endsWith('.ts'));
|
||||
|
||||
for (const file of dtos) {
|
||||
const content = await fs.readFile(path.join(generatedTypesDir, file), 'utf-8');
|
||||
const interfaceName = file.replace('.ts', '');
|
||||
|
||||
// File should contain an interface (name might be normalized)
|
||||
expect(content).toMatch(/export interface \w+\s*{/);
|
||||
|
||||
// Should not have duplicate interface names in the same file
|
||||
const interfaceMatches = content.match(/export interface (\w+)/g);
|
||||
expect(interfaceMatches?.length).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
it('should generate valid TypeScript syntax', async () => {
|
||||
const files = await fs.readdir(generatedTypesDir);
|
||||
const dtos = files.filter(f => f.endsWith('.ts'));
|
||||
|
||||
for (const file of dtos) {
|
||||
const content = await fs.readFile(path.join(generatedTypesDir, file), 'utf-8');
|
||||
|
||||
// Basic syntax checks
|
||||
expect(content).toContain('export interface');
|
||||
expect(content).toContain('{');
|
||||
expect(content).toContain('}');
|
||||
expect(content).toContain('Auto-generated DTO');
|
||||
|
||||
// Should not have syntax errors
|
||||
expect(content).not.toMatch(/interface\s+\w+\s*\{\s*\}/); // Empty interfaces
|
||||
expect(content).not.toContain('undefined;');
|
||||
expect(content).not.toContain('any;');
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle dependencies correctly', async () => {
|
||||
const files = await fs.readdir(generatedTypesDir);
|
||||
const dtos = files.filter(f => f.endsWith('.ts'));
|
||||
|
||||
for (const file of dtos) {
|
||||
const content = await fs.readFile(path.join(generatedTypesDir, file), 'utf-8');
|
||||
const importMatches = content.match(/import type \{ (\w+) \} from '\.\/(\w+)';/g) || [];
|
||||
|
||||
for (const importLine of importMatches) {
|
||||
const match = importLine.match(/import type \{ (\w+) \} from '\.\/(\w+)';/);
|
||||
if (match) {
|
||||
const [, importedType, fromFile] = match;
|
||||
|
||||
// Import type should match the file name
|
||||
expect(importedType).toBe(fromFile);
|
||||
|
||||
// The imported file should exist
|
||||
const importedPath = path.join(generatedTypesDir, `${fromFile}.ts`);
|
||||
const exists = await fs.access(importedPath).then(() => true).catch(() => false);
|
||||
expect(exists).toBe(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should maintain consistent naming between OpenAPI and generated files', async () => {
|
||||
const specContent = await fs.readFile(openapiPath, 'utf-8');
|
||||
const spec = JSON.parse(specContent);
|
||||
const schemas = Object.keys(spec.components.schemas);
|
||||
|
||||
const generatedFiles = await fs.readdir(generatedTypesDir);
|
||||
const generatedDTOs = generatedFiles
|
||||
.filter(f => f.endsWith('.ts'))
|
||||
.map(f => f.replace('.ts', ''));
|
||||
|
||||
// Check that most schemas have matching files (allowing for some edge cases)
|
||||
const missingFiles = schemas.filter(schema => !generatedDTOs.includes(schema));
|
||||
const coverage = (schemas.length - missingFiles.length) / schemas.length;
|
||||
expect(coverage).toBeGreaterThan(0.95);
|
||||
|
||||
// Check that most files have matching schemas (allowing for normalization)
|
||||
const extraFiles = generatedDTOs.filter(dto => !schemas.includes(dto));
|
||||
const extraCoverage = (generatedDTOs.length - extraFiles.length) / generatedDTOs.length;
|
||||
expect(extraCoverage).toBeGreaterThan(0.95);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration', () => {
|
||||
it('should generate types that can be imported without errors', async () => {
|
||||
// Generate types first
|
||||
execSync('npm run api:generate-types', {
|
||||
cwd: path.join(__dirname, '../..'),
|
||||
stdio: 'pipe'
|
||||
});
|
||||
|
||||
// Try to import a few key DTOs
|
||||
const testDTOs = [
|
||||
'RaceDTO',
|
||||
'DriverDTO',
|
||||
'RequestAvatarGenerationInputDTO',
|
||||
'RequestAvatarGenerationOutputDTO'
|
||||
];
|
||||
|
||||
for (const dto of testDTOs) {
|
||||
const filePath = path.join(generatedTypesDir, `${dto}.ts`);
|
||||
const exists = await fs.access(filePath).then(() => true).catch(() => false);
|
||||
|
||||
if (exists) {
|
||||
const content = await fs.readFile(filePath, 'utf-8');
|
||||
// Should be valid TypeScript that can be parsed
|
||||
expect(content).toContain(`export interface ${dto}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle the full generation workflow', async () => {
|
||||
// Run complete workflow
|
||||
execSync('npm run api:sync-types', {
|
||||
cwd: path.join(__dirname, '../..'),
|
||||
stdio: 'pipe'
|
||||
});
|
||||
|
||||
// Verify both spec and types were generated
|
||||
const specExists = await fs.access(openapiPath).then(() => true).catch(() => false);
|
||||
expect(specExists).toBe(true);
|
||||
|
||||
const files = await fs.readdir(generatedTypesDir);
|
||||
const tsFiles = files.filter(f => f.endsWith('.ts'));
|
||||
expect(tsFiles.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user