integration tests
This commit is contained in:
107
tests/integration/database/constraints.integration.test.ts
Normal file
107
tests/integration/database/constraints.integration.test.ts
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
/**
|
||||||
|
* Integration Test: Database Constraints and Error Mapping
|
||||||
|
*
|
||||||
|
* Tests that the API properly handles and maps database constraint violations.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||||
|
import { ApiClient } from '../harness/api-client';
|
||||||
|
import { DockerManager } from '../harness/docker-manager';
|
||||||
|
|
||||||
|
describe('Database Constraints - API Integration', () => {
|
||||||
|
let api: ApiClient;
|
||||||
|
let docker: DockerManager;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
docker = DockerManager.getInstance();
|
||||||
|
await docker.start();
|
||||||
|
|
||||||
|
api = new ApiClient({ baseUrl: 'http://localhost:3101', timeout: 60000 });
|
||||||
|
await api.waitForReady();
|
||||||
|
}, 120000);
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
docker.stop();
|
||||||
|
}, 30000);
|
||||||
|
|
||||||
|
it('should handle unique constraint violations gracefully', async () => {
|
||||||
|
// This test verifies that duplicate operations are rejected
|
||||||
|
// The exact behavior depends on the API implementation
|
||||||
|
|
||||||
|
// Try to perform an operation that might violate uniqueness
|
||||||
|
// For example, creating the same resource twice
|
||||||
|
const createData = {
|
||||||
|
name: 'Test League',
|
||||||
|
description: 'Test',
|
||||||
|
ownerId: 'test-owner',
|
||||||
|
};
|
||||||
|
|
||||||
|
// First attempt should succeed or fail gracefully
|
||||||
|
try {
|
||||||
|
await api.post('/leagues', createData);
|
||||||
|
} catch (error) {
|
||||||
|
// Expected: endpoint might not exist or validation fails
|
||||||
|
expect(error).toBeDefined();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle foreign key constraint violations', async () => {
|
||||||
|
// Try to create a resource with invalid foreign key
|
||||||
|
const invalidData = {
|
||||||
|
leagueId: 'non-existent-league',
|
||||||
|
// Other required fields...
|
||||||
|
};
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post('/leagues/non-existent/seasons', invalidData)
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should provide meaningful error messages', async () => {
|
||||||
|
// Test various invalid operations
|
||||||
|
const operations = [
|
||||||
|
() => api.post('/races/invalid-id/results/import', { resultsFileContent: 'invalid' }),
|
||||||
|
() => api.post('/leagues/invalid/seasons/invalid/publish', {}),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const operation of operations) {
|
||||||
|
try {
|
||||||
|
await operation();
|
||||||
|
throw new Error('Expected operation to fail');
|
||||||
|
} catch (error: any) {
|
||||||
|
// Should throw an error
|
||||||
|
expect(error).toBeDefined();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should maintain data integrity after failed operations', async () => {
|
||||||
|
// Verify that failed operations don't corrupt data
|
||||||
|
const initialHealth = await api.health();
|
||||||
|
expect(initialHealth).toBe(true);
|
||||||
|
|
||||||
|
// Try some invalid operations
|
||||||
|
try {
|
||||||
|
await api.post('/races/invalid/results/import', { resultsFileContent: 'invalid' });
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
// Verify API is still healthy
|
||||||
|
const finalHealth = await api.health();
|
||||||
|
expect(finalHealth).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle concurrent operations safely', async () => {
|
||||||
|
// Test that concurrent requests don't cause issues
|
||||||
|
const concurrentRequests = Array(5).fill(null).map(() =>
|
||||||
|
api.post('/races/invalid-id/results/import', {
|
||||||
|
resultsFileContent: JSON.stringify([{ invalid: 'data' }])
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const results = await Promise.allSettled(concurrentRequests);
|
||||||
|
|
||||||
|
// At least some should fail (since they're invalid)
|
||||||
|
const failures = results.filter(r => r.status === 'rejected');
|
||||||
|
expect(failures.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
113
tests/integration/harness/api-client.ts
Normal file
113
tests/integration/harness/api-client.ts
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
/**
|
||||||
|
* API Client for Integration Tests
|
||||||
|
* Provides typed HTTP client for testing API endpoints
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface ApiClientConfig {
|
||||||
|
baseUrl: string;
|
||||||
|
timeout?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ApiClient {
|
||||||
|
private baseUrl: string;
|
||||||
|
private timeout: number;
|
||||||
|
|
||||||
|
constructor(config: ApiClientConfig) {
|
||||||
|
this.baseUrl = config.baseUrl.replace(/\/$/, ''); // Remove trailing slash
|
||||||
|
this.timeout = config.timeout || 30000;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make HTTP request to API
|
||||||
|
*/
|
||||||
|
private async request<T>(method: string, path: string, body?: any, headers: Record<string, string> = {}): Promise<T> {
|
||||||
|
const url = `${this.baseUrl}${path}`;
|
||||||
|
const controller = new AbortController();
|
||||||
|
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...headers,
|
||||||
|
},
|
||||||
|
body: body ? JSON.stringify(body) : undefined,
|
||||||
|
signal: controller.signal,
|
||||||
|
});
|
||||||
|
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
throw new Error(`API Error ${response.status}: ${errorText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentType = response.headers.get('content-type');
|
||||||
|
if (contentType && contentType.includes('application/json')) {
|
||||||
|
return (await response.json()) as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (await response.text()) as unknown as T;
|
||||||
|
} catch (error) {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
if (error.name === 'AbortError') {
|
||||||
|
throw new Error(`Request timeout after ${this.timeout}ms`);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GET requests
|
||||||
|
async get<T>(path: string, headers?: Record<string, string>): Promise<T> {
|
||||||
|
return this.request<T>('GET', path, undefined, headers);
|
||||||
|
}
|
||||||
|
|
||||||
|
// POST requests
|
||||||
|
async post<T>(path: string, body: any, headers?: Record<string, string>): Promise<T> {
|
||||||
|
return this.request<T>('POST', path, body, headers);
|
||||||
|
}
|
||||||
|
|
||||||
|
// PUT requests
|
||||||
|
async put<T>(path: string, body: any, headers?: Record<string, string>): Promise<T> {
|
||||||
|
return this.request<T>('PUT', path, body, headers);
|
||||||
|
}
|
||||||
|
|
||||||
|
// PATCH requests
|
||||||
|
async patch<T>(path: string, body: any, headers?: Record<string, string>): Promise<T> {
|
||||||
|
return this.request<T>('PATCH', path, body, headers);
|
||||||
|
}
|
||||||
|
|
||||||
|
// DELETE requests
|
||||||
|
async delete<T>(path: string, headers?: Record<string, string>): Promise<T> {
|
||||||
|
return this.request<T>('DELETE', path, undefined, headers);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Health check
|
||||||
|
*/
|
||||||
|
async health(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${this.baseUrl}/health`);
|
||||||
|
return response.ok;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wait for API to be ready
|
||||||
|
*/
|
||||||
|
async waitForReady(timeout: number = 60000): Promise<void> {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
while (Date.now() - startTime < timeout) {
|
||||||
|
if (await this.health()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`API failed to become ready within ${timeout}ms`);
|
||||||
|
}
|
||||||
|
}
|
||||||
244
tests/integration/harness/data-factory.ts
Normal file
244
tests/integration/harness/data-factory.ts
Normal file
@@ -0,0 +1,244 @@
|
|||||||
|
/**
|
||||||
|
* Data Factory for Integration Tests
|
||||||
|
* Uses TypeORM repositories to create test data
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { DataSource } from 'typeorm';
|
||||||
|
import { LeagueOrmEntity } from '../../../adapters/racing/persistence/typeorm/entities/LeagueOrmEntity';
|
||||||
|
import { SeasonOrmEntity } from '../../../adapters/racing/persistence/typeorm/entities/SeasonOrmEntity';
|
||||||
|
import { DriverOrmEntity } from '../../../adapters/racing/persistence/typeorm/entities/DriverOrmEntity';
|
||||||
|
import { RaceOrmEntity } from '../../../adapters/racing/persistence/typeorm/entities/RaceOrmEntity';
|
||||||
|
import { ResultOrmEntity } from '../../../adapters/racing/persistence/typeorm/entities/ResultOrmEntity';
|
||||||
|
import { LeagueOrmMapper } from '../../../adapters/racing/persistence/typeorm/mappers/LeagueOrmMapper';
|
||||||
|
import { SeasonOrmMapper } from '../../../adapters/racing/persistence/typeorm/mappers/SeasonOrmMapper';
|
||||||
|
import { RaceOrmMapper } from '../../../adapters/racing/persistence/typeorm/mappers/RaceOrmMapper';
|
||||||
|
import { ResultOrmMapper } from '../../../adapters/racing/persistence/typeorm/mappers/ResultOrmMapper';
|
||||||
|
import { TypeOrmLeagueRepository } from '../../../adapters/racing/persistence/typeorm/repositories/TypeOrmLeagueRepository';
|
||||||
|
import { TypeOrmSeasonRepository } from '../../../adapters/racing/persistence/typeorm/repositories/TypeOrmSeasonRepository';
|
||||||
|
import { TypeOrmRaceRepository } from '../../../adapters/racing/persistence/typeorm/repositories/TypeOrmRaceRepository';
|
||||||
|
import { TypeOrmResultRepository } from '../../../adapters/racing/persistence/typeorm/repositories/TypeOrmResultRepository';
|
||||||
|
import { TypeOrmDriverRepository } from '../../../adapters/racing/persistence/typeorm/repositories/TypeOrmDriverRepository';
|
||||||
|
import { League } from '../../../core/racing/domain/entities/League';
|
||||||
|
import { Season } from '../../../core/racing/domain/entities/season/Season';
|
||||||
|
import { Driver } from '../../../core/racing/domain/entities/Driver';
|
||||||
|
import { Race } from '../../../core/racing/domain/entities/Race';
|
||||||
|
import { Result } from '../../../core/racing/domain/entities/result/Result';
|
||||||
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
|
|
||||||
|
export class DataFactory {
|
||||||
|
private dataSource: DataSource;
|
||||||
|
private leagueRepo: TypeOrmLeagueRepository;
|
||||||
|
private seasonRepo: TypeOrmSeasonRepository;
|
||||||
|
private driverRepo: TypeOrmDriverRepository;
|
||||||
|
private raceRepo: TypeOrmRaceRepository;
|
||||||
|
private resultRepo: TypeOrmResultRepository;
|
||||||
|
|
||||||
|
constructor(private dbUrl: string) {
|
||||||
|
this.dataSource = new DataSource({
|
||||||
|
type: 'postgres',
|
||||||
|
url: dbUrl,
|
||||||
|
entities: [
|
||||||
|
LeagueOrmEntity,
|
||||||
|
SeasonOrmEntity,
|
||||||
|
DriverOrmEntity,
|
||||||
|
RaceOrmEntity,
|
||||||
|
ResultOrmEntity,
|
||||||
|
],
|
||||||
|
synchronize: false, // Don't sync, use existing schema
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize(): Promise<void> {
|
||||||
|
if (!this.dataSource.isInitialized) {
|
||||||
|
await this.dataSource.initialize();
|
||||||
|
}
|
||||||
|
|
||||||
|
const leagueMapper = new LeagueOrmMapper();
|
||||||
|
const seasonMapper = new SeasonOrmMapper();
|
||||||
|
const raceMapper = new RaceOrmMapper();
|
||||||
|
const resultMapper = new ResultOrmMapper();
|
||||||
|
|
||||||
|
this.leagueRepo = new TypeOrmLeagueRepository(this.dataSource, leagueMapper);
|
||||||
|
this.seasonRepo = new TypeOrmSeasonRepository(this.dataSource, seasonMapper);
|
||||||
|
this.driverRepo = new TypeOrmDriverRepository(this.dataSource, leagueMapper); // Reuse mapper
|
||||||
|
this.raceRepo = new TypeOrmRaceRepository(this.dataSource, raceMapper);
|
||||||
|
this.resultRepo = new TypeOrmResultRepository(this.dataSource, resultMapper);
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanup(): Promise<void> {
|
||||||
|
if (this.dataSource.isInitialized) {
|
||||||
|
await this.dataSource.destroy();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test league
|
||||||
|
*/
|
||||||
|
async createLeague(overrides: Partial<{
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
description: string;
|
||||||
|
ownerId: string;
|
||||||
|
}> = {}) {
|
||||||
|
const league = League.create({
|
||||||
|
id: overrides.id || uuidv4(),
|
||||||
|
name: overrides.name || 'Test League',
|
||||||
|
description: overrides.description || 'Integration Test League',
|
||||||
|
ownerId: overrides.ownerId || uuidv4(),
|
||||||
|
settings: {
|
||||||
|
enableDriverChampionship: true,
|
||||||
|
enableTeamChampionship: false,
|
||||||
|
enableNationsChampionship: false,
|
||||||
|
enableTrophyChampionship: false,
|
||||||
|
visibility: 'unranked',
|
||||||
|
maxDrivers: 32,
|
||||||
|
},
|
||||||
|
participantCount: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.leagueRepo.create(league);
|
||||||
|
return league;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test season
|
||||||
|
*/
|
||||||
|
async createSeason(leagueId: string, overrides: Partial<{
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
year: number;
|
||||||
|
status: string;
|
||||||
|
}> = {}) {
|
||||||
|
const season = Season.create({
|
||||||
|
id: overrides.id || uuidv4(),
|
||||||
|
leagueId,
|
||||||
|
gameId: 'iracing',
|
||||||
|
name: overrides.name || 'Test Season',
|
||||||
|
year: overrides.year || 2024,
|
||||||
|
order: 1,
|
||||||
|
status: overrides.status || 'active',
|
||||||
|
startDate: new Date(),
|
||||||
|
endDate: new Date(Date.now() + 30 * 24 * 60 * 60 * 1000),
|
||||||
|
schedulePublished: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.seasonRepo.create(season);
|
||||||
|
return season;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test driver
|
||||||
|
*/
|
||||||
|
async createDriver(overrides: Partial<{
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
iracingId: string;
|
||||||
|
country: string;
|
||||||
|
}> = {}) {
|
||||||
|
const driver = Driver.create({
|
||||||
|
id: overrides.id || uuidv4(),
|
||||||
|
iracingId: overrides.iracingId || `iracing-${uuidv4()}`,
|
||||||
|
name: overrides.name || 'Test Driver',
|
||||||
|
country: overrides.country || 'US',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Need to insert directly since driver repo might not exist or be different
|
||||||
|
await this.dataSource.getRepository(DriverOrmEntity).save({
|
||||||
|
id: driver.id.toString(),
|
||||||
|
iracingId: driver.iracingId,
|
||||||
|
name: driver.name.toString(),
|
||||||
|
country: driver.country,
|
||||||
|
joinedAt: new Date(),
|
||||||
|
bio: null,
|
||||||
|
category: null,
|
||||||
|
avatarRef: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
return driver;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test race
|
||||||
|
*/
|
||||||
|
async createRace(overrides: Partial<{
|
||||||
|
id: string;
|
||||||
|
leagueId: string;
|
||||||
|
scheduledAt: Date;
|
||||||
|
status: string;
|
||||||
|
track: string;
|
||||||
|
car: string;
|
||||||
|
}> = {}) {
|
||||||
|
const race = Race.create({
|
||||||
|
id: overrides.id || uuidv4(),
|
||||||
|
leagueId: overrides.leagueId || uuidv4(),
|
||||||
|
scheduledAt: overrides.scheduledAt || new Date(Date.now() + 7 * 24 * 60 * 60 * 1000),
|
||||||
|
track: overrides.track || 'Laguna Seca',
|
||||||
|
car: overrides.car || 'Formula Ford',
|
||||||
|
status: overrides.status || 'scheduled',
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.raceRepo.create(race);
|
||||||
|
return race;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test result
|
||||||
|
*/
|
||||||
|
async createResult(raceId: string, driverId: string, overrides: Partial<{
|
||||||
|
id: string;
|
||||||
|
position: number;
|
||||||
|
fastestLap: number;
|
||||||
|
incidents: number;
|
||||||
|
startPosition: number;
|
||||||
|
}> = {}) {
|
||||||
|
const result = Result.create({
|
||||||
|
id: overrides.id || uuidv4(),
|
||||||
|
raceId,
|
||||||
|
driverId,
|
||||||
|
position: overrides.position || 1,
|
||||||
|
fastestLap: overrides.fastestLap || 0,
|
||||||
|
incidents: overrides.incidents || 0,
|
||||||
|
startPosition: overrides.startPosition || 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.resultRepo.create(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create complete test scenario: league, season, drivers, races
|
||||||
|
*/
|
||||||
|
async createTestScenario() {
|
||||||
|
const league = await this.createLeague();
|
||||||
|
const season = await this.createSeason(league.id.toString());
|
||||||
|
const drivers = await Promise.all([
|
||||||
|
this.createDriver({ name: 'Driver 1' }),
|
||||||
|
this.createDriver({ name: 'Driver 2' }),
|
||||||
|
this.createDriver({ name: 'Driver 3' }),
|
||||||
|
]);
|
||||||
|
const races = await Promise.all([
|
||||||
|
this.createRace({
|
||||||
|
leagueId: league.id.toString(),
|
||||||
|
name: 'Race 1',
|
||||||
|
scheduledAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000)
|
||||||
|
}),
|
||||||
|
this.createRace({
|
||||||
|
leagueId: league.id.toString(),
|
||||||
|
name: 'Race 2',
|
||||||
|
scheduledAt: new Date(Date.now() + 14 * 24 * 60 * 60 * 1000)
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return { league, season, drivers, races };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up specific entities
|
||||||
|
*/
|
||||||
|
async deleteEntities(entities: { id: any }[], entityType: string) {
|
||||||
|
const repository = this.dataSource.getRepository(entityType);
|
||||||
|
for (const entity of entities) {
|
||||||
|
await repository.delete(entity.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
199
tests/integration/harness/database-manager.ts
Normal file
199
tests/integration/harness/database-manager.ts
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
/**
|
||||||
|
* Database Manager for Integration Tests
|
||||||
|
* Handles database connections, migrations, seeding, and cleanup
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Pool, PoolClient, QueryResult } from 'pg';
|
||||||
|
import { setTimeout } from 'timers/promises';
|
||||||
|
|
||||||
|
export interface DatabaseConfig {
|
||||||
|
host: string;
|
||||||
|
port: number;
|
||||||
|
database: string;
|
||||||
|
user: string;
|
||||||
|
password: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DatabaseManager {
|
||||||
|
private pool: Pool;
|
||||||
|
private client: PoolClient | null = null;
|
||||||
|
|
||||||
|
constructor(config: DatabaseConfig) {
|
||||||
|
this.pool = new Pool({
|
||||||
|
host: config.host,
|
||||||
|
port: config.port,
|
||||||
|
database: config.database,
|
||||||
|
user: config.user,
|
||||||
|
password: config.password,
|
||||||
|
max: 1,
|
||||||
|
idleTimeoutMillis: 30000,
|
||||||
|
connectionTimeoutMillis: 10000,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wait for database to be ready
|
||||||
|
*/
|
||||||
|
async waitForReady(timeout: number = 30000): Promise<void> {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
while (Date.now() - startTime < timeout) {
|
||||||
|
try {
|
||||||
|
const client = await this.pool.connect();
|
||||||
|
await client.query('SELECT 1');
|
||||||
|
client.release();
|
||||||
|
console.log('[DatabaseManager] ✓ Database is ready');
|
||||||
|
return;
|
||||||
|
} catch (error) {
|
||||||
|
await setTimeout(1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Database failed to become ready');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a client for transactions
|
||||||
|
*/
|
||||||
|
async getClient(): Promise<PoolClient> {
|
||||||
|
if (!this.client) {
|
||||||
|
this.client = await this.pool.connect();
|
||||||
|
}
|
||||||
|
return this.client;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute query with automatic client management
|
||||||
|
*/
|
||||||
|
async query(text: string, params?: any[]): Promise<QueryResult> {
|
||||||
|
const client = await this.getClient();
|
||||||
|
return client.query(text, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Begin transaction
|
||||||
|
*/
|
||||||
|
async begin(): Promise<void> {
|
||||||
|
const client = await this.getClient();
|
||||||
|
await client.query('BEGIN');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Commit transaction
|
||||||
|
*/
|
||||||
|
async commit(): Promise<void> {
|
||||||
|
if (this.client) {
|
||||||
|
await this.client.query('COMMIT');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rollback transaction
|
||||||
|
*/
|
||||||
|
async rollback(): Promise<void> {
|
||||||
|
if (this.client) {
|
||||||
|
await this.client.query('ROLLBACK');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncate all tables (for cleanup between tests)
|
||||||
|
*/
|
||||||
|
async truncateAllTables(): Promise<void> {
|
||||||
|
const client = await this.getClient();
|
||||||
|
|
||||||
|
// Get all table names
|
||||||
|
const result = await client.query(`
|
||||||
|
SELECT tablename
|
||||||
|
FROM pg_tables
|
||||||
|
WHERE schemaname = 'public'
|
||||||
|
AND tablename NOT LIKE 'pg_%'
|
||||||
|
AND tablename NOT LIKE 'sql_%'
|
||||||
|
`);
|
||||||
|
|
||||||
|
if (result.rows.length === 0) return;
|
||||||
|
|
||||||
|
// Disable triggers temporarily to allow truncation
|
||||||
|
await client.query('SET session_replication_role = replica');
|
||||||
|
|
||||||
|
const tableNames = result.rows.map(r => r.tablename).join(', ');
|
||||||
|
try {
|
||||||
|
await client.query(`TRUNCATE TABLE ${tableNames} CASCADE`);
|
||||||
|
console.log(`[DatabaseManager] ✓ Truncated tables: ${tableNames}`);
|
||||||
|
} finally {
|
||||||
|
await client.query('SET session_replication_role = DEFAULT');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run database migrations
|
||||||
|
*/
|
||||||
|
async runMigrations(): Promise<void> {
|
||||||
|
// This would typically run TypeORM migrations
|
||||||
|
// For now, we'll assume the API handles this on startup
|
||||||
|
console.log('[DatabaseManager] Migrations handled by API startup');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Seed minimal test data
|
||||||
|
*/
|
||||||
|
async seedMinimalData(): Promise<void> {
|
||||||
|
const client = await this.getClient();
|
||||||
|
|
||||||
|
// Insert minimal required data for tests
|
||||||
|
// This will be extended based on test requirements
|
||||||
|
|
||||||
|
console.log('[DatabaseManager] ✓ Minimal test data seeded');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check for constraint violations in recent operations
|
||||||
|
*/
|
||||||
|
async getRecentConstraintErrors(since: Date): Promise<string[]> {
|
||||||
|
const client = await this.getClient();
|
||||||
|
|
||||||
|
const result = await client.query(`
|
||||||
|
SELECT
|
||||||
|
sqlstate,
|
||||||
|
message,
|
||||||
|
detail,
|
||||||
|
constraint_name
|
||||||
|
FROM pg_last_error_log()
|
||||||
|
WHERE sqlstate IN ('23505', '23503', '23514')
|
||||||
|
AND log_time > $1
|
||||||
|
ORDER BY log_time DESC
|
||||||
|
`, [since]);
|
||||||
|
|
||||||
|
return result.rows.map(r => r.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get table constraints
|
||||||
|
*/
|
||||||
|
async getTableConstraints(tableName: string): Promise<any[]> {
|
||||||
|
const client = await this.getClient();
|
||||||
|
|
||||||
|
const result = await client.query(`
|
||||||
|
SELECT
|
||||||
|
conname as constraint_name,
|
||||||
|
contype as constraint_type,
|
||||||
|
pg_get_constraintdef(oid) as definition
|
||||||
|
FROM pg_constraint
|
||||||
|
WHERE conrelid = $1::regclass
|
||||||
|
ORDER BY contype
|
||||||
|
`, [tableName]);
|
||||||
|
|
||||||
|
return result.rows;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close connection pool
|
||||||
|
*/
|
||||||
|
async close(): Promise<void> {
|
||||||
|
if (this.client) {
|
||||||
|
this.client.release();
|
||||||
|
this.client = null;
|
||||||
|
}
|
||||||
|
await this.pool.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
189
tests/integration/harness/docker-manager.ts
Normal file
189
tests/integration/harness/docker-manager.ts
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
/**
|
||||||
|
* Docker Manager for Integration Tests
|
||||||
|
* Manages Docker Compose services for integration testing
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { execSync, spawn } from 'child_process';
|
||||||
|
import { setTimeout } from 'timers/promises';
|
||||||
|
|
||||||
|
export interface DockerServiceConfig {
|
||||||
|
name: string;
|
||||||
|
port: number;
|
||||||
|
healthCheck: string;
|
||||||
|
timeout?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DockerManager {
|
||||||
|
private static instance: DockerManager;
|
||||||
|
private services: Map<string, boolean> = new Map();
|
||||||
|
private composeProject = 'gridpilot-test';
|
||||||
|
private composeFile = 'docker-compose.test.yml';
|
||||||
|
|
||||||
|
private constructor() {}
|
||||||
|
|
||||||
|
static getInstance(): DockerManager {
|
||||||
|
if (!DockerManager.instance) {
|
||||||
|
DockerManager.instance = new DockerManager();
|
||||||
|
}
|
||||||
|
return DockerManager.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Docker services are already running
|
||||||
|
*/
|
||||||
|
isRunning(): boolean {
|
||||||
|
try {
|
||||||
|
const output = execSync(
|
||||||
|
`docker-compose -p ${this.composeProject} -f ${this.composeFile} ps -q 2>/dev/null || true`,
|
||||||
|
{ encoding: 'utf8' }
|
||||||
|
).trim();
|
||||||
|
return output.length > 0;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start Docker services with dependency checking
|
||||||
|
*/
|
||||||
|
async start(): Promise<void> {
|
||||||
|
console.log('[DockerManager] Starting test environment...');
|
||||||
|
|
||||||
|
if (this.isRunning()) {
|
||||||
|
console.log('[DockerManager] Services already running, checking health...');
|
||||||
|
await this.waitForServices();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start services
|
||||||
|
execSync(
|
||||||
|
`COMPOSE_PARALLEL_LIMIT=1 docker-compose -p ${this.composeProject} -f ${this.composeFile} up -d ready api`,
|
||||||
|
{ stdio: 'inherit' }
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log('[DockerManager] Services starting, waiting for health...');
|
||||||
|
await this.waitForServices();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wait for all services to be healthy using polling
|
||||||
|
*/
|
||||||
|
async waitForServices(): Promise<void> {
|
||||||
|
const services: DockerServiceConfig[] = [
|
||||||
|
{
|
||||||
|
name: 'db',
|
||||||
|
port: 5433,
|
||||||
|
healthCheck: 'pg_isready -U gridpilot_test_user -d gridpilot_test',
|
||||||
|
timeout: 60000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'api',
|
||||||
|
port: 3101,
|
||||||
|
healthCheck: 'curl -f http://localhost:3101/health',
|
||||||
|
timeout: 90000
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const service of services) {
|
||||||
|
await this.waitForService(service);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wait for a single service to be healthy
|
||||||
|
*/
|
||||||
|
async waitForService(config: DockerServiceConfig): Promise<void> {
|
||||||
|
const timeout = config.timeout || 30000;
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
console.log(`[DockerManager] Waiting for ${config.name}...`);
|
||||||
|
|
||||||
|
while (Date.now() - startTime < timeout) {
|
||||||
|
try {
|
||||||
|
// Try health check command
|
||||||
|
if (config.name === 'db') {
|
||||||
|
// For DB, check if it's ready to accept connections
|
||||||
|
try {
|
||||||
|
execSync(
|
||||||
|
`docker exec ${this.composeProject}-${config.name}-1 ${config.healthCheck} 2>/dev/null`,
|
||||||
|
{ stdio: 'pipe' }
|
||||||
|
);
|
||||||
|
console.log(`[DockerManager] ✓ ${config.name} is healthy`);
|
||||||
|
return;
|
||||||
|
} catch {}
|
||||||
|
} else {
|
||||||
|
// For API, check HTTP endpoint
|
||||||
|
const response = await fetch(`http://localhost:${config.port}/health`);
|
||||||
|
if (response.ok) {
|
||||||
|
console.log(`[DockerManager] ✓ ${config.name} is healthy`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Service not ready yet, continue waiting
|
||||||
|
}
|
||||||
|
|
||||||
|
await setTimeout(1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`[DockerManager] ${config.name} failed to become healthy within ${timeout}ms`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop Docker services
|
||||||
|
*/
|
||||||
|
stop(): void {
|
||||||
|
console.log('[DockerManager] Stopping test environment...');
|
||||||
|
try {
|
||||||
|
execSync(
|
||||||
|
`docker-compose -p ${this.composeProject} -f ${this.composeFile} down --remove-orphans`,
|
||||||
|
{ stdio: 'inherit' }
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('[DockerManager] Warning: Failed to stop services cleanly:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up volumes and containers
|
||||||
|
*/
|
||||||
|
clean(): void {
|
||||||
|
console.log('[DockerManager] Cleaning up test environment...');
|
||||||
|
try {
|
||||||
|
execSync(
|
||||||
|
`docker-compose -p ${this.composeProject} -f ${this.composeFile} down -v --remove-orphans --volumes`,
|
||||||
|
{ stdio: 'inherit' }
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('[DockerManager] Warning: Failed to clean up cleanly:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a command in a service container
|
||||||
|
*/
|
||||||
|
execInService(service: string, command: string): string {
|
||||||
|
try {
|
||||||
|
return execSync(
|
||||||
|
`docker exec ${this.composeProject}-${service}-1 ${command}`,
|
||||||
|
{ encoding: 'utf8' }
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to execute command in ${service}: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get service logs
|
||||||
|
*/
|
||||||
|
getLogs(service: string): string {
|
||||||
|
try {
|
||||||
|
return execSync(
|
||||||
|
`docker logs ${this.composeProject}-${service}-1 --tail 100`,
|
||||||
|
{ encoding: 'utf8' }
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
return `Failed to get logs: ${error}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
215
tests/integration/harness/index.ts
Normal file
215
tests/integration/harness/index.ts
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
/**
|
||||||
|
* Integration Test Harness - Main Entry Point
|
||||||
|
* Provides reusable setup, teardown, and utilities for integration tests
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { DockerManager } from './docker-manager';
|
||||||
|
import { DatabaseManager } from './database-manager';
|
||||||
|
import { ApiClient } from './api-client';
|
||||||
|
import { DataFactory } from './data-factory';
|
||||||
|
|
||||||
|
export interface IntegrationTestConfig {
|
||||||
|
api: {
|
||||||
|
baseUrl: string;
|
||||||
|
port: number;
|
||||||
|
};
|
||||||
|
database: {
|
||||||
|
host: string;
|
||||||
|
port: number;
|
||||||
|
database: string;
|
||||||
|
user: string;
|
||||||
|
password: string;
|
||||||
|
};
|
||||||
|
timeouts?: {
|
||||||
|
setup?: number;
|
||||||
|
teardown?: number;
|
||||||
|
test?: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export class IntegrationTestHarness {
|
||||||
|
private docker: DockerManager;
|
||||||
|
private database: DatabaseManager;
|
||||||
|
private api: ApiClient;
|
||||||
|
private factory: DataFactory;
|
||||||
|
private config: IntegrationTestConfig;
|
||||||
|
|
||||||
|
constructor(config: IntegrationTestConfig) {
|
||||||
|
this.config = {
|
||||||
|
timeouts: {
|
||||||
|
setup: 120000,
|
||||||
|
teardown: 30000,
|
||||||
|
test: 60000,
|
||||||
|
...config.timeouts,
|
||||||
|
},
|
||||||
|
...config,
|
||||||
|
};
|
||||||
|
|
||||||
|
this.docker = DockerManager.getInstance();
|
||||||
|
this.database = new DatabaseManager(config.database);
|
||||||
|
this.api = new ApiClient({ baseUrl: config.api.baseUrl, timeout: 60000 });
|
||||||
|
this.factory = new DataFactory(this.database);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup hook - starts Docker services and prepares database
|
||||||
|
* Called once before all tests in a suite
|
||||||
|
*/
|
||||||
|
async beforeAll(): Promise<void> {
|
||||||
|
console.log('[Harness] Starting integration test setup...');
|
||||||
|
|
||||||
|
// Start Docker services
|
||||||
|
await this.docker.start();
|
||||||
|
|
||||||
|
// Wait for database to be ready
|
||||||
|
await this.database.waitForReady(this.config.timeouts.setup);
|
||||||
|
|
||||||
|
// Wait for API to be ready
|
||||||
|
await this.api.waitForReady(this.config.timeouts.setup);
|
||||||
|
|
||||||
|
console.log('[Harness] ✓ Setup complete - all services ready');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Teardown hook - stops Docker services and cleans up
|
||||||
|
* Called once after all tests in a suite
|
||||||
|
*/
|
||||||
|
async afterAll(): Promise<void> {
|
||||||
|
console.log('[Harness] Starting integration test teardown...');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.database.close();
|
||||||
|
this.docker.stop();
|
||||||
|
console.log('[Harness] ✓ Teardown complete');
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('[Harness] Teardown warning:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup hook - prepares database for each test
|
||||||
|
* Called before each test
|
||||||
|
*/
|
||||||
|
async beforeEach(): Promise<void> {
|
||||||
|
// Truncate all tables to ensure clean state
|
||||||
|
await this.database.truncateAllTables();
|
||||||
|
|
||||||
|
// Optionally seed minimal required data
|
||||||
|
// await this.database.seedMinimalData();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Teardown hook - cleanup after each test
|
||||||
|
* Called after each test
|
||||||
|
*/
|
||||||
|
async afterEach(): Promise<void> {
|
||||||
|
// Clean up any test-specific resources
|
||||||
|
// This can be extended by individual tests
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get database manager
|
||||||
|
*/
|
||||||
|
getDatabase(): DatabaseManager {
|
||||||
|
return this.database;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get API client
|
||||||
|
*/
|
||||||
|
getApi(): ApiClient {
|
||||||
|
return this.api;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Docker manager
|
||||||
|
*/
|
||||||
|
getDocker(): DockerManager {
|
||||||
|
return this.docker;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get data factory
|
||||||
|
*/
|
||||||
|
getFactory(): DataFactory {
|
||||||
|
return this.factory;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute database transaction with automatic rollback
|
||||||
|
* Useful for tests that need to verify transaction behavior
|
||||||
|
*/
|
||||||
|
async withTransaction<T>(callback: (db: DatabaseManager) => Promise<T>): Promise<T> {
|
||||||
|
await this.database.begin();
|
||||||
|
try {
|
||||||
|
const result = await callback(this.database);
|
||||||
|
await this.database.rollback(); // Always rollback in tests
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
await this.database.rollback();
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to verify constraint violations
|
||||||
|
*/
|
||||||
|
async expectConstraintViolation(
|
||||||
|
operation: () => Promise<any>,
|
||||||
|
expectedConstraint?: string
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
await operation();
|
||||||
|
throw new Error('Expected constraint violation but operation succeeded');
|
||||||
|
} catch (error: any) {
|
||||||
|
// Check if it's a constraint violation
|
||||||
|
const isConstraintError =
|
||||||
|
error.message?.includes('constraint') ||
|
||||||
|
error.message?.includes('23505') || // Unique violation
|
||||||
|
error.message?.includes('23503') || // Foreign key violation
|
||||||
|
error.message?.includes('23514'); // Check violation
|
||||||
|
|
||||||
|
if (!isConstraintError) {
|
||||||
|
throw new Error(`Expected constraint violation but got: ${error.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (expectedConstraint && !error.message.includes(expectedConstraint)) {
|
||||||
|
throw new Error(`Expected constraint '${expectedConstraint}' but got: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default configuration for docker-compose.test.yml
|
||||||
|
export const DEFAULT_TEST_CONFIG: IntegrationTestConfig = {
|
||||||
|
api: {
|
||||||
|
baseUrl: 'http://localhost:3101',
|
||||||
|
port: 3101,
|
||||||
|
},
|
||||||
|
database: {
|
||||||
|
host: 'localhost',
|
||||||
|
port: 5433,
|
||||||
|
database: 'gridpilot_test',
|
||||||
|
user: 'gridpilot_test_user',
|
||||||
|
password: 'gridpilot_test_pass',
|
||||||
|
},
|
||||||
|
timeouts: {
|
||||||
|
setup: 120000,
|
||||||
|
teardown: 30000,
|
||||||
|
test: 60000,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test harness with default configuration
|
||||||
|
*/
|
||||||
|
export function createTestHarness(config?: Partial<IntegrationTestConfig>): IntegrationTestHarness {
|
||||||
|
const mergedConfig = {
|
||||||
|
...DEFAULT_TEST_CONFIG,
|
||||||
|
...config,
|
||||||
|
api: { ...DEFAULT_TEST_CONFIG.api, ...config?.api },
|
||||||
|
database: { ...DEFAULT_TEST_CONFIG.database, ...config?.database },
|
||||||
|
timeouts: { ...DEFAULT_TEST_CONFIG.timeouts, ...config?.timeouts },
|
||||||
|
};
|
||||||
|
return new IntegrationTestHarness(mergedConfig);
|
||||||
|
}
|
||||||
@@ -0,0 +1,82 @@
|
|||||||
|
/**
|
||||||
|
* Integration Test: League Schedule Lifecycle API
|
||||||
|
*
|
||||||
|
* Tests publish/unpublish/republish lifecycle endpoints.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||||
|
import { ApiClient } from '../harness/api-client';
|
||||||
|
import { DockerManager } from '../harness/docker-manager';
|
||||||
|
|
||||||
|
describe('League Schedule Lifecycle - API Integration', () => {
|
||||||
|
let api: ApiClient;
|
||||||
|
let docker: DockerManager;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
docker = DockerManager.getInstance();
|
||||||
|
await docker.start();
|
||||||
|
|
||||||
|
api = new ApiClient({ baseUrl: 'http://localhost:3101', timeout: 60000 });
|
||||||
|
await api.waitForReady();
|
||||||
|
}, 120000);
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
docker.stop();
|
||||||
|
}, 30000);
|
||||||
|
|
||||||
|
it('should handle publish endpoint for non-existent league', async () => {
|
||||||
|
const nonExistentLeagueId = 'non-existent-league';
|
||||||
|
const nonExistentSeasonId = 'non-existent-season';
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post(`/leagues/${nonExistentLeagueId}/seasons/${nonExistentSeasonId}/publish`, {})
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle unpublish endpoint for non-existent league', async () => {
|
||||||
|
const nonExistentLeagueId = 'non-existent-league';
|
||||||
|
const nonExistentSeasonId = 'non-existent-season';
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post(`/leagues/${nonExistentLeagueId}/seasons/${nonExistentSeasonId}/unpublish`, {})
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle create schedule race endpoint for non-existent league', async () => {
|
||||||
|
const nonExistentLeagueId = 'non-existent-league';
|
||||||
|
const nonExistentSeasonId = 'non-existent-season';
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post(`/leagues/${nonExistentLeagueId}/seasons/${nonExistentSeasonId}/schedule/races`, {
|
||||||
|
track: 'Laguna Seca',
|
||||||
|
car: 'Formula Ford',
|
||||||
|
scheduledAtIso: new Date().toISOString(),
|
||||||
|
})
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject invalid date format', async () => {
|
||||||
|
const leagueId = 'test-league';
|
||||||
|
const seasonId = 'test-season';
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post(`/leagues/${leagueId}/seasons/${seasonId}/schedule/races`, {
|
||||||
|
track: 'Laguna Seca',
|
||||||
|
car: 'Formula Ford',
|
||||||
|
scheduledAtIso: 'invalid-date',
|
||||||
|
})
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject missing required fields for race creation', async () => {
|
||||||
|
const leagueId = 'test-league';
|
||||||
|
const seasonId = 'test-season';
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post(`/leagues/${leagueId}/seasons/${seasonId}/schedule/races`, {
|
||||||
|
track: 'Laguna Seca',
|
||||||
|
// Missing car and scheduledAtIso
|
||||||
|
})
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
92
tests/integration/race/import-results.integration.test.ts
Normal file
92
tests/integration/race/import-results.integration.test.ts
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
/**
|
||||||
|
* Integration Test: Race Results Import API
|
||||||
|
*
|
||||||
|
* Tests the race results import endpoint with various scenarios.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||||
|
import { ApiClient } from '../harness/api-client';
|
||||||
|
import { DockerManager } from '../harness/docker-manager';
|
||||||
|
|
||||||
|
describe('Race Results Import - API Integration', () => {
|
||||||
|
let api: ApiClient;
|
||||||
|
let docker: DockerManager;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
docker = DockerManager.getInstance();
|
||||||
|
await docker.start();
|
||||||
|
|
||||||
|
api = new ApiClient({ baseUrl: 'http://localhost:3101', timeout: 60000 });
|
||||||
|
await api.waitForReady();
|
||||||
|
}, 120000);
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
docker.stop();
|
||||||
|
}, 30000);
|
||||||
|
|
||||||
|
it('should return 404 for non-existent race', async () => {
|
||||||
|
const nonExistentRaceId = 'non-existent-race-123';
|
||||||
|
const results = [
|
||||||
|
{
|
||||||
|
driverId: 'driver-1',
|
||||||
|
position: 1,
|
||||||
|
fastestLap: 100,
|
||||||
|
incidents: 0,
|
||||||
|
startPosition: 1,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post(`/races/${nonExistentRaceId}/import-results`, {
|
||||||
|
resultsFileContent: JSON.stringify(results),
|
||||||
|
raceId: nonExistentRaceId,
|
||||||
|
})
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle invalid JSON gracefully', async () => {
|
||||||
|
const raceId = 'test-race-1';
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post(`/races/${raceId}/import-results`, {
|
||||||
|
resultsFileContent: 'invalid json {',
|
||||||
|
raceId,
|
||||||
|
})
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject empty results array', async () => {
|
||||||
|
const raceId = 'test-race-1';
|
||||||
|
const emptyResults: any[] = [];
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post(`/races/${raceId}/import-results`, {
|
||||||
|
resultsFileContent: JSON.stringify(emptyResults),
|
||||||
|
raceId,
|
||||||
|
})
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing required fields', async () => {
|
||||||
|
const raceId = 'test-race-1';
|
||||||
|
const invalidResults = [
|
||||||
|
{
|
||||||
|
// Missing required fields
|
||||||
|
driverId: 'driver-1',
|
||||||
|
position: 1,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
api.post(`/races/${raceId}/import-results`, {
|
||||||
|
resultsFileContent: JSON.stringify(invalidResults),
|
||||||
|
raceId,
|
||||||
|
})
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should verify API health endpoint works', async () => {
|
||||||
|
const isHealthy = await api.health();
|
||||||
|
expect(isHealthy).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user