wip
This commit is contained in:
628
src/data/fileExamples.ts
Normal file
628
src/data/fileExamples.ts
Normal file
@@ -0,0 +1,628 @@
|
||||
/**
|
||||
* File Examples Data Structure
|
||||
*
|
||||
* This module manages file examples for blog posts.
|
||||
* Each example includes the file content, metadata, and can be easily copied or downloaded.
|
||||
*/
|
||||
|
||||
export interface FileExample {
|
||||
id: string;
|
||||
filename: string;
|
||||
content: string;
|
||||
language: string;
|
||||
description?: string;
|
||||
tags?: string[];
|
||||
postSlug?: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
export interface FileExampleGroup {
|
||||
groupId: string;
|
||||
title: string;
|
||||
description?: string;
|
||||
files: FileExample[];
|
||||
}
|
||||
|
||||
// In-memory storage (for development)
|
||||
// In production, this could be backed by a database or file system
|
||||
const fileExamplesStore = new Map<string, FileExample>();
|
||||
|
||||
// Sample file examples for demonstration
|
||||
export const sampleFileExamples: FileExampleGroup[] = [
|
||||
{
|
||||
groupId: "python-data-processing",
|
||||
title: "Python Data Processing Example",
|
||||
description: "A complete example of processing data with error handling",
|
||||
files: [
|
||||
{
|
||||
id: "python-data-processor",
|
||||
filename: "data_processor.py",
|
||||
content: `import json
|
||||
import logging
|
||||
from typing import List, Dict, Any
|
||||
from pathlib import Path
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class DataProcessor:
|
||||
def __init__(self, input_path: str, output_path: str):
|
||||
self.input_path = Path(input_path)
|
||||
self.output_path = Path(output_path)
|
||||
|
||||
def load_data(self) -> List[Dict[str, Any]]:
|
||||
"""Load JSON data from input file."""
|
||||
if not self.input_path.exists():
|
||||
raise FileNotFoundError(f"Input file not found: {self.input_path}")
|
||||
|
||||
with open(self.input_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
logger.info(f"Loaded {len(data)} records")
|
||||
return data
|
||||
|
||||
def process_records(self, data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""Process records and add computed fields."""
|
||||
processed = []
|
||||
for record in data:
|
||||
# Add timestamp
|
||||
import time
|
||||
record['processed_at'] = time.time()
|
||||
|
||||
# Normalize keys
|
||||
record['id'] = record.get('id', '').lower()
|
||||
|
||||
processed.append(record)
|
||||
|
||||
logger.info(f"Processed {len(processed)} records")
|
||||
return processed
|
||||
|
||||
def save_data(self, data: List[Dict[str, Any]]) -> None:
|
||||
"""Save processed data to output file."""
|
||||
self.output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(self.output_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
logger.info(f"Saved {len(data)} records to {self.output_path}")
|
||||
|
||||
def run(self) -> None:
|
||||
"""Execute the complete processing pipeline."""
|
||||
try:
|
||||
data = self.load_data()
|
||||
processed = self.process_records(data)
|
||||
self.save_data(processed)
|
||||
logger.info("Processing completed successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Processing failed: {e}")
|
||||
raise
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Example usage
|
||||
processor = DataProcessor(
|
||||
input_path="data/input.json",
|
||||
output_path="data/processed.json"
|
||||
)
|
||||
processor.run()`,
|
||||
language: "python",
|
||||
description: "A robust data processor with logging and error handling",
|
||||
tags: ["python", "data-processing", "logging"],
|
||||
postSlug: "debugging-tips",
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
},
|
||||
{
|
||||
id: "python-config-example",
|
||||
filename: "config.py",
|
||||
content: `"""
|
||||
Configuration management for the data processor
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
"""Configuration for data processing."""
|
||||
|
||||
input_path: str
|
||||
output_path: str
|
||||
batch_size: int = 1000
|
||||
max_workers: int = 4
|
||||
enable_caching: bool = True
|
||||
log_level: str = "INFO"
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'Config':
|
||||
"""Create config from dictionary."""
|
||||
return cls(**data)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Convert config to dictionary."""
|
||||
return {
|
||||
'input_path': self.input_path,
|
||||
'output_path': self.output_path,
|
||||
'batch_size': self.batch_size,
|
||||
'max_workers': self.max_workers,
|
||||
'enable_caching': self.enable_caching,
|
||||
'log_level': self.log_level
|
||||
}
|
||||
|
||||
# Default configuration
|
||||
DEFAULT_CONFIG = Config(
|
||||
input_path="data/input.json",
|
||||
output_path="data/output.json",
|
||||
batch_size=500,
|
||||
max_workers=2
|
||||
)`,
|
||||
language: "python",
|
||||
description: "Configuration management using dataclasses",
|
||||
tags: ["python", "configuration", "dataclasses"],
|
||||
postSlug: "debugging-tips",
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
groupId: "typescript-architecture",
|
||||
title: "TypeScript Architecture Patterns",
|
||||
description: "Modern TypeScript patterns for scalable applications",
|
||||
files: [
|
||||
{
|
||||
id: "ts-interface-example",
|
||||
filename: "interfaces.ts",
|
||||
content: `/**
|
||||
* Core interfaces for a scalable TypeScript application
|
||||
*/
|
||||
|
||||
// Repository pattern
|
||||
export interface Repository<T> {
|
||||
findById(id: string): Promise<T | null>;
|
||||
findAll(): Promise<T[]>;
|
||||
create(entity: Omit<T, 'id'>): Promise<T>;
|
||||
update(id: string, entity: Partial<T>): Promise<T>;
|
||||
delete(id: string): Promise<boolean>;
|
||||
}
|
||||
|
||||
// Service layer interface
|
||||
export interface Service<T> {
|
||||
get(id: string): Promise<T>;
|
||||
list(): Promise<T[]>;
|
||||
create(data: any): Promise<T>;
|
||||
update(id: string, data: any): Promise<T>;
|
||||
remove(id: string): Promise<void>;
|
||||
}
|
||||
|
||||
// Event system
|
||||
export interface DomainEvent {
|
||||
type: string;
|
||||
payload: any;
|
||||
timestamp: Date;
|
||||
source: string;
|
||||
}
|
||||
|
||||
export interface EventHandler {
|
||||
handle(event: DomainEvent): Promise<void>;
|
||||
}
|
||||
|
||||
export interface EventPublisher {
|
||||
publish(event: DomainEvent): Promise<void>;
|
||||
subscribe(handler: EventHandler): void;
|
||||
}
|
||||
|
||||
// Result type for error handling
|
||||
export type Result<T, E = Error> =
|
||||
| { success: true; value: T }
|
||||
| { success: false; error: E };
|
||||
|
||||
export namespace Result {
|
||||
export function ok<T>(value: T): Result<T> {
|
||||
return { success: true, value };
|
||||
}
|
||||
|
||||
export function fail<E extends Error>(error: E): Result<never, E> {
|
||||
return { success: false, error };
|
||||
}
|
||||
|
||||
export function isOk<T, E>(result: Result<T, E>): result is { success: true; value: T } {
|
||||
return result.success;
|
||||
}
|
||||
|
||||
export function isFail<T, E>(result: Result<T, E>): result is { success: false; error: E } {
|
||||
return !result.success;
|
||||
}
|
||||
}`,
|
||||
language: "typescript",
|
||||
description: "TypeScript interfaces for clean architecture",
|
||||
tags: ["typescript", "architecture", "interfaces"],
|
||||
postSlug: "architecture-patterns",
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
},
|
||||
{
|
||||
id: "ts-service-example",
|
||||
filename: "userService.ts",
|
||||
content: `import { Repository, Service, Result, DomainEvent, EventPublisher } from './interfaces';
|
||||
|
||||
interface User {
|
||||
id: string;
|
||||
email: string;
|
||||
name: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
interface CreateUserDTO {
|
||||
email: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
class UserService implements Service<User> {
|
||||
constructor(
|
||||
private readonly userRepository: Repository<User>,
|
||||
private readonly eventPublisher: EventPublisher
|
||||
) {}
|
||||
|
||||
async get(id: string): Promise<User> {
|
||||
const user = await this.userRepository.findById(id);
|
||||
if (!user) {
|
||||
throw new Error(\`User with id \${id} not found\`);
|
||||
}
|
||||
return user;
|
||||
}
|
||||
|
||||
async list(): Promise<User[]> {
|
||||
return this.userRepository.findAll();
|
||||
}
|
||||
|
||||
async create(data: CreateUserDTO): Promise<User> {
|
||||
// Validate email
|
||||
if (!this.isValidEmail(data.email)) {
|
||||
throw new Error('Invalid email format');
|
||||
}
|
||||
|
||||
// Create user
|
||||
const user = await this.userRepository.create({
|
||||
...data,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date()
|
||||
});
|
||||
|
||||
// Publish event
|
||||
const event: DomainEvent = {
|
||||
type: 'USER_CREATED',
|
||||
payload: { userId: user.id, email: user.email },
|
||||
timestamp: new Date(),
|
||||
source: 'UserService'
|
||||
};
|
||||
|
||||
await this.eventPublisher.publish(event);
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
async update(id: string, data: Partial<User>): Promise<User> {
|
||||
const existing = await this.get(id);
|
||||
const updated = await this.userRepository.update(id, {
|
||||
...data,
|
||||
updatedAt: new Date()
|
||||
});
|
||||
|
||||
const event: DomainEvent = {
|
||||
type: 'USER_UPDATED',
|
||||
payload: { userId: id, changes: data },
|
||||
timestamp: new Date(),
|
||||
source: 'UserService'
|
||||
};
|
||||
|
||||
await this.eventPublisher.publish(event);
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
async remove(id: string): Promise<void> {
|
||||
const success = await this.userRepository.delete(id);
|
||||
if (!success) {
|
||||
throw new Error(\`Failed to delete user \${id}\`);
|
||||
}
|
||||
|
||||
const event: DomainEvent = {
|
||||
type: 'USER_DELETED',
|
||||
payload: { userId: id },
|
||||
timestamp: new Date(),
|
||||
source: 'UserService'
|
||||
};
|
||||
|
||||
await this.eventPublisher.publish(event);
|
||||
}
|
||||
|
||||
private isValidEmail(email: string): boolean {
|
||||
const emailRegex = /^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$/;
|
||||
return emailRegex.test(email);
|
||||
}
|
||||
|
||||
// Additional business logic
|
||||
async getUserByEmail(email: string): Promise<User | null> {
|
||||
const users = await this.userRepository.findAll();
|
||||
return users.find(u => u.email === email) || null;
|
||||
}
|
||||
}
|
||||
|
||||
export { UserService, type User, type CreateUserDTO };`,
|
||||
language: "typescript",
|
||||
description: "Service implementation with domain events",
|
||||
tags: ["typescript", "service-layer", "domain-events"],
|
||||
postSlug: "architecture-patterns",
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
groupId: "docker-deployment",
|
||||
title: "Docker Deployment Configuration",
|
||||
description: "Production-ready Docker setup",
|
||||
files: [
|
||||
{
|
||||
id: "dockerfile",
|
||||
filename: "Dockerfile",
|
||||
content: `# Multi-stage build for optimized production image
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
# Install dependencies
|
||||
FROM base AS deps
|
||||
WORKDIR /app
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only=production --ignore-scripts
|
||||
|
||||
# Build stage
|
||||
FROM base AS builder
|
||||
WORKDIR /app
|
||||
COPY package*.json ./
|
||||
RUN npm ci
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
WORKDIR /app
|
||||
|
||||
# Create non-root user
|
||||
RUN addgroup -g 1001 -S nodejs
|
||||
RUN adduser -S astro -u 1001
|
||||
|
||||
# Copy built assets
|
||||
COPY --from=builder --chown=astro:nodejs /app/dist ./dist
|
||||
COPY --from=deps --chown=astro:nodejs /app/node_modules ./node_modules
|
||||
COPY --from=builder --chown=astro:nodejs /app/package*.json ./
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \\
|
||||
CMD node -e "require('http').get('http://localhost:4321/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"
|
||||
|
||||
# Run as non-root
|
||||
USER astro
|
||||
|
||||
EXPOSE 4321
|
||||
|
||||
CMD ["node", "dist/server/entry.mjs"]`,
|
||||
language: "dockerfile",
|
||||
description: "Multi-stage Docker build for production",
|
||||
tags: ["docker", "production", "multi-stage"],
|
||||
postSlug: "docker-deployment",
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
},
|
||||
{
|
||||
id: "docker-compose",
|
||||
filename: "docker-compose.yml",
|
||||
content: `version: '3.8'
|
||||
|
||||
services:
|
||||
web:
|
||||
build:
|
||||
context: .
|
||||
target: production
|
||||
ports:
|
||||
- "8080:4321"
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- PORT=4321
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "node", "-e", "require('http').get('http://localhost:4321/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: '1'
|
||||
memory: 512M
|
||||
reservations:
|
||||
cpus: '0.5'
|
||||
memory: 256M
|
||||
|
||||
# Optional: Add Redis for caching
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
command: redis-server --appendonly yes
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 256M
|
||||
|
||||
# Optional: Add Caddy for reverse proxy
|
||||
caddy:
|
||||
image: caddy:2-alpine
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./docker/Caddyfile:/etc/caddy/Caddyfile
|
||||
- caddy_data:/data
|
||||
- caddy_config:/config
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
caddy_data:
|
||||
caddy_config:`,
|
||||
language: "yaml",
|
||||
description: "Multi-service Docker Compose setup",
|
||||
tags: ["docker", "compose", "orchestration"],
|
||||
postSlug: "docker-deployment",
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
// Helper functions for managing file examples
|
||||
export class FileExampleManager {
|
||||
static async getFileExample(id: string): Promise<FileExample | undefined> {
|
||||
// First check in-memory store
|
||||
const stored = fileExamplesStore.get(id);
|
||||
if (stored) return stored;
|
||||
|
||||
// Search in sample data
|
||||
for (const group of sampleFileExamples) {
|
||||
const file = group.files.find(f => f.id === id);
|
||||
if (file) return file;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
static async getFilesByTag(tag: string): Promise<FileExample[]> {
|
||||
const results: FileExample[] = [];
|
||||
|
||||
for (const group of sampleFileExamples) {
|
||||
for (const file of group.files) {
|
||||
if (file.tags?.includes(tag)) {
|
||||
results.push(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
static async searchFiles(query: string): Promise<FileExample[]> {
|
||||
const lowerQuery = query.toLowerCase();
|
||||
|
||||
const results: FileExample[] = [];
|
||||
|
||||
for (const group of sampleFileExamples) {
|
||||
for (const file of group.files) {
|
||||
const searchable = [
|
||||
file.filename,
|
||||
file.description,
|
||||
file.language,
|
||||
...(file.tags || [])
|
||||
].join(' ').toLowerCase();
|
||||
|
||||
if (searchable.includes(lowerQuery)) {
|
||||
results.push(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
static async getAvailableTags(): Promise<string[]> {
|
||||
const tags = new Set<string>();
|
||||
|
||||
for (const group of sampleFileExamples) {
|
||||
for (const file of group.files) {
|
||||
file.tags?.forEach(tag => tags.add(tag));
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(tags).sort();
|
||||
}
|
||||
|
||||
static async createFileExample(example: Omit<FileExample, 'id' | 'createdAt' | 'updatedAt'>): Promise<FileExample> {
|
||||
const id = `${example.filename.replace(/[^a-zA-Z0-9]/g, '-')}-${Date.now()}`;
|
||||
const newExample: FileExample = {
|
||||
...example,
|
||||
id,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
fileExamplesStore.set(id, newExample);
|
||||
return newExample;
|
||||
}
|
||||
|
||||
static async updateFileExample(id: string, updates: Partial<FileExample>): Promise<FileExample | undefined> {
|
||||
const existing = await this.getFileExample(id);
|
||||
if (!existing) return undefined;
|
||||
|
||||
const updated: FileExample = {
|
||||
...existing,
|
||||
...updates,
|
||||
updatedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
fileExamplesStore.set(id, updated);
|
||||
return updated;
|
||||
}
|
||||
|
||||
static async deleteFileExample(id: string): Promise<boolean> {
|
||||
return fileExamplesStore.delete(id);
|
||||
}
|
||||
|
||||
static async getAllGroups(): Promise<FileExampleGroup[]> {
|
||||
return sampleFileExamples;
|
||||
}
|
||||
|
||||
static async getGroup(groupId: string): Promise<FileExampleGroup | undefined> {
|
||||
return sampleFileExamples.find(g => g.groupId === groupId);
|
||||
}
|
||||
|
||||
static async downloadFile(id: string): Promise<{ filename: string; content: string; mimeType: string } | null> {
|
||||
const file = await this.getFileExample(id);
|
||||
if (!file) return null;
|
||||
|
||||
const mimeType = this.getMimeType(file.language);
|
||||
return {
|
||||
filename: file.filename,
|
||||
content: file.content,
|
||||
mimeType
|
||||
};
|
||||
}
|
||||
|
||||
static async downloadMultiple(ids: string[]): Promise<Array<{ filename: string; content: string }>> {
|
||||
const files = await Promise.all(ids.map(id => this.getFileExample(id)));
|
||||
return files
|
||||
.filter((f): f is FileExample => f !== undefined)
|
||||
.map(f => ({ filename: f.filename, content: f.content }));
|
||||
}
|
||||
|
||||
private static getMimeType(language: string): string {
|
||||
const mimeTypes: Record<string, string> = {
|
||||
'python': 'text/x-python',
|
||||
'typescript': 'text/x-typescript',
|
||||
'javascript': 'text/javascript',
|
||||
'dockerfile': 'text/x-dockerfile',
|
||||
'yaml': 'text/yaml',
|
||||
'json': 'application/json',
|
||||
'html': 'text/html',
|
||||
'css': 'text/css',
|
||||
'sql': 'text/x-sql',
|
||||
'bash': 'text/x-shellscript',
|
||||
'text': 'text/plain'
|
||||
};
|
||||
return mimeTypes[language] || 'text/plain';
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user