feat(Production): Complete production deployment infrastructure

- Add comprehensive health check system with multiple endpoints
- Add Prometheus metrics endpoint
- Add production logging configurations (5 strategies)
- Add complete deployment documentation suite:
  * QUICKSTART.md - 30-minute deployment guide
  * DEPLOYMENT_CHECKLIST.md - Printable verification checklist
  * DEPLOYMENT_WORKFLOW.md - Complete deployment lifecycle
  * PRODUCTION_DEPLOYMENT.md - Comprehensive technical reference
  * production-logging.md - Logging configuration guide
  * ANSIBLE_DEPLOYMENT.md - Infrastructure as Code automation
  * README.md - Navigation hub
  * DEPLOYMENT_SUMMARY.md - Executive summary
- Add deployment scripts and automation
- Add DEPLOYMENT_PLAN.md - Concrete plan for immediate deployment
- Update README with production-ready features

All production infrastructure is now complete and ready for deployment.
This commit is contained in:
2025-10-25 19:18:37 +02:00
parent caa85db796
commit fc3d7e6357
83016 changed files with 378904 additions and 20919 deletions

View File

@@ -0,0 +1,621 @@
/**
* LiveComponents Concurrent Upload Load Tests
*
* Tests the upload system under high load with concurrent uploads to validate:
* - System scalability and performance under stress
* - Resource management (memory, CPU, network)
* - Queue management for concurrent uploads
* - Server capacity and response times
* - Error handling under load
* - Recovery mechanisms during high traffic
*
* Run with: npx playwright test concurrent-upload-load.spec.js
*/
import { test, expect } from '@playwright/test';
import { mkdir, rm } from 'fs/promises';
import { join } from 'path';
import { existsSync } from 'fs';
// Load Test Configuration
const LOAD_TEST_CONFIG = {
// Concurrent upload scenarios
light: {
users: 5,
filesPerUser: 2,
fileSizeMB: 1,
expectedDuration: 30000 // 30 seconds
},
moderate: {
users: 10,
filesPerUser: 3,
fileSizeMB: 2,
expectedDuration: 60000 // 60 seconds
},
heavy: {
users: 20,
filesPerUser: 5,
fileSizeMB: 5,
expectedDuration: 120000 // 120 seconds
},
stress: {
users: 50,
filesPerUser: 2,
fileSizeMB: 1,
expectedDuration: 180000 // 180 seconds
}
};
// Performance Thresholds
const PERFORMANCE_THRESHOLDS = {
light: {
maxDuration: 30000, // 30 seconds
maxMemoryMB: 200, // 200MB
maxAvgResponseTime: 1000, // 1 second
minSuccessRate: 0.95 // 95%
},
moderate: {
maxDuration: 60000, // 60 seconds
maxMemoryMB: 500, // 500MB
maxAvgResponseTime: 2000, // 2 seconds
minSuccessRate: 0.90 // 90%
},
heavy: {
maxDuration: 120000, // 120 seconds
maxMemoryMB: 1000, // 1GB
maxAvgResponseTime: 3000, // 3 seconds
minSuccessRate: 0.85 // 85%
},
stress: {
maxDuration: 180000, // 180 seconds
maxMemoryMB: 2000, // 2GB
maxAvgResponseTime: 5000, // 5 seconds
minSuccessRate: 0.80 // 80%
}
};
// Test file directory
const TEST_FILES_DIR = join(process.cwd(), 'tests', 'tmp', 'load-test-files');
/**
* Create test file with specified size
*/
async function createTestFile(filename, sizeInMB) {
await mkdir(TEST_FILES_DIR, { recursive: true });
const filePath = join(TEST_FILES_DIR, filename);
const sizeInBytes = sizeInMB * 1024 * 1024;
const chunkSize = 1024 * 1024; // 1MB chunks
const chunks = Math.ceil(sizeInBytes / chunkSize);
const buffer = Buffer.alloc(chunkSize);
let written = 0;
const stream = require('fs').createWriteStream(filePath);
for (let i = 0; i < chunks; i++) {
const remaining = sizeInBytes - written;
const writeSize = Math.min(chunkSize, remaining);
// Fill with pattern for verification
for (let j = 0; j < writeSize; j++) {
buffer[j] = (i + j) % 256;
}
stream.write(buffer.slice(0, writeSize));
written += writeSize;
}
stream.end();
await new Promise((resolve) => stream.on('finish', resolve));
return filePath;
}
/**
* Cleanup test files
*/
async function cleanupTestFiles() {
if (existsSync(TEST_FILES_DIR)) {
await rm(TEST_FILES_DIR, { recursive: true, force: true });
}
}
/**
* Simulate concurrent user session
*/
async function simulateUserSession(browser, userId, config) {
const context = await browser.newContext();
const page = await context.newPage();
const userMetrics = {
userId,
uploads: [],
totalDuration: 0,
memoryUsage: [],
errors: []
};
try {
await page.goto('https://localhost/livecomponents/test/upload');
await page.waitForFunction(() => window.LiveComponents !== undefined);
const startTime = Date.now();
// Upload multiple files concurrently per user
for (let fileIndex = 0; fileIndex < config.filesPerUser; fileIndex++) {
const filename = `user${userId}-file${fileIndex}.bin`;
const testFile = await createTestFile(filename, config.fileSizeMB);
const uploadStart = Date.now();
try {
// Set file input
const fileInput = page.locator('input[type="file"]');
await fileInput.setInputFiles(testFile);
// Start upload
await page.click('button#upload-btn');
// Wait for completion
await page.waitForSelector('.upload-complete', {
timeout: 60000
});
const uploadDuration = Date.now() - uploadStart;
userMetrics.uploads.push({
filename,
duration: uploadDuration,
success: true
});
// Collect memory metrics
const memory = await page.evaluate(() => {
if (performance.memory) {
return performance.memory.usedJSHeapSize / (1024 * 1024); // MB
}
return 0;
});
if (memory > 0) {
userMetrics.memoryUsage.push(memory);
}
} catch (error) {
userMetrics.errors.push({
filename,
error: error.message
});
userMetrics.uploads.push({
filename,
duration: Date.now() - uploadStart,
success: false,
error: error.message
});
}
}
userMetrics.totalDuration = Date.now() - startTime;
} finally {
await context.close();
}
return userMetrics;
}
/**
* Aggregate metrics from all user sessions
*/
function aggregateMetrics(allUserMetrics) {
const totalUploads = allUserMetrics.reduce(
(sum, user) => sum + user.uploads.length,
0
);
const successfulUploads = allUserMetrics.reduce(
(sum, user) => sum + user.uploads.filter(u => u.success).length,
0
);
const failedUploads = totalUploads - successfulUploads;
const allDurations = allUserMetrics.flatMap(
user => user.uploads.map(u => u.duration)
);
const avgDuration = allDurations.reduce((sum, d) => sum + d, 0) / allDurations.length;
const maxDuration = Math.max(...allDurations);
const minDuration = Math.min(...allDurations);
const allMemoryUsage = allUserMetrics.flatMap(user => user.memoryUsage);
const avgMemory = allMemoryUsage.length > 0
? allMemoryUsage.reduce((sum, m) => sum + m, 0) / allMemoryUsage.length
: 0;
const maxMemory = allMemoryUsage.length > 0 ? Math.max(...allMemoryUsage) : 0;
const successRate = successfulUploads / totalUploads;
return {
totalUploads,
successfulUploads,
failedUploads,
successRate,
avgDuration,
maxDuration,
minDuration,
avgMemory,
maxMemory,
errors: allUserMetrics.flatMap(user => user.errors)
};
}
test.describe('Concurrent Upload Load Tests', () => {
test.afterEach(async () => {
await cleanupTestFiles();
});
test('Light Load: 5 users, 2 files each (1MB)', async ({ browser }) => {
const config = LOAD_TEST_CONFIG.light;
const thresholds = PERFORMANCE_THRESHOLDS.light;
test.setTimeout(config.expectedDuration);
const startTime = Date.now();
// Simulate concurrent user sessions
const userPromises = Array.from({ length: config.users }, (_, i) =>
simulateUserSession(browser, i + 1, config)
);
const allUserMetrics = await Promise.all(userPromises);
const totalDuration = Date.now() - startTime;
// Aggregate metrics
const metrics = aggregateMetrics(allUserMetrics);
console.log('\n=== Light Load Test Results ===');
console.log(`Total Duration: ${totalDuration}ms`);
console.log(`Total Uploads: ${metrics.totalUploads}`);
console.log(`Successful: ${metrics.successfulUploads}`);
console.log(`Failed: ${metrics.failedUploads}`);
console.log(`Success Rate: ${(metrics.successRate * 100).toFixed(2)}%`);
console.log(`Avg Response Time: ${metrics.avgDuration.toFixed(2)}ms`);
console.log(`Max Response Time: ${metrics.maxDuration}ms`);
console.log(`Avg Memory: ${metrics.avgMemory.toFixed(2)}MB`);
console.log(`Max Memory: ${metrics.maxMemory.toFixed(2)}MB`);
// Assertions
expect(totalDuration).toBeLessThan(thresholds.maxDuration);
expect(metrics.maxMemory).toBeLessThan(thresholds.maxMemoryMB);
expect(metrics.avgDuration).toBeLessThan(thresholds.maxAvgResponseTime);
expect(metrics.successRate).toBeGreaterThanOrEqual(thresholds.minSuccessRate);
});
test('Moderate Load: 10 users, 3 files each (2MB)', async ({ browser }) => {
const config = LOAD_TEST_CONFIG.moderate;
const thresholds = PERFORMANCE_THRESHOLDS.moderate;
test.setTimeout(config.expectedDuration);
const startTime = Date.now();
// Simulate concurrent user sessions
const userPromises = Array.from({ length: config.users }, (_, i) =>
simulateUserSession(browser, i + 1, config)
);
const allUserMetrics = await Promise.all(userPromises);
const totalDuration = Date.now() - startTime;
// Aggregate metrics
const metrics = aggregateMetrics(allUserMetrics);
console.log('\n=== Moderate Load Test Results ===');
console.log(`Total Duration: ${totalDuration}ms`);
console.log(`Total Uploads: ${metrics.totalUploads}`);
console.log(`Successful: ${metrics.successfulUploads}`);
console.log(`Failed: ${metrics.failedUploads}`);
console.log(`Success Rate: ${(metrics.successRate * 100).toFixed(2)}%`);
console.log(`Avg Response Time: ${metrics.avgDuration.toFixed(2)}ms`);
console.log(`Max Response Time: ${metrics.maxDuration}ms`);
console.log(`Avg Memory: ${metrics.avgMemory.toFixed(2)}MB`);
console.log(`Max Memory: ${metrics.maxMemory.toFixed(2)}MB`);
// Assertions
expect(totalDuration).toBeLessThan(thresholds.maxDuration);
expect(metrics.maxMemory).toBeLessThan(thresholds.maxMemoryMB);
expect(metrics.avgDuration).toBeLessThan(thresholds.maxAvgResponseTime);
expect(metrics.successRate).toBeGreaterThanOrEqual(thresholds.minSuccessRate);
});
test('Heavy Load: 20 users, 5 files each (5MB)', async ({ browser }) => {
const config = LOAD_TEST_CONFIG.heavy;
const thresholds = PERFORMANCE_THRESHOLDS.heavy;
test.setTimeout(config.expectedDuration);
const startTime = Date.now();
// Simulate concurrent user sessions
const userPromises = Array.from({ length: config.users }, (_, i) =>
simulateUserSession(browser, i + 1, config)
);
const allUserMetrics = await Promise.all(userPromises);
const totalDuration = Date.now() - startTime;
// Aggregate metrics
const metrics = aggregateMetrics(allUserMetrics);
console.log('\n=== Heavy Load Test Results ===');
console.log(`Total Duration: ${totalDuration}ms`);
console.log(`Total Uploads: ${metrics.totalUploads}`);
console.log(`Successful: ${metrics.successfulUploads}`);
console.log(`Failed: ${metrics.failedUploads}`);
console.log(`Success Rate: ${(metrics.successRate * 100).toFixed(2)}%`);
console.log(`Avg Response Time: ${metrics.avgDuration.toFixed(2)}ms`);
console.log(`Max Response Time: ${metrics.maxDuration}ms`);
console.log(`Avg Memory: ${metrics.avgMemory.toFixed(2)}MB`);
console.log(`Max Memory: ${metrics.maxMemory.toFixed(2)}MB`);
// Assertions
expect(totalDuration).toBeLessThan(thresholds.maxDuration);
expect(metrics.maxMemory).toBeLessThan(thresholds.maxMemoryMB);
expect(metrics.avgDuration).toBeLessThan(thresholds.maxAvgResponseTime);
expect(metrics.successRate).toBeGreaterThanOrEqual(thresholds.minSuccessRate);
});
test('Stress Test: 50 users, 2 files each (1MB)', async ({ browser }) => {
const config = LOAD_TEST_CONFIG.stress;
const thresholds = PERFORMANCE_THRESHOLDS.stress;
test.setTimeout(config.expectedDuration);
const startTime = Date.now();
// Simulate concurrent user sessions
const userPromises = Array.from({ length: config.users }, (_, i) =>
simulateUserSession(browser, i + 1, config)
);
const allUserMetrics = await Promise.all(userPromises);
const totalDuration = Date.now() - startTime;
// Aggregate metrics
const metrics = aggregateMetrics(allUserMetrics);
console.log('\n=== Stress Test Results ===');
console.log(`Total Duration: ${totalDuration}ms`);
console.log(`Total Uploads: ${metrics.totalUploads}`);
console.log(`Successful: ${metrics.successfulUploads}`);
console.log(`Failed: ${metrics.failedUploads}`);
console.log(`Success Rate: ${(metrics.successRate * 100).toFixed(2)}%`);
console.log(`Avg Response Time: ${metrics.avgDuration.toFixed(2)}ms`);
console.log(`Max Response Time: ${metrics.maxDuration}ms`);
console.log(`Avg Memory: ${metrics.avgMemory.toFixed(2)}MB`);
console.log(`Max Memory: ${metrics.maxMemory.toFixed(2)}MB`);
console.log(`Total Errors: ${metrics.errors.length}`);
// Assertions
expect(totalDuration).toBeLessThan(thresholds.maxDuration);
expect(metrics.maxMemory).toBeLessThan(thresholds.maxMemoryMB);
expect(metrics.avgDuration).toBeLessThan(thresholds.maxAvgResponseTime);
expect(metrics.successRate).toBeGreaterThanOrEqual(thresholds.minSuccessRate);
});
test('Queue Management: Verify concurrent upload queue handling', async ({ browser }) => {
test.setTimeout(60000);
const context = await browser.newContext();
const page = await context.newPage();
await page.goto('https://localhost/livecomponents/test/upload');
await page.waitForFunction(() => window.LiveComponents !== undefined);
// Create 10 test files
const testFiles = await Promise.all(
Array.from({ length: 10 }, (_, i) =>
createTestFile(`queue-test-${i}.bin`, 1)
)
);
// Upload all files at once
const fileInput = page.locator('input[type="file"]');
await fileInput.setInputFiles(testFiles);
// Start uploads
await page.click('button#upload-btn');
// Monitor queue state
const queueStates = [];
const interval = setInterval(async () => {
const queueState = await page.evaluate(() => {
return {
active: window.__activeUploads || 0,
queued: window.__queuedUploads || 0,
completed: window.__completedUploads || 0
};
});
queueStates.push(queueState);
}, 500);
// Wait for all uploads to complete
await page.waitForSelector('.all-uploads-complete', { timeout: 60000 });
clearInterval(interval);
// Verify queue management
expect(queueStates.length).toBeGreaterThan(0);
// Verify max concurrent uploads never exceeded limit (e.g., 3)
const maxConcurrent = Math.max(...queueStates.map(s => s.active));
expect(maxConcurrent).toBeLessThanOrEqual(3);
// Verify all files eventually completed
const finalState = queueStates[queueStates.length - 1];
expect(finalState.completed).toBe(10);
await context.close();
});
test('Resource Cleanup: Verify memory cleanup after concurrent uploads', async ({ browser }) => {
test.setTimeout(60000);
const context = await browser.newContext();
const page = await context.newPage();
await page.goto('https://localhost/livecomponents/test/upload');
await page.waitForFunction(() => window.LiveComponents !== undefined);
// Measure baseline memory
const baselineMemory = await page.evaluate(() => {
if (performance.memory) {
return performance.memory.usedJSHeapSize / (1024 * 1024);
}
return 0;
});
// Upload 5 files concurrently
const testFiles = await Promise.all(
Array.from({ length: 5 }, (_, i) =>
createTestFile(`cleanup-test-${i}.bin`, 2)
)
);
const fileInput = page.locator('input[type="file"]');
await fileInput.setInputFiles(testFiles);
await page.click('button#upload-btn');
await page.waitForSelector('.all-uploads-complete', { timeout: 60000 });
// Measure memory after uploads
const afterUploadMemory = await page.evaluate(() => {
if (performance.memory) {
return performance.memory.usedJSHeapSize / (1024 * 1024);
}
return 0;
});
// Force garbage collection via reload
await page.reload();
await page.waitForTimeout(2000);
// Measure memory after cleanup
const afterCleanupMemory = await page.evaluate(() => {
if (performance.memory) {
return performance.memory.usedJSHeapSize / (1024 * 1024);
}
return 0;
});
if (baselineMemory > 0) {
console.log(`\nMemory Usage:`);
console.log(`Baseline: ${baselineMemory.toFixed(2)}MB`);
console.log(`After Uploads: ${afterUploadMemory.toFixed(2)}MB`);
console.log(`After Cleanup: ${afterCleanupMemory.toFixed(2)}MB`);
const memoryIncrease = afterUploadMemory - baselineMemory;
const memoryAfterCleanup = afterCleanupMemory - baselineMemory;
// Memory should return close to baseline after cleanup
expect(memoryAfterCleanup).toBeLessThan(memoryIncrease * 0.5);
}
await context.close();
});
test('Error Recovery: System recovers from concurrent upload failures', async ({ browser }) => {
test.setTimeout(60000);
const context = await browser.newContext();
const page = await context.newPage();
await page.goto('https://localhost/livecomponents/test/upload');
await page.waitForFunction(() => window.LiveComponents !== undefined);
// Simulate intermittent failures
let failureCount = 0;
await page.route('**/live-component/**/chunk/**', (route) => {
// Fail every 3rd request
if (failureCount % 3 === 0) {
failureCount++;
route.abort('failed');
} else {
failureCount++;
route.continue();
}
});
// Upload 3 files concurrently
const testFiles = await Promise.all(
Array.from({ length: 3 }, (_, i) =>
createTestFile(`recovery-test-${i}.bin`, 1)
)
);
const fileInput = page.locator('input[type="file"]');
await fileInput.setInputFiles(testFiles);
await page.click('button#upload-btn');
// Wait for completion (with retries)
await page.waitForSelector('.all-uploads-complete', { timeout: 60000 });
// Verify all files completed despite failures
const completedFiles = await page.locator('.upload-complete').count();
expect(completedFiles).toBe(3);
await context.close();
});
test('Throughput Test: Measure sustained upload throughput', async ({ browser }) => {
test.setTimeout(120000);
const context = await browser.newContext();
const page = await context.newPage();
await page.goto('https://localhost/livecomponents/test/upload');
await page.waitForFunction(() => window.LiveComponents !== undefined);
// Upload 20 files of 5MB each (100MB total)
const totalFiles = 20;
const fileSizeMB = 5;
const totalMB = totalFiles * fileSizeMB;
const startTime = Date.now();
for (let i = 0; i < totalFiles; i++) {
const testFile = await createTestFile(`throughput-test-${i}.bin`, fileSizeMB);
const fileInput = page.locator('input[type="file"]');
await fileInput.setInputFiles(testFile);
await page.click('button#upload-btn');
await page.waitForSelector('.upload-complete', { timeout: 30000 });
// Small delay between uploads to avoid overwhelming system
await page.waitForTimeout(100);
}
const totalDuration = (Date.now() - startTime) / 1000; // seconds
const throughputMBps = totalMB / totalDuration;
console.log(`\nThroughput Test Results:`);
console.log(`Total Data: ${totalMB}MB`);
console.log(`Total Duration: ${totalDuration.toFixed(2)}s`);
console.log(`Throughput: ${throughputMBps.toFixed(2)} MB/s`);
// Expect at least 1 MB/s throughput
expect(throughputMBps).toBeGreaterThan(1);
await context.close();
});
});