feat(Production): Complete production deployment infrastructure

- Add comprehensive health check system with multiple endpoints
- Add Prometheus metrics endpoint
- Add production logging configurations (5 strategies)
- Add complete deployment documentation suite:
  * QUICKSTART.md - 30-minute deployment guide
  * DEPLOYMENT_CHECKLIST.md - Printable verification checklist
  * DEPLOYMENT_WORKFLOW.md - Complete deployment lifecycle
  * PRODUCTION_DEPLOYMENT.md - Comprehensive technical reference
  * production-logging.md - Logging configuration guide
  * ANSIBLE_DEPLOYMENT.md - Infrastructure as Code automation
  * README.md - Navigation hub
  * DEPLOYMENT_SUMMARY.md - Executive summary
- Add deployment scripts and automation
- Add DEPLOYMENT_PLAN.md - Concrete plan for immediate deployment
- Update README with production-ready features

All production infrastructure is now complete and ready for deployment.
This commit is contained in:
2025-10-25 19:18:37 +02:00
parent caa85db796
commit fc3d7e6357
83016 changed files with 378904 additions and 20919 deletions

View File

@@ -0,0 +1,701 @@
/**
* ChunkedUploader - Chunked File Upload Module for Large Files
*
* Features:
* - Break large files into manageable chunks
* - SHA-256 hashing for integrity verification
* - Resume capability for interrupted uploads
* - Real-time progress via SSE
* - Retry logic with exponential backoff
* - Parallel chunk uploads (configurable)
* - Integration with LiveComponent system
*
* @package Framework\LiveComponents
*/
import { getGlobalSseClient } from '../sse/index.js';
/**
* Chunk Upload Status
*/
const ChunkStatus = {
PENDING: 'pending',
HASHING: 'hashing',
UPLOADING: 'uploading',
COMPLETE: 'complete',
ERROR: 'error'
};
/**
* Upload Session Status
*/
const SessionStatus = {
INITIALIZING: 'initializing',
INITIALIZED: 'initialized',
UPLOADING: 'uploading',
ASSEMBLING: 'assembling',
COMPLETE: 'complete',
ABORTED: 'aborted',
ERROR: 'error'
};
/**
* Chunk Metadata - Tracks individual chunk state
*/
class ChunkMetadata {
constructor(index, size, file) {
this.index = index;
this.size = size;
this.status = ChunkStatus.PENDING;
this.hash = null;
this.uploadedBytes = 0;
this.retries = 0;
this.error = null;
this.xhr = null;
this.file = file;
}
get progress() {
return this.size > 0 ? (this.uploadedBytes / this.size) * 100 : 0;
}
reset() {
this.status = ChunkStatus.PENDING;
this.uploadedBytes = 0;
this.error = null;
this.xhr = null;
}
abort() {
if (this.xhr) {
this.xhr.abort();
this.xhr = null;
}
}
}
/**
* Upload Session - Manages complete chunked upload session
*/
class UploadSession {
constructor(file, options) {
this.file = file;
this.sessionId = null;
this.totalChunks = 0;
this.chunkSize = options.chunkSize;
this.chunks = [];
this.status = SessionStatus.INITIALIZING;
this.uploadedChunks = 0;
this.expectedFileHash = null;
this.error = null;
this.startTime = null;
this.endTime = null;
this.expiresAt = null;
}
get progress() {
if (this.totalChunks === 0) return 0;
return (this.uploadedChunks / this.totalChunks) * 100;
}
get uploadedBytes() {
return this.chunks.reduce((sum, chunk) => sum + chunk.uploadedBytes, 0);
}
get totalBytes() {
return this.file.size;
}
get isComplete() {
return this.status === SessionStatus.COMPLETE;
}
get isError() {
return this.status === SessionStatus.ERROR;
}
get isAborted() {
return this.status === SessionStatus.ABORTED;
}
get canResume() {
return this.sessionId !== null && !this.isComplete && !this.isAborted;
}
getChunk(index) {
return this.chunks[index];
}
getPendingChunks() {
return this.chunks.filter(c => c.status === ChunkStatus.PENDING || c.status === ChunkStatus.ERROR);
}
getUploadingChunks() {
return this.chunks.filter(c => c.status === ChunkStatus.UPLOADING);
}
}
/**
* ChunkedUploader - Main chunked upload manager
*/
export class ChunkedUploader {
constructor(componentId, options = {}) {
this.componentId = componentId;
// Options
this.chunkSize = options.chunkSize || 512 * 1024; // 512KB default
this.maxConcurrentChunks = options.maxConcurrentChunks || 3;
this.maxRetries = options.maxRetries || 3;
this.retryDelay = options.retryDelay || 1000; // 1s base delay
this.enableSSE = options.enableSSE !== false; // SSE enabled by default
this.apiBase = options.apiBase || '/live-component/upload';
// Callbacks
this.onInitialized = options.onInitialized || (() => {});
this.onChunkProgress = options.onChunkProgress || (() => {});
this.onProgress = options.onProgress || (() => {});
this.onComplete = options.onComplete || (() => {});
this.onError = options.onError || (() => {});
this.onAborted = options.onAborted || (() => {});
this.onSSEProgress = options.onSSEProgress || (() => {});
// State
this.sessions = new Map(); // sessionId => UploadSession
this.activeSession = null;
this.sseClient = null;
this.userId = null;
// Initialize SSE if enabled
if (this.enableSSE) {
this.initializeSSE();
}
}
/**
* Initialize SSE connection for real-time progress
*/
initializeSSE() {
try {
// Get user ID from meta tag or data attribute
this.userId = this.getUserId();
if (this.userId) {
this.sseClient = getGlobalSseClient([`user:${this.userId}`]);
// Listen for progress events
this.sseClient.on('progress', (data) => {
this.handleSSEProgress(data);
});
// Connect if not already connected
if (!this.sseClient.isConnected()) {
this.sseClient.connect();
}
}
} catch (error) {
console.warn('[ChunkedUploader] Failed to initialize SSE:', error);
}
}
/**
* Get user ID for SSE channel
*/
getUserId() {
// Try meta tag first
const meta = document.querySelector('meta[name="user-id"]');
if (meta) return meta.content;
// Try data attribute on body
if (document.body.dataset.userId) {
return document.body.dataset.userId;
}
return null;
}
/**
* Handle SSE progress updates
*/
handleSSEProgress(data) {
const sessionId = data.session_id;
const session = this.sessions.get(sessionId);
if (session && data.taskId === sessionId) {
// Update session from SSE data
if (data.data?.uploaded_chunks !== undefined) {
session.uploadedChunks = data.data.uploaded_chunks;
}
if (data.data?.phase) {
switch (data.data.phase) {
case 'initialized':
session.status = SessionStatus.INITIALIZED;
break;
case 'uploading':
session.status = SessionStatus.UPLOADING;
break;
case 'completed':
session.status = SessionStatus.COMPLETE;
break;
case 'aborted':
session.status = SessionStatus.ABORTED;
break;
case 'error':
session.status = SessionStatus.ERROR;
break;
}
}
// Callback
this.onSSEProgress({
sessionId,
percent: data.percent,
message: data.message,
data: data.data
});
}
}
/**
* Upload file with chunking
*
* @param {File} file - File to upload
* @param {string} targetPath - Target path for assembled file
* @returns {Promise<UploadSession>}
*/
async upload(file, targetPath) {
// Create upload session
const session = new UploadSession(file, {
chunkSize: this.chunkSize
});
this.activeSession = session;
try {
// Calculate chunks
session.totalChunks = Math.ceil(file.size / this.chunkSize);
for (let i = 0; i < session.totalChunks; i++) {
const start = i * this.chunkSize;
const end = Math.min(start + this.chunkSize, file.size);
const size = end - start;
session.chunks.push(new ChunkMetadata(i, size, file));
}
// Initialize session with API
await this.initializeSession(session);
// Store session
this.sessions.set(session.sessionId, session);
// Start uploading chunks
session.status = SessionStatus.UPLOADING;
session.startTime = Date.now();
await this.uploadChunks(session);
// Complete upload
await this.completeUpload(session, targetPath);
// Success
session.status = SessionStatus.COMPLETE;
session.endTime = Date.now();
this.onComplete({
sessionId: session.sessionId,
file: session.file,
totalBytes: session.totalBytes,
duration: session.endTime - session.startTime
});
return session;
} catch (error) {
session.status = SessionStatus.ERROR;
session.error = error.message;
this.onError({
sessionId: session.sessionId,
file: session.file,
error: error.message
});
throw error;
}
}
/**
* Initialize upload session with API
*/
async initializeSession(session) {
const response = await fetch(`${this.apiBase}/init`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': navigator.userAgent
},
body: JSON.stringify({
componentId: this.componentId,
fileName: session.file.name,
totalSize: session.file.size,
chunkSize: this.chunkSize
})
});
if (!response.ok) {
throw new Error(`Failed to initialize upload session: ${response.status}`);
}
const data = await response.json();
if (!data.success) {
throw new Error(data.error || 'Failed to initialize upload session');
}
// Update session
session.sessionId = data.session_id;
session.totalChunks = data.total_chunks;
session.expiresAt = new Date(data.expires_at);
session.status = SessionStatus.INITIALIZED;
this.onInitialized({
sessionId: session.sessionId,
totalChunks: session.totalChunks,
expiresAt: session.expiresAt
});
}
/**
* Upload all chunks with parallelization
*/
async uploadChunks(session) {
const pending = session.getPendingChunks();
// Upload chunks in parallel batches
while (pending.length > 0) {
const batch = pending.splice(0, this.maxConcurrentChunks);
await Promise.all(
batch.map(chunk => this.uploadChunk(session, chunk))
);
}
}
/**
* Upload a single chunk
*/
async uploadChunk(session, chunk) {
let retries = 0;
while (retries <= this.maxRetries) {
try {
// Hash chunk data
chunk.status = ChunkStatus.HASHING;
const chunkData = await this.readChunk(session.file, chunk);
const chunkHash = await this.hashChunk(chunkData);
chunk.hash = chunkHash;
// Upload chunk
chunk.status = ChunkStatus.UPLOADING;
await this.uploadChunkData(session, chunk, chunkData, chunkHash);
// Success
chunk.status = ChunkStatus.COMPLETE;
session.uploadedChunks++;
this.onProgress({
sessionId: session.sessionId,
progress: session.progress,
uploadedChunks: session.uploadedChunks,
totalChunks: session.totalChunks,
uploadedBytes: session.uploadedBytes,
totalBytes: session.totalBytes
});
return;
} catch (error) {
retries++;
if (retries > this.maxRetries) {
chunk.status = ChunkStatus.ERROR;
chunk.error = error.message;
throw new Error(`Chunk ${chunk.index} failed after ${this.maxRetries} retries: ${error.message}`);
}
// Exponential backoff
const delay = this.retryDelay * Math.pow(2, retries - 1);
await this.sleep(delay);
chunk.reset();
}
}
}
/**
* Read chunk data from file
*/
readChunk(file, chunk) {
return new Promise((resolve, reject) => {
const start = chunk.index * this.chunkSize;
const end = Math.min(start + chunk.size, file.size);
const blob = file.slice(start, end);
const reader = new FileReader();
reader.onload = (e) => resolve(e.target.result);
reader.onerror = (e) => reject(new Error('Failed to read chunk'));
reader.readAsArrayBuffer(blob);
});
}
/**
* Hash chunk data with SHA-256
*/
async hashChunk(chunkData) {
const hashBuffer = await crypto.subtle.digest('SHA-256', chunkData);
const hashArray = Array.from(new Uint8Array(hashBuffer));
return hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
}
/**
* Upload chunk data to server
*/
uploadChunkData(session, chunk, chunkData, chunkHash) {
return new Promise((resolve, reject) => {
const formData = new FormData();
formData.append('sessionId', session.sessionId);
formData.append('chunkIndex', chunk.index);
formData.append('chunkHash', chunkHash);
formData.append('chunk', new Blob([chunkData]));
const xhr = new XMLHttpRequest();
chunk.xhr = xhr;
// Progress tracking
xhr.upload.addEventListener('progress', (e) => {
if (e.lengthComputable) {
chunk.uploadedBytes = e.loaded;
this.onChunkProgress({
sessionId: session.sessionId,
chunkIndex: chunk.index,
uploadedBytes: e.loaded,
totalBytes: e.total,
progress: (e.loaded / e.total) * 100
});
}
});
// Load event
xhr.addEventListener('load', () => {
if (xhr.status >= 200 && xhr.status < 300) {
try {
const response = JSON.parse(xhr.responseText);
if (response.success) {
resolve(response);
} else {
reject(new Error(response.error || 'Chunk upload failed'));
}
} catch (e) {
reject(e);
}
} else {
reject(new Error(`Chunk upload failed with status ${xhr.status}`));
}
});
// Error events
xhr.addEventListener('error', () => {
reject(new Error('Network error during chunk upload'));
});
xhr.addEventListener('abort', () => {
reject(new Error('Chunk upload cancelled'));
});
// Send request
xhr.open('POST', `${this.apiBase}/chunk`);
xhr.setRequestHeader('Accept', 'application/json');
xhr.setRequestHeader('User-Agent', navigator.userAgent);
xhr.send(formData);
});
}
/**
* Complete upload and assemble file
*/
async completeUpload(session, targetPath) {
session.status = SessionStatus.ASSEMBLING;
const response = await fetch(`${this.apiBase}/complete`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': navigator.userAgent
},
body: JSON.stringify({
sessionId: session.sessionId,
targetPath: targetPath
})
});
if (!response.ok) {
throw new Error(`Failed to complete upload: ${response.status}`);
}
const data = await response.json();
if (!data.success) {
throw new Error(data.error || 'Failed to complete upload');
}
return data;
}
/**
* Abort upload
*/
async abort(sessionId) {
const session = this.sessions.get(sessionId);
if (!session) {
throw new Error('Session not found');
}
// Abort all active chunk uploads
session.chunks.forEach(chunk => chunk.abort());
// Notify server
try {
await fetch(`${this.apiBase}/abort`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': navigator.userAgent
},
body: JSON.stringify({
sessionId: sessionId,
reason: 'User cancelled'
})
});
} catch (error) {
console.warn('[ChunkedUploader] Failed to notify server of abort:', error);
}
// Update session
session.status = SessionStatus.ABORTED;
this.onAborted({
sessionId,
uploadedChunks: session.uploadedChunks,
totalChunks: session.totalChunks
});
}
/**
* Get upload status
*/
async getStatus(sessionId) {
const response = await fetch(`${this.apiBase}/status/${sessionId}`, {
headers: {
'Accept': 'application/json',
'User-Agent': navigator.userAgent
}
});
if (!response.ok) {
throw new Error(`Failed to get upload status: ${response.status}`);
}
const data = await response.json();
if (!data.success) {
throw new Error(data.error || 'Failed to get upload status');
}
return data;
}
/**
* Resume interrupted upload
*/
async resume(sessionId, file, targetPath) {
// Get current status from server
const status = await this.getStatus(sessionId);
// Recreate session
const session = new UploadSession(file, {
chunkSize: this.chunkSize
});
session.sessionId = sessionId;
session.totalChunks = status.total_chunks;
session.uploadedChunks = status.uploaded_chunks;
session.status = SessionStatus.UPLOADING;
// Recreate chunks
for (let i = 0; i < session.totalChunks; i++) {
const start = i * this.chunkSize;
const end = Math.min(start + this.chunkSize, file.size);
const size = end - start;
const chunk = new ChunkMetadata(i, size, file);
// Mark already uploaded chunks as complete
if (i < status.uploaded_chunks) {
chunk.status = ChunkStatus.COMPLETE;
chunk.uploadedBytes = chunk.size;
}
session.chunks.push(chunk);
}
// Store and set as active
this.sessions.set(sessionId, session);
this.activeSession = session;
// Resume uploading
session.startTime = Date.now();
await this.uploadChunks(session);
await this.completeUpload(session, targetPath);
session.status = SessionStatus.COMPLETE;
session.endTime = Date.now();
this.onComplete({
sessionId: session.sessionId,
file: session.file,
totalBytes: session.totalBytes,
duration: session.endTime - session.startTime,
resumed: true
});
return session;
}
/**
* Helper: Sleep for delay
*/
sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* Cleanup
*/
destroy() {
// Abort all active sessions
this.sessions.forEach(session => {
if (session.canResume) {
session.chunks.forEach(chunk => chunk.abort());
}
});
this.sessions.clear();
this.activeSession = null;
}
}
export default ChunkedUploader;