/** * E2E Tests for LiveComponents Chunked Upload System * * Tests chunked upload functionality in real browser environment: * - Upload initialization and session management * - Chunk splitting and parallel uploads * - Progress tracking via SSE * - Resume capability after interruption * - Integrity verification (SHA-256) * - Error handling and retry logic * - Quarantine system integration * - Multiple file uploads * * Run with: npx playwright test chunked-upload.spec.js */ import { test, expect } from '@playwright/test'; import { createReadStream } from 'fs'; import { writeFile, unlink, mkdir } from 'fs/promises'; import { join } from 'path'; import { fileURLToPath } from 'url'; import { dirname } from 'path'; const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); // Test file generation const TEST_FILES_DIR = join(__dirname, '../../tmp/upload-test-files'); /** * Helper: Create test file of specified size */ async function createTestFile(filename, sizeInMB) { await mkdir(TEST_FILES_DIR, { recursive: true }); const filePath = join(TEST_FILES_DIR, filename); const sizeInBytes = sizeInMB * 1024 * 1024; // Generate random data const chunkSize = 1024 * 1024; // 1MB chunks const chunks = Math.ceil(sizeInBytes / chunkSize); const buffer = Buffer.alloc(chunkSize); let written = 0; const stream = require('fs').createWriteStream(filePath); for (let i = 0; i < chunks; i++) { const remaining = sizeInBytes - written; const writeSize = Math.min(chunkSize, remaining); // Fill with pattern for verification for (let j = 0; j < writeSize; j++) { buffer[j] = (i + j) % 256; } stream.write(buffer.slice(0, writeSize)); written += writeSize; } stream.end(); await new Promise((resolve) => stream.on('finish', resolve)); return filePath; } /** * Helper: Clean up test files */ async function cleanupTestFiles() { try { const fs = await import('fs/promises'); const files = await fs.readdir(TEST_FILES_DIR); for (const file of files) { await fs.unlink(join(TEST_FILES_DIR, file)); } } catch (error) { // Directory might not exist } } test.describe('Chunked Upload System', () => { test.beforeAll(async () => { // Clean up any existing test files await cleanupTestFiles(); }); test.afterAll(async () => { // Clean up test files await cleanupTestFiles(); }); test.beforeEach(async ({ page }) => { await page.goto('https://localhost/livecomponents/test/upload'); await page.waitForFunction(() => window.LiveComponents !== undefined); await page.waitForFunction(() => window.ChunkedUploader !== undefined); }); test('should initialize upload session successfully', async ({ page }) => { // Create small test file (1MB) const testFile = await createTestFile('test-small.bin', 1); // Set file input const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); // Click upload button await page.click('button#upload-btn'); // Wait for session initialization await page.waitForTimeout(500); // Verify session created const sessionId = await page.evaluate(() => { return window.__uploadSession?.sessionId; }); expect(sessionId).toBeTruthy(); expect(sessionId).toMatch(/^[a-f0-9-]{36}$/); // UUID format }); test('should split file into correct number of chunks', async ({ page }) => { // Create 2MB file (should result in 4 chunks with 512KB chunk size) const testFile = await createTestFile('test-chunks.bin', 2); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); // Wait for chunk splitting await page.waitForTimeout(500); const uploadInfo = await page.evaluate(() => { return { totalChunks: window.__uploadSession?.totalChunks, chunkSize: window.__uploadSession?.chunkSize }; }); expect(uploadInfo.totalChunks).toBe(4); // 2MB / 512KB = 4 expect(uploadInfo.chunkSize).toBe(512 * 1024); // 512KB }); test('should upload chunks in parallel', async ({ page }) => { // Create 5MB file const testFile = await createTestFile('test-parallel.bin', 5); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); // Monitor network requests const chunkRequests = []; page.on('request', request => { if (request.url().includes('/live-component/') && request.url().includes('/chunk')) { chunkRequests.push({ time: Date.now(), url: request.url() }); } }); await page.click('button#upload-btn'); // Wait for upload to complete await page.waitForSelector('.upload-complete', { timeout: 30000 }); // Verify parallel uploads (chunks should overlap in time) expect(chunkRequests.length).toBeGreaterThan(3); // Check if requests were concurrent (within 100ms window) const timeWindows = chunkRequests.reduce((windows, req) => { const window = Math.floor(req.time / 100); windows[window] = (windows[window] || 0) + 1; return windows; }, {}); const hasParallelUploads = Object.values(timeWindows).some(count => count > 1); expect(hasParallelUploads).toBe(true); }); test('should track upload progress accurately', async ({ page }) => { // Create 3MB file const testFile = await createTestFile('test-progress.bin', 3); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); const progressUpdates = []; // Monitor progress updates await page.evaluate(() => { window.__progressUpdates = []; const originalLog = console.log; console.log = function(...args) { if (args[0] && typeof args[0] === 'string' && args[0].includes('Progress:')) { window.__progressUpdates.push(parseFloat(args[1])); } originalLog.apply(console, args); }; }); await page.click('button#upload-btn'); // Wait for completion await page.waitForSelector('.upload-complete', { timeout: 30000 }); // Get progress updates const updates = await page.evaluate(() => window.__progressUpdates || []); // Verify progress increases monotonically for (let i = 1; i < updates.length; i++) { expect(updates[i]).toBeGreaterThanOrEqual(updates[i - 1]); } // Verify final progress is 100% expect(updates[updates.length - 1]).toBeCloseTo(100, 0); }); test('should verify chunk integrity with SHA-256', async ({ page }) => { // Create test file const testFile = await createTestFile('test-integrity.bin', 1); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); // Monitor chunk upload requests const chunkHashes = []; page.on('requestfinished', async request => { if (request.url().includes('/chunk')) { const postData = request.postDataJSON(); if (postData && postData.chunkHash) { chunkHashes.push(postData.chunkHash); } } }); await page.click('button#upload-btn'); await page.waitForSelector('.upload-complete', { timeout: 30000 }); // Verify hashes were sent expect(chunkHashes.length).toBeGreaterThan(0); // Verify hash format (SHA-256 is 64 hex characters) chunkHashes.forEach(hash => { expect(hash).toMatch(/^[a-f0-9]{64}$/); }); }); test('should handle upload interruption and resume', async ({ page }) => { // Create larger file (10MB) const testFile = await createTestFile('test-resume.bin', 10); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); // Wait for some chunks to upload await page.waitForTimeout(2000); // Get current progress const progressBefore = await page.locator('#progress-text').textContent(); const percentBefore = parseFloat(progressBefore); // Simulate interruption (reload page) await page.reload(); await page.waitForFunction(() => window.LiveComponents !== undefined); // Re-select file const fileInput2 = page.locator('input[type="file"]'); await fileInput2.setInputFiles(testFile); // Resume upload await page.click('button#resume-upload-btn'); // Wait for completion await page.waitForSelector('.upload-complete', { timeout: 30000 }); // Verify upload completed const finalProgress = await page.locator('#progress-text').textContent(); expect(finalProgress).toBe('100%'); // Verify resume actually happened (should skip uploaded chunks) const uploadedChunks = await page.evaluate(() => window.__uploadedChunks || 0); expect(uploadedChunks).toBeGreaterThan(0); }); test('should retry failed chunks with exponential backoff', async ({ page }) => { // Create test file const testFile = await createTestFile('test-retry.bin', 2); // Intercept and fail first chunk upload attempt let attemptCount = 0; await page.route('**/live-component/**/chunk/**', (route, request) => { attemptCount++; if (attemptCount <= 2) { // Fail first 2 attempts route.abort('failed'); } else { // Allow subsequent attempts route.continue(); } }); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); // Upload should eventually succeed after retries await page.waitForSelector('.upload-complete', { timeout: 30000 }); // Verify retries occurred expect(attemptCount).toBeGreaterThan(2); }); test('should handle concurrent multi-file uploads', async ({ page }) => { // Create multiple test files const files = [ await createTestFile('test-multi-1.bin', 1), await createTestFile('test-multi-2.bin', 1), await createTestFile('test-multi-3.bin', 1) ]; const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(files); // Verify all files queued const queuedCount = await page.evaluate(() => { return window.__uploadQueue?.length || 0; }); expect(queuedCount).toBe(3); // Start uploads await page.click('button#upload-all-btn'); // Wait for all uploads to complete await page.waitForSelector('.all-uploads-complete', { timeout: 60000 }); // Verify all files uploaded const completedFiles = await page.locator('.uploaded-file').count(); expect(completedFiles).toBe(3); }); test('should receive real-time progress via SSE', async ({ page }) => { // Create test file const testFile = await createTestFile('test-sse.bin', 5); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); // Monitor SSE connections let sseConnected = false; page.on('requestfinished', request => { if (request.url().includes('/sse/upload-progress')) { sseConnected = true; } }); await page.click('button#upload-btn'); // Wait a bit for SSE connection await page.waitForTimeout(1000); // Verify SSE connection established expect(sseConnected).toBe(true); // Verify progress updates are real-time (not just on completion) const progressElement = page.locator('#progress-text'); // Should see intermediate progress values const intermediateProgress = []; for (let i = 0; i < 5; i++) { await page.waitForTimeout(500); const progress = await progressElement.textContent(); intermediateProgress.push(parseFloat(progress)); } // Should have varying progress values const uniqueValues = new Set(intermediateProgress); expect(uniqueValues.size).toBeGreaterThan(1); }); test('should handle upload cancellation', async ({ page }) => { // Create larger file const testFile = await createTestFile('test-cancel.bin', 20); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); // Wait for upload to start await page.waitForTimeout(1000); // Get progress before cancellation const progressBefore = await page.locator('#progress-text').textContent(); const percentBefore = parseFloat(progressBefore); expect(percentBefore).toBeGreaterThan(0); expect(percentBefore).toBeLessThan(100); // Cancel upload await page.click('button#cancel-upload-btn'); // Wait for cancellation await page.waitForTimeout(500); // Verify upload cancelled const status = await page.locator('#upload-status').textContent(); expect(status).toContain('Cancelled'); // Verify progress stopped await page.waitForTimeout(1000); const progressAfter = await page.locator('#progress-text').textContent(); expect(progressAfter).toBe(progressBefore); // Should not have increased }); test('should validate file size limits', async ({ page }) => { // Create file exceeding limit (e.g., 100MB) const testFile = await createTestFile('test-too-large.bin', 100); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); // Should show error await page.waitForSelector('.upload-error', { timeout: 5000 }); const errorMessage = await page.locator('.upload-error').textContent(); expect(errorMessage).toContain('File too large'); }); test('should validate file types', async ({ page }) => { // Create file with disallowed extension const testFile = await createTestFile('test-invalid.exe', 1); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); // Should show error await page.waitForSelector('.upload-error', { timeout: 5000 }); const errorMessage = await page.locator('.upload-error').textContent(); expect(errorMessage).toContain('File type not allowed'); }); test('should display uploaded file in component', async ({ page }) => { // Create test file const testFile = await createTestFile('test-display.jpg', 1); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); // Wait for upload complete await page.waitForSelector('.upload-complete', { timeout: 30000 }); // Wait for component update await page.waitForTimeout(500); // Verify file appears in uploaded files list const uploadedFile = page.locator('[data-lc-fragment="file-list"] li').first(); await expect(uploadedFile).toBeVisible(); const fileName = await uploadedFile.textContent(); expect(fileName).toContain('test-display.jpg'); }); test('should handle quarantine system integration', async ({ page }) => { // Create test file const testFile = await createTestFile('test-quarantine.bin', 1); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); await page.waitForSelector('.upload-complete', { timeout: 30000 }); // Check for quarantine status const quarantineStatus = await page.evaluate(() => { return window.__uploadResult?.quarantineStatus; }); // Should have a quarantine status (scanning, passed, or failed) expect(['scanning', 'passed', 'failed']).toContain(quarantineStatus); }); test('should update component state after successful upload', async ({ page }) => { // Get initial file count const initialCount = await page.locator('[data-lc-fragment="file-list"] li').count(); // Create and upload file const testFile = await createTestFile('test-state.pdf', 1); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); await page.waitForSelector('.upload-complete', { timeout: 30000 }); // Wait for component state update await page.waitForTimeout(1000); // Verify file count increased const finalCount = await page.locator('[data-lc-fragment="file-list"] li').count(); expect(finalCount).toBe(initialCount + 1); }); test('should handle network interruption gracefully', async ({ page }) => { // Create test file const testFile = await createTestFile('test-network.bin', 5); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); // Simulate network interruption after 2 seconds setTimeout(() => { page.route('**/live-component/**', route => route.abort('failed')); }, 2000); await page.click('button#upload-btn'); // Should show error or retry notification await page.waitForSelector('.upload-error, .upload-retrying', { timeout: 10000 }); const status = await page.locator('#upload-status').textContent(); expect(status).toMatch(/Error|Retrying/); }); }); test.describe('Chunked Upload Performance', () => { test.beforeEach(async ({ page }) => { await page.goto('https://localhost/livecomponents/test/upload'); await page.waitForFunction(() => window.LiveComponents !== undefined); }); test('should upload 10MB file in under 30 seconds', async ({ page }) => { const testFile = await createTestFile('test-perf-10mb.bin', 10); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); const startTime = Date.now(); await page.click('button#upload-btn'); await page.waitForSelector('.upload-complete', { timeout: 30000 }); const duration = Date.now() - startTime; console.log(`10MB upload took ${duration}ms`); expect(duration).toBeLessThan(30000); // < 30 seconds }); test('should handle memory efficiently with large files', async ({ page }) => { // Monitor memory if available (Chromium only) const memoryBefore = await page.evaluate(() => { return performance.memory ? performance.memory.usedJSHeapSize : 0; }); // Upload 50MB file const testFile = await createTestFile('test-memory-50mb.bin', 50); const fileInput = page.locator('input[type="file"]'); await fileInput.setInputFiles(testFile); await page.click('button#upload-btn'); await page.waitForSelector('.upload-complete', { timeout: 120000 }); const memoryAfter = await page.evaluate(() => { return performance.memory ? performance.memory.usedJSHeapSize : 0; }); if (memoryBefore > 0) { const memoryIncrease = memoryAfter - memoryBefore; // Memory increase should be reasonable (not loading entire file into memory) // Should be < 10MB for 50MB file (chunked processing) expect(memoryIncrease).toBeLessThan(10 * 1024 * 1024); console.log(`Memory increase: ${(memoryIncrease / 1024 / 1024).toFixed(2)} MB`); } }); });