- Add comprehensive health check system with multiple endpoints - Add Prometheus metrics endpoint - Add production logging configurations (5 strategies) - Add complete deployment documentation suite: * QUICKSTART.md - 30-minute deployment guide * DEPLOYMENT_CHECKLIST.md - Printable verification checklist * DEPLOYMENT_WORKFLOW.md - Complete deployment lifecycle * PRODUCTION_DEPLOYMENT.md - Comprehensive technical reference * production-logging.md - Logging configuration guide * ANSIBLE_DEPLOYMENT.md - Infrastructure as Code automation * README.md - Navigation hub * DEPLOYMENT_SUMMARY.md - Executive summary - Add deployment scripts and automation - Add DEPLOYMENT_PLAN.md - Concrete plan for immediate deployment - Update README with production-ready features All production infrastructure is now complete and ready for deployment.
1007 lines
32 KiB
JavaScript
1007 lines
32 KiB
JavaScript
/**
|
|
* ChunkedUploader Tests
|
|
*
|
|
* Comprehensive test suite for ChunkedUploader module:
|
|
* - File chunking and upload initialization
|
|
* - SHA-256 hashing and integrity verification
|
|
* - Parallel chunk uploads with concurrency control
|
|
* - Retry logic with exponential backoff
|
|
* - Resume capability for interrupted uploads
|
|
* - SSE real-time progress tracking
|
|
* - Abort functionality
|
|
* - Error handling and recovery
|
|
*
|
|
* @package Framework\LiveComponents
|
|
*/
|
|
|
|
import { ChunkedUploader } from '../../../../resources/js/modules/livecomponent/ChunkedUploader.js';
|
|
|
|
// Mock fetch globally
|
|
global.fetch = jest.fn();
|
|
|
|
// Mock crypto.subtle for SHA-256 hashing
|
|
global.crypto = {
|
|
subtle: {
|
|
digest: jest.fn()
|
|
}
|
|
};
|
|
|
|
// Mock XMLHttpRequest
|
|
class MockXMLHttpRequest {
|
|
constructor() {
|
|
this.upload = {
|
|
addEventListener: jest.fn()
|
|
};
|
|
this.addEventListener = jest.fn();
|
|
this.open = jest.fn();
|
|
this.send = jest.fn();
|
|
this.setRequestHeader = jest.fn();
|
|
this.abort = jest.fn();
|
|
this.status = 200;
|
|
this.responseText = '{"success": true}';
|
|
}
|
|
}
|
|
|
|
global.XMLHttpRequest = MockXMLHttpRequest;
|
|
|
|
// Mock SSE client
|
|
jest.mock('../../../../resources/js/modules/sse/index.js', () => ({
|
|
getGlobalSseClient: jest.fn(() => ({
|
|
on: jest.fn(),
|
|
connect: jest.fn(),
|
|
isConnected: jest.fn(() => false)
|
|
}))
|
|
}));
|
|
|
|
describe('ChunkedUploader', () => {
|
|
let uploader;
|
|
let mockFile;
|
|
|
|
beforeEach(() => {
|
|
jest.clearAllMocks();
|
|
|
|
// Reset fetch mock
|
|
global.fetch.mockReset();
|
|
|
|
// Create mock file
|
|
mockFile = new File(['a'.repeat(2 * 1024 * 1024)], 'test.txt', {
|
|
type: 'text/plain'
|
|
});
|
|
|
|
// Mock crypto.subtle.digest
|
|
global.crypto.subtle.digest.mockResolvedValue(
|
|
new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]).buffer
|
|
);
|
|
|
|
uploader = new ChunkedUploader('test-component', {
|
|
chunkSize: 512 * 1024, // 512KB
|
|
maxConcurrentChunks: 3,
|
|
maxRetries: 3,
|
|
retryDelay: 100,
|
|
enableSSE: false // Disable SSE for most tests
|
|
});
|
|
});
|
|
|
|
afterEach(() => {
|
|
uploader.destroy();
|
|
});
|
|
|
|
describe('Constructor and Configuration', () => {
|
|
it('initializes with default options', () => {
|
|
const defaultUploader = new ChunkedUploader('component-1');
|
|
|
|
expect(defaultUploader.componentId).toBe('component-1');
|
|
expect(defaultUploader.chunkSize).toBe(512 * 1024);
|
|
expect(defaultUploader.maxConcurrentChunks).toBe(3);
|
|
expect(defaultUploader.maxRetries).toBe(3);
|
|
expect(defaultUploader.retryDelay).toBe(1000);
|
|
expect(defaultUploader.enableSSE).toBe(true);
|
|
});
|
|
|
|
it('accepts custom options', () => {
|
|
const customUploader = new ChunkedUploader('component-2', {
|
|
chunkSize: 1024 * 1024, // 1MB
|
|
maxConcurrentChunks: 5,
|
|
maxRetries: 5,
|
|
retryDelay: 2000,
|
|
enableSSE: false,
|
|
apiBase: '/custom/upload'
|
|
});
|
|
|
|
expect(customUploader.chunkSize).toBe(1024 * 1024);
|
|
expect(customUploader.maxConcurrentChunks).toBe(5);
|
|
expect(customUploader.maxRetries).toBe(5);
|
|
expect(customUploader.retryDelay).toBe(2000);
|
|
expect(customUploader.enableSSE).toBe(false);
|
|
expect(customUploader.apiBase).toBe('/custom/upload');
|
|
});
|
|
|
|
it('registers callbacks correctly', () => {
|
|
const callbacks = {
|
|
onInitialized: jest.fn(),
|
|
onChunkProgress: jest.fn(),
|
|
onProgress: jest.fn(),
|
|
onComplete: jest.fn(),
|
|
onError: jest.fn(),
|
|
onAborted: jest.fn()
|
|
};
|
|
|
|
const callbackUploader = new ChunkedUploader('component-3', {
|
|
...callbacks,
|
|
enableSSE: false
|
|
});
|
|
|
|
expect(callbackUploader.onInitialized).toBe(callbacks.onInitialized);
|
|
expect(callbackUploader.onProgress).toBe(callbacks.onProgress);
|
|
expect(callbackUploader.onComplete).toBe(callbacks.onComplete);
|
|
});
|
|
});
|
|
|
|
describe('Session Initialization', () => {
|
|
it('initializes upload session successfully', async () => {
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => ({
|
|
success: true,
|
|
session_id: 'session-123',
|
|
total_chunks: 4,
|
|
expires_at: new Date(Date.now() + 3600000).toISOString()
|
|
})
|
|
});
|
|
|
|
const onInitialized = jest.fn();
|
|
uploader.onInitialized = onInitialized;
|
|
|
|
const session = {
|
|
file: mockFile,
|
|
totalChunks: 0,
|
|
sessionId: null,
|
|
status: 'initializing',
|
|
expiresAt: null
|
|
};
|
|
|
|
await uploader.initializeSession(session);
|
|
|
|
expect(session.sessionId).toBe('session-123');
|
|
expect(session.totalChunks).toBe(4);
|
|
expect(session.status).toBe('initialized');
|
|
expect(session.expiresAt).toBeInstanceOf(Date);
|
|
expect(onInitialized).toHaveBeenCalledWith({
|
|
sessionId: 'session-123',
|
|
totalChunks: 4,
|
|
expiresAt: session.expiresAt
|
|
});
|
|
});
|
|
|
|
it('throws error on failed initialization', async () => {
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: false,
|
|
status: 500
|
|
});
|
|
|
|
const session = {
|
|
file: mockFile,
|
|
totalChunks: 0
|
|
};
|
|
|
|
await expect(uploader.initializeSession(session))
|
|
.rejects.toThrow('Failed to initialize upload session: 500');
|
|
});
|
|
|
|
it('handles API error response', async () => {
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => ({
|
|
success: false,
|
|
error: 'Invalid file type'
|
|
})
|
|
});
|
|
|
|
const session = {
|
|
file: mockFile
|
|
};
|
|
|
|
await expect(uploader.initializeSession(session))
|
|
.rejects.toThrow('Invalid file type');
|
|
});
|
|
});
|
|
|
|
describe('File Chunking', () => {
|
|
it('calculates correct number of chunks', async () => {
|
|
const file = new File(['a'.repeat(2 * 1024 * 1024)], 'test.txt'); // 2MB file
|
|
const chunkSize = 512 * 1024; // 512KB chunks
|
|
|
|
const expectedChunks = Math.ceil(file.size / chunkSize); // 4 chunks
|
|
expect(expectedChunks).toBe(4);
|
|
});
|
|
|
|
it('creates chunk metadata correctly', async () => {
|
|
const session = {
|
|
file: mockFile,
|
|
chunks: [],
|
|
totalChunks: 0
|
|
};
|
|
|
|
const totalChunks = Math.ceil(mockFile.size / uploader.chunkSize);
|
|
session.totalChunks = totalChunks;
|
|
|
|
for (let i = 0; i < totalChunks; i++) {
|
|
const start = i * uploader.chunkSize;
|
|
const end = Math.min(start + uploader.chunkSize, mockFile.size);
|
|
const size = end - start;
|
|
|
|
expect(size).toBeGreaterThan(0);
|
|
expect(size).toBeLessThanOrEqual(uploader.chunkSize);
|
|
}
|
|
});
|
|
|
|
it('handles last chunk size correctly', async () => {
|
|
const file = new File(['a'.repeat(1536 * 1024)], 'test.txt'); // 1.5MB file
|
|
const chunkSize = 512 * 1024; // 512KB chunks
|
|
|
|
const totalChunks = Math.ceil(file.size / chunkSize); // 3 chunks
|
|
expect(totalChunks).toBe(3);
|
|
|
|
// Last chunk should be 512KB (exactly)
|
|
const lastChunkStart = 2 * chunkSize;
|
|
const lastChunkSize = file.size - lastChunkStart;
|
|
expect(lastChunkSize).toBe(512 * 1024);
|
|
});
|
|
});
|
|
|
|
describe('SHA-256 Hashing', () => {
|
|
it('hashes chunk data correctly', async () => {
|
|
const chunkData = new Uint8Array([1, 2, 3, 4, 5]).buffer;
|
|
|
|
global.crypto.subtle.digest.mockResolvedValueOnce(
|
|
new Uint8Array([0xde, 0xad, 0xbe, 0xef]).buffer
|
|
);
|
|
|
|
const hash = await uploader.hashChunk(chunkData);
|
|
|
|
expect(global.crypto.subtle.digest).toHaveBeenCalledWith('SHA-256', chunkData);
|
|
expect(hash).toBe('deadbeef');
|
|
});
|
|
|
|
it('converts hash to hex string correctly', async () => {
|
|
const chunkData = new Uint8Array([10, 20, 30]).buffer;
|
|
|
|
global.crypto.subtle.digest.mockResolvedValueOnce(
|
|
new Uint8Array([0x01, 0x0a, 0xff]).buffer
|
|
);
|
|
|
|
const hash = await uploader.hashChunk(chunkData);
|
|
|
|
expect(hash).toBe('010aff');
|
|
expect(hash).toHaveLength(6); // 3 bytes = 6 hex chars
|
|
});
|
|
});
|
|
|
|
describe('Chunk Reading', () => {
|
|
it('reads chunk data from file', async () => {
|
|
const file = new File(['Hello World'], 'test.txt');
|
|
const chunk = {
|
|
index: 0,
|
|
size: 11
|
|
};
|
|
|
|
const chunkData = await uploader.readChunk(file, chunk);
|
|
|
|
expect(chunkData).toBeInstanceOf(ArrayBuffer);
|
|
expect(chunkData.byteLength).toBe(11);
|
|
});
|
|
|
|
it('reads partial chunk correctly', async () => {
|
|
const content = 'a'.repeat(1024);
|
|
const file = new File([content], 'test.txt');
|
|
|
|
const chunk = {
|
|
index: 0,
|
|
size: 512
|
|
};
|
|
|
|
uploader.chunkSize = 512;
|
|
const chunkData = await uploader.readChunk(file, chunk);
|
|
|
|
expect(chunkData.byteLength).toBe(512);
|
|
});
|
|
});
|
|
|
|
describe('Chunk Upload', () => {
|
|
it('uploads chunk successfully', async () => {
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
file: mockFile,
|
|
uploadedChunks: 0,
|
|
progress: 0
|
|
};
|
|
|
|
const chunk = {
|
|
index: 0,
|
|
size: 512 * 1024,
|
|
status: 'pending',
|
|
uploadedBytes: 0,
|
|
hash: null,
|
|
xhr: null
|
|
};
|
|
|
|
// Mock FileReader
|
|
global.FileReader = class {
|
|
readAsArrayBuffer() {
|
|
setTimeout(() => {
|
|
this.onload({ target: { result: new ArrayBuffer(512 * 1024) } });
|
|
}, 0);
|
|
}
|
|
};
|
|
|
|
global.crypto.subtle.digest.mockResolvedValueOnce(
|
|
new Uint8Array([1, 2, 3, 4]).buffer
|
|
);
|
|
|
|
const onProgress = jest.fn();
|
|
uploader.onProgress = onProgress;
|
|
|
|
// Mock successful XHR
|
|
MockXMLHttpRequest.prototype.send = function() {
|
|
setTimeout(() => {
|
|
this.status = 200;
|
|
this.responseText = JSON.stringify({ success: true });
|
|
const loadEvent = this.addEventListener.mock.calls.find(
|
|
call => call[0] === 'load'
|
|
);
|
|
if (loadEvent) loadEvent[1]();
|
|
}, 10);
|
|
};
|
|
|
|
await uploader.uploadChunk(session, chunk);
|
|
|
|
expect(chunk.status).toBe('complete');
|
|
expect(session.uploadedChunks).toBe(1);
|
|
expect(onProgress).toHaveBeenCalled();
|
|
});
|
|
|
|
it('retries failed chunk upload', async () => {
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
file: mockFile,
|
|
uploadedChunks: 0
|
|
};
|
|
|
|
const chunk = {
|
|
index: 0,
|
|
size: 512 * 1024,
|
|
status: 'pending',
|
|
uploadedBytes: 0,
|
|
reset: jest.fn()
|
|
};
|
|
|
|
global.FileReader = class {
|
|
readAsArrayBuffer() {
|
|
setTimeout(() => {
|
|
this.onload({ target: { result: new ArrayBuffer(512 * 1024) } });
|
|
}, 0);
|
|
}
|
|
};
|
|
|
|
let attempts = 0;
|
|
MockXMLHttpRequest.prototype.send = function() {
|
|
setTimeout(() => {
|
|
attempts++;
|
|
if (attempts < 3) {
|
|
// Fail first 2 attempts
|
|
this.status = 500;
|
|
const errorEvent = this.addEventListener.mock.calls.find(
|
|
call => call[0] === 'error'
|
|
);
|
|
if (errorEvent) errorEvent[1]();
|
|
} else {
|
|
// Succeed on 3rd attempt
|
|
this.status = 200;
|
|
this.responseText = JSON.stringify({ success: true });
|
|
const loadEvent = this.addEventListener.mock.calls.find(
|
|
call => call[0] === 'load'
|
|
);
|
|
if (loadEvent) loadEvent[1]();
|
|
}
|
|
}, 10);
|
|
};
|
|
|
|
uploader.maxRetries = 3;
|
|
uploader.retryDelay = 10;
|
|
|
|
await uploader.uploadChunk(session, chunk);
|
|
|
|
expect(attempts).toBe(3);
|
|
expect(chunk.reset).toHaveBeenCalledTimes(2);
|
|
});
|
|
|
|
it('fails after max retries exceeded', async () => {
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
file: mockFile
|
|
};
|
|
|
|
const chunk = {
|
|
index: 0,
|
|
size: 512 * 1024,
|
|
status: 'pending',
|
|
reset: jest.fn()
|
|
};
|
|
|
|
global.FileReader = class {
|
|
readAsArrayBuffer() {
|
|
setTimeout(() => {
|
|
this.onload({ target: { result: new ArrayBuffer(512 * 1024) } });
|
|
}, 0);
|
|
}
|
|
};
|
|
|
|
MockXMLHttpRequest.prototype.send = function() {
|
|
setTimeout(() => {
|
|
this.status = 500;
|
|
const errorEvent = this.addEventListener.mock.calls.find(
|
|
call => call[0] === 'error'
|
|
);
|
|
if (errorEvent) errorEvent[1]();
|
|
}, 10);
|
|
};
|
|
|
|
uploader.maxRetries = 2;
|
|
uploader.retryDelay = 10;
|
|
|
|
await expect(uploader.uploadChunk(session, chunk))
|
|
.rejects.toThrow('Chunk 0 failed after 2 retries');
|
|
|
|
expect(chunk.status).toBe('error');
|
|
});
|
|
});
|
|
|
|
describe('Parallel Chunk Upload', () => {
|
|
it('uploads chunks in parallel batches', async () => {
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
file: mockFile,
|
|
uploadedChunks: 0,
|
|
chunks: [],
|
|
getPendingChunks: jest.fn()
|
|
};
|
|
|
|
// Create 6 pending chunks
|
|
const chunks = [];
|
|
for (let i = 0; i < 6; i++) {
|
|
chunks.push({
|
|
index: i,
|
|
size: 512 * 1024,
|
|
status: 'pending'
|
|
});
|
|
}
|
|
|
|
session.chunks = chunks;
|
|
session.getPendingChunks.mockReturnValue([...chunks]);
|
|
|
|
uploader.uploadChunk = jest.fn().mockResolvedValue();
|
|
uploader.maxConcurrentChunks = 3;
|
|
|
|
await uploader.uploadChunks(session);
|
|
|
|
// Should process 6 chunks in 2 batches of 3
|
|
expect(uploader.uploadChunk).toHaveBeenCalledTimes(6);
|
|
});
|
|
|
|
it('respects max concurrent chunks limit', async () => {
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
file: mockFile,
|
|
chunks: [],
|
|
getPendingChunks: jest.fn()
|
|
};
|
|
|
|
const chunks = [];
|
|
for (let i = 0; i < 10; i++) {
|
|
chunks.push({ index: i, status: 'pending' });
|
|
}
|
|
|
|
session.chunks = chunks;
|
|
session.getPendingChunks.mockReturnValue([...chunks]);
|
|
|
|
let concurrentUploads = 0;
|
|
let maxConcurrent = 0;
|
|
|
|
uploader.uploadChunk = jest.fn().mockImplementation(async () => {
|
|
concurrentUploads++;
|
|
maxConcurrent = Math.max(maxConcurrent, concurrentUploads);
|
|
await new Promise(resolve => setTimeout(resolve, 10));
|
|
concurrentUploads--;
|
|
});
|
|
|
|
uploader.maxConcurrentChunks = 3;
|
|
|
|
await uploader.uploadChunks(session);
|
|
|
|
expect(maxConcurrent).toBeLessThanOrEqual(3);
|
|
});
|
|
});
|
|
|
|
describe('Upload Completion', () => {
|
|
it('completes upload successfully', async () => {
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => ({
|
|
success: true,
|
|
file_path: '/uploads/test.txt',
|
|
file_hash: 'abc123'
|
|
})
|
|
});
|
|
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
status: 'uploading'
|
|
};
|
|
|
|
const result = await uploader.completeUpload(session, '/uploads/test.txt');
|
|
|
|
expect(session.status).toBe('assembling');
|
|
expect(result.success).toBe(true);
|
|
expect(global.fetch).toHaveBeenCalledWith(
|
|
'/live-component/upload/complete',
|
|
expect.objectContaining({
|
|
method: 'POST',
|
|
body: JSON.stringify({
|
|
sessionId: 'session-123',
|
|
targetPath: '/uploads/test.txt'
|
|
})
|
|
})
|
|
);
|
|
});
|
|
|
|
it('throws error on completion failure', async () => {
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: false,
|
|
status: 500
|
|
});
|
|
|
|
const session = {
|
|
sessionId: 'session-123'
|
|
};
|
|
|
|
await expect(uploader.completeUpload(session, '/uploads/test.txt'))
|
|
.rejects.toThrow('Failed to complete upload: 500');
|
|
});
|
|
});
|
|
|
|
describe('Full Upload Flow', () => {
|
|
it('executes complete upload successfully', async () => {
|
|
// Mock initialization
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => ({
|
|
success: true,
|
|
session_id: 'session-123',
|
|
total_chunks: 2,
|
|
expires_at: new Date(Date.now() + 3600000).toISOString()
|
|
})
|
|
});
|
|
|
|
// Mock completion
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => ({
|
|
success: true,
|
|
file_path: '/uploads/test.txt'
|
|
})
|
|
});
|
|
|
|
global.FileReader = class {
|
|
readAsArrayBuffer() {
|
|
setTimeout(() => {
|
|
this.onload({ target: { result: new ArrayBuffer(512 * 1024) } });
|
|
}, 0);
|
|
}
|
|
};
|
|
|
|
MockXMLHttpRequest.prototype.send = function() {
|
|
setTimeout(() => {
|
|
this.status = 200;
|
|
this.responseText = JSON.stringify({ success: true });
|
|
const loadEvent = this.addEventListener.mock.calls.find(
|
|
call => call[0] === 'load'
|
|
);
|
|
if (loadEvent) loadEvent[1]();
|
|
}, 10);
|
|
};
|
|
|
|
const onComplete = jest.fn();
|
|
uploader.onComplete = onComplete;
|
|
|
|
const file = new File(['a'.repeat(1024 * 1024)], 'test.txt'); // 1MB file
|
|
const session = await uploader.upload(file, '/uploads/test.txt');
|
|
|
|
expect(session.status).toBe('complete');
|
|
expect(session.sessionId).toBe('session-123');
|
|
expect(onComplete).toHaveBeenCalledWith(expect.objectContaining({
|
|
sessionId: 'session-123',
|
|
file: file,
|
|
totalBytes: file.size
|
|
}));
|
|
});
|
|
|
|
it('handles upload error correctly', async () => {
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: false,
|
|
status: 500
|
|
});
|
|
|
|
const onError = jest.fn();
|
|
uploader.onError = onError;
|
|
|
|
const file = new File(['test'], 'test.txt');
|
|
|
|
await expect(uploader.upload(file, '/uploads/test.txt'))
|
|
.rejects.toThrow();
|
|
|
|
expect(onError).toHaveBeenCalledWith(expect.objectContaining({
|
|
file: file
|
|
}));
|
|
});
|
|
});
|
|
|
|
describe('Upload Abort', () => {
|
|
it('aborts upload successfully', async () => {
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => ({ success: true })
|
|
});
|
|
|
|
const chunks = [
|
|
{ abort: jest.fn() },
|
|
{ abort: jest.fn() },
|
|
{ abort: jest.fn() }
|
|
];
|
|
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
status: 'uploading',
|
|
chunks: chunks,
|
|
uploadedChunks: 1,
|
|
totalChunks: 3
|
|
};
|
|
|
|
uploader.sessions.set('session-123', session);
|
|
|
|
const onAborted = jest.fn();
|
|
uploader.onAborted = onAborted;
|
|
|
|
await uploader.abort('session-123');
|
|
|
|
expect(chunks[0].abort).toHaveBeenCalled();
|
|
expect(chunks[1].abort).toHaveBeenCalled();
|
|
expect(chunks[2].abort).toHaveBeenCalled();
|
|
expect(session.status).toBe('aborted');
|
|
expect(onAborted).toHaveBeenCalledWith({
|
|
sessionId: 'session-123',
|
|
uploadedChunks: 1,
|
|
totalChunks: 3
|
|
});
|
|
});
|
|
|
|
it('throws error for non-existent session', async () => {
|
|
await expect(uploader.abort('non-existent'))
|
|
.rejects.toThrow('Session not found');
|
|
});
|
|
});
|
|
|
|
describe('Resume Capability', () => {
|
|
it('resumes interrupted upload', async () => {
|
|
// Mock status request
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => ({
|
|
success: true,
|
|
session_id: 'session-123',
|
|
total_chunks: 4,
|
|
uploaded_chunks: 2,
|
|
status: 'uploading'
|
|
})
|
|
});
|
|
|
|
// Mock completion
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => ({
|
|
success: true,
|
|
file_path: '/uploads/test.txt'
|
|
})
|
|
});
|
|
|
|
global.FileReader = class {
|
|
readAsArrayBuffer() {
|
|
setTimeout(() => {
|
|
this.onload({ target: { result: new ArrayBuffer(512 * 1024) } });
|
|
}, 0);
|
|
}
|
|
};
|
|
|
|
MockXMLHttpRequest.prototype.send = function() {
|
|
setTimeout(() => {
|
|
this.status = 200;
|
|
this.responseText = JSON.stringify({ success: true });
|
|
const loadEvent = this.addEventListener.mock.calls.find(
|
|
call => call[0] === 'load'
|
|
);
|
|
if (loadEvent) loadEvent[1]();
|
|
}, 10);
|
|
};
|
|
|
|
const onComplete = jest.fn();
|
|
uploader.onComplete = onComplete;
|
|
|
|
const file = new File(['a'.repeat(2 * 1024 * 1024)], 'test.txt');
|
|
const session = await uploader.resume('session-123', file, '/uploads/test.txt');
|
|
|
|
expect(session.status).toBe('complete');
|
|
expect(session.uploadedChunks).toBe(4);
|
|
expect(onComplete).toHaveBeenCalledWith(expect.objectContaining({
|
|
sessionId: 'session-123',
|
|
resumed: true
|
|
}));
|
|
});
|
|
|
|
it('gets upload status correctly', async () => {
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => ({
|
|
success: true,
|
|
session_id: 'session-123',
|
|
total_chunks: 5,
|
|
uploaded_chunks: 3,
|
|
status: 'uploading'
|
|
})
|
|
});
|
|
|
|
const status = await uploader.getStatus('session-123');
|
|
|
|
expect(status.session_id).toBe('session-123');
|
|
expect(status.uploaded_chunks).toBe(3);
|
|
expect(status.total_chunks).toBe(5);
|
|
});
|
|
});
|
|
|
|
describe('Progress Tracking', () => {
|
|
it('tracks chunk progress correctly', () => {
|
|
const chunk = {
|
|
index: 0,
|
|
size: 1024,
|
|
uploadedBytes: 512,
|
|
status: 'uploading'
|
|
};
|
|
|
|
const progress = (chunk.uploadedBytes / chunk.size) * 100;
|
|
expect(progress).toBe(50);
|
|
});
|
|
|
|
it('tracks session progress correctly', () => {
|
|
const session = {
|
|
totalChunks: 10,
|
|
uploadedChunks: 5
|
|
};
|
|
|
|
const progress = (session.uploadedChunks / session.totalChunks) * 100;
|
|
expect(progress).toBe(50);
|
|
});
|
|
|
|
it('calls onProgress callback with correct data', async () => {
|
|
const onProgress = jest.fn();
|
|
uploader.onProgress = onProgress;
|
|
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
file: mockFile,
|
|
uploadedChunks: 0,
|
|
totalChunks: 4,
|
|
uploadedBytes: 0,
|
|
totalBytes: 2 * 1024 * 1024,
|
|
progress: 0,
|
|
chunks: []
|
|
};
|
|
|
|
const chunk = {
|
|
index: 0,
|
|
size: 512 * 1024,
|
|
status: 'pending',
|
|
uploadedBytes: 0
|
|
};
|
|
|
|
global.FileReader = class {
|
|
readAsArrayBuffer() {
|
|
setTimeout(() => {
|
|
this.onload({ target: { result: new ArrayBuffer(512 * 1024) } });
|
|
}, 0);
|
|
}
|
|
};
|
|
|
|
MockXMLHttpRequest.prototype.send = function() {
|
|
setTimeout(() => {
|
|
this.status = 200;
|
|
this.responseText = JSON.stringify({ success: true });
|
|
const loadEvent = this.addEventListener.mock.calls.find(
|
|
call => call[0] === 'load'
|
|
);
|
|
if (loadEvent) loadEvent[1]();
|
|
}, 10);
|
|
};
|
|
|
|
await uploader.uploadChunk(session, chunk);
|
|
|
|
expect(onProgress).toHaveBeenCalledWith(expect.objectContaining({
|
|
sessionId: 'session-123',
|
|
uploadedChunks: 1,
|
|
totalChunks: 4
|
|
}));
|
|
});
|
|
});
|
|
|
|
describe('SSE Integration', () => {
|
|
it('initializes SSE connection when enabled', () => {
|
|
// Mock user ID
|
|
document.body.dataset.userId = 'user-123';
|
|
|
|
const { getGlobalSseClient } = require('../../../../resources/js/modules/sse/index.js');
|
|
|
|
const sseUploader = new ChunkedUploader('component-sse', {
|
|
enableSSE: true
|
|
});
|
|
|
|
expect(getGlobalSseClient).toHaveBeenCalledWith(['user:user-123']);
|
|
});
|
|
|
|
it('handles SSE progress updates', () => {
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
uploadedChunks: 0,
|
|
status: 'uploading'
|
|
};
|
|
|
|
uploader.sessions.set('session-123', session);
|
|
|
|
const onSSEProgress = jest.fn();
|
|
uploader.onSSEProgress = onSSEProgress;
|
|
|
|
uploader.handleSSEProgress({
|
|
session_id: 'session-123',
|
|
taskId: 'session-123',
|
|
percent: 50,
|
|
message: 'Uploading...',
|
|
data: {
|
|
uploaded_chunks: 5,
|
|
phase: 'uploading'
|
|
}
|
|
});
|
|
|
|
expect(session.uploadedChunks).toBe(5);
|
|
expect(session.status).toBe('uploading');
|
|
expect(onSSEProgress).toHaveBeenCalledWith({
|
|
sessionId: 'session-123',
|
|
percent: 50,
|
|
message: 'Uploading...',
|
|
data: expect.objectContaining({
|
|
uploaded_chunks: 5,
|
|
phase: 'uploading'
|
|
})
|
|
});
|
|
});
|
|
});
|
|
|
|
describe('Cleanup', () => {
|
|
it('destroys uploader and cleans up resources', () => {
|
|
const chunk1 = { abort: jest.fn() };
|
|
const chunk2 = { abort: jest.fn() };
|
|
|
|
const session1 = {
|
|
sessionId: 'session-1',
|
|
canResume: true,
|
|
chunks: [chunk1]
|
|
};
|
|
|
|
const session2 = {
|
|
sessionId: 'session-2',
|
|
canResume: true,
|
|
chunks: [chunk2]
|
|
};
|
|
|
|
uploader.sessions.set('session-1', session1);
|
|
uploader.sessions.set('session-2', session2);
|
|
uploader.activeSession = session1;
|
|
|
|
uploader.destroy();
|
|
|
|
expect(chunk1.abort).toHaveBeenCalled();
|
|
expect(chunk2.abort).toHaveBeenCalled();
|
|
expect(uploader.sessions.size).toBe(0);
|
|
expect(uploader.activeSession).toBeNull();
|
|
});
|
|
});
|
|
|
|
describe('Error Scenarios', () => {
|
|
it('handles network errors gracefully', async () => {
|
|
global.fetch.mockRejectedValueOnce(new Error('Network error'));
|
|
|
|
const file = new File(['test'], 'test.txt');
|
|
|
|
await expect(uploader.upload(file, '/uploads/test.txt'))
|
|
.rejects.toThrow();
|
|
});
|
|
|
|
it('handles malformed API responses', async () => {
|
|
global.fetch.mockResolvedValueOnce({
|
|
ok: true,
|
|
json: async () => {
|
|
throw new Error('Invalid JSON');
|
|
}
|
|
});
|
|
|
|
const session = { file: mockFile };
|
|
|
|
await expect(uploader.initializeSession(session))
|
|
.rejects.toThrow();
|
|
});
|
|
|
|
it('handles file reading errors', async () => {
|
|
const file = new File(['test'], 'test.txt');
|
|
const chunk = { index: 0, size: 4 };
|
|
|
|
global.FileReader = class {
|
|
readAsArrayBuffer() {
|
|
setTimeout(() => {
|
|
this.onerror(new Error('Read failed'));
|
|
}, 0);
|
|
}
|
|
};
|
|
|
|
await expect(uploader.readChunk(file, chunk))
|
|
.rejects.toThrow('Failed to read chunk');
|
|
});
|
|
});
|
|
|
|
describe('Performance Characteristics', () => {
|
|
it('handles large files efficiently', async () => {
|
|
const largeFile = new File(
|
|
['a'.repeat(100 * 1024 * 1024)],
|
|
'large.txt'
|
|
); // 100MB
|
|
|
|
const chunkSize = 1024 * 1024; // 1MB chunks
|
|
const expectedChunks = Math.ceil(largeFile.size / chunkSize);
|
|
|
|
expect(expectedChunks).toBe(100);
|
|
});
|
|
|
|
it('limits concurrent chunk uploads', async () => {
|
|
uploader.maxConcurrentChunks = 5;
|
|
|
|
const session = {
|
|
sessionId: 'session-123',
|
|
chunks: [],
|
|
getPendingChunks: jest.fn()
|
|
};
|
|
|
|
const chunks = [];
|
|
for (let i = 0; i < 50; i++) {
|
|
chunks.push({ index: i, status: 'pending' });
|
|
}
|
|
|
|
session.getPendingChunks.mockReturnValue([...chunks]);
|
|
|
|
let activeCalls = 0;
|
|
uploader.uploadChunk = jest.fn().mockImplementation(async () => {
|
|
activeCalls++;
|
|
await new Promise(resolve => setTimeout(resolve, 5));
|
|
activeCalls--;
|
|
});
|
|
|
|
await uploader.uploadChunks(session);
|
|
|
|
expect(uploader.uploadChunk).toHaveBeenCalledTimes(50);
|
|
});
|
|
});
|
|
});
|