Files
michaelschiemer/tests/Unit/Framework/LiveComponents/ChunkedUploadEdgeCasesTest.php
Michael Schiemer fc3d7e6357 feat(Production): Complete production deployment infrastructure
- Add comprehensive health check system with multiple endpoints
- Add Prometheus metrics endpoint
- Add production logging configurations (5 strategies)
- Add complete deployment documentation suite:
  * QUICKSTART.md - 30-minute deployment guide
  * DEPLOYMENT_CHECKLIST.md - Printable verification checklist
  * DEPLOYMENT_WORKFLOW.md - Complete deployment lifecycle
  * PRODUCTION_DEPLOYMENT.md - Comprehensive technical reference
  * production-logging.md - Logging configuration guide
  * ANSIBLE_DEPLOYMENT.md - Infrastructure as Code automation
  * README.md - Navigation hub
  * DEPLOYMENT_SUMMARY.md - Executive summary
- Add deployment scripts and automation
- Add DEPLOYMENT_PLAN.md - Concrete plan for immediate deployment
- Update README with production-ready features

All production infrastructure is now complete and ready for deployment.
2025-10-25 19:18:37 +02:00

680 lines
25 KiB
PHP

<?php
declare(strict_types=1);
namespace Tests\Unit\Framework\LiveComponents;
use App\Framework\Core\ValueObjects\Byte;
use App\Framework\Filesystem\InMemoryStorage;
use App\Framework\LiveComponents\Services\ChunkAssembler;
use App\Framework\LiveComponents\Services\ChunkedUploadManager;
use App\Framework\LiveComponents\Services\IntegrityValidator;
use App\Framework\LiveComponents\Services\UploadSessionIdGenerator;
use App\Framework\LiveComponents\ValueObjects\ChunkHash;
use App\Framework\LiveComponents\ValueObjects\UploadSession;
use App\Framework\Random\TestableRandomGenerator;
use Tests\Support\InMemoryUploadSessionStore;
use Tests\Support\InMemoryUploadProgressTracker;
use DateTimeImmutable;
/**
* Edge Cases & Error Recovery Tests for Chunked Upload System
*
* Tests critical edge cases and error recovery scenarios:
* - Concurrency issues (simultaneous uploads, race conditions)
* - Data corruption scenarios (partial writes, filesystem corruption)
* - Resource exhaustion (disk space, memory limits, session limits)
* - Network failure recovery (timeouts, resume, idempotency)
* - Stress & performance (simultaneous sessions, large files)
*/
beforeEach(function () {
// Setup dependencies
$this->randomGen = new TestableRandomGenerator();
$this->sessionIdGenerator = new UploadSessionIdGenerator($this->randomGen);
$this->sessionStore = new InMemoryUploadSessionStore();
$this->integrityValidator = new IntegrityValidator();
$this->fileStorage = new InMemoryStorage();
$this->chunkAssembler = new ChunkAssembler($this->fileStorage);
$this->progressTracker = new InMemoryUploadProgressTracker();
$this->uploadManager = new ChunkedUploadManager(
$this->sessionIdGenerator,
$this->sessionStore,
$this->integrityValidator,
$this->chunkAssembler,
$this->fileStorage,
$this->progressTracker,
'/tmp/test-uploads'
);
});
// ============================================================================
// CONCURRENCY EDGE CASES
// ============================================================================
describe('Concurrency Edge Cases', function () {
it('handles duplicate chunk upload (idempotency)', function () {
// Initialize session
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'test-file.txt',
totalSize: Byte::fromBytes(1024),
chunkSize: Byte::fromBytes(512)
);
// Prepare chunk data
$chunkData = str_repeat('A', 512);
$chunkHash = ChunkHash::fromData($chunkData);
// Upload same chunk twice (simulate network retry)
$result1 = $this->uploadManager->uploadChunk(
sessionId: $session->sessionId,
chunkIndex: 0,
chunkData: $chunkData,
providedHash: $chunkHash
);
$result2 = $this->uploadManager->uploadChunk(
sessionId: $session->sessionId,
chunkIndex: 0,
chunkData: $chunkData,
providedHash: $chunkHash
);
// Should be idempotent - both uploads succeed
expect($result1->getUploadedChunks())->toHaveCount(1);
expect($result2->getUploadedChunks())->toHaveCount(1);
expect($result1->getProgress())->toBe(50.0);
expect($result2->getProgress())->toBe(50.0);
});
it('rejects chunk upload with different data but same index', function () {
// Initialize session
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'test-file.txt',
totalSize: Byte::fromBytes(1024),
chunkSize: Byte::fromBytes(512)
);
// Upload first chunk
$chunkData1 = str_repeat('A', 512);
$chunkHash1 = ChunkHash::fromData($chunkData1);
$this->uploadManager->uploadChunk(
sessionId: $session->sessionId,
chunkIndex: 0,
chunkData: $chunkData1,
providedHash: $chunkHash1
);
// Try to upload different data for same chunk index
$chunkData2 = str_repeat('B', 512);
$chunkHash2 = ChunkHash::fromData($chunkData2);
expect(fn() => $this->uploadManager->uploadChunk(
sessionId: $session->sessionId,
chunkIndex: 0,
chunkData: $chunkData2,
providedHash: $chunkHash2
))->toThrow(\InvalidArgumentException::class, 'already uploaded with different');
});
it('handles out-of-order chunk uploads', function () {
// Initialize session with 4 chunks
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'test-file.txt',
totalSize: Byte::fromBytes(2048),
chunkSize: Byte::fromBytes(512)
);
// Upload chunks in random order: 2, 0, 3, 1
$chunks = [
2 => str_repeat('C', 512),
0 => str_repeat('A', 512),
3 => str_repeat('D', 512),
1 => str_repeat('B', 512),
];
foreach ($chunks as $index => $data) {
$hash = ChunkHash::fromData($data);
$this->uploadManager->uploadChunk(
sessionId: $session->sessionId,
chunkIndex: $index,
chunkData: $data,
providedHash: $hash
);
}
// All chunks uploaded - should be complete
$finalSession = $this->uploadManager->getStatus($session->sessionId);
expect($finalSession->isComplete())->toBeTrue();
expect($finalSession->getUploadedChunks())->toHaveCount(4);
});
it('handles rapid successive chunk uploads', function () {
// Initialize session with many small chunks
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'rapid-upload.txt',
totalSize: Byte::fromBytes(1000),
chunkSize: Byte::fromBytes(100)
);
// Rapidly upload all 10 chunks
for ($i = 0; $i < 10; $i++) {
$chunkData = str_repeat(chr(65 + $i), 100);
$chunkHash = ChunkHash::fromData($chunkData);
$this->uploadManager->uploadChunk(
sessionId: $session->sessionId,
chunkIndex: $i,
chunkData: $chunkData,
providedHash: $chunkHash
);
}
$finalSession = $this->uploadManager->getStatus($session->sessionId);
expect($finalSession->isComplete())->toBeTrue();
expect($finalSession->getProgress())->toBe(100.0);
});
});
// ============================================================================
// DATA CORRUPTION & INTEGRITY EDGE CASES
// ============================================================================
describe('Data Corruption Edge Cases', function () {
it('detects corrupted chunk data via hash mismatch', function () {
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'test.txt',
totalSize: Byte::fromBytes(1024),
chunkSize: Byte::fromBytes(512)
);
// Create chunk with intentionally wrong hash
$chunkData = str_repeat('A', 512);
$corruptedHash = ChunkHash::fromData('corrupted');
expect(fn() => $this->uploadManager->uploadChunk(
sessionId: $session->sessionId,
chunkIndex: 0,
chunkData: $chunkData,
providedHash: $corruptedHash
))->toThrow(\InvalidArgumentException::class, 'hash mismatch');
});
it('validates final assembled file hash', function () {
$chunk1Data = str_repeat('A', 512);
$chunk2Data = str_repeat('B', 512);
$expectedHash = ChunkHash::fromData($chunk1Data . $chunk2Data);
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'validated.txt',
totalSize: Byte::fromBytes(1024),
chunkSize: Byte::fromBytes(512),
expectedFileHash: $expectedHash
);
// Upload all chunks
$this->uploadManager->uploadChunk(
$session->sessionId,
0,
$chunk1Data,
ChunkHash::fromData($chunk1Data)
);
$this->uploadManager->uploadChunk(
$session->sessionId,
1,
$chunk2Data,
ChunkHash::fromData($chunk2Data)
);
// Complete upload - should succeed with matching hash
$targetPath = '/tmp/validated-file.txt';
$completedSession = $this->uploadManager->completeUpload(
$session->sessionId,
$targetPath
);
expect($completedSession->isComplete())->toBeTrue();
expect($this->fileStorage->exists($targetPath))->toBeTrue();
});
it('rejects final assembly with mismatched file hash', function () {
$wrongHash = ChunkHash::fromData('wrong expected hash');
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'mismatch.txt',
totalSize: Byte::fromBytes(1024),
chunkSize: Byte::fromBytes(512),
expectedFileHash: $wrongHash
);
// Upload all chunks
$chunk1Data = str_repeat('A', 512);
$chunk2Data = str_repeat('B', 512);
$this->uploadManager->uploadChunk(
$session->sessionId,
0,
$chunk1Data,
ChunkHash::fromData($chunk1Data)
);
$this->uploadManager->uploadChunk(
$session->sessionId,
1,
$chunk2Data,
ChunkHash::fromData($chunk2Data)
);
// Completion should fail due to hash mismatch
expect(fn() => $this->uploadManager->completeUpload(
$session->sessionId,
'/tmp/mismatch-file.txt'
))->toThrow(\InvalidArgumentException::class);
});
// Note: Chunk size validation removed - not feasible because:
// 1. Session doesn't store original chunkSize parameter
// 2. Client determines chunk size, server only calculates totalChunks
// 3. Last chunk can be any size
// 4. Hash validation provides sufficient integrity guarantee
it('handles last chunk with partial size correctly', function () {
// Initialize with size not evenly divisible by chunk size
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'partial-last.txt',
totalSize: Byte::fromBytes(1300), // 512 + 512 + 276
chunkSize: Byte::fromBytes(512)
);
expect($session->totalChunks)->toBe(3);
// Upload first two full chunks
$this->uploadManager->uploadChunk(
$session->sessionId,
0,
str_repeat('A', 512),
ChunkHash::fromData(str_repeat('A', 512))
);
$this->uploadManager->uploadChunk(
$session->sessionId,
1,
str_repeat('B', 512),
ChunkHash::fromData(str_repeat('B', 512))
);
// Upload last chunk with partial size (276 bytes)
$lastChunkData = str_repeat('C', 276);
$lastChunkHash = ChunkHash::fromData($lastChunkData);
$finalSession = $this->uploadManager->uploadChunk(
$session->sessionId,
2,
$lastChunkData,
$lastChunkHash
);
expect($finalSession->isComplete())->toBeTrue();
expect($finalSession->getProgress())->toBe(100.0);
});
});
// ============================================================================
// RESOURCE EXHAUSTION EDGE CASES
// ============================================================================
describe('Resource Exhaustion Edge Cases', function () {
it('handles zero-byte file gracefully', function () {
expect(fn() => $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'empty.txt',
totalSize: Byte::fromBytes(0),
chunkSize: Byte::fromBytes(512)
))->toThrow(\InvalidArgumentException::class, 'Total size must be greater than zero');
});
it('handles extremely small chunk size', function () {
expect(fn() => $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'tiny-chunks.txt',
totalSize: Byte::fromBytes(1024),
chunkSize: Byte::fromBytes(0)
))->toThrow(\InvalidArgumentException::class, 'Chunk size must be greater than zero');
});
it('handles very large file simulation (1GB+ chunks)', function () {
// Simulate 1GB file with 100MB chunks
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'large-file.bin',
totalSize: Byte::fromGigabytes(1),
chunkSize: Byte::fromMegabytes(100)
);
// 1GB = 1,073,741,824 bytes / 100MB = 104,857,600 bytes = 10.24 chunks
// ceil(10.24) = 11 chunks (not evenly divisible)
expect($session->totalChunks)->toBe(11);
expect($session->totalSize->toGigabytes())->toBe(1.0);
});
it('calculates correct chunk count for various file sizes', function () {
// Test various size combinations
$testCases = [
['total' => 1000, 'chunk' => 100, 'expected' => 10], // Exact division
['total' => 1050, 'chunk' => 100, 'expected' => 11], // Partial last chunk
['total' => 999, 'chunk' => 100, 'expected' => 10], // Partial last chunk
['total' => 100, 'chunk' => 100, 'expected' => 1], // Single chunk
['total' => 50, 'chunk' => 100, 'expected' => 1], // Chunk larger than file
];
foreach ($testCases as $case) {
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'test.txt',
totalSize: Byte::fromBytes($case['total']),
chunkSize: Byte::fromBytes($case['chunk'])
);
expect($session->totalChunks)->toBe(
$case['expected'],
"Failed for total={$case['total']}, chunk={$case['chunk']}"
);
}
});
it('handles many simultaneous upload sessions', function () {
$sessions = [];
// Create 50 simultaneous upload sessions
for ($i = 0; $i < 50; $i++) {
$sessions[] = $this->uploadManager->initializeUpload(
componentId: "uploader-{$i}",
fileName: "file-{$i}.txt",
totalSize: Byte::fromKilobytes(10),
chunkSize: Byte::fromKilobytes(2)
);
}
expect($sessions)->toHaveCount(50);
// Verify all sessions are independent
foreach ($sessions as $session) {
expect($session->sessionId)->not->toBeNull();
$retrieved = $this->uploadManager->getStatus($session->sessionId);
expect($retrieved)->not->toBeNull();
}
});
});
// ============================================================================
// NETWORK FAILURE & RECOVERY EDGE CASES
// ============================================================================
describe('Network Failure & Recovery Edge Cases', function () {
it('handles session expiration gracefully', function () {
// Create expired session manually
$sessionId = $this->sessionIdGenerator->generate();
$expiredSession = new UploadSession(
sessionId: $sessionId,
componentId: 'test-uploader',
fileName: 'expired.txt',
totalSize: Byte::fromBytes(1024),
totalChunks: 2,
createdAt: new DateTimeImmutable('-2 hours'),
expiresAt: new DateTimeImmutable('-1 hour') // Expired 1 hour ago
);
$this->sessionStore->save($expiredSession);
// Try to upload chunk for expired session
$chunkData = str_repeat('A', 512);
$chunkHash = ChunkHash::fromData($chunkData);
expect(fn() => $this->uploadManager->uploadChunk(
sessionId: $sessionId,
chunkIndex: 0,
chunkData: $chunkData,
providedHash: $chunkHash
))->toThrow(\InvalidArgumentException::class, 'expired');
});
it('supports resume after partial upload', function () {
// Initialize and upload first chunk
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'resumable.txt',
totalSize: Byte::fromBytes(1536), // 3 chunks of 512 bytes
chunkSize: Byte::fromBytes(512)
);
// Upload first chunk
$this->uploadManager->uploadChunk(
$session->sessionId,
0,
str_repeat('A', 512),
ChunkHash::fromData(str_repeat('A', 512))
);
// Simulate network interruption - check status
$currentSession = $this->uploadManager->getStatus($session->sessionId);
$missingChunks = $currentSession->getMissingChunkIndices();
expect($missingChunks)->toBe([1, 2]);
expect($currentSession->getProgress())->toBeGreaterThan(0);
expect($currentSession->getProgress())->toBeLessThan(100.0);
// Resume - upload remaining chunks
$this->uploadManager->uploadChunk(
$session->sessionId,
1,
str_repeat('B', 512),
ChunkHash::fromData(str_repeat('B', 512))
);
$this->uploadManager->uploadChunk(
$session->sessionId,
2,
str_repeat('C', 512),
ChunkHash::fromData(str_repeat('C', 512))
);
$finalSession = $this->uploadManager->getStatus($session->sessionId);
expect($finalSession->isComplete())->toBeTrue();
});
it('handles abort during active upload', function () {
$session = $this->uploadManager->initializeUpload(
componentId: 'test-uploader',
fileName: 'aborted.txt',
totalSize: Byte::fromBytes(1024),
chunkSize: Byte::fromBytes(512)
);
// Upload first chunk
$this->uploadManager->uploadChunk(
$session->sessionId,
0,
str_repeat('A', 512),
ChunkHash::fromData(str_repeat('A', 512))
);
// Abort mid-upload
$this->uploadManager->abortUpload(
sessionId: $session->sessionId,
reason: 'User cancelled'
);
// Verify session is completely removed
expect($this->uploadManager->getStatus($session->sessionId))->toBeNull();
// Verify cannot upload to aborted session
expect(fn() => $this->uploadManager->uploadChunk(
$session->sessionId,
1,
str_repeat('B', 512),
ChunkHash::fromData(str_repeat('B', 512))
))->toThrow(\InvalidArgumentException::class, 'Session not found');
});
it('validates session exists before chunk upload', function () {
$nonExistentSessionId = $this->sessionIdGenerator->generate();
$chunkData = str_repeat('A', 512);
$chunkHash = ChunkHash::fromData($chunkData);
expect(fn() => $this->uploadManager->uploadChunk(
sessionId: $nonExistentSessionId,
chunkIndex: 0,
chunkData: $chunkData,
providedHash: $chunkHash
))->toThrow(\InvalidArgumentException::class, 'Session not found');
});
it('handles rapid abort-reinitialize cycles', function () {
// Initialize, upload chunk, abort - repeat multiple times
for ($i = 0; $i < 5; $i++) {
$session = $this->uploadManager->initializeUpload(
componentId: "uploader-{$i}",
fileName: "cycle-{$i}.txt",
totalSize: Byte::fromBytes(1024),
chunkSize: Byte::fromBytes(512)
);
// Upload one chunk
$this->uploadManager->uploadChunk(
$session->sessionId,
0,
str_repeat('A', 512),
ChunkHash::fromData(str_repeat('A', 512))
);
// Abort immediately
$this->uploadManager->abortUpload($session->sessionId);
// Verify cleanup
expect($this->uploadManager->getStatus($session->sessionId))->toBeNull();
}
// All abort cycles completed successfully
expect(true)->toBeTrue();
});
});
// ============================================================================
// STRESS & PERFORMANCE EDGE CASES
// ============================================================================
describe('Stress & Performance Edge Cases', function () {
it('handles burst of chunk uploads for same session', function () {
$session = $this->uploadManager->initializeUpload(
componentId: 'stress-test',
fileName: 'burst-upload.txt',
totalSize: Byte::fromBytes(5000),
chunkSize: Byte::fromBytes(100)
);
// Burst upload all 50 chunks as fast as possible
for ($i = 0; $i < 50; $i++) {
$chunkData = str_repeat(chr(65 + ($i % 26)), 100);
$chunkHash = ChunkHash::fromData($chunkData);
$this->uploadManager->uploadChunk(
$session->sessionId,
$i,
$chunkData,
$chunkHash
);
}
$finalSession = $this->uploadManager->getStatus($session->sessionId);
expect($finalSession->isComplete())->toBeTrue();
expect($finalSession->getUploadedChunks())->toHaveCount(50);
});
it('maintains session isolation across multiple uploads', function () {
// Create multiple sessions and upload chunks in interleaved manner
$session1 = $this->uploadManager->initializeUpload(
componentId: 'uploader-1',
fileName: 'file-1.txt',
totalSize: Byte::fromBytes(300),
chunkSize: Byte::fromBytes(100)
);
$session2 = $this->uploadManager->initializeUpload(
componentId: 'uploader-2',
fileName: 'file-2.txt',
totalSize: Byte::fromBytes(300),
chunkSize: Byte::fromBytes(100)
);
// Interleaved uploads: session1 chunk 0, session2 chunk 0, session1 chunk 1, etc.
$this->uploadManager->uploadChunk($session1->sessionId, 0, str_repeat('A', 100), ChunkHash::fromData(str_repeat('A', 100)));
$this->uploadManager->uploadChunk($session2->sessionId, 0, str_repeat('X', 100), ChunkHash::fromData(str_repeat('X', 100)));
$this->uploadManager->uploadChunk($session1->sessionId, 1, str_repeat('B', 100), ChunkHash::fromData(str_repeat('B', 100)));
$this->uploadManager->uploadChunk($session2->sessionId, 1, str_repeat('Y', 100), ChunkHash::fromData(str_repeat('Y', 100)));
$this->uploadManager->uploadChunk($session1->sessionId, 2, str_repeat('C', 100), ChunkHash::fromData(str_repeat('C', 100)));
$this->uploadManager->uploadChunk($session2->sessionId, 2, str_repeat('Z', 100), ChunkHash::fromData(str_repeat('Z', 100)));
// Both sessions should be complete and independent
$status1 = $this->uploadManager->getStatus($session1->sessionId);
$status2 = $this->uploadManager->getStatus($session2->sessionId);
expect($status1->isComplete())->toBeTrue();
expect($status2->isComplete())->toBeTrue();
// Complete both and verify file content
$path1 = '/tmp/file-1.txt';
$path2 = '/tmp/file-2.txt';
$this->uploadManager->completeUpload($session1->sessionId, $path1);
$this->uploadManager->completeUpload($session2->sessionId, $path2);
expect($this->fileStorage->get($path1))->toBe(str_repeat('A', 100) . str_repeat('B', 100) . str_repeat('C', 100));
expect($this->fileStorage->get($path2))->toBe(str_repeat('X', 100) . str_repeat('Y', 100) . str_repeat('Z', 100));
});
it('tracks progress accurately across many chunks', function () {
$session = $this->uploadManager->initializeUpload(
componentId: 'progress-test',
fileName: 'many-chunks.txt',
totalSize: Byte::fromBytes(10000),
chunkSize: Byte::fromBytes(100)
);
expect($session->totalChunks)->toBe(100);
// Upload chunks and verify progress increments correctly
$expectedProgress = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100];
$actualProgress = [];
for ($i = 0; $i < 100; $i++) {
$chunkData = str_repeat('X', 100);
$chunkHash = ChunkHash::fromData($chunkData);
$updatedSession = $this->uploadManager->uploadChunk(
$session->sessionId,
$i,
$chunkData,
$chunkHash
);
// Sample progress at 10% intervals
if (($i + 1) % 10 === 0) {
$actualProgress[] = $updatedSession->getProgress();
}
}
expect($actualProgress)->toBe($expectedProgress);
});
});