- Add comprehensive health check system with multiple endpoints - Add Prometheus metrics endpoint - Add production logging configurations (5 strategies) - Add complete deployment documentation suite: * QUICKSTART.md - 30-minute deployment guide * DEPLOYMENT_CHECKLIST.md - Printable verification checklist * DEPLOYMENT_WORKFLOW.md - Complete deployment lifecycle * PRODUCTION_DEPLOYMENT.md - Comprehensive technical reference * production-logging.md - Logging configuration guide * ANSIBLE_DEPLOYMENT.md - Infrastructure as Code automation * README.md - Navigation hub * DEPLOYMENT_SUMMARY.md - Executive summary - Add deployment scripts and automation - Add DEPLOYMENT_PLAN.md - Concrete plan for immediate deployment - Update README with production-ready features All production infrastructure is now complete and ready for deployment.
146 lines
4.7 KiB
PHP
146 lines
4.7 KiB
PHP
<?php
|
|
|
|
declare(strict_types=1);
|
|
|
|
use App\Framework\LiveComponents\Services\IntegrityValidator;
|
|
use App\Framework\LiveComponents\ValueObjects\ChunkHash;
|
|
|
|
describe('IntegrityValidator Service', function () {
|
|
it('verifies chunk data with matching hash', function () {
|
|
$validator = new IntegrityValidator();
|
|
$data = 'test chunk data';
|
|
$hash = ChunkHash::fromData($data);
|
|
|
|
expect($validator->verifyChunk($data, $hash))->toBeTrue();
|
|
});
|
|
|
|
it('rejects chunk data with mismatched hash', function () {
|
|
$validator = new IntegrityValidator();
|
|
$data = 'test chunk data';
|
|
$wrongData = 'different chunk data';
|
|
$hash = ChunkHash::fromData($data);
|
|
|
|
expect($validator->verifyChunk($wrongData, $hash))->toBeFalse();
|
|
});
|
|
|
|
it('verifies file with matching hash', function () {
|
|
$validator = new IntegrityValidator();
|
|
$tempFile = tempnam(sys_get_temp_dir(), 'integrity_test_');
|
|
file_put_contents($tempFile, 'test file content');
|
|
|
|
try {
|
|
$hash = ChunkHash::fromFile($tempFile);
|
|
|
|
expect($validator->verifyFile($tempFile, $hash))->toBeTrue();
|
|
} finally {
|
|
unlink($tempFile);
|
|
}
|
|
});
|
|
|
|
it('rejects file with mismatched hash', function () {
|
|
$validator = new IntegrityValidator();
|
|
$tempFile = tempnam(sys_get_temp_dir(), 'integrity_test_');
|
|
file_put_contents($tempFile, 'original content');
|
|
|
|
try {
|
|
$hash = ChunkHash::fromFile($tempFile);
|
|
|
|
// Modify file content
|
|
file_put_contents($tempFile, 'modified content');
|
|
|
|
expect($validator->verifyFile($tempFile, $hash))->toBeFalse();
|
|
} finally {
|
|
unlink($tempFile);
|
|
}
|
|
});
|
|
|
|
it('rejects verification for non-existent file', function () {
|
|
$validator = new IntegrityValidator();
|
|
$nonExistentFile = '/tmp/does_not_exist_' . uniqid();
|
|
$hash = ChunkHash::fromData('some data');
|
|
|
|
expect($validator->verifyFile($nonExistentFile, $hash))->toBeFalse();
|
|
});
|
|
|
|
it('calculates chunk hash correctly', function () {
|
|
$validator = new IntegrityValidator();
|
|
$data = 'test chunk data';
|
|
|
|
$hash = $validator->calculateChunkHash($data);
|
|
|
|
expect($hash)->toBeInstanceOf(ChunkHash::class);
|
|
expect($hash->verify($data))->toBeTrue();
|
|
});
|
|
|
|
it('calculates file hash correctly', function () {
|
|
$validator = new IntegrityValidator();
|
|
$tempFile = tempnam(sys_get_temp_dir(), 'integrity_test_');
|
|
file_put_contents($tempFile, 'test file content');
|
|
|
|
try {
|
|
$hash = $validator->calculateFileHash($tempFile);
|
|
|
|
expect($hash)->toBeInstanceOf(ChunkHash::class);
|
|
expect($hash->verifyFile($tempFile))->toBeTrue();
|
|
} finally {
|
|
unlink($tempFile);
|
|
}
|
|
});
|
|
|
|
it('produces consistent hashes for same data', function () {
|
|
$validator = new IntegrityValidator();
|
|
$data = 'consistent test data';
|
|
|
|
$hash1 = $validator->calculateChunkHash($data);
|
|
$hash2 = $validator->calculateChunkHash($data);
|
|
|
|
expect($hash1->equals($hash2))->toBeTrue();
|
|
});
|
|
|
|
it('produces different hashes for different data', function () {
|
|
$validator = new IntegrityValidator();
|
|
$data1 = 'first chunk';
|
|
$data2 = 'second chunk';
|
|
|
|
$hash1 = $validator->calculateChunkHash($data1);
|
|
$hash2 = $validator->calculateChunkHash($data2);
|
|
|
|
expect($hash1->equals($hash2))->toBeFalse();
|
|
});
|
|
|
|
it('verifies large chunk data correctly', function () {
|
|
$validator = new IntegrityValidator();
|
|
// Create 1MB of data
|
|
$largeData = str_repeat('A', 1024 * 1024);
|
|
$hash = ChunkHash::fromData($largeData);
|
|
|
|
expect($validator->verifyChunk($largeData, $hash))->toBeTrue();
|
|
});
|
|
|
|
it('verifies empty chunk data', function () {
|
|
$validator = new IntegrityValidator();
|
|
$emptyData = '';
|
|
$hash = ChunkHash::fromData($emptyData);
|
|
|
|
expect($validator->verifyChunk($emptyData, $hash))->toBeTrue();
|
|
});
|
|
|
|
it('detects corrupted data in middle of chunk', function () {
|
|
$validator = new IntegrityValidator();
|
|
$originalData = 'start' . str_repeat('middle', 1000) . 'end';
|
|
$hash = ChunkHash::fromData($originalData);
|
|
|
|
$corruptedData = 'start' . str_repeat('CORRUPTED', 1000) . 'end';
|
|
|
|
expect($validator->verifyChunk($corruptedData, $hash))->toBeFalse();
|
|
});
|
|
|
|
it('handles binary data correctly', function () {
|
|
$validator = new IntegrityValidator();
|
|
$binaryData = random_bytes(1024);
|
|
$hash = $validator->calculateChunkHash($binaryData);
|
|
|
|
expect($validator->verifyChunk($binaryData, $hash))->toBeTrue();
|
|
});
|
|
});
|