- Create AnsibleDeployStage using framework's Process module for secure command execution - Integrate AnsibleDeployStage into DeploymentPipelineCommands for production deployments - Add force_deploy flag support in Ansible playbook to override stale locks - Use PHP deployment module as orchestrator (php console.php deploy:production) - Fix ErrorAggregationInitializer to use Environment class instead of $_ENV superglobal Architecture: - BuildStage → AnsibleDeployStage → HealthCheckStage for production - Process module provides timeout, error handling, and output capture - Ansible playbook supports rollback via rollback-git-based.yml - Zero-downtime deployments with health checks
177 lines
6.3 KiB
PHP
177 lines
6.3 KiB
PHP
<?php
|
||
|
||
declare(strict_types=1);
|
||
|
||
/**
|
||
* Manual Test for ML Performance Monitoring
|
||
*
|
||
* Tests ModelPerformanceMonitor and AlertingService integration
|
||
*/
|
||
|
||
require_once __DIR__ . '/../../vendor/autoload.php';
|
||
|
||
use App\Framework\Core\AppBootstrapper;
|
||
use App\Framework\MachineLearning\ModelManagement\ModelPerformanceMonitor;
|
||
use App\Framework\MachineLearning\ModelManagement\AlertingService;
|
||
use App\Framework\MachineLearning\ModelManagement\ModelRegistry;
|
||
use App\Framework\MachineLearning\ModelManagement\ValueObjects\ModelMetadata;
|
||
use App\Framework\Core\ValueObjects\Version;
|
||
|
||
echo "=== ML Performance Monitoring Test ===\n\n";
|
||
|
||
try {
|
||
// Bootstrap framework
|
||
echo "1. Bootstrapping framework...\n";
|
||
$basePath = dirname(__DIR__, 2);
|
||
|
||
$clock = new \App\Framework\DateTime\SystemClock();
|
||
$highResClock = new \App\Framework\DateTime\SystemHighResolutionClock();
|
||
$memoryMonitor = new \App\Framework\Performance\MemoryMonitor();
|
||
$collector = new \App\Framework\Performance\EnhancedPerformanceCollector(
|
||
$clock,
|
||
$highResClock,
|
||
$memoryMonitor,
|
||
enabled: false
|
||
);
|
||
|
||
$bootstrapper = new AppBootstrapper($basePath, $collector, $memoryMonitor);
|
||
$container = $bootstrapper->bootstrapWorker();
|
||
echo " ✓ Framework bootstrapped\n\n";
|
||
|
||
// Initialize ML Model Management
|
||
echo "2. Initializing ML Model Management...\n";
|
||
$mlInitializer = new \App\Framework\MachineLearning\ModelManagement\MLModelManagementInitializer($container);
|
||
$mlInitializer->initialize();
|
||
echo " ✓ ML Model Management initialized\n\n";
|
||
|
||
// Get services
|
||
echo "3. Retrieving Services...\n";
|
||
$performanceMonitor = $container->get(ModelPerformanceMonitor::class);
|
||
echo " ✓ ModelPerformanceMonitor retrieved\n";
|
||
|
||
$alertingService = $container->get(AlertingService::class);
|
||
echo " ✓ AlertingService retrieved\n";
|
||
|
||
$registry = $container->get(ModelRegistry::class);
|
||
echo " ✓ ModelRegistry retrieved\n\n";
|
||
|
||
// Register a test model
|
||
echo "4. Registering Test Model...\n";
|
||
$testMetadata = ModelMetadata::forQueueAnomaly(
|
||
Version::fromString('1.0.0')
|
||
);
|
||
|
||
try {
|
||
$registry->register($testMetadata);
|
||
echo " ✓ Test model registered: queue-anomaly v1.0.0\n\n";
|
||
} catch (\Exception $e) {
|
||
echo " ℹ Test model already exists (expected): " . $e->getMessage() . "\n\n";
|
||
}
|
||
|
||
// Record performance metrics
|
||
echo "5. Recording Performance Metrics...\n";
|
||
try {
|
||
$performanceMonitor->trackPrediction(
|
||
modelName: 'queue-anomaly',
|
||
version: Version::fromString('1.0.0'),
|
||
prediction: false, // No anomaly
|
||
actual: false, // Correct prediction
|
||
confidence: 0.85
|
||
);
|
||
echo " ✓ First prediction tracked\n";
|
||
|
||
$performanceMonitor->trackPrediction(
|
||
modelName: 'queue-anomaly',
|
||
version: Version::fromString('1.0.0'),
|
||
prediction: true, // Anomaly detected
|
||
actual: true, // Correct prediction
|
||
confidence: 0.92
|
||
);
|
||
echo " ✓ Second prediction tracked\n";
|
||
|
||
$performanceMonitor->trackPrediction(
|
||
modelName: 'queue-anomaly',
|
||
version: Version::fromString('1.0.0'),
|
||
prediction: false, // No anomaly
|
||
actual: false, // Correct prediction
|
||
confidence: 0.78
|
||
);
|
||
echo " ✓ Third prediction tracked\n\n";
|
||
} catch (\Throwable $e) {
|
||
echo " ✗ Recording error: " . $e->getMessage() . "\n";
|
||
echo " File: " . $e->getFile() . ":" . $e->getLine() . "\n\n";
|
||
}
|
||
|
||
// Get performance metrics
|
||
echo "6. Retrieving Performance Metrics...\n";
|
||
try {
|
||
$metrics = $performanceMonitor->getCurrentMetrics(
|
||
'queue-anomaly',
|
||
Version::fromString('1.0.0')
|
||
);
|
||
|
||
echo " ✓ Metrics retrieved:\n";
|
||
echo " - Accuracy: " . ($metrics['accuracy'] ?? 'N/A') . "\n";
|
||
echo " - Precision: " . ($metrics['precision'] ?? 'N/A') . "\n";
|
||
echo " - Recall: " . ($metrics['recall'] ?? 'N/A') . "\n";
|
||
echo " - F1 Score: " . ($metrics['f1_score'] ?? 'N/A') . "\n";
|
||
echo " - Total Predictions: " . ($metrics['total_predictions'] ?? 'N/A') . "\n";
|
||
} catch (\Throwable $e) {
|
||
echo " ✗ Metrics retrieval error: " . $e->getMessage() . "\n";
|
||
echo " File: " . $e->getFile() . ":" . $e->getLine() . "\n";
|
||
}
|
||
echo "\n";
|
||
|
||
// Test degradation detection
|
||
echo "7. Testing Degradation Detection...\n";
|
||
try {
|
||
$hasDegraded = $performanceMonitor->hasPerformanceDegraded(
|
||
'queue-anomaly',
|
||
Version::fromString('1.0.0')
|
||
);
|
||
|
||
if ($hasDegraded) {
|
||
echo " ⚠ Performance degradation detected\n";
|
||
} else {
|
||
echo " ✓ No performance degradation (expected with limited data)\n";
|
||
}
|
||
} catch (\Throwable $e) {
|
||
echo " ✗ Degradation detection error: " . $e->getMessage() . "\n";
|
||
echo " File: " . $e->getFile() . ":" . $e->getLine() . "\n";
|
||
}
|
||
echo "\n";
|
||
|
||
// Test alerting system
|
||
echo "8. Testing Alerting System...\n";
|
||
try {
|
||
// Send a test alert
|
||
$alertingService->sendAlert(
|
||
level: 'info',
|
||
title: 'Performance Monitoring Test',
|
||
message: 'Test alert: Model performance is within acceptable range',
|
||
data: [
|
||
'model' => 'queue-anomaly',
|
||
'version' => '1.0.0',
|
||
'accuracy' => $metrics['accuracy'] ?? 'N/A',
|
||
'total_predictions' => $metrics['total_predictions'] ?? 0
|
||
]
|
||
);
|
||
echo " ✓ Test alert sent successfully\n";
|
||
echo " - Alert logged with level: info\n";
|
||
} catch (\Throwable $e) {
|
||
echo " ✗ Alerting error: " . $e->getMessage() . "\n";
|
||
echo " File: " . $e->getFile() . ":" . $e->getLine() . "\n";
|
||
}
|
||
echo "\n";
|
||
|
||
echo "=== Performance Monitoring Test Completed ===\n";
|
||
echo "✓ All monitoring components functional\n";
|
||
|
||
} catch (\Throwable $e) {
|
||
echo "\n!!! FATAL ERROR !!!\n";
|
||
echo "Error: " . $e->getMessage() . "\n";
|
||
echo "File: " . $e->getFile() . ":" . $e->getLine() . "\n";
|
||
echo "\nStack trace:\n" . $e->getTraceAsString() . "\n";
|
||
exit(1);
|
||
}
|