- Add comprehensive health check system with multiple endpoints - Add Prometheus metrics endpoint - Add production logging configurations (5 strategies) - Add complete deployment documentation suite: * QUICKSTART.md - 30-minute deployment guide * DEPLOYMENT_CHECKLIST.md - Printable verification checklist * DEPLOYMENT_WORKFLOW.md - Complete deployment lifecycle * PRODUCTION_DEPLOYMENT.md - Comprehensive technical reference * production-logging.md - Logging configuration guide * ANSIBLE_DEPLOYMENT.md - Infrastructure as Code automation * README.md - Navigation hub * DEPLOYMENT_SUMMARY.md - Executive summary - Add deployment scripts and automation - Add DEPLOYMENT_PLAN.md - Concrete plan for immediate deployment - Update README with production-ready features All production infrastructure is now complete and ready for deployment.
353 lines
13 KiB
PHP
353 lines
13 KiB
PHP
<?php
|
|
|
|
declare(strict_types=1);
|
|
|
|
/**
|
|
* ML Model Management Usage Examples
|
|
*
|
|
* Comprehensive examples demonstrating all features of the ML Model Management system:
|
|
* 1. Model Registration and Versioning
|
|
* 2. A/B Testing and Model Comparison
|
|
* 3. Real-Time Performance Monitoring
|
|
* 4. Automatic Threshold Optimization
|
|
*/
|
|
|
|
require_once __DIR__ . '/../vendor/autoload.php';
|
|
|
|
use App\Framework\MachineLearning\ModelManagement\ModelRegistry;
|
|
use App\Framework\MachineLearning\ModelManagement\ABTestingService;
|
|
use App\Framework\MachineLearning\ModelManagement\ModelPerformanceMonitor;
|
|
use App\Framework\MachineLearning\ModelManagement\AutoTuningEngine;
|
|
use App\Framework\MachineLearning\ModelManagement\ValueObjects\ModelMetadata;
|
|
use App\Framework\MachineLearning\ModelManagement\ValueObjects\ModelType;
|
|
use App\Framework\MachineLearning\ModelManagement\ValueObjects\ABTestConfig;
|
|
use App\Framework\Core\ValueObjects\Version;
|
|
use App\Framework\Core\ValueObjects\Timestamp;
|
|
use App\Framework\Core\ValueObjects\Duration;
|
|
use App\Framework\DI\Container;
|
|
|
|
// ============================================================================
|
|
// Setup: Get services from DI container
|
|
// ============================================================================
|
|
|
|
$container = require __DIR__ . '/../bootstrap/container.php';
|
|
|
|
$registry = $container->get(ModelRegistry::class);
|
|
$abTesting = $container->get(ABTestingService::class);
|
|
$performanceMonitor = $container->get(ModelPerformanceMonitor::class);
|
|
$autoTuning = $container->get(AutoTuningEngine::class);
|
|
|
|
echo "=== ML Model Management System - Usage Examples ===\n\n";
|
|
|
|
// ============================================================================
|
|
// Example 1: Register N+1 Detection Model Versions
|
|
// ============================================================================
|
|
|
|
echo "1. Model Registration and Versioning\n";
|
|
echo str_repeat('-', 60) . "\n";
|
|
|
|
// Register initial version
|
|
$n1DetectorV1 = ModelMetadata::forN1Detector(
|
|
version: Version::fromString('1.0.0'),
|
|
configuration: [
|
|
'threshold' => 0.7,
|
|
'window_size' => 100,
|
|
'min_cluster_size' => 3,
|
|
]
|
|
);
|
|
|
|
$n1DetectorV1 = $n1DetectorV1->withPerformanceMetrics([
|
|
'accuracy' => 0.92,
|
|
'precision' => 0.89,
|
|
'recall' => 0.88,
|
|
'f1_score' => 0.885,
|
|
'false_positive_rate' => 0.11,
|
|
]);
|
|
|
|
$registry->register($n1DetectorV1);
|
|
echo "✓ Registered N+1 Detector v1.0.0\n";
|
|
echo " Accuracy: {$n1DetectorV1->getAccuracy()}\n";
|
|
echo " F1-Score: {$n1DetectorV1->getF1Score()}\n\n";
|
|
|
|
// Register improved version
|
|
$n1DetectorV11 = ModelMetadata::forN1Detector(
|
|
version: Version::fromString('1.1.0'),
|
|
configuration: [
|
|
'threshold' => 0.75, // Optimized threshold
|
|
'window_size' => 100,
|
|
'min_cluster_size' => 3,
|
|
]
|
|
);
|
|
|
|
$n1DetectorV11 = $n1DetectorV11->withPerformanceMetrics([
|
|
'accuracy' => 0.95,
|
|
'precision' => 0.93,
|
|
'recall' => 0.91,
|
|
'f1_score' => 0.92,
|
|
'false_positive_rate' => 0.07,
|
|
]);
|
|
|
|
$registry->register($n1DetectorV11);
|
|
echo "✓ Registered N+1 Detector v1.1.0 (improved)\n";
|
|
echo " Accuracy: {$n1DetectorV11->getAccuracy()}\n";
|
|
echo " F1-Score: {$n1DetectorV11->getF1Score()}\n\n";
|
|
|
|
// Retrieve and list all versions
|
|
$allVersions = $registry->getAll('n1-detector');
|
|
echo "Total versions registered: " . count($allVersions) . "\n";
|
|
|
|
foreach ($allVersions as $model) {
|
|
echo " - {$model->version->toString()} (Accuracy: {$model->getAccuracy()})\n";
|
|
}
|
|
|
|
echo "\n";
|
|
|
|
// ============================================================================
|
|
// Example 2: A/B Testing - Compare Model Versions
|
|
// ============================================================================
|
|
|
|
echo "2. A/B Testing and Model Comparison\n";
|
|
echo str_repeat('-', 60) . "\n";
|
|
|
|
// Create A/B test configuration
|
|
$abTestConfig = ABTestConfig::create(
|
|
modelName: 'n1-detector',
|
|
versionA: Version::fromString('1.0.0'),
|
|
versionB: Version::fromString('1.1.0'),
|
|
trafficSplit: 0.5 // 50/50 split
|
|
);
|
|
|
|
echo "Test Configuration:\n";
|
|
echo " Model: {$abTestConfig->modelName}\n";
|
|
echo " Version A: {$abTestConfig->versionA->toString()} ({$abTestConfig->trafficSplitA * 100}%)\n";
|
|
echo " Version B: {$abTestConfig->versionB->toString()} ({$abTestConfig->getTrafficSplitB() * 100}%)\n";
|
|
echo " Primary Metric: {$abTestConfig->primaryMetric}\n\n";
|
|
|
|
// Run A/B test
|
|
$abTestResult = $abTesting->runTest($abTestConfig);
|
|
|
|
echo "A/B Test Results:\n";
|
|
echo " Winner: {$abTestResult->winner}\n";
|
|
echo " Statistically Significant: " . ($abTestResult->isStatisticallySignificant ? 'Yes' : 'No') . "\n";
|
|
echo " Primary Metric Improvement: " . sprintf('%+.2f%%', $abTestResult->getPrimaryMetricImprovementPercent()) . "\n";
|
|
echo "\nRecommendation:\n";
|
|
echo " {$abTestResult->recommendation}\n\n";
|
|
|
|
// Metrics comparison
|
|
echo "Detailed Metrics Comparison:\n";
|
|
$summary = $abTestResult->getMetricsSummary();
|
|
|
|
echo " Version A ({$abTestConfig->versionA->toString()}):\n";
|
|
echo " Accuracy: " . sprintf('%.4f', $summary['version_a']['accuracy']) . "\n";
|
|
echo " Precision: " . sprintf('%.4f', $summary['version_a']['precision']) . "\n";
|
|
echo " Recall: " . sprintf('%.4f', $summary['version_a']['recall']) . "\n";
|
|
echo " F1-Score: " . sprintf('%.4f', $summary['version_a']['f1_score']) . "\n\n";
|
|
|
|
echo " Version B ({$abTestConfig->versionB->toString()}):\n";
|
|
echo " Accuracy: " . sprintf('%.4f', $summary['version_b']['accuracy']) . "\n";
|
|
echo " Precision: " . sprintf('%.4f', $summary['version_b']['precision']) . "\n";
|
|
echo " Recall: " . sprintf('%.4f', $summary['version_b']['recall']) . "\n";
|
|
echo " F1-Score: " . sprintf('%.4f', $summary['version_b']['f1_score']) . "\n\n";
|
|
|
|
// Gradual rollout plan
|
|
if ($abTestResult->shouldDeployVersionB()) {
|
|
echo "Gradual Rollout Plan:\n";
|
|
$rolloutPlan = $abTesting->generateRolloutPlan(steps: 5);
|
|
|
|
foreach ($rolloutPlan as $step => $trafficSplitB) {
|
|
echo sprintf(
|
|
" Step %d: %.0f%% traffic to v%s\n",
|
|
$step,
|
|
$trafficSplitB * 100,
|
|
$abTestConfig->versionB->toString()
|
|
);
|
|
}
|
|
}
|
|
|
|
echo "\n";
|
|
|
|
// ============================================================================
|
|
// Example 3: Real-Time Performance Monitoring
|
|
// ============================================================================
|
|
|
|
echo "3. Real-Time Performance Monitoring\n";
|
|
echo str_repeat('-', 60) . "\n";
|
|
|
|
// Simulate some predictions
|
|
echo "Simulating predictions...\n";
|
|
|
|
$version = Version::fromString('1.1.0');
|
|
|
|
// Simulate 20 predictions
|
|
for ($i = 0; $i < 20; $i++) {
|
|
$prediction = (bool) ($i % 3 !== 0); // Predict false every 3rd
|
|
$actual = (bool) ($i % 4 !== 0); // Actual false every 4th
|
|
$confidence = 0.75 + ($i * 0.01);
|
|
|
|
$performanceMonitor->trackPrediction(
|
|
modelName: 'n1-detector',
|
|
version: $version,
|
|
prediction: $prediction,
|
|
actual: $actual,
|
|
confidence: $confidence
|
|
);
|
|
}
|
|
|
|
echo "✓ Tracked 20 predictions\n\n";
|
|
|
|
// Get current metrics
|
|
$currentMetrics = $performanceMonitor->getCurrentMetrics(
|
|
'n1-detector',
|
|
$version,
|
|
Duration::fromHours(1)
|
|
);
|
|
|
|
echo "Current Performance Metrics (last 1 hour):\n";
|
|
echo " Total Predictions: {$currentMetrics['total_predictions']}\n";
|
|
echo " Correct: {$currentMetrics['correct_predictions']}\n";
|
|
echo " Accuracy: " . sprintf('%.4f', $currentMetrics['accuracy']) . "\n";
|
|
echo " Precision: " . sprintf('%.4f', $currentMetrics['precision']) . "\n";
|
|
echo " Recall: " . sprintf('%.4f', $currentMetrics['recall']) . "\n";
|
|
echo " F1-Score: " . sprintf('%.4f', $currentMetrics['f1_score']) . "\n";
|
|
echo " Avg Confidence: " . sprintf('%.4f', $currentMetrics['average_confidence']) . "\n\n";
|
|
|
|
// Check for performance degradation
|
|
$degradationInfo = $performanceMonitor->getPerformanceDegradationInfo(
|
|
'n1-detector',
|
|
$version,
|
|
thresholdPercent: 0.05
|
|
);
|
|
|
|
echo "Performance Degradation Check:\n";
|
|
echo " Has Degraded: " . ($degradationInfo['has_degraded'] ? 'Yes' : 'No') . "\n";
|
|
echo " Baseline Accuracy: " . sprintf('%.4f', $degradationInfo['baseline_accuracy']) . "\n";
|
|
echo " Current Accuracy: " . sprintf('%.4f', $degradationInfo['current_accuracy']) . "\n";
|
|
echo " Degradation: " . sprintf('%.2f%%', $degradationInfo['degradation_percent']) . "\n";
|
|
echo "\nRecommendation:\n";
|
|
echo " {$degradationInfo['recommendation']}\n\n";
|
|
|
|
// Concept drift detection
|
|
$hasDrift = $performanceMonitor->detectConceptDrift('n1-detector', $version);
|
|
echo "Concept Drift Detected: " . ($hasDrift ? 'Yes' : 'No') . "\n\n";
|
|
|
|
// ============================================================================
|
|
// Example 4: Automatic Threshold Optimization
|
|
// ============================================================================
|
|
|
|
echo "4. Automatic Threshold Optimization\n";
|
|
echo str_repeat('-', 60) . "\n";
|
|
|
|
// Optimize threshold for maximum F1-score
|
|
$optimizationResult = $autoTuning->optimizeThreshold(
|
|
modelName: 'n1-detector',
|
|
version: $version,
|
|
metricToOptimize: 'f1_score',
|
|
thresholdRange: [0.5, 0.9],
|
|
step: 0.05,
|
|
timeWindow: Duration::fromHours(1)
|
|
);
|
|
|
|
echo "Threshold Optimization Results:\n";
|
|
echo " Current Threshold: " . sprintf('%.2f', $optimizationResult['current_threshold']) . "\n";
|
|
echo " Optimal Threshold: " . sprintf('%.2f', $optimizationResult['optimal_threshold']) . "\n";
|
|
echo " Current F1-Score: " . sprintf('%.4f', $optimizationResult['current_metric_value']) . "\n";
|
|
echo " Optimal F1-Score: " . sprintf('%.4f', $optimizationResult['optimal_metric_value']) . "\n";
|
|
echo " Improvement: " . sprintf('%+.2f%%', $optimizationResult['improvement_percent']) . "\n";
|
|
echo "\nRecommendation:\n";
|
|
echo " {$optimizationResult['recommendation']}\n\n";
|
|
|
|
// Adaptive threshold adjustment
|
|
$adaptiveResult = $autoTuning->adaptiveThresholdAdjustment('n1-detector', $version);
|
|
|
|
echo "Adaptive Threshold Adjustment:\n";
|
|
echo " Current Threshold: " . sprintf('%.2f', $adaptiveResult['current_threshold']) . "\n";
|
|
echo " Recommended Threshold: " . sprintf('%.2f', $adaptiveResult['recommended_threshold']) . "\n";
|
|
echo " Reason: {$adaptiveResult['adjustment_reason']}\n";
|
|
echo "\nExpected Improvement:\n";
|
|
echo " Accuracy: " . sprintf('%+.4f', $adaptiveResult['expected_improvement']['accuracy']) . "\n";
|
|
echo " Precision: " . sprintf('%+.4f', $adaptiveResult['expected_improvement']['precision']) . "\n";
|
|
echo " Recall: " . sprintf('%+.4f', $adaptiveResult['expected_improvement']['recall']) . "\n\n";
|
|
|
|
// Precision-recall trade-off optimization
|
|
$tradeoffResult = $autoTuning->optimizePrecisionRecallTradeoff(
|
|
modelName: 'n1-detector',
|
|
version: $version,
|
|
targetPrecision: 0.95 // Target 95% precision
|
|
);
|
|
|
|
echo "Precision-Recall Trade-off Optimization:\n";
|
|
echo " Target Precision: 0.95\n";
|
|
echo " Optimal Threshold: " . sprintf('%.2f', $tradeoffResult['optimal_threshold']) . "\n";
|
|
echo " Achieved Precision: " . sprintf('%.4f', $tradeoffResult['achieved_precision']) . "\n";
|
|
echo " Achieved Recall: " . sprintf('%.4f', $tradeoffResult['achieved_recall']) . "\n";
|
|
echo " F1-Score: " . sprintf('%.4f', $tradeoffResult['f1_score']) . "\n\n";
|
|
|
|
// ============================================================================
|
|
// Example 5: Version Comparison Across Multiple Models
|
|
// ============================================================================
|
|
|
|
echo "5. Multi-Version Performance Comparison\n";
|
|
echo str_repeat('-', 60) . "\n";
|
|
|
|
$versions = [
|
|
Version::fromString('1.0.0'),
|
|
Version::fromString('1.1.0'),
|
|
];
|
|
|
|
$comparison = $performanceMonitor->compareVersions(
|
|
'n1-detector',
|
|
$versions,
|
|
Duration::fromHours(24)
|
|
);
|
|
|
|
echo "Performance Comparison (last 24 hours):\n\n";
|
|
|
|
foreach ($comparison as $versionStr => $data) {
|
|
echo "Version {$versionStr}:\n";
|
|
echo " Environment: " . ($data['environment'] ?? 'N/A') . "\n";
|
|
echo " Deployed: " . ($data['deployed_at'] ?? 'Not deployed') . "\n";
|
|
echo " Current Accuracy: " . sprintf('%.4f', $data['current_metrics']['accuracy']) . "\n";
|
|
echo " Total Predictions: {$data['current_metrics']['total_predictions']}\n";
|
|
echo "\n";
|
|
}
|
|
|
|
// ============================================================================
|
|
// Example 6: Production Deployment Workflow
|
|
// ============================================================================
|
|
|
|
echo "6. Production Deployment Workflow\n";
|
|
echo str_repeat('-', 60) . "\n";
|
|
|
|
// Deploy winning version to production
|
|
if ($abTestResult->shouldDeployVersionB()) {
|
|
$winningVersion = $abTestResult->getWinningVersion();
|
|
|
|
echo "Deploying version {$winningVersion->toString()} to production...\n";
|
|
|
|
// Update model with deployment info
|
|
$updatedMetadata = $n1DetectorV11->withDeployment(
|
|
environment: 'production',
|
|
deployedAt: Timestamp::now()
|
|
);
|
|
|
|
$registry->update($updatedMetadata);
|
|
|
|
echo "✓ Successfully deployed to production\n";
|
|
echo " Environment: {$updatedMetadata->environment}\n";
|
|
echo " Deployed At: {$updatedMetadata->deployedAt->format('Y-m-d H:i:s')}\n";
|
|
echo "\n";
|
|
}
|
|
|
|
// Get all production models
|
|
$productionModels = $registry->getProductionModels();
|
|
echo "Active Production Models: " . count($productionModels) . "\n";
|
|
|
|
foreach ($productionModels as $model) {
|
|
echo " - {$model->modelName} v{$model->version->toString()}\n";
|
|
echo " Type: {$model->modelType->value}\n";
|
|
echo " Accuracy: {$model->getAccuracy()}\n";
|
|
echo " Deployed: {$model->deployedAt->format('Y-m-d H:i:s')}\n\n";
|
|
}
|
|
|
|
echo "\n=== Examples Completed Successfully ===\n";
|