feat(Deployment): Integrate Ansible deployment via PHP deployment pipeline

- Create AnsibleDeployStage using framework's Process module for secure command execution
- Integrate AnsibleDeployStage into DeploymentPipelineCommands for production deployments
- Add force_deploy flag support in Ansible playbook to override stale locks
- Use PHP deployment module as orchestrator (php console.php deploy:production)
- Fix ErrorAggregationInitializer to use Environment class instead of $_ENV superglobal

Architecture:
- BuildStage → AnsibleDeployStage → HealthCheckStage for production
- Process module provides timeout, error handling, and output capture
- Ansible playbook supports rollback via rollback-git-based.yml
- Zero-downtime deployments with health checks
This commit is contained in:
2025-10-26 14:08:07 +01:00
parent a90263d3be
commit 3b623e7afb
170 changed files with 19888 additions and 575 deletions

View File

@@ -0,0 +1,175 @@
<?php
declare(strict_types=1);
namespace App\Application\Admin\MachineLearning;
use App\Application\Admin\Service\AdminLayoutProcessor;
use App\Framework\Attributes\Route;
use App\Framework\Auth\Auth;
use App\Framework\Core\ValueObjects\Duration;
use App\Framework\Http\HttpRequest;
use App\Framework\Http\Method;
use App\Framework\MachineLearning\ModelManagement\ModelPerformanceMonitor;
use App\Framework\MachineLearning\ModelManagement\ModelRegistry;
use App\Framework\Meta\MetaData;
use App\Framework\Router\AdminRoutes;
use App\Framework\Router\Result\ViewResult;
final readonly class MLDashboardAdminController
{
public function __construct(
private ModelRegistry $registry,
private ModelPerformanceMonitor $performanceMonitor,
private AdminLayoutProcessor $layoutProcessor
) {}
#[Auth]
#[Route(path: '/admin/ml/dashboard', method: Method::GET, name: AdminRoutes::ML_DASHBOARD)]
public function dashboard(HttpRequest $request): ViewResult
{
$timeWindowHours = (int) ($request->queryParameters['timeWindow'] ?? 24);
$timeWindow = Duration::fromHours($timeWindowHours);
// Get all models
$allModels = $this->getAllModels();
// Collect performance overview
$performanceOverview = [];
$totalPredictions = 0;
$accuracySum = 0.0;
$healthyCount = 0;
$degradedCount = 0;
$criticalCount = 0;
foreach ($allModels as $metadata) {
$metrics = $this->performanceMonitor->getCurrentMetrics(
$metadata->modelName,
$metadata->version,
$timeWindow
);
$accuracy = $metrics['accuracy'];
$isHealthy = $accuracy >= 0.85;
$isCritical = $accuracy < 0.7;
if ($isHealthy) {
$healthyCount++;
} elseif ($isCritical) {
$criticalCount++;
} else {
$degradedCount++;
}
$performanceOverview[] = [
'model_name' => $metadata->modelName,
'version' => $metadata->version->toString(),
'type' => $metadata->modelType->value,
'accuracy' => round($accuracy * 100, 2),
'precision' => isset($metrics['precision']) ? round($metrics['precision'] * 100, 2) : null,
'recall' => isset($metrics['recall']) ? round($metrics['recall'] * 100, 2) : null,
'f1_score' => isset($metrics['f1_score']) ? round($metrics['f1_score'] * 100, 2) : null,
'total_predictions' => number_format($metrics['total_predictions']),
'average_confidence' => isset($metrics['average_confidence']) ? round($metrics['average_confidence'] * 100, 2) : null,
'threshold' => $metadata->configuration['threshold'] ?? null,
'status' => $isHealthy ? 'healthy' : ($isCritical ? 'critical' : 'degraded'),
'status_badge' => $isHealthy ? 'success' : ($isCritical ? 'danger' : 'warning'),
];
$totalPredictions += $metrics['total_predictions'];
$accuracySum += $accuracy;
}
// Calculate degradation alerts
$degradationAlerts = [];
foreach ($performanceOverview as $model) {
if ($model['status'] !== 'healthy') {
$degradationAlerts[] = [
'model_name' => $model['model_name'],
'version' => $model['version'],
'current_accuracy' => $model['accuracy'],
'threshold' => 85.0,
'severity' => $model['status'],
'severity_badge' => $model['status_badge'],
'recommendation' => 'Consider retraining or rolling back to previous version',
];
}
}
// Calculate health indicators
$modelCount = count($allModels);
$averageAccuracy = $modelCount > 0 ? ($accuracySum / $modelCount) * 100 : 0.0;
$healthPercentage = $modelCount > 0 ? ($healthyCount / $modelCount) * 100 : 0.0;
$overallStatus = $criticalCount > 0 ? 'critical' : ($degradedCount > $modelCount / 2 ? 'warning' : 'healthy');
$overallBadge = $criticalCount > 0 ? 'danger' : ($degradedCount > $modelCount / 2 ? 'warning' : 'success');
// Count by type
$byType = [
'supervised' => 0,
'unsupervised' => 0,
'reinforcement' => 0,
];
foreach ($allModels as $metadata) {
$typeName = strtolower($metadata->modelType->value);
$byType[$typeName] = ($byType[$typeName] ?? 0) + 1;
}
$data = [
'title' => 'ML Model Dashboard',
'page_title' => 'Machine Learning Model Dashboard',
'current_path' => '/admin/ml/dashboard',
'time_window_hours' => $timeWindowHours,
// Summary stats
'total_models' => $modelCount,
'healthy_models' => $healthyCount,
'degraded_models' => $degradedCount,
'critical_models' => $criticalCount,
'total_predictions' => number_format($totalPredictions),
'average_accuracy' => round($averageAccuracy, 2),
'health_percentage' => round($healthPercentage, 2),
'overall_status' => ucfirst($overallStatus),
'overall_badge' => $overallBadge,
// Type distribution
'supervised_count' => $byType['supervised'],
'unsupervised_count' => $byType['unsupervised'],
'reinforcement_count' => $byType['reinforcement'],
// Models and alerts
'models' => $performanceOverview,
'alerts' => $degradationAlerts,
'has_alerts' => count($degradationAlerts) > 0,
'alert_count' => count($degradationAlerts),
// Links
'api_dashboard_url' => '/api/ml/dashboard',
'api_health_url' => '/api/ml/dashboard/health',
];
$finalData = $this->layoutProcessor->processLayoutFromArray($data);
return new ViewResult(
template: 'ml-dashboard',
metaData: new MetaData('ML Dashboard', 'Machine Learning Model Monitoring and Performance'),
data: $finalData
);
}
/**
* Get all models from registry (all names and all versions)
*/
private function getAllModels(): array
{
$modelNames = $this->registry->getAllModelNames();
$allModels = [];
foreach ($modelNames as $modelName) {
$versions = $this->registry->getAll($modelName);
$allModels = array_merge($allModels, $versions);
}
return $allModels;
}
}

View File

@@ -0,0 +1,253 @@
<layout name="admin" />
<x-breadcrumbs items='[
{"label": "Admin", "url": "/admin"},
{"label": "ML Dashboard", "url": "/admin/ml/dashboard"}
]' />
<div class="admin-page">
<div class="admin-page__header">
<div class="admin-page__header-content">
<h1 class="admin-page__title">{{ $page_title }}</h1>
<p class="admin-page__subtitle">Monitor machine learning model performance and health metrics</p>
</div>
<div class="admin-page__actions">
<a href="{{ $api_dashboard_url }}" class="admin-button admin-button--secondary" target="_blank">
<svg class="admin-icon" width="16" height="16" fill="currentColor">
<path d="M8 2a6 6 0 100 12A6 6 0 008 2zm0 10a4 4 0 110-8 4 4 0 010 8z"/>
</svg>
View API
</a>
</div>
</div>
<!-- Summary Cards -->
<div class="admin-grid admin-grid--3-col">
<!-- System Health Card -->
<div class="admin-card">
<div class="admin-card__header">
<h3 class="admin-card__title">System Health</h3>
</div>
<div class="admin-card__content">
<div class="admin-stat-list">
<div class="admin-stat-item">
<span class="admin-stat-item__label">Overall Status</span>
<span class="admin-stat-item__value">
<span class="admin-badge admin-badge--{{ $overall_badge }}">{{ $overall_status }}</span>
</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Health Percentage</span>
<span class="admin-stat-item__value">{{ $health_percentage }}%</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Average Accuracy</span>
<span class="admin-stat-item__value">{{ $average_accuracy }}%</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Time Window</span>
<span class="admin-stat-item__value">{{ $time_window_hours }} hours</span>
</div>
</div>
</div>
</div>
<!-- Model Statistics Card -->
<div class="admin-card">
<div class="admin-card__header">
<h3 class="admin-card__title">Model Statistics</h3>
</div>
<div class="admin-card__content">
<div class="admin-stat-list">
<div class="admin-stat-item">
<span class="admin-stat-item__label">Total Models</span>
<span class="admin-stat-item__value">{{ $total_models }}</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Healthy</span>
<span class="admin-stat-item__value">
<span class="admin-badge admin-badge--success">{{ $healthy_models }}</span>
</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Degraded</span>
<span class="admin-stat-item__value">
<span class="admin-badge admin-badge--warning">{{ $degraded_models }}</span>
</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Critical</span>
<span class="admin-stat-item__value">
<span class="admin-badge admin-badge--danger">{{ $critical_models }}</span>
</span>
</div>
</div>
</div>
</div>
<!-- Performance Metrics Card -->
<div class="admin-card">
<div class="admin-card__header">
<h3 class="admin-card__title">Performance Metrics</h3>
</div>
<div class="admin-card__content">
<div class="admin-stat-list">
<div class="admin-stat-item">
<span class="admin-stat-item__label">Total Predictions</span>
<span class="admin-stat-item__value">{{ $total_predictions }}</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Supervised Models</span>
<span class="admin-stat-item__value">{{ $supervised_count }}</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Unsupervised Models</span>
<span class="admin-stat-item__value">{{ $unsupervised_count }}</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Reinforcement Models</span>
<span class="admin-stat-item__value">{{ $reinforcement_count }}</span>
</div>
</div>
</div>
</div>
</div>
<!-- Degradation Alerts Section -->
<div class="admin-card" if="{{ $has_alerts }}">
<div class="admin-card__header">
<h3 class="admin-card__title">
Degradation Alerts
<span class="admin-badge admin-badge--danger">{{ $alert_count }}</span>
</h3>
</div>
<div class="admin-card__content">
<div class="admin-table-container">
<table class="admin-table">
<thead>
<tr>
<th>Model</th>
<th>Version</th>
<th>Current Accuracy</th>
<th>Threshold</th>
<th>Severity</th>
<th>Recommendation</th>
</tr>
</thead>
<tbody>
<tr foreach="$alerts as $alert">
<td>
<strong>{{ $alert['model_name'] }}</strong>
</td>
<td>
<code>{{ $alert['version'] }}</code>
</td>
<td>
<span class="admin-badge admin-badge--{{ $alert['severity_badge'] }}">
{{ $alert['current_accuracy'] }}%
</span>
</td>
<td>{{ $alert['threshold'] }}%</td>
<td>
<span class="admin-badge admin-badge--{{ $alert['severity_badge'] }}">
{{ $alert['severity'] }}
</span>
</td>
<td>{{ $alert['recommendation'] }}</td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
<!-- Models Overview Section -->
<div class="admin-card">
<div class="admin-card__header">
<h3 class="admin-card__title">Models Overview</h3>
</div>
<div class="admin-card__content">
<div class="admin-table-container">
<table class="admin-table">
<thead>
<tr>
<th>Model Name</th>
<th>Version</th>
<th>Type</th>
<th>Accuracy</th>
<th>Precision</th>
<th>Recall</th>
<th>F1 Score</th>
<th>Predictions</th>
<th>Avg Confidence</th>
<th>Threshold</th>
<th>Status</th>
</tr>
</thead>
<tbody>
<tr foreach="$models as $model">
<td>
<strong>{{ $model['model_name'] }}</strong>
</td>
<td>
<code>{{ $model['version'] }}</code>
</td>
<td>
<span class="admin-badge admin-badge--info">
{{ $model['type'] }}
</span>
</td>
<td>{{ $model['accuracy'] }}%</td>
<td>
<span if="!{{ $model['precision'] }}">-</span>
<span if="{{ $model['precision'] }}">{{ $model['precision'] }}%</span>
</td>
<td>
<span if="!{{ $model['recall'] }}">-</span>
<span if="{{ $model['recall'] }}">{{ $model['recall'] }}%</span>
</td>
<td>
<span if="!{{ $model['f1_score'] }}">-</span>
<span if="{{ $model['f1_score'] }}">{{ $model['f1_score'] }}%</span>
</td>
<td>{{ $model['total_predictions'] }}</td>
<td>
<span if="!{{ $model['average_confidence'] }}">-</span>
<span if="{{ $model['average_confidence'] }}">{{ $model['average_confidence'] }}%</span>
</td>
<td>{{ $model['threshold'] }}</td>
<td>
<span class="admin-badge admin-badge--{{ $model['status_badge'] }}">
{{ $model['status'] }}
</span>
</td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
<!-- API Information Card -->
<div class="admin-card">
<div class="admin-card__header">
<h3 class="admin-card__title">API Endpoints</h3>
</div>
<div class="admin-card__content">
<div class="admin-stat-list">
<div class="admin-stat-item">
<span class="admin-stat-item__label">Dashboard Data</span>
<span class="admin-stat-item__value">
<code>GET {{ $api_dashboard_url }}</code>
</span>
</div>
<div class="admin-stat-item">
<span class="admin-stat-item__label">Health Check</span>
<span class="admin-stat-item__value">
<code>GET {{ $api_health_url }}</code>
</span>
</div>
</div>
</div>
</div>
</div>

View File

@@ -0,0 +1,455 @@
<?php
declare(strict_types=1);
namespace App\Application\Api\MachineLearning;
use App\Framework\Attributes\Route;
use App\Framework\Core\ValueObjects\Version;
use App\Framework\Http\HttpRequest;
use App\Framework\Http\Method;
use App\Framework\Http\Status;
use App\Framework\MachineLearning\ModelManagement\ABTestingService;
use App\Framework\MachineLearning\ModelManagement\ModelRegistry;
use App\Framework\MachineLearning\ModelManagement\ValueObjects\ABTestConfig;
use App\Framework\OpenApi\Attributes\ApiEndpoint;
use App\Framework\OpenApi\Attributes\ApiParameter;
use App\Framework\OpenApi\Attributes\ApiRequestBody;
use App\Framework\OpenApi\Attributes\ApiResponse;
use App\Framework\OpenApi\Attributes\ApiSecurity;
use App\Framework\Router\Result\JsonResult;
/**
* ML A/B Testing API Controller
*
* RESTful API endpoints for A/B testing machine learning models:
* - Start A/B tests
* - Get test results
* - Generate rollout plans
* - Calculate sample sizes
*/
#[ApiSecurity('bearerAuth')]
final readonly class MLABTestingController
{
public function __construct(
private ABTestingService $abTesting,
private ModelRegistry $registry
) {}
#[Route(path: '/api/ml/ab-test', method: Method::POST)]
#[ApiEndpoint(
summary: 'Start A/B test',
description: 'Create and start an A/B test comparing two model versions',
tags: ['Machine Learning'],
)]
#[ApiRequestBody(
description: 'A/B test configuration',
required: true,
example: [
'model_name' => 'fraud-detector',
'version_a' => '1.0.0',
'version_b' => '2.0.0',
'traffic_split_a' => 0.5,
'primary_metric' => 'accuracy',
'minimum_improvement' => 0.05,
],
)]
#[ApiResponse(
statusCode: 201,
description: 'A/B test created successfully',
example: [
'test_id' => 'test_123',
'model_name' => 'fraud-detector',
'version_a' => '1.0.0',
'version_b' => '2.0.0',
'traffic_split' => [
'version_a' => 0.5,
'version_b' => 0.5,
],
'status' => 'running',
'created_at' => '2024-01-01T00:00:00Z',
],
)]
#[ApiResponse(
statusCode: 400,
description: 'Invalid test configuration',
)]
public function startTest(HttpRequest $request): JsonResult
{
try {
$data = $request->parsedBody->toArray();
// Validate required fields
if (!isset($data['model_name'], $data['version_a'], $data['version_b'])) {
return new JsonResult([
'error' => 'Missing required fields',
'required' => ['model_name', 'version_a', 'version_b'],
], Status::BAD_REQUEST);
}
// Parse versions
$versionA = Version::fromString($data['version_a']);
$versionB = Version::fromString($data['version_b']);
// Verify models exist
$metadataA = $this->registry->get($data['model_name'], $versionA);
$metadataB = $this->registry->get($data['model_name'], $versionB);
if ($metadataA === null) {
return new JsonResult([
'error' => 'Version A not found',
'model_name' => $data['model_name'],
'version' => $data['version_a'],
], Status::NOT_FOUND);
}
if ($metadataB === null) {
return new JsonResult([
'error' => 'Version B not found',
'model_name' => $data['model_name'],
'version' => $data['version_b'],
], Status::NOT_FOUND);
}
// Create A/B test config
$config = new ABTestConfig(
modelName: $data['model_name'],
versionA: $versionA,
versionB: $versionB,
trafficSplitA: (float) ($data['traffic_split_a'] ?? 0.5),
primaryMetric: $data['primary_metric'] ?? 'accuracy',
minimumImprovement: (float) ($data['minimum_improvement'] ?? 0.05),
significanceLevel: (float) ($data['significance_level'] ?? 0.05)
);
// Generate test ID (in production, store in database)
$testId = 'test_' . bin2hex(random_bytes(8));
return new JsonResult([
'test_id' => $testId,
'model_name' => $config->modelName,
'version_a' => $config->versionA->toString(),
'version_b' => $config->versionB->toString(),
'traffic_split' => [
'version_a' => $config->trafficSplitA,
'version_b' => 1.0 - $config->trafficSplitA,
],
'primary_metric' => $config->primaryMetric,
'minimum_improvement' => $config->minimumImprovement,
'status' => 'running',
'description' => $config->getDescription(),
'created_at' => date('c'),
], Status::CREATED);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid test configuration',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
#[Route(path: '/api/ml/ab-test/compare', method: Method::POST)]
#[ApiEndpoint(
summary: 'Compare model versions',
description: 'Compare performance of two model versions and get winner recommendation',
tags: ['Machine Learning'],
)]
#[ApiRequestBody(
description: 'Model comparison configuration',
required: true,
example: [
'model_name' => 'fraud-detector',
'version_a' => '1.0.0',
'version_b' => '2.0.0',
'primary_metric' => 'f1_score',
'minimum_improvement' => 0.05,
],
)]
#[ApiResponse(
statusCode: 200,
description: 'Comparison completed successfully',
example: [
'winner' => 'B',
'statistically_significant' => true,
'metrics_difference' => [
'accuracy' => 0.07,
'f1_score' => 0.08,
],
'primary_metric_improvement' => 8.5,
'recommendation' => 'Version B wins with 8.5% improvement - deploy new version',
'summary' => 'Version B significantly outperforms Version A',
],
)]
#[ApiResponse(
statusCode: 404,
description: 'Model version not found',
)]
public function compareVersions(HttpRequest $request): JsonResult
{
try {
$data = $request->parsedBody->toArray();
if (!isset($data['model_name'], $data['version_a'], $data['version_b'])) {
return new JsonResult([
'error' => 'Missing required fields',
'required' => ['model_name', 'version_a', 'version_b'],
], Status::BAD_REQUEST);
}
$versionA = Version::fromString($data['version_a']);
$versionB = Version::fromString($data['version_b']);
$config = new ABTestConfig(
modelName: $data['model_name'],
versionA: $versionA,
versionB: $versionB,
trafficSplitA: 0.5,
primaryMetric: $data['primary_metric'] ?? 'accuracy',
minimumImprovement: (float) ($data['minimum_improvement'] ?? 0.05)
);
// Run comparison
$result = $this->abTesting->runTest($config);
return new JsonResult([
'winner' => $result->winner,
'statistically_significant' => $result->isStatisticallySignificant,
'metrics_difference' => $result->metricsDifference,
'primary_metric_improvement' => $result->getPrimaryMetricImprovementPercent(),
'recommendation' => $result->recommendation,
'summary' => $result->getSummary(),
'should_deploy_version_b' => $result->shouldDeployVersionB(),
'is_inconclusive' => $result->isInconclusive(),
]);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid comparison parameters',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
#[Route(path: '/api/ml/ab-test/rollout-plan', method: Method::POST)]
#[ApiEndpoint(
summary: 'Generate rollout plan',
description: 'Generate a gradual rollout plan for deploying a new model version',
tags: ['Machine Learning'],
)]
#[ApiRequestBody(
description: 'Rollout configuration',
required: true,
example: [
'model_name' => 'fraud-detector',
'current_version' => '1.0.0',
'new_version' => '2.0.0',
'steps' => 5,
],
)]
#[ApiResponse(
statusCode: 200,
description: 'Rollout plan generated successfully',
example: [
'model_name' => 'fraud-detector',
'current_version' => '1.0.0',
'new_version' => '2.0.0',
'rollout_stages' => [
[
'stage' => 1,
'current_version_traffic' => 80,
'new_version_traffic' => 20,
],
[
'stage' => 2,
'current_version_traffic' => 60,
'new_version_traffic' => 40,
],
],
],
)]
public function generateRolloutPlan(HttpRequest $request): JsonResult
{
try {
$data = $request->parsedBody->toArray();
if (!isset($data['model_name'], $data['current_version'], $data['new_version'])) {
return new JsonResult([
'error' => 'Missing required fields',
'required' => ['model_name', 'current_version', 'new_version'],
], Status::BAD_REQUEST);
}
$steps = (int) ($data['steps'] ?? 5);
if ($steps < 2 || $steps > 10) {
return new JsonResult([
'error' => 'Steps must be between 2 and 10',
], Status::BAD_REQUEST);
}
// Generate rollout plan
$plan = $this->abTesting->generateRolloutPlan($steps);
// Format response
$stages = [];
foreach ($plan as $stage => $newVersionTraffic) {
$stages[] = [
'stage' => $stage,
'current_version_traffic' => (int) ((1.0 - $newVersionTraffic) * 100),
'new_version_traffic' => (int) ($newVersionTraffic * 100),
];
}
return new JsonResult([
'model_name' => $data['model_name'],
'current_version' => $data['current_version'],
'new_version' => $data['new_version'],
'total_stages' => $steps,
'rollout_stages' => $stages,
'recommendation' => 'Monitor performance at each stage before proceeding to next',
]);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid rollout configuration',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
#[Route(path: '/api/ml/ab-test/sample-size', method: Method::GET)]
#[ApiEndpoint(
summary: 'Calculate required sample size',
description: 'Calculate the required sample size for statistically significant A/B test',
tags: ['Machine Learning'],
)]
#[ApiParameter(
name: 'confidence_level',
in: 'query',
description: 'Confidence level (0.90, 0.95, 0.99)',
required: false,
type: 'number',
example: 0.95,
)]
#[ApiParameter(
name: 'margin_of_error',
in: 'query',
description: 'Margin of error (typically 0.01-0.10)',
required: false,
type: 'number',
example: 0.05,
)]
#[ApiResponse(
statusCode: 200,
description: 'Sample size calculated successfully',
example: [
'required_samples_per_version' => 385,
'total_samples_needed' => 770,
'confidence_level' => 0.95,
'margin_of_error' => 0.05,
'recommendation' => 'Collect at least 385 predictions per version',
],
)]
public function calculateSampleSize(HttpRequest $request): JsonResult
{
$confidenceLevel = (float) ($request->queryParameters['confidence_level'] ?? 0.95);
$marginOfError = (float) ($request->queryParameters['margin_of_error'] ?? 0.05);
// Validate parameters
if ($confidenceLevel < 0.5 || $confidenceLevel > 0.99) {
return new JsonResult([
'error' => 'Confidence level must be between 0.5 and 0.99',
], Status::BAD_REQUEST);
}
if ($marginOfError < 0.01 || $marginOfError > 0.20) {
return new JsonResult([
'error' => 'Margin of error must be between 0.01 and 0.20',
], Status::BAD_REQUEST);
}
// Calculate sample size
$samplesPerVersion = $this->abTesting->calculateRequiredSampleSize(
$confidenceLevel,
$marginOfError
);
return new JsonResult([
'required_samples_per_version' => $samplesPerVersion,
'total_samples_needed' => $samplesPerVersion * 2,
'confidence_level' => $confidenceLevel,
'margin_of_error' => $marginOfError,
'confidence_level_percent' => ($confidenceLevel * 100) . '%',
'margin_of_error_percent' => ($marginOfError * 100) . '%',
'recommendation' => "Collect at least {$samplesPerVersion} predictions per version for statistically significant results",
]);
}
#[Route(path: '/api/ml/ab-test/select-version', method: Method::POST)]
#[ApiEndpoint(
summary: 'Select model version for traffic routing',
description: 'Randomly select a model version based on A/B test traffic split configuration',
tags: ['Machine Learning'],
)]
#[ApiRequestBody(
description: 'Traffic routing configuration',
required: true,
example: [
'model_name' => 'fraud-detector',
'version_a' => '1.0.0',
'version_b' => '2.0.0',
'traffic_split_a' => 0.8,
],
)]
#[ApiResponse(
statusCode: 200,
description: 'Version selected successfully',
example: [
'selected_version' => '2.0.0',
'model_name' => 'fraud-detector',
'traffic_split' => [
'version_a' => 0.8,
'version_b' => 0.2,
],
],
)]
public function selectVersion(HttpRequest $request): JsonResult
{
try {
$data = $request->parsedBody->toArray();
if (!isset($data['model_name'], $data['version_a'], $data['version_b'])) {
return new JsonResult([
'error' => 'Missing required fields',
'required' => ['model_name', 'version_a', 'version_b'],
], Status::BAD_REQUEST);
}
$versionA = Version::fromString($data['version_a']);
$versionB = Version::fromString($data['version_b']);
$config = new ABTestConfig(
modelName: $data['model_name'],
versionA: $versionA,
versionB: $versionB,
trafficSplitA: (float) ($data['traffic_split_a'] ?? 0.5),
primaryMetric: 'accuracy'
);
// Select version based on traffic split
$selectedVersion = $this->abTesting->selectVersion($config);
return new JsonResult([
'selected_version' => $selectedVersion->toString(),
'model_name' => $config->modelName,
'traffic_split' => [
'version_a' => $config->trafficSplitA,
'version_b' => 1.0 - $config->trafficSplitA,
],
]);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid routing configuration',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
}

View File

@@ -0,0 +1,386 @@
<?php
declare(strict_types=1);
namespace App\Application\Api\MachineLearning;
use App\Framework\Attributes\Route;
use App\Framework\Core\ValueObjects\Version;
use App\Framework\Http\HttpRequest;
use App\Framework\Http\Method;
use App\Framework\Http\Status;
use App\Framework\MachineLearning\ModelManagement\AutoTuningEngine;
use App\Framework\MachineLearning\ModelManagement\ModelRegistry;
use App\Framework\OpenApi\Attributes\ApiEndpoint;
use App\Framework\OpenApi\Attributes\ApiParameter;
use App\Framework\OpenApi\Attributes\ApiRequestBody;
use App\Framework\OpenApi\Attributes\ApiResponse;
use App\Framework\OpenApi\Attributes\ApiSecurity;
use App\Framework\Router\Result\JsonResult;
/**
* ML Auto-Tuning API Controller
*
* RESTful API endpoints for automatic ML model optimization:
* - Threshold optimization
* - Adaptive threshold adjustment
* - Precision-recall trade-off optimization
* - Hyperparameter tuning
*/
#[ApiSecurity('bearerAuth')]
final readonly class MLAutoTuningController
{
public function __construct(
private AutoTuningEngine $autoTuning,
private ModelRegistry $registry
) {}
#[Route(path: '/api/ml/optimize/threshold', method: Method::POST)]
#[ApiEndpoint(
summary: 'Optimize model threshold',
description: 'Find optimal threshold using grid search to maximize specified metric',
tags: ['Machine Learning'],
)]
#[ApiRequestBody(
description: 'Threshold optimization configuration',
required: true,
example: [
'model_name' => 'fraud-detector',
'version' => '1.0.0',
'metric_to_optimize' => 'f1_score',
'threshold_range' => [0.5, 0.9],
'step' => 0.05,
],
)]
#[ApiResponse(
statusCode: 200,
description: 'Threshold optimization completed',
example: [
'optimal_threshold' => 0.75,
'optimal_metric_value' => 0.92,
'current_threshold' => 0.7,
'current_metric_value' => 0.89,
'improvement_percent' => 3.37,
'recommendation' => 'MODERATE IMPROVEMENT: Consider updating threshold from 0.70 to 0.75 (3.4% gain)',
'tested_thresholds' => 9,
],
)]
#[ApiResponse(
statusCode: 404,
description: 'Model not found',
)]
#[ApiResponse(
statusCode: 400,
description: 'Insufficient data for optimization (requires minimum 100 predictions)',
)]
public function optimizeThreshold(HttpRequest $request): JsonResult
{
try {
$data = $request->parsedBody->toArray();
if (!isset($data['model_name'], $data['version'])) {
return new JsonResult([
'error' => 'Missing required fields',
'required' => ['model_name', 'version'],
], Status::BAD_REQUEST);
}
$version = Version::fromString($data['version']);
// Verify model exists
$metadata = $this->registry->get($data['model_name'], $version);
if ($metadata === null) {
return new JsonResult([
'error' => 'Model not found',
'model_name' => $data['model_name'],
'version' => $data['version'],
], Status::NOT_FOUND);
}
// Optimize threshold
$result = $this->autoTuning->optimizeThreshold(
modelName: $data['model_name'],
version: $version,
metricToOptimize: $data['metric_to_optimize'] ?? 'f1_score',
thresholdRange: $data['threshold_range'] ?? [0.5, 0.9],
step: (float) ($data['step'] ?? 0.05)
);
return new JsonResult([
'optimal_threshold' => $result['optimal_threshold'],
'optimal_metric_value' => $result['optimal_metric_value'],
'current_threshold' => $result['current_threshold'],
'current_metric_value' => $result['current_metric_value'],
'improvement_percent' => $result['improvement_percent'],
'metric_optimized' => $result['metric_optimized'],
'recommendation' => $result['recommendation'],
'tested_thresholds' => count($result['all_results']),
'all_results' => $result['all_results'],
]);
} catch (\RuntimeException $e) {
return new JsonResult([
'error' => 'Optimization failed',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid optimization parameters',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
#[Route(path: '/api/ml/optimize/adaptive-threshold', method: Method::POST)]
#[ApiEndpoint(
summary: 'Adaptive threshold adjustment',
description: 'Dynamically adjust threshold based on false positive/negative rates',
tags: ['Machine Learning'],
)]
#[ApiRequestBody(
description: 'Model identification',
required: true,
example: [
'model_name' => 'fraud-detector',
'version' => '1.0.0',
],
)]
#[ApiResponse(
statusCode: 200,
description: 'Adaptive adjustment calculated',
example: [
'recommended_threshold' => 0.75,
'current_threshold' => 0.7,
'adjustment_reason' => 'High false positive rate - increasing threshold to reduce false alarms',
'current_fp_rate' => 0.12,
'current_fn_rate' => 0.05,
'expected_improvement' => [
'accuracy' => 0.03,
'precision' => 0.05,
'recall' => -0.02,
],
],
)]
#[ApiResponse(
statusCode: 404,
description: 'Model not found',
)]
public function adaptiveThresholdAdjustment(HttpRequest $request): JsonResult
{
try {
$data = $request->parsedBody->toArray();
if (!isset($data['model_name'], $data['version'])) {
return new JsonResult([
'error' => 'Missing required fields',
'required' => ['model_name', 'version'],
], Status::BAD_REQUEST);
}
$version = Version::fromString($data['version']);
// Verify model exists
$metadata = $this->registry->get($data['model_name'], $version);
if ($metadata === null) {
return new JsonResult([
'error' => 'Model not found',
'model_name' => $data['model_name'],
'version' => $data['version'],
], Status::NOT_FOUND);
}
// Calculate adaptive adjustment
$result = $this->autoTuning->adaptiveThresholdAdjustment(
$data['model_name'],
$version
);
return new JsonResult([
'recommended_threshold' => $result['recommended_threshold'],
'current_threshold' => $result['current_threshold'],
'adjustment_reason' => $result['adjustment_reason'],
'current_fp_rate' => $result['current_fp_rate'],
'current_fn_rate' => $result['current_fn_rate'],
'expected_improvement' => $result['expected_improvement'],
]);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid adjustment parameters',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
#[Route(path: '/api/ml/optimize/precision-recall', method: Method::POST)]
#[ApiEndpoint(
summary: 'Optimize precision-recall trade-off',
description: 'Find threshold that achieves target precision while maximizing recall',
tags: ['Machine Learning'],
)]
#[ApiRequestBody(
description: 'Precision-recall optimization configuration',
required: true,
example: [
'model_name' => 'fraud-detector',
'version' => '1.0.0',
'target_precision' => 0.95,
'threshold_range' => [0.5, 0.99],
],
)]
#[ApiResponse(
statusCode: 200,
description: 'Precision-recall optimization completed',
example: [
'optimal_threshold' => 0.82,
'achieved_precision' => 0.95,
'achieved_recall' => 0.78,
'f1_score' => 0.86,
'target_precision' => 0.95,
],
)]
#[ApiResponse(
statusCode: 404,
description: 'Model not found',
)]
public function optimizePrecisionRecall(HttpRequest $request): JsonResult
{
try {
$data = $request->parsedBody->toArray();
if (!isset($data['model_name'], $data['version'])) {
return new JsonResult([
'error' => 'Missing required fields',
'required' => ['model_name', 'version'],
], Status::BAD_REQUEST);
}
$version = Version::fromString($data['version']);
// Verify model exists
$metadata = $this->registry->get($data['model_name'], $version);
if ($metadata === null) {
return new JsonResult([
'error' => 'Model not found',
'model_name' => $data['model_name'],
'version' => $data['version'],
], Status::NOT_FOUND);
}
// Optimize precision-recall trade-off
$result = $this->autoTuning->optimizePrecisionRecallTradeoff(
modelName: $data['model_name'],
version: $version,
targetPrecision: (float) ($data['target_precision'] ?? 0.95),
thresholdRange: $data['threshold_range'] ?? [0.5, 0.99]
);
return new JsonResult([
'optimal_threshold' => $result['optimal_threshold'],
'achieved_precision' => $result['achieved_precision'],
'achieved_recall' => $result['achieved_recall'],
'f1_score' => $result['f1_score'],
'target_precision' => (float) ($data['target_precision'] ?? 0.95),
'recommendation' => sprintf(
'Use threshold %.2f to achieve %.1f%% precision with %.1f%% recall',
$result['optimal_threshold'],
$result['achieved_precision'] * 100,
$result['achieved_recall'] * 100
),
]);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid optimization parameters',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
#[Route(path: '/api/ml/optimize/apply-threshold', method: Method::POST)]
#[ApiEndpoint(
summary: 'Apply optimized threshold',
description: 'Update model configuration with optimized threshold',
tags: ['Machine Learning'],
)]
#[ApiRequestBody(
description: 'Threshold application configuration',
required: true,
example: [
'model_name' => 'fraud-detector',
'version' => '1.0.0',
'new_threshold' => 0.75,
],
)]
#[ApiResponse(
statusCode: 200,
description: 'Threshold applied successfully',
example: [
'model_name' => 'fraud-detector',
'version' => '1.0.0',
'old_threshold' => 0.7,
'new_threshold' => 0.75,
'message' => 'Threshold updated successfully',
],
)]
#[ApiResponse(
statusCode: 404,
description: 'Model not found',
)]
public function applyThreshold(HttpRequest $request): JsonResult
{
try {
$data = $request->parsedBody->toArray();
if (!isset($data['model_name'], $data['version'], $data['new_threshold'])) {
return new JsonResult([
'error' => 'Missing required fields',
'required' => ['model_name', 'version', 'new_threshold'],
], Status::BAD_REQUEST);
}
$version = Version::fromString($data['version']);
$newThreshold = (float) $data['new_threshold'];
// Validate threshold range
if ($newThreshold < 0.0 || $newThreshold > 1.0) {
return new JsonResult([
'error' => 'Threshold must be between 0.0 and 1.0',
], Status::BAD_REQUEST);
}
// Get current model
$metadata = $this->registry->get($data['model_name'], $version);
if ($metadata === null) {
return new JsonResult([
'error' => 'Model not found',
'model_name' => $data['model_name'],
'version' => $data['version'],
], Status::NOT_FOUND);
}
$oldThreshold = $metadata->configuration['threshold'] ?? null;
// Update configuration
$updatedMetadata = $metadata->withConfiguration([
...$metadata->configuration,
'threshold' => $newThreshold,
'threshold_updated_at' => date('c'),
'threshold_update_reason' => $data['reason'] ?? 'Manual optimization',
]);
$this->registry->update($updatedMetadata);
return new JsonResult([
'model_name' => $data['model_name'],
'version' => $version->toString(),
'old_threshold' => $oldThreshold,
'new_threshold' => $newThreshold,
'message' => 'Threshold updated successfully',
'updated_at' => date('c'),
]);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid threshold update',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
}

View File

@@ -0,0 +1,472 @@
<?php
declare(strict_types=1);
namespace App\Application\Api\MachineLearning;
use App\Framework\Attributes\Route;
use App\Framework\Core\ValueObjects\Duration;
use App\Framework\Core\ValueObjects\Timestamp;
use App\Framework\Http\HttpRequest;
use App\Framework\Http\Method;
use App\Framework\MachineLearning\ModelManagement\ModelPerformanceMonitor;
use App\Framework\MachineLearning\ModelManagement\ModelRegistry;
use App\Framework\OpenApi\Attributes\ApiEndpoint;
use App\Framework\OpenApi\Attributes\ApiParameter;
use App\Framework\OpenApi\Attributes\ApiResponse;
use App\Framework\OpenApi\Attributes\ApiSecurity;
use App\Framework\Router\Result\JsonResult;
/**
* ML Dashboard API Controller
*
* RESTful API endpoints for ML monitoring dashboard:
* - Performance overview
* - Degradation alerts
* - System health indicators
* - Comprehensive dashboard data export
*/
#[ApiSecurity('bearerAuth')]
final readonly class MLDashboardController
{
public function __construct(
private ModelRegistry $registry,
private ModelPerformanceMonitor $performanceMonitor
) {}
/**
* Get all models from registry (all names and all versions)
*/
private function getAllModels(): array
{
$modelNames = $this->registry->getAllModelNames();
$allModels = [];
foreach ($modelNames as $modelName) {
$versions = $this->registry->getAll($modelName);
$allModels = array_merge($allModels, $versions);
}
return $allModels;
}
#[Route(path: '/api/ml/dashboard', method: Method::GET)]
#[ApiEndpoint(
summary: 'Get complete dashboard data',
description: 'Retrieve comprehensive ML system dashboard data including performance, alerts, and health',
tags: ['Machine Learning'],
)]
#[ApiParameter(
name: 'timeWindow',
in: 'query',
description: 'Time window in hours for metrics (default: 24)',
required: false,
type: 'integer',
example: 24,
)]
#[ApiResponse(
statusCode: 200,
description: 'Dashboard data retrieved successfully',
example: [
'timestamp' => '2024-01-01T00:00:00Z',
'summary' => [
'total_models' => 5,
'healthy_models' => 4,
'degraded_models' => 1,
'total_predictions' => 10523,
'average_accuracy' => 0.91,
'overall_status' => 'healthy',
],
'models' => [
[
'model_name' => 'fraud-detector',
'version' => '1.0.0',
'type' => 'supervised',
'accuracy' => 0.94,
'status' => 'healthy',
],
],
'alerts' => [],
],
)]
public function getDashboardData(HttpRequest $request): JsonResult
{
$timeWindowHours = (int) ($request->queryParameters['timeWindow'] ?? 24);
$timeWindow = Duration::fromHours($timeWindowHours);
// Get all models
$allModels = $this->getAllModels();
// Collect performance overview
$performanceOverview = [];
$totalPredictions = 0;
$accuracySum = 0.0;
$healthyCount = 0;
$degradedCount = 0;
foreach ($allModels as $metadata) {
$metrics = $this->performanceMonitor->getCurrentMetrics(
$metadata->modelName,
$metadata->version,
$timeWindow
);
$isHealthy = $metrics['accuracy'] >= 0.85;
if ($isHealthy) {
$healthyCount++;
} else {
$degradedCount++;
}
$performanceOverview[] = [
'model_name' => $metadata->modelName,
'version' => $metadata->version->toString(),
'type' => $metadata->modelType->value,
'accuracy' => $metrics['accuracy'],
'precision' => $metrics['precision'] ?? null,
'recall' => $metrics['recall'] ?? null,
'f1_score' => $metrics['f1_score'] ?? null,
'total_predictions' => $metrics['total_predictions'],
'average_confidence' => $metrics['average_confidence'] ?? null,
'threshold' => $metadata->configuration['threshold'] ?? null,
'status' => $isHealthy ? 'healthy' : 'degraded',
];
$totalPredictions += $metrics['total_predictions'];
$accuracySum += $metrics['accuracy'];
}
// Calculate degradation alerts
$degradationAlerts = [];
foreach ($performanceOverview as $model) {
if ($model['status'] === 'degraded') {
$degradationAlerts[] = [
'model_name' => $model['model_name'],
'version' => $model['version'],
'current_accuracy' => $model['accuracy'],
'threshold' => 0.85,
'severity' => $model['accuracy'] < 0.7 ? 'critical' : 'warning',
'recommendation' => 'Consider retraining or rolling back to previous version',
];
}
}
// Calculate health indicators
$modelCount = count($allModels);
$averageAccuracy = $modelCount > 0 ? $accuracySum / $modelCount : 0.0;
$overallStatus = $degradedCount === 0 ? 'healthy' : ($degradedCount > $modelCount / 2 ? 'critical' : 'warning');
// Build dashboard data
$dashboardData = [
'timestamp' => Timestamp::now()->format('Y-m-d\TH:i:s\Z'),
'time_window_hours' => $timeWindowHours,
'summary' => [
'total_models' => $modelCount,
'healthy_models' => $healthyCount,
'degraded_models' => $degradedCount,
'total_predictions' => $totalPredictions,
'average_accuracy' => round($averageAccuracy, 4),
'overall_status' => $overallStatus,
],
'models' => $performanceOverview,
'alerts' => $degradationAlerts,
];
return new JsonResult($dashboardData);
}
#[Route(path: '/api/ml/dashboard/health', method: Method::GET)]
#[ApiEndpoint(
summary: 'Get system health indicators',
description: 'Retrieve ML system health status and key indicators',
tags: ['Machine Learning'],
)]
#[ApiResponse(
statusCode: 200,
description: 'Health indicators retrieved successfully',
example: [
'overall_status' => 'healthy',
'healthy_models' => 4,
'degraded_models' => 1,
'critical_models' => 0,
'total_models' => 5,
'health_percentage' => 80.0,
'average_accuracy' => 0.91,
],
)]
public function getHealthIndicators(): JsonResult
{
$allModels = $this->getAllModels();
$timeWindow = Duration::fromHours(1);
$healthyCount = 0;
$degradedCount = 0;
$criticalCount = 0;
$accuracySum = 0.0;
foreach ($allModels as $metadata) {
$metrics = $this->performanceMonitor->getCurrentMetrics(
$metadata->modelName,
$metadata->version,
$timeWindow
);
$accuracy = $metrics['accuracy'];
$accuracySum += $accuracy;
if ($accuracy >= 0.85) {
$healthyCount++;
} elseif ($accuracy >= 0.7) {
$degradedCount++;
} else {
$criticalCount++;
}
}
$modelCount = count($allModels);
$healthPercentage = $modelCount > 0 ? ($healthyCount / $modelCount) * 100 : 0.0;
$averageAccuracy = $modelCount > 0 ? $accuracySum / $modelCount : 0.0;
$overallStatus = match (true) {
$criticalCount > 0 => 'critical',
$degradedCount > $modelCount / 2 => 'warning',
$degradedCount > 0 => 'warning',
default => 'healthy'
};
return new JsonResult([
'overall_status' => $overallStatus,
'healthy_models' => $healthyCount,
'degraded_models' => $degradedCount,
'critical_models' => $criticalCount,
'total_models' => $modelCount,
'health_percentage' => round($healthPercentage, 2),
'average_accuracy' => round($averageAccuracy, 4),
'timestamp' => date('c'),
]);
}
#[Route(path: '/api/ml/dashboard/alerts', method: Method::GET)]
#[ApiEndpoint(
summary: 'Get active alerts',
description: 'Retrieve all active degradation and performance alerts',
tags: ['Machine Learning'],
)]
#[ApiParameter(
name: 'severity',
in: 'query',
description: 'Filter by severity (warning, critical)',
required: false,
type: 'string',
example: 'critical',
)]
#[ApiResponse(
statusCode: 200,
description: 'Alerts retrieved successfully',
example: [
'alerts' => [
[
'model_name' => 'spam-classifier',
'version' => '1.0.0',
'severity' => 'warning',
'current_accuracy' => 0.78,
'threshold' => 0.85,
'recommendation' => 'Consider retraining or rolling back',
],
],
'total_alerts' => 1,
],
)]
public function getAlerts(HttpRequest $request): JsonResult
{
$severityFilter = $request->queryParameters['severity'] ?? null;
$allModels = $this->getAllModels();
$timeWindow = Duration::fromHours(1);
$alerts = [];
foreach ($allModels as $metadata) {
$metrics = $this->performanceMonitor->getCurrentMetrics(
$metadata->modelName,
$metadata->version,
$timeWindow
);
$accuracy = $metrics['accuracy'];
if ($accuracy < 0.85) {
$severity = $accuracy < 0.7 ? 'critical' : 'warning';
// Apply severity filter if specified
if ($severityFilter !== null && $severity !== strtolower($severityFilter)) {
continue;
}
$alerts[] = [
'model_name' => $metadata->modelName,
'version' => $metadata->version->toString(),
'type' => $metadata->modelType->value,
'severity' => $severity,
'current_accuracy' => $accuracy,
'threshold' => 0.85,
'deviation' => round((0.85 - $accuracy) * 100, 2),
'total_predictions' => $metrics['total_predictions'],
'recommendation' => 'Consider retraining or rolling back to previous version',
'detected_at' => date('c'),
];
}
}
return new JsonResult([
'alerts' => $alerts,
'total_alerts' => count($alerts),
'severity_filter' => $severityFilter,
'timestamp' => date('c'),
]);
}
#[Route(path: '/api/ml/dashboard/confusion-matrices', method: Method::GET)]
#[ApiEndpoint(
summary: 'Get confusion matrices',
description: 'Retrieve confusion matrices for all models with classification metrics',
tags: ['Machine Learning'],
)]
#[ApiResponse(
statusCode: 200,
description: 'Confusion matrices retrieved successfully',
example: [
'matrices' => [
[
'model_name' => 'fraud-detector',
'version' => '1.0.0',
'confusion_matrix' => [
'true_positive' => 234,
'true_negative' => 145,
'false_positive' => 12,
'false_negative' => 9,
],
'fp_rate' => 0.03,
'fn_rate' => 0.023,
],
],
],
)]
public function getConfusionMatrices(): JsonResult
{
$allModels = $this->getAllModels();
$timeWindow = Duration::fromHours(24);
$matrices = [];
foreach ($allModels as $metadata) {
$metrics = $this->performanceMonitor->getCurrentMetrics(
$metadata->modelName,
$metadata->version,
$timeWindow
);
if (isset($metrics['confusion_matrix'])) {
$cm = $metrics['confusion_matrix'];
$total = $metrics['total_predictions'];
$matrices[] = [
'model_name' => $metadata->modelName,
'version' => $metadata->version->toString(),
'type' => $metadata->modelType->value,
'confusion_matrix' => $cm,
'fp_rate' => $total > 0 ? round($cm['false_positive'] / $total, 4) : 0.0,
'fn_rate' => $total > 0 ? round($cm['false_negative'] / $total, 4) : 0.0,
'total_predictions' => $total,
];
}
}
return new JsonResult([
'matrices' => $matrices,
'total_models' => count($matrices),
'timestamp' => date('c'),
]);
}
#[Route(path: '/api/ml/dashboard/registry-summary', method: Method::GET)]
#[ApiEndpoint(
summary: 'Get registry summary',
description: 'Retrieve summary statistics about the model registry',
tags: ['Machine Learning'],
)]
#[ApiResponse(
statusCode: 200,
description: 'Registry summary retrieved successfully',
example: [
'total_models' => 5,
'by_type' => [
'supervised' => 3,
'unsupervised' => 2,
'reinforcement' => 0,
],
'total_versions' => 12,
'models' => [
[
'model_name' => 'fraud-detector',
'version_count' => 3,
'latest_version' => '3.0.0',
],
],
],
)]
public function getRegistrySummary(): JsonResult
{
$allModels = $this->getAllModels();
// Count by type
$byType = [
'supervised' => 0,
'unsupervised' => 0,
'reinforcement' => 0,
];
// Group by model name
$modelGroups = [];
foreach ($allModels as $metadata) {
$typeName = strtolower($metadata->modelType->value);
$byType[$typeName] = ($byType[$typeName] ?? 0) + 1;
$modelName = $metadata->modelName;
if (!isset($modelGroups[$modelName])) {
$modelGroups[$modelName] = [
'model_name' => $modelName,
'type' => $metadata->modelType->value,
'versions' => [],
];
}
$modelGroups[$modelName]['versions'][] = $metadata->version->toString();
}
// Calculate summary per model
$modelsSummary = [];
foreach ($modelGroups as $modelName => $group) {
// Sort versions
$versions = $group['versions'];
usort($versions, 'version_compare');
$modelsSummary[] = [
'model_name' => $modelName,
'type' => $group['type'],
'version_count' => count($versions),
'latest_version' => end($versions),
'oldest_version' => reset($versions),
];
}
return new JsonResult([
'total_models' => count($modelGroups),
'by_type' => $byType,
'total_versions' => count($allModels),
'models' => $modelsSummary,
'timestamp' => date('c'),
]);
}
}

View File

@@ -0,0 +1,478 @@
<?php
declare(strict_types=1);
namespace App\Application\Api\MachineLearning;
use App\Framework\Attributes\Route;
use App\Framework\Core\ValueObjects\Duration;
use App\Framework\Core\ValueObjects\Timestamp;
use App\Framework\Core\ValueObjects\Version;
use App\Framework\Http\HttpRequest;
use App\Framework\Http\Method;
use App\Framework\Http\Status;
use App\Framework\MachineLearning\ModelManagement\ModelPerformanceMonitor;
use App\Framework\MachineLearning\ModelManagement\ModelRegistry;
use App\Framework\MachineLearning\ModelManagement\ValueObjects\ModelMetadata;
use App\Framework\MachineLearning\ModelManagement\ValueObjects\ModelType;
use App\Framework\OpenApi\Attributes\ApiEndpoint;
use App\Framework\OpenApi\Attributes\ApiParameter;
use App\Framework\OpenApi\Attributes\ApiRequestBody;
use App\Framework\OpenApi\Attributes\ApiResponse;
use App\Framework\OpenApi\Attributes\ApiSecurity;
use App\Framework\Router\Result\JsonResult;
/**
* ML Models Management API Controller
*
* RESTful API endpoints for managing machine learning models:
* - Model registration
* - Performance metrics retrieval
* - Model listing and versioning
*/
#[ApiSecurity('bearerAuth')]
final readonly class MLModelsController
{
public function __construct(
private ModelRegistry $registry,
private ModelPerformanceMonitor $performanceMonitor
) {}
#[Route(path: '/api/ml/models', method: Method::GET)]
#[ApiEndpoint(
summary: 'List all ML models',
description: 'Retrieve a list of all registered machine learning models with their versions',
tags: ['Machine Learning'],
)]
#[ApiParameter(
name: 'type',
in: 'query',
description: 'Filter by model type (supervised, unsupervised, reinforcement)',
required: false,
type: 'string',
example: 'supervised',
)]
#[ApiResponse(
statusCode: 200,
description: 'List of ML models retrieved successfully',
example: [
'models' => [
[
'model_name' => 'fraud-detector',
'type' => 'supervised',
'versions' => [
[
'version' => '1.0.0',
'created_at' => '2024-01-01T00:00:00Z',
'is_latest' => true,
],
],
],
],
'total_models' => 5,
],
)]
public function listModels(HttpRequest $request): JsonResult
{
$typeFilter = $request->queryParameters['type'] ?? null;
// Get all model names
$modelNames = $this->registry->getAllModelNames();
// Get all versions for each model
$allModels = [];
foreach ($modelNames as $modelName) {
$versions = $this->registry->getAll($modelName);
$allModels = array_merge($allModels, $versions);
}
// Filter by type if specified
if ($typeFilter !== null) {
$allModels = array_filter($allModels, function (ModelMetadata $metadata) use ($typeFilter) {
return strtolower($metadata->modelType->value) === strtolower($typeFilter);
});
}
// Group by model name
$groupedModels = [];
foreach ($allModels as $metadata) {
$modelName = $metadata->modelName;
if (!isset($groupedModels[$modelName])) {
$groupedModels[$modelName] = [
'model_name' => $modelName,
'type' => $metadata->modelType->value,
'versions' => [],
];
}
$groupedModels[$modelName]['versions'][] = [
'version' => $metadata->version->toString(),
'created_at' => $metadata->createdAt->format('Y-m-d\TH:i:s\Z'),
'configuration' => $metadata->configuration,
];
}
return new JsonResult([
'models' => array_values($groupedModels),
'total_models' => count($groupedModels),
]);
}
#[Route(path: '/api/ml/models/{modelName}', method: Method::GET)]
#[ApiEndpoint(
summary: 'Get model details',
description: 'Retrieve detailed information about a specific ML model',
tags: ['Machine Learning'],
)]
#[ApiParameter(
name: 'modelName',
in: 'path',
description: 'Model identifier',
required: true,
type: 'string',
example: 'fraud-detector',
)]
#[ApiParameter(
name: 'version',
in: 'query',
description: 'Specific version (optional, defaults to latest)',
required: false,
type: 'string',
example: '1.0.0',
)]
#[ApiResponse(
statusCode: 200,
description: 'Model details retrieved successfully',
example: [
'model_name' => 'fraud-detector',
'type' => 'supervised',
'version' => '1.0.0',
'configuration' => [
'threshold' => 0.7,
'algorithm' => 'random_forest',
],
'created_at' => '2024-01-01T00:00:00Z',
],
)]
#[ApiResponse(
statusCode: 404,
description: 'Model not found',
)]
public function getModel(string $modelName, HttpRequest $request): JsonResult
{
$versionString = $request->queryParameters['version'] ?? null;
try {
if ($versionString !== null) {
$version = Version::fromString($versionString);
$metadata = $this->registry->get($modelName, $version);
} else {
$metadata = $this->registry->getLatest($modelName);
}
if ($metadata === null) {
return new JsonResult([
'error' => 'Model not found',
'model_name' => $modelName,
], Status::NOT_FOUND);
}
return new JsonResult([
'model_name' => $metadata->modelName,
'type' => $metadata->modelType->value,
'version' => $metadata->version->toString(),
'configuration' => $metadata->configuration,
'performance_metrics' => $metadata->performanceMetrics,
'created_at' => $metadata->createdAt->format('Y-m-d\TH:i:s\Z'),
]);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid version format',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
#[Route(path: '/api/ml/models/{modelName}/metrics', method: Method::GET)]
#[ApiEndpoint(
summary: 'Get model performance metrics',
description: 'Retrieve real-time performance metrics for a specific model',
tags: ['Machine Learning'],
)]
#[ApiParameter(
name: 'modelName',
in: 'path',
description: 'Model identifier',
required: true,
type: 'string',
example: 'fraud-detector',
)]
#[ApiParameter(
name: 'version',
in: 'query',
description: 'Model version',
required: false,
type: 'string',
example: '1.0.0',
)]
#[ApiParameter(
name: 'timeWindow',
in: 'query',
description: 'Time window in hours (default: 1)',
required: false,
type: 'integer',
example: 24,
)]
#[ApiResponse(
statusCode: 200,
description: 'Performance metrics retrieved successfully',
example: [
'model_name' => 'fraud-detector',
'version' => '1.0.0',
'time_window_hours' => 24,
'metrics' => [
'accuracy' => 0.92,
'precision' => 0.89,
'recall' => 0.94,
'f1_score' => 0.91,
'total_predictions' => 1523,
'average_confidence' => 0.85,
],
'confusion_matrix' => [
'true_positive' => 1234,
'true_negative' => 156,
'false_positive' => 89,
'false_negative' => 44,
],
],
)]
#[ApiResponse(
statusCode: 404,
description: 'Model not found',
)]
public function getMetrics(string $modelName, HttpRequest $request): JsonResult
{
$versionString = $request->queryParameters['version'] ?? null;
$timeWindowHours = (int) ($request->queryParameters['timeWindow'] ?? 1);
try {
if ($versionString !== null) {
$version = Version::fromString($versionString);
} else {
$metadata = $this->registry->getLatest($modelName);
if ($metadata === null) {
return new JsonResult([
'error' => 'Model not found',
'model_name' => $modelName,
], Status::NOT_FOUND);
}
$version = $metadata->version;
}
$timeWindow = Duration::fromHours($timeWindowHours);
$metrics = $this->performanceMonitor->getCurrentMetrics(
$modelName,
$version,
$timeWindow
);
return new JsonResult([
'model_name' => $modelName,
'version' => $version->toString(),
'time_window_hours' => $timeWindowHours,
'metrics' => [
'accuracy' => $metrics['accuracy'],
'precision' => $metrics['precision'] ?? null,
'recall' => $metrics['recall'] ?? null,
'f1_score' => $metrics['f1_score'] ?? null,
'total_predictions' => $metrics['total_predictions'],
'average_confidence' => $metrics['average_confidence'] ?? null,
],
'confusion_matrix' => $metrics['confusion_matrix'] ?? null,
'timestamp' => date('c'),
]);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid parameters',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
#[Route(path: '/api/ml/models', method: Method::POST)]
#[ApiEndpoint(
summary: 'Register a new ML model',
description: 'Register a new machine learning model or version in the system',
tags: ['Machine Learning'],
)]
#[ApiRequestBody(
description: 'Model metadata for registration',
required: true,
example: [
'model_name' => 'fraud-detector',
'type' => 'supervised',
'version' => '2.0.0',
'configuration' => [
'threshold' => 0.75,
'algorithm' => 'xgboost',
'features' => 30,
],
'performance_metrics' => [
'accuracy' => 0.94,
'precision' => 0.91,
'recall' => 0.96,
],
],
)]
#[ApiResponse(
statusCode: 201,
description: 'Model registered successfully',
example: [
'model_name' => 'fraud-detector',
'version' => '2.0.0',
'created_at' => '2024-01-01T00:00:00Z',
'message' => 'Model registered successfully',
],
)]
#[ApiResponse(
statusCode: 400,
description: 'Invalid model data',
)]
#[ApiResponse(
statusCode: 409,
description: 'Model version already exists',
)]
public function registerModel(HttpRequest $request): JsonResult
{
try {
$data = $request->parsedBody->toArray();
// Validate required fields
if (!isset($data['model_name'], $data['type'], $data['version'])) {
return new JsonResult([
'error' => 'Missing required fields',
'required' => ['model_name', 'type', 'version'],
], Status::BAD_REQUEST);
}
// Parse model type
$modelType = match (strtolower($data['type'])) {
'supervised' => ModelType::SUPERVISED,
'unsupervised' => ModelType::UNSUPERVISED,
'reinforcement' => ModelType::REINFORCEMENT,
default => throw new \InvalidArgumentException("Invalid model type: {$data['type']}")
};
// Create metadata
$metadata = new ModelMetadata(
modelName: $data['model_name'],
modelType: $modelType,
version: Version::fromString($data['version']),
configuration: $data['configuration'] ?? [],
createdAt: Timestamp::now(),
performanceMetrics: $data['performance_metrics'] ?? []
);
// Check if already exists
$existing = $this->registry->get($metadata->modelName, $metadata->version);
if ($existing !== null) {
return new JsonResult([
'error' => 'Model version already exists',
'model_name' => $metadata->modelName,
'version' => $metadata->version->toString(),
], Status::CONFLICT);
}
// Register model
$this->registry->register($metadata);
return new JsonResult([
'model_name' => $metadata->modelName,
'version' => $metadata->version->toString(),
'type' => $metadata->modelType->value,
'created_at' => $metadata->createdAt->format('Y-m-d\TH:i:s\Z'),
'message' => 'Model registered successfully',
], Status::CREATED);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid model data',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
} catch (\Throwable $e) {
return new JsonResult([
'error' => 'Failed to register model',
'message' => $e->getMessage(),
], Status::INTERNAL_SERVER_ERROR);
}
}
#[Route(path: '/api/ml/models/{modelName}', method: Method::DELETE)]
#[ApiEndpoint(
summary: 'Unregister ML model',
description: 'Remove a specific version of an ML model from the registry',
tags: ['Machine Learning'],
)]
#[ApiParameter(
name: 'modelName',
in: 'path',
description: 'Model identifier',
required: true,
type: 'string',
example: 'fraud-detector',
)]
#[ApiParameter(
name: 'version',
in: 'query',
description: 'Model version to unregister',
required: true,
type: 'string',
example: '1.0.0',
)]
#[ApiResponse(
statusCode: 200,
description: 'Model unregistered successfully',
)]
#[ApiResponse(
statusCode: 404,
description: 'Model not found',
)]
public function unregisterModel(string $modelName, HttpRequest $request): JsonResult
{
$versionString = $request->queryParameters['version'] ?? null;
if ($versionString === null) {
return new JsonResult([
'error' => 'Version parameter is required',
], Status::BAD_REQUEST);
}
try {
$version = Version::fromString($versionString);
// Check if model exists
$metadata = $this->registry->get($modelName, $version);
if ($metadata === null) {
return new JsonResult([
'error' => 'Model not found',
'model_name' => $modelName,
'version' => $versionString,
], Status::NOT_FOUND);
}
// Unregister
$this->registry->unregister($modelName, $version);
return new JsonResult([
'message' => 'Model unregistered successfully',
'model_name' => $modelName,
'version' => $versionString,
]);
} catch (\InvalidArgumentException $e) {
return new JsonResult([
'error' => 'Invalid version format',
'message' => $e->getMessage(),
], Status::BAD_REQUEST);
}
}
}