fix: Gitea Traefik routing and connection pool optimization
Some checks failed
🚀 Build & Deploy Image / Determine Build Necessity (push) Failing after 10m14s
🚀 Build & Deploy Image / Build Runtime Base Image (push) Has been skipped
🚀 Build & Deploy Image / Build Docker Image (push) Has been skipped
🚀 Build & Deploy Image / Run Tests & Quality Checks (push) Has been skipped
🚀 Build & Deploy Image / Auto-deploy to Staging (push) Has been skipped
🚀 Build & Deploy Image / Auto-deploy to Production (push) Has been skipped
Security Vulnerability Scan / Check for Dependency Changes (push) Failing after 11m25s
Security Vulnerability Scan / Composer Security Audit (push) Has been cancelled
Some checks failed
🚀 Build & Deploy Image / Determine Build Necessity (push) Failing after 10m14s
🚀 Build & Deploy Image / Build Runtime Base Image (push) Has been skipped
🚀 Build & Deploy Image / Build Docker Image (push) Has been skipped
🚀 Build & Deploy Image / Run Tests & Quality Checks (push) Has been skipped
🚀 Build & Deploy Image / Auto-deploy to Staging (push) Has been skipped
🚀 Build & Deploy Image / Auto-deploy to Production (push) Has been skipped
Security Vulnerability Scan / Check for Dependency Changes (push) Failing after 11m25s
Security Vulnerability Scan / Composer Security Audit (push) Has been cancelled
- Remove middleware reference from Gitea Traefik labels (caused routing issues) - Optimize Gitea connection pool settings (MAX_IDLE_CONNS=30, authentication_timeout=180s) - Add explicit service reference in Traefik labels - Fix intermittent 504 timeouts by improving PostgreSQL connection handling Fixes Gitea unreachability via git.michaelschiemer.de
This commit is contained in:
358
scripts/maintenance/backup.sh
Executable file
358
scripts/maintenance/backup.sh
Executable file
@@ -0,0 +1,358 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Production Backup Script
|
||||
# Automated backup for database, Vault, and critical files
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/backup.sh [--full|--database-only|--vault-only] [--encrypt]
|
||||
#
|
||||
# Options:
|
||||
# --full Full backup (database + vault + files)
|
||||
# --database-only Database backup only
|
||||
# --vault-only Vault backup only
|
||||
# --encrypt Encrypt backup files with GPG
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
BACKUP_DIR="${PROJECT_ROOT}/../backups"
|
||||
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||
BACKUP_PATH="${BACKUP_DIR}/${TIMESTAMP}"
|
||||
|
||||
# Default options
|
||||
BACKUP_TYPE="full"
|
||||
ENCRYPT=false
|
||||
|
||||
# Parse arguments
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
--full)
|
||||
BACKUP_TYPE="full"
|
||||
;;
|
||||
--database-only)
|
||||
BACKUP_TYPE="database"
|
||||
;;
|
||||
--vault-only)
|
||||
BACKUP_TYPE="vault"
|
||||
;;
|
||||
--encrypt)
|
||||
ENCRYPT=true
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Colors
|
||||
GREEN="\e[32m"
|
||||
YELLOW="\e[33m"
|
||||
RED="\e[31m"
|
||||
BLUE="\e[34m"
|
||||
RESET="\e[0m"
|
||||
|
||||
# Logging functions
|
||||
log() {
|
||||
echo -e "${BLUE}[$(date +'%H:%M:%S')]${RESET} $1"
|
||||
}
|
||||
|
||||
success() {
|
||||
echo -e "${GREEN}✅ $1${RESET}"
|
||||
}
|
||||
|
||||
warning() {
|
||||
echo -e "${YELLOW}⚠️ $1${RESET}"
|
||||
}
|
||||
|
||||
error() {
|
||||
echo -e "${RED}❌ $1${RESET}"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Create backup directory
|
||||
prepare_backup_dir() {
|
||||
log "Preparing backup directory..."
|
||||
|
||||
mkdir -p "$BACKUP_PATH"
|
||||
success "Backup directory created: $BACKUP_PATH"
|
||||
}
|
||||
|
||||
# Backup database
|
||||
backup_database() {
|
||||
log "Backing up database..."
|
||||
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# Check if database is running
|
||||
if ! docker compose ps db | grep -q "Up"; then
|
||||
error "Database container is not running"
|
||||
fi
|
||||
|
||||
# Dump database
|
||||
local db_backup="${BACKUP_PATH}/database.sql"
|
||||
|
||||
if docker compose exec -T db pg_dump -U postgres michaelschiemer_prod > "$db_backup"; then
|
||||
# Compress
|
||||
gzip -f "$db_backup"
|
||||
local size=$(du -h "${db_backup}.gz" | cut -f1)
|
||||
|
||||
success "Database backup created: database.sql.gz ($size)"
|
||||
|
||||
# Encrypt if requested
|
||||
if [[ "$ENCRYPT" == "true" ]]; then
|
||||
encrypt_file "${db_backup}.gz"
|
||||
fi
|
||||
else
|
||||
error "Database backup failed"
|
||||
fi
|
||||
}
|
||||
|
||||
# Backup Vault secrets
|
||||
backup_vault() {
|
||||
log "Backing up Vault secrets..."
|
||||
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
local vault_backup="${BACKUP_PATH}/vault_secrets.sql"
|
||||
|
||||
# Export Vault tables
|
||||
if docker compose exec -T db pg_dump -U postgres michaelschiemer_prod \
|
||||
-t vault_secrets -t vault_audit > "$vault_backup"; then
|
||||
|
||||
# Compress
|
||||
gzip -f "$vault_backup"
|
||||
local size=$(du -h "${vault_backup}.gz" | cut -f1)
|
||||
|
||||
success "Vault backup created: vault_secrets.sql.gz ($size)"
|
||||
|
||||
# Encrypt (recommended for Vault backups)
|
||||
if [[ "$ENCRYPT" == "true" ]]; then
|
||||
encrypt_file "${vault_backup}.gz"
|
||||
else
|
||||
warning "Vault backup is not encrypted - consider using --encrypt"
|
||||
fi
|
||||
else
|
||||
error "Vault backup failed"
|
||||
fi
|
||||
}
|
||||
|
||||
# Backup environment configuration
|
||||
backup_environment() {
|
||||
log "Backing up environment configuration..."
|
||||
|
||||
if [[ -f "$PROJECT_ROOT/.env.production" ]]; then
|
||||
cp "$PROJECT_ROOT/.env.production" "${BACKUP_PATH}/env.production"
|
||||
success "Environment configuration backed up"
|
||||
|
||||
if [[ "$ENCRYPT" == "true" ]]; then
|
||||
encrypt_file "${BACKUP_PATH}/env.production"
|
||||
fi
|
||||
else
|
||||
warning ".env.production not found"
|
||||
fi
|
||||
}
|
||||
|
||||
# Backup storage directory
|
||||
backup_storage() {
|
||||
log "Backing up storage directory..."
|
||||
|
||||
if [[ -d "$PROJECT_ROOT/storage" ]]; then
|
||||
local storage_backup="${BACKUP_PATH}/storage.tar.gz"
|
||||
|
||||
tar -czf "$storage_backup" -C "$PROJECT_ROOT" storage
|
||||
|
||||
local size=$(du -h "$storage_backup" | cut -f1)
|
||||
success "Storage backup created: storage.tar.gz ($size)"
|
||||
|
||||
if [[ "$ENCRYPT" == "true" ]]; then
|
||||
encrypt_file "$storage_backup"
|
||||
fi
|
||||
else
|
||||
warning "Storage directory not found"
|
||||
fi
|
||||
}
|
||||
|
||||
# Backup uploaded files
|
||||
backup_uploads() {
|
||||
log "Backing up uploaded files..."
|
||||
|
||||
if [[ -d "$PROJECT_ROOT/public/uploads" ]]; then
|
||||
local uploads_backup="${BACKUP_PATH}/uploads.tar.gz"
|
||||
|
||||
tar -czf "$uploads_backup" -C "$PROJECT_ROOT/public" uploads
|
||||
|
||||
local size=$(du -h "$uploads_backup" | cut -f1)
|
||||
success "Uploads backup created: uploads.tar.gz ($size)"
|
||||
|
||||
if [[ "$ENCRYPT" == "true" ]]; then
|
||||
encrypt_file "$uploads_backup"
|
||||
fi
|
||||
else
|
||||
log "No uploads directory found (skipping)"
|
||||
fi
|
||||
}
|
||||
|
||||
# Encrypt file with GPG
|
||||
encrypt_file() {
|
||||
local file="$1"
|
||||
|
||||
log "Encrypting $file..."
|
||||
|
||||
# Check if GPG is available
|
||||
if ! command -v gpg &> /dev/null; then
|
||||
warning "GPG not installed - skipping encryption"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Encrypt with symmetric encryption (password-based)
|
||||
if gpg --symmetric --cipher-algo AES256 "$file"; then
|
||||
rm -f "$file" # Remove unencrypted file
|
||||
success "File encrypted: ${file}.gpg"
|
||||
else
|
||||
error "Encryption failed"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create backup manifest
|
||||
create_manifest() {
|
||||
log "Creating backup manifest..."
|
||||
|
||||
local manifest="${BACKUP_PATH}/MANIFEST.txt"
|
||||
|
||||
{
|
||||
echo "Backup Manifest"
|
||||
echo "==============="
|
||||
echo ""
|
||||
echo "Timestamp: $(date -Iseconds)"
|
||||
echo "Backup Type: $BACKUP_TYPE"
|
||||
echo "Encrypted: $ENCRYPT"
|
||||
echo ""
|
||||
echo "Contents:"
|
||||
echo ""
|
||||
find "$BACKUP_PATH" -type f -exec du -h {} \; | sort -rh
|
||||
echo ""
|
||||
echo "Total Size: $(du -sh "$BACKUP_PATH" | cut -f1)"
|
||||
} > "$manifest"
|
||||
|
||||
success "Backup manifest created"
|
||||
}
|
||||
|
||||
# Cleanup old backups
|
||||
cleanup_old_backups() {
|
||||
log "Cleaning up old backups..."
|
||||
|
||||
# Keep last 7 days of backups
|
||||
find "$BACKUP_DIR" -maxdepth 1 -type d -name "20*" -mtime +7 -exec rm -rf {} \;
|
||||
|
||||
success "Old backups cleaned up (kept last 7 days)"
|
||||
}
|
||||
|
||||
# Verify backup integrity
|
||||
verify_backup() {
|
||||
log "Verifying backup integrity..."
|
||||
|
||||
local all_valid=true
|
||||
|
||||
# Verify gzip files
|
||||
for file in "$BACKUP_PATH"/*.gz; do
|
||||
if [[ -f "$file" ]]; then
|
||||
if gzip -t "$file" 2>/dev/null; then
|
||||
log "✓ $file is valid"
|
||||
else
|
||||
error "✗ $file is corrupted"
|
||||
all_valid=false
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Verify tar.gz files
|
||||
for file in "$BACKUP_PATH"/*.tar.gz; do
|
||||
if [[ -f "$file" ]]; then
|
||||
if tar -tzf "$file" &>/dev/null; then
|
||||
log "✓ $file is valid"
|
||||
else
|
||||
error "✗ $file is corrupted"
|
||||
all_valid=false
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$all_valid" == "true" ]]; then
|
||||
success "All backup files verified successfully"
|
||||
else
|
||||
error "Some backup files are corrupted"
|
||||
fi
|
||||
}
|
||||
|
||||
# Display backup summary
|
||||
display_summary() {
|
||||
echo ""
|
||||
echo -e "${GREEN}========================================${RESET}"
|
||||
echo -e "${GREEN} Backup Summary${RESET}"
|
||||
echo -e "${GREEN}========================================${RESET}"
|
||||
echo ""
|
||||
echo "📋 Backup Type: $BACKUP_TYPE"
|
||||
echo "⏰ Timestamp: $(date)"
|
||||
echo "📁 Location: $BACKUP_PATH"
|
||||
echo "🔒 Encrypted: $ENCRYPT"
|
||||
echo ""
|
||||
echo "📦 Backup Contents:"
|
||||
echo ""
|
||||
find "$BACKUP_PATH" -type f -exec du -h {} \; | sort -rh | head -10
|
||||
echo ""
|
||||
echo "💾 Total Size: $(du -sh "$BACKUP_PATH" | cut -f1)"
|
||||
echo ""
|
||||
echo "📝 Restoration Commands:"
|
||||
echo ""
|
||||
|
||||
if [[ -f "${BACKUP_PATH}/database.sql.gz" ]]; then
|
||||
echo " Database:"
|
||||
echo " gunzip -c database.sql.gz | docker compose exec -T db psql -U postgres michaelschiemer_prod"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [[ -f "${BACKUP_PATH}/vault_secrets.sql.gz" ]]; then
|
||||
echo " Vault:"
|
||||
echo " gunzip -c vault_secrets.sql.gz | docker compose exec -T db psql -U postgres michaelschiemer_prod"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [[ -f "${BACKUP_PATH}/storage.tar.gz" ]]; then
|
||||
echo " Storage:"
|
||||
echo " tar -xzf storage.tar.gz -C /path/to/project"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}========================================${RESET}"
|
||||
}
|
||||
|
||||
# Main backup execution
|
||||
main() {
|
||||
log "🔐 Starting production backup (type: $BACKUP_TYPE)..."
|
||||
echo ""
|
||||
|
||||
prepare_backup_dir
|
||||
|
||||
case "$BACKUP_TYPE" in
|
||||
full)
|
||||
backup_database
|
||||
backup_vault
|
||||
backup_environment
|
||||
backup_storage
|
||||
backup_uploads
|
||||
;;
|
||||
database)
|
||||
backup_database
|
||||
;;
|
||||
vault)
|
||||
backup_vault
|
||||
;;
|
||||
esac
|
||||
|
||||
create_manifest
|
||||
verify_backup
|
||||
cleanup_old_backups
|
||||
display_summary
|
||||
|
||||
success "🎉 Backup completed successfully!"
|
||||
}
|
||||
|
||||
# Run main
|
||||
main "$@"
|
||||
@@ -19,7 +19,7 @@ use App\Framework\Discovery\Storage\DiscoveryStorageService;
|
||||
use App\Framework\Filesystem\FileScanner;
|
||||
use App\Framework\Filesystem\FileSystemService;
|
||||
use App\Framework\Logging\NullLogger;
|
||||
use App\Framework\Reflection\CachedReflectionProvider;
|
||||
use App\Framework\ReflectionLegacy\CachedReflectionProvider;
|
||||
|
||||
echo "🚀 Bootstrapping Discovery System...\n\n";
|
||||
$totalStart = microtime(true);
|
||||
|
||||
26
scripts/maintenance/fix-worker-permissions.sh
Normal file
26
scripts/maintenance/fix-worker-permissions.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "🔧 Fixing worker permissions..."
|
||||
|
||||
# Create directories if they don't exist
|
||||
#mkdir -p ./storage/logs
|
||||
#mkdir -p ./storage/cache
|
||||
#mkdir -p ./src/Framework/CommandBus/storage/queue
|
||||
|
||||
# Host-System Permissions korrigieren
|
||||
echo "📁 Fixing host permissions..."
|
||||
chmod -R 775 ../storage/ 2>/dev/null || true
|
||||
chmod -R 775 ../src/Framework/CommandBus/storage/ 2>/dev/null || true
|
||||
|
||||
echo "✅ Host permissions fixed"
|
||||
|
||||
echo "🐳 Restarting worker container..."
|
||||
docker-compose restart queue-worker
|
||||
|
||||
echo "⏱️ Waiting for worker to start..."
|
||||
sleep 3
|
||||
|
||||
echo "📋 Worker logs:"
|
||||
docker-compose logs --tail=20 queue-worker
|
||||
|
||||
echo "✅ Worker restarted successfully!"
|
||||
40
scripts/maintenance/fix_admin_layout.sh
Executable file
40
scripts/maintenance/fix_admin_layout.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to fix AdminLayoutProcessor usage across all admin controllers
|
||||
# Replaces primitive obsession with proper Value Object pattern
|
||||
|
||||
files=(
|
||||
"src/Application/Admin/Content/ImageManagerController.php"
|
||||
"src/Application/Admin/Content/ImageSlotsController.php"
|
||||
"src/Application/Admin/Analytics/AnalyticsController.php"
|
||||
"src/Application/Admin/System/HealthController.php"
|
||||
"src/Application/Admin/System/PhpInfoController.php"
|
||||
"src/Application/Admin/System/EnvironmentController.php"
|
||||
"src/Application/Admin/System/PerformanceController.php"
|
||||
"src/Application/Admin/Development/WafTestController.php"
|
||||
"src/Application/Admin/Development/DesignSystemController.php"
|
||||
"src/Application/Admin/Development/StyleguideController.php"
|
||||
"src/Application/Admin/Development/RoutesController.php"
|
||||
"src/Application/Admin/Infrastructure/ServicesController.php"
|
||||
"src/Application/Admin/Infrastructure/CacheMetricsController.php"
|
||||
"src/Application/Admin/Infrastructure/LogViewerController.php"
|
||||
"src/Application/Admin/Infrastructure/RedisController.php"
|
||||
)
|
||||
|
||||
for file in "${files[@]}"; do
|
||||
if [ -f "$file" ]; then
|
||||
echo "Fixing $file..."
|
||||
|
||||
# Replace the method call pattern
|
||||
sed -i 's/\$layoutData = \$this->layoutProcessor->processAdminLayout(\$data);/\$finalData = \$this->layoutProcessor->processLayoutFromArray(\$data);/g' "$file"
|
||||
|
||||
# Replace the ViewResult data pattern
|
||||
sed -i 's/data: array_merge(\$layoutData, \$data)/data: \$finalData/g' "$file"
|
||||
|
||||
echo "Fixed $file"
|
||||
else
|
||||
echo "File not found: $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "All files processed!"
|
||||
30
scripts/maintenance/generate-vapid-keys.php
Normal file
30
scripts/maintenance/generate-vapid-keys.php
Normal file
@@ -0,0 +1,30 @@
|
||||
<?php
|
||||
|
||||
declare(strict_types=1);
|
||||
|
||||
require_once __DIR__ . '/vendor/autoload.php';
|
||||
|
||||
use App\Framework\WebPush\Services\VapidKeyGenerator;
|
||||
|
||||
echo "=== VAPID Key Generation for Web Push ===\n\n";
|
||||
|
||||
try {
|
||||
$generator = new VapidKeyGenerator();
|
||||
$keyPair = $generator->generate();
|
||||
|
||||
echo "✅ VAPID keys generated successfully!\n\n";
|
||||
|
||||
echo "--- Copy these to your .env file ---\n\n";
|
||||
echo "VAPID_PUBLIC_KEY={$keyPair->publicKey}\n";
|
||||
echo "VAPID_PRIVATE_KEY={$keyPair->privateKey}\n";
|
||||
echo "VAPID_SUBJECT=mailto:admin@example.com\n\n";
|
||||
|
||||
echo "--- Security Notes ---\n";
|
||||
echo "• The PUBLIC key goes in your frontend JavaScript\n";
|
||||
echo "• The PRIVATE key must remain secret on your server\n";
|
||||
echo "• NEVER commit private keys to version control\n\n";
|
||||
|
||||
} catch (\Exception $e) {
|
||||
echo "❌ Failed to generate VAPID keys: {$e->getMessage()}\n";
|
||||
exit(1);
|
||||
}
|
||||
248
scripts/maintenance/seed-ml-models.php
Normal file
248
scripts/maintenance/seed-ml-models.php
Normal file
@@ -0,0 +1,248 @@
|
||||
<?php
|
||||
|
||||
declare(strict_types=1);
|
||||
|
||||
require_once __DIR__ . '/../vendor/autoload.php';
|
||||
|
||||
use App\Framework\MachineLearning\ModelManagement\ModelRegistry;
|
||||
use App\Framework\MachineLearning\ModelManagement\ValueObjects\ModelMetadata;
|
||||
use App\Framework\MachineLearning\ModelManagement\ValueObjects\ModelType;
|
||||
use App\Framework\MachineLearning\ModelManagement\ModelPerformanceMonitor;
|
||||
use App\Framework\Core\ValueObjects\Version;
|
||||
use App\Framework\Core\ValueObjects\Timestamp;
|
||||
use App\Framework\Core\ValueObjects\Duration;
|
||||
use App\Framework\Core\AppBootstrapper;
|
||||
use App\Framework\Performance\EnhancedPerformanceCollector;
|
||||
use App\Framework\DateTime\SystemClock;
|
||||
use App\Framework\DateTime\SystemHighResolutionClock;
|
||||
use App\Framework\Performance\MemoryMonitor;
|
||||
|
||||
echo "🌱 ML Models Seeder\n";
|
||||
echo "==================\n\n";
|
||||
|
||||
// Bootstrap application
|
||||
$basePath = dirname(__DIR__);
|
||||
$clock = new SystemClock();
|
||||
$highResClock = new SystemHighResolutionClock();
|
||||
$memoryMonitor = new MemoryMonitor();
|
||||
$collector = new EnhancedPerformanceCollector($clock, $highResClock, $memoryMonitor, enabled: true);
|
||||
$bootstrapper = new AppBootstrapper($basePath, $collector, $memoryMonitor);
|
||||
$container = $bootstrapper->bootstrapWorker();
|
||||
|
||||
/** @var ModelRegistry $registry */
|
||||
$registry = $container->get(ModelRegistry::class);
|
||||
|
||||
/** @var ModelPerformanceMonitor $performanceMonitor */
|
||||
$performanceMonitor = $container->get(ModelPerformanceMonitor::class);
|
||||
|
||||
// Sample Models to Seed
|
||||
$models = [
|
||||
// 1. Fraud Detection Model (Supervised, Production)
|
||||
[
|
||||
'name' => 'fraud-detector',
|
||||
'type' => ModelType::SUPERVISED,
|
||||
'version' => '1.0.0',
|
||||
'environment' => 'production',
|
||||
'configuration' => [
|
||||
'threshold' => 0.75,
|
||||
'min_confidence' => 0.6,
|
||||
'feature_count' => 15,
|
||||
'algorithm' => 'random_forest',
|
||||
],
|
||||
'metrics' => [
|
||||
'accuracy' => 0.94,
|
||||
'precision' => 0.91,
|
||||
'recall' => 0.89,
|
||||
'f1_score' => 0.90,
|
||||
'total_predictions' => 15234,
|
||||
'average_confidence' => 0.87,
|
||||
'confusion_matrix' => [
|
||||
'true_positive' => 1345,
|
||||
'true_negative' => 12789,
|
||||
'false_positive' => 567,
|
||||
'false_negative' => 533,
|
||||
],
|
||||
],
|
||||
],
|
||||
|
||||
// 2. Spam Classifier (Supervised, Production - Degraded)
|
||||
[
|
||||
'name' => 'spam-classifier',
|
||||
'type' => ModelType::SUPERVISED,
|
||||
'version' => '2.0.0',
|
||||
'environment' => 'production',
|
||||
'configuration' => [
|
||||
'threshold' => 0.80,
|
||||
'min_confidence' => 0.7,
|
||||
'feature_count' => 20,
|
||||
'algorithm' => 'gradient_boosting',
|
||||
],
|
||||
'metrics' => [
|
||||
'accuracy' => 0.78, // Degraded performance
|
||||
'precision' => 0.82,
|
||||
'recall' => 0.71,
|
||||
'f1_score' => 0.76,
|
||||
'total_predictions' => 8923,
|
||||
'average_confidence' => 0.75,
|
||||
'confusion_matrix' => [
|
||||
'true_positive' => 892,
|
||||
'true_negative' => 6051,
|
||||
'false_positive' => 1234,
|
||||
'false_negative' => 746,
|
||||
],
|
||||
],
|
||||
],
|
||||
|
||||
// 3. User Segmentation (Unsupervised, Production)
|
||||
[
|
||||
'name' => 'user-segmentation',
|
||||
'type' => ModelType::UNSUPERVISED,
|
||||
'version' => '1.2.0',
|
||||
'environment' => 'production',
|
||||
'configuration' => [
|
||||
'n_clusters' => 5,
|
||||
'algorithm' => 'k_means',
|
||||
'feature_count' => 12,
|
||||
],
|
||||
'metrics' => [
|
||||
'accuracy' => 0.88,
|
||||
'total_predictions' => 5678,
|
||||
'average_confidence' => 0.83,
|
||||
'silhouette_score' => 0.72,
|
||||
],
|
||||
],
|
||||
|
||||
// 4. Anomaly Detection (Unsupervised, Production)
|
||||
[
|
||||
'name' => 'anomaly-detector',
|
||||
'type' => ModelType::UNSUPERVISED,
|
||||
'version' => '1.5.0',
|
||||
'environment' => 'production',
|
||||
'configuration' => [
|
||||
'contamination' => 0.1,
|
||||
'algorithm' => 'isolation_forest',
|
||||
'feature_count' => 10,
|
||||
],
|
||||
'metrics' => [
|
||||
'accuracy' => 0.92,
|
||||
'total_predictions' => 12456,
|
||||
'average_confidence' => 0.85,
|
||||
'anomaly_rate' => 0.08,
|
||||
],
|
||||
],
|
||||
|
||||
// 5. Recommendation Engine (Reinforcement, Development)
|
||||
[
|
||||
'name' => 'recommendation-engine',
|
||||
'type' => ModelType::REINFORCEMENT,
|
||||
'version' => '0.5.0',
|
||||
'environment' => 'development',
|
||||
'configuration' => [
|
||||
'learning_rate' => 0.001,
|
||||
'discount_factor' => 0.95,
|
||||
'exploration_rate' => 0.1,
|
||||
'algorithm' => 'q_learning',
|
||||
],
|
||||
'metrics' => [
|
||||
'accuracy' => 0.67, // Still in development
|
||||
'total_predictions' => 2345,
|
||||
'average_confidence' => 0.62,
|
||||
'average_reward' => 3.42,
|
||||
],
|
||||
],
|
||||
|
||||
// 6. Sentiment Analysis (Supervised, Staging)
|
||||
[
|
||||
'name' => 'sentiment-analyzer',
|
||||
'type' => ModelType::SUPERVISED,
|
||||
'version' => '2.1.0',
|
||||
'environment' => 'staging',
|
||||
'configuration' => [
|
||||
'threshold' => 0.65,
|
||||
'algorithm' => 'lstm',
|
||||
'feature_count' => 50,
|
||||
'max_sequence_length' => 100,
|
||||
],
|
||||
'metrics' => [
|
||||
'accuracy' => 0.91,
|
||||
'precision' => 0.89,
|
||||
'recall' => 0.92,
|
||||
'f1_score' => 0.90,
|
||||
'total_predictions' => 7890,
|
||||
'average_confidence' => 0.86,
|
||||
],
|
||||
],
|
||||
];
|
||||
|
||||
echo "Registering " . count($models) . " ML models...\n\n";
|
||||
|
||||
foreach ($models as $index => $modelData) {
|
||||
$modelNum = $index + 1;
|
||||
echo "[$modelNum/" . count($models) . "] Registering {$modelData['name']} v{$modelData['version']}...\n";
|
||||
|
||||
try {
|
||||
// Create ModelMetadata
|
||||
$metadata = new ModelMetadata(
|
||||
modelName: $modelData['name'],
|
||||
modelType: $modelData['type'],
|
||||
version: Version::fromString($modelData['version']),
|
||||
configuration: $modelData['configuration'],
|
||||
performanceMetrics: [],
|
||||
createdAt: Timestamp::now(),
|
||||
deployedAt: $modelData['environment'] === 'production' ? Timestamp::now() : null,
|
||||
environment: $modelData['environment'],
|
||||
metadata: [
|
||||
'seeded_at' => date('Y-m-d H:i:s'),
|
||||
'description' => "Sample {$modelData['type']->value} model for testing",
|
||||
]
|
||||
);
|
||||
|
||||
// Register model
|
||||
$registry->register($metadata);
|
||||
|
||||
// Track performance metrics using trackPrediction
|
||||
$performanceMonitor->trackPrediction(
|
||||
modelName: $modelData['name'],
|
||||
version: Version::fromString($modelData['version']),
|
||||
prediction: 1, // Dummy prediction
|
||||
actual: 1, // Dummy actual
|
||||
confidence: $modelData['metrics']['average_confidence']
|
||||
);
|
||||
|
||||
// Update metrics manually to match our sample data
|
||||
if (isset($modelData['metrics']['confusion_matrix'])) {
|
||||
$cm = $modelData['metrics']['confusion_matrix'];
|
||||
// Track individual predictions to build up confusion matrix
|
||||
for ($i = 0; $i < $cm['true_positive']; $i++) {
|
||||
$performanceMonitor->trackPrediction(
|
||||
modelName: $modelData['name'],
|
||||
version: Version::fromString($modelData['version']),
|
||||
prediction: 1,
|
||||
actual: 1,
|
||||
confidence: $modelData['metrics']['average_confidence']
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
echo " ✅ Successfully registered {$modelData['name']}\n";
|
||||
echo " - Type: {$modelData['type']->value}\n";
|
||||
echo " - Environment: {$modelData['environment']}\n";
|
||||
echo " - Accuracy: " . round($modelData['metrics']['accuracy'] * 100, 2) . "%\n";
|
||||
|
||||
if ($modelData['metrics']['accuracy'] < 0.85) {
|
||||
echo " ⚠️ Warning: Degraded performance\n";
|
||||
}
|
||||
|
||||
echo "\n";
|
||||
} catch (\Exception $e) {
|
||||
echo " ❌ Error: {$e->getMessage()}\n\n";
|
||||
}
|
||||
}
|
||||
|
||||
echo "==================\n";
|
||||
echo "✅ Seeding complete!\n\n";
|
||||
|
||||
echo "Next steps:\n";
|
||||
echo "1. Visit https://localhost/admin/ml/dashboard to see the models\n";
|
||||
echo "2. Check API endpoint: https://localhost/api/ml/dashboard\n";
|
||||
echo "3. Verify foreach attribute rendering in Models Overview table\n";
|
||||
247
scripts/maintenance/seed-notifications.php
Normal file
247
scripts/maintenance/seed-notifications.php
Normal file
@@ -0,0 +1,247 @@
|
||||
<?php
|
||||
|
||||
declare(strict_types=1);
|
||||
|
||||
require_once __DIR__ . '/../vendor/autoload.php';
|
||||
|
||||
use App\Framework\Core\AppBootstrapper;
|
||||
use App\Framework\Performance\EnhancedPerformanceCollector;
|
||||
use App\Framework\DateTime\SystemClock;
|
||||
use App\Framework\DateTime\SystemHighResolutionClock;
|
||||
use App\Framework\Performance\MemoryMonitor;
|
||||
use App\Framework\Notification\Notification;
|
||||
use App\Framework\Notification\Storage\NotificationRepository;
|
||||
use App\Framework\Notification\ValueObjects\NotificationId;
|
||||
use App\Framework\Notification\ValueObjects\NotificationPriority;
|
||||
use App\Framework\Notification\ValueObjects\NotificationStatus;
|
||||
use App\Framework\Notification\ValueObjects\NotificationChannel;
|
||||
use App\Framework\Core\ValueObjects\Timestamp;
|
||||
|
||||
echo "🔔 Notifications Seeder\n";
|
||||
echo "=====================\n\n";
|
||||
|
||||
// Bootstrap application
|
||||
$basePath = dirname(__DIR__);
|
||||
$clock = new SystemClock();
|
||||
$highResClock = new SystemHighResolutionClock();
|
||||
$memoryMonitor = new MemoryMonitor();
|
||||
$collector = new EnhancedPerformanceCollector($clock, $highResClock, $memoryMonitor, enabled: true);
|
||||
$bootstrapper = new AppBootstrapper($basePath, $collector, $memoryMonitor);
|
||||
$container = $bootstrapper->bootstrapWorker();
|
||||
|
||||
/** @var NotificationRepository $repository */
|
||||
$repository = $container->get(NotificationRepository::class);
|
||||
|
||||
// Sample notification types
|
||||
final readonly class NotificationType implements App\Framework\Notification\ValueObjects\NotificationTypeInterface
|
||||
{
|
||||
public function __construct(private string $value) {}
|
||||
|
||||
public function toString(): string
|
||||
{
|
||||
return $this->value;
|
||||
}
|
||||
|
||||
public function getDisplayName(): string
|
||||
{
|
||||
return match ($this->value) {
|
||||
'ml_performance_degradation' => 'ML Performance Degradation',
|
||||
'ml_model_deployed' => 'ML Model Deployed',
|
||||
'ml_training_complete' => 'ML Training Complete',
|
||||
'system_alert' => 'System Alert',
|
||||
'security_alert' => 'Security Alert',
|
||||
'info' => 'Information',
|
||||
default => ucwords(str_replace('_', ' ', $this->value)),
|
||||
};
|
||||
}
|
||||
|
||||
public function isCritical(): bool
|
||||
{
|
||||
return in_array($this->value, [
|
||||
'ml_performance_degradation',
|
||||
'security_alert',
|
||||
'system_alert'
|
||||
], true);
|
||||
}
|
||||
|
||||
public function equals($other): bool
|
||||
{
|
||||
return $other instanceof self && $this->value === $other->value;
|
||||
}
|
||||
}
|
||||
|
||||
// Sample notifications to create
|
||||
$notifications = [
|
||||
// ML Performance Alerts
|
||||
[
|
||||
'type' => 'ml_performance_degradation',
|
||||
'title' => 'ML Model Performance Degradation Detected',
|
||||
'body' => 'The spam-classifier model (v2.0.0) is experiencing performance degradation. Current accuracy: 78% (below threshold of 85%). Immediate attention recommended.',
|
||||
'priority' => NotificationPriority::URGENT,
|
||||
'action_url' => '/admin/ml/models/spam-classifier',
|
||||
'action_label' => 'View Model Details',
|
||||
'created_offset' => -7200, // 2 hours ago
|
||||
],
|
||||
[
|
||||
'type' => 'ml_model_deployed',
|
||||
'title' => 'New ML Model Deployed Successfully',
|
||||
'body' => 'Fraud detector model v1.0.0 has been successfully deployed to production. Initial accuracy: 94%. Monitoring active.',
|
||||
'priority' => NotificationPriority::NORMAL,
|
||||
'action_url' => '/admin/ml/models/fraud-detector',
|
||||
'action_label' => 'View Deployment',
|
||||
'created_offset' => -3600, // 1 hour ago
|
||||
],
|
||||
[
|
||||
'type' => 'ml_training_complete',
|
||||
'title' => 'Model Training Completed',
|
||||
'body' => 'Sentiment analyzer training completed successfully. New version 2.1.0 ready for deployment. Validation accuracy: 91%.',
|
||||
'priority' => NotificationPriority::HIGH,
|
||||
'action_url' => '/admin/ml/models/sentiment-analyzer',
|
||||
'action_label' => 'Review & Deploy',
|
||||
'created_offset' => -1800, // 30 minutes ago
|
||||
],
|
||||
|
||||
// System Alerts
|
||||
[
|
||||
'type' => 'system_alert',
|
||||
'title' => 'High Memory Usage Detected',
|
||||
'body' => 'System memory usage exceeded 85% threshold. Current usage: 87%. Consider scaling resources or optimizing memory-intensive processes.',
|
||||
'priority' => NotificationPriority::HIGH,
|
||||
'action_url' => '/admin/performance',
|
||||
'action_label' => 'View Metrics',
|
||||
'created_offset' => -900, // 15 minutes ago
|
||||
],
|
||||
[
|
||||
'type' => 'system_alert',
|
||||
'title' => 'Queue Backlog Warning',
|
||||
'body' => 'Job queue backlog detected. 1,234 pending jobs in queue. Processing rate: 45 jobs/minute. Estimated clearance time: 27 minutes.',
|
||||
'priority' => NotificationPriority::NORMAL,
|
||||
'action_url' => '/admin/queue',
|
||||
'action_label' => 'View Queue',
|
||||
'created_offset' => -600, // 10 minutes ago
|
||||
],
|
||||
|
||||
// Security Alerts
|
||||
[
|
||||
'type' => 'security_alert',
|
||||
'title' => 'Suspicious Login Attempts Detected',
|
||||
'body' => 'Multiple failed login attempts detected from IP 203.0.113.42. Rate limiting applied. Review access logs for potential security threat.',
|
||||
'priority' => NotificationPriority::URGENT,
|
||||
'action_url' => '/admin/security/logs',
|
||||
'action_label' => 'View Security Logs',
|
||||
'created_offset' => -300, // 5 minutes ago
|
||||
],
|
||||
[
|
||||
'type' => 'security_alert',
|
||||
'title' => 'WAF Blocked Malicious Request',
|
||||
'body' => 'Web Application Firewall blocked SQL injection attempt. Attack pattern detected: UNION SELECT. Source IP: 198.51.100.10.',
|
||||
'priority' => NotificationPriority::HIGH,
|
||||
'action_url' => '/admin/security/waf',
|
||||
'action_label' => 'View WAF Logs',
|
||||
'created_offset' => -120, // 2 minutes ago
|
||||
],
|
||||
|
||||
// Info Notifications
|
||||
[
|
||||
'type' => 'info',
|
||||
'title' => 'System Backup Completed',
|
||||
'body' => 'Daily system backup completed successfully. Backup size: 2.3 GB. Next scheduled backup: Tomorrow at 2:00 AM.',
|
||||
'priority' => NotificationPriority::LOW,
|
||||
'action_url' => '/admin/backups',
|
||||
'action_label' => 'View Backups',
|
||||
'created_offset' => -86400, // 1 day ago
|
||||
],
|
||||
[
|
||||
'type' => 'info',
|
||||
'title' => 'Database Optimization Recommended',
|
||||
'body' => 'Database performance analysis suggests optimizing 3 tables. Estimated performance improvement: 15%. Schedule maintenance window for optimization.',
|
||||
'priority' => NotificationPriority::NORMAL,
|
||||
'action_url' => '/admin/database/optimization',
|
||||
'action_label' => 'View Recommendations',
|
||||
'created_offset' => -172800, // 2 days ago
|
||||
],
|
||||
[
|
||||
'type' => 'info',
|
||||
'title' => 'Weekly Performance Report Available',
|
||||
'body' => 'Weekly system performance report is now available. Key metrics: 99.8% uptime, 145ms avg response time, 1.2M requests processed.',
|
||||
'priority' => NotificationPriority::LOW,
|
||||
'action_url' => '/admin/reports/weekly',
|
||||
'action_label' => 'View Report',
|
||||
'created_offset' => -259200, // 3 days ago
|
||||
],
|
||||
];
|
||||
|
||||
echo "Creating " . count($notifications) . " sample notifications...\n\n";
|
||||
|
||||
$createdCount = 0;
|
||||
$now = time();
|
||||
|
||||
foreach ($notifications as $index => $notificationData) {
|
||||
$notificationNum = $index + 1;
|
||||
echo "[$notificationNum/" . count($notifications) . "] Creating: {$notificationData['title']}\n";
|
||||
|
||||
try {
|
||||
// Create notification timestamp (offset from now)
|
||||
$createdAt = Timestamp::fromTimestamp($now + $notificationData['created_offset']);
|
||||
|
||||
// For recent notifications (< 1 hour ago), leave unread
|
||||
// For older notifications, mark some as read
|
||||
$isRecent = abs($notificationData['created_offset']) < 3600;
|
||||
$shouldBeRead = !$isRecent && (($index % 3) === 0); // Mark every 3rd older notification as read
|
||||
|
||||
$notification = new Notification(
|
||||
id: NotificationId::generate(),
|
||||
recipientId: 'admin',
|
||||
type: new NotificationType($notificationData['type']),
|
||||
title: $notificationData['title'],
|
||||
body: $notificationData['body'],
|
||||
createdAt: $createdAt,
|
||||
data: [],
|
||||
channels: [NotificationChannel::DATABASE],
|
||||
priority: $notificationData['priority'],
|
||||
status: $shouldBeRead ? NotificationStatus::READ : NotificationStatus::SENT,
|
||||
sentAt: $createdAt,
|
||||
readAt: $shouldBeRead ? Timestamp::fromTimestamp($now + $notificationData['created_offset'] + 300) : null,
|
||||
actionUrl: $notificationData['action_url'],
|
||||
actionLabel: $notificationData['action_label']
|
||||
);
|
||||
|
||||
$repository->save($notification);
|
||||
|
||||
$createdCount++;
|
||||
|
||||
$statusIcon = $shouldBeRead ? '✓' : '📬';
|
||||
$priorityLabel = $notificationData['priority']->value;
|
||||
|
||||
echo " $statusIcon Successfully created ($priorityLabel priority, " . ($shouldBeRead ? 'read' : 'unread') . ")\n";
|
||||
echo " - Created: " . $createdAt->format('Y-m-d H:i:s') . "\n";
|
||||
|
||||
if ($notificationData['action_url']) {
|
||||
echo " - Action: {$notificationData['action_label']} → {$notificationData['action_url']}\n";
|
||||
}
|
||||
|
||||
echo "\n";
|
||||
|
||||
} catch (\Exception $e) {
|
||||
echo " ❌ Error: {$e->getMessage()}\n\n";
|
||||
}
|
||||
}
|
||||
|
||||
echo "=====================\n";
|
||||
echo "✅ Seeding complete!\n\n";
|
||||
|
||||
echo "Summary:\n";
|
||||
echo "- Total notifications created: $createdCount\n";
|
||||
|
||||
// Get current stats
|
||||
$unreadCount = $repository->countUnreadByUser('admin');
|
||||
echo "- Unread notifications: $unreadCount\n";
|
||||
|
||||
$allNotifications = $repository->findByUser('admin', limit: 100);
|
||||
echo "- Total notifications for admin: " . count($allNotifications) . "\n\n";
|
||||
|
||||
echo "Next steps:\n";
|
||||
echo "1. Visit https://localhost/admin/notifications to view the notifications\n";
|
||||
echo "2. Test mark as read functionality\n";
|
||||
echo "3. Test mark all as read functionality\n";
|
||||
echo "4. Verify unread badge updates in real-time\n";
|
||||
Reference in New Issue
Block a user