chore: complete update

This commit is contained in:
2025-07-17 16:24:20 +02:00
parent 899227b0a4
commit 64a7051137
1300 changed files with 85570 additions and 2756 deletions

63
src/Domain/AI/AiModel.php Normal file
View File

@@ -0,0 +1,63 @@
<?php
declare(strict_types=1);
namespace App\Domain\AI;
enum AiModel: string
{
// OpenAI Models
case GPT_35_TURBO = 'gpt-3.5-turbo';
case GPT_4 = 'gpt-4';
case GPT_4_TURBO = 'gpt-4-turbo';
case GPT_4O = 'gpt-4o';
case GPT_4O_MINI = 'gpt-4o-mini';
// GPT4All Models
case LLAMA2_7B = 'llama2-7b';
case MISTRAL_7B = 'mistral-7b-instruct';
case CODE_LLAMA_7B = 'codellama-7b-instruct';
case ORCA_MINI_3B = 'orca-mini-3b';
case VICUNA_7B = 'vicuna-7b';
// Ollama Models
case OLLAMA_LLAMA2 = 'llama2';
case OLLAMA_LLAMA2_13B = 'llama2:13b';
case OLLAMA_LLAMA2_70B = 'llama2:70b';
case OLLAMA_MISTRAL = 'mistral';
case OLLAMA_MISTRAL_7B = 'mistral:7b';
case OLLAMA_CODELLAMA = 'codellama';
case OLLAMA_CODELLAMA_13B = 'codellama:13b';
case OLLAMA_GEMMA = 'gemma';
case OLLAMA_GEMMA_7B = 'gemma:7b';
case OLLAMA_NEURAL_CHAT = 'neural-chat';
case OLLAMA_STARLING = 'starling-lm';
case OLLAMA_ORCA_MINI = 'orca-mini';
case OLLAMA_VICUNA = 'vicuna';
case OLLAMA_LLAMA3 = 'llama3';
case OLLAMA_LLAMA3_8B = 'llama3:8b';
case OLLAMA_LLAMA3_70B = 'llama3:70b';
case OLLAMA_PHI3 = 'phi3';
case OLLAMA_QWEN = 'qwen';
case OLLAMA_QWEN2_5 = 'qwen2.5:7b';
case OLLAMA_QWEN2_5_CODER = 'qwen2.5-coder:7b';
case OLLAMA_LLAMA3_1 = 'llama3.1:8b';
case OLLAMA_LLAMA3_2_3B = 'llama3.2:3b';
case OLLAMA_DEEPSEEK_CODER = 'deepseek-coder:6.7b';
public function getProvider(): AiProvider
{
return match($this) {
self::GPT_35_TURBO, self::GPT_4, self::GPT_4_TURBO, self::GPT_4O, self::GPT_4O_MINI => AiProvider::OPENAI,
self::LLAMA2_7B, self::MISTRAL_7B, self::CODE_LLAMA_7B, self::ORCA_MINI_3B, self::VICUNA_7B => AiProvider::GPT4ALL,
self::OLLAMA_LLAMA2, self::OLLAMA_LLAMA2_13B, self::OLLAMA_LLAMA2_70B,
self::OLLAMA_MISTRAL, self::OLLAMA_MISTRAL_7B, self::OLLAMA_CODELLAMA,
self::OLLAMA_CODELLAMA_13B, self::OLLAMA_GEMMA, self::OLLAMA_GEMMA_7B,
self::OLLAMA_NEURAL_CHAT, self::OLLAMA_STARLING, self::OLLAMA_ORCA_MINI,
self::OLLAMA_VICUNA, self::OLLAMA_LLAMA3, self::OLLAMA_LLAMA3_8B,
self::OLLAMA_LLAMA3_70B, self::OLLAMA_PHI3, self::OLLAMA_QWEN,
self::OLLAMA_QWEN2_5, self::OLLAMA_QWEN2_5_CODER, self::OLLAMA_LLAMA3_1, self::OLLAMA_LLAMA3_2_3B, self::OLLAMA_DEEPSEEK_CODER => AiProvider::OLLAMA,
};
}
}

View File

@@ -0,0 +1,11 @@
<?php
declare(strict_types=1);
namespace App\Domain\AI;
enum AiProvider: string
{
case OPENAI = 'openai';
case GPT4ALL = 'gpt4all';
case OLLAMA = 'ollama';
}

15
src/Domain/AI/AiQuery.php Normal file
View File

@@ -0,0 +1,15 @@
<?php
declare(strict_types=1);
namespace App\Domain\AI;
final class AiQuery
{
public function __construct(
public string $message,
public AiModel $model,
public array $messages = [],
public float $temperature = 0.7,
public ?int $maxTokens = null
) {}
}

View File

@@ -0,0 +1,9 @@
<?php
declare(strict_types=1);
namespace App\Domain\AI;
interface AiQueryHandlerInterface
{
public function __invoke(AiQuery $query): AiResponse;
}

View File

@@ -0,0 +1,14 @@
<?php
declare(strict_types=1);
namespace App\Domain\AI;
final class AiResponse
{
public function __construct(
public string $content,
public string $provider,
public string $model,
public ?int $tokensUsed = null
) {}
}

View File

@@ -0,0 +1,19 @@
<?php
declare(strict_types=1);
namespace App\Domain\AI\Exception;
use App\Framework\Exception\FrameworkException;
use App\Domain\AI\AiProvider;
class AiProviderUnavailableException extends FrameworkException
{
public function __construct(AiProvider $provider, string $reason = '')
{
$message = "AI Provider '{$provider->value}' ist nicht verfügbar";
if ($reason) {
$message .= ": $reason";
}
parent::__construct($message);
}
}

11
src/Domain/AI/Role.php Normal file
View File

@@ -0,0 +1,11 @@
<?php
declare(strict_types=1);
namespace App\Domain\AI;
enum Role:string
{
case SYSTEM = 'system';
case USER = 'user';
case ASSISTANT = 'assistant';
}