feat: add LLM prediction providers with structured output support
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -2,6 +2,11 @@
|
||||
|
||||
namespace App\Providers;
|
||||
|
||||
use App\Services\ApiLogger;
|
||||
use App\Services\LlmPrediction\AnthropicPredictionProvider;
|
||||
use App\Services\LlmPrediction\GeminiPredictionProvider;
|
||||
use App\Services\LlmPrediction\OilPredictionProvider;
|
||||
use App\Services\LlmPrediction\OpenAiPredictionProvider;
|
||||
use Carbon\CarbonImmutable;
|
||||
use Illuminate\Support\Facades\Date;
|
||||
use Illuminate\Support\Facades\DB;
|
||||
@@ -15,7 +20,15 @@ class AppServiceProvider extends ServiceProvider
|
||||
*/
|
||||
public function register(): void
|
||||
{
|
||||
//
|
||||
$this->app->bind(OilPredictionProvider::class, function ($app) {
|
||||
$logger = $app->make(ApiLogger::class);
|
||||
|
||||
return match (config('services.llm.provider')) {
|
||||
'openai' => new OpenAiPredictionProvider($logger),
|
||||
'gemini' => new GeminiPredictionProvider($logger),
|
||||
default => new AnthropicPredictionProvider($logger),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user