feat: add LLM prediction providers with structured output support
Some checks failed
linter / quality (push) Has been cancelled
tests / ci (8.3) (push) Has been cancelled
tests / ci (8.4) (push) Has been cancelled
tests / ci (8.5) (push) Has been cancelled

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Ovidiu U
2026-04-07 14:42:44 +01:00
parent e9612666e3
commit 6a80c11f38
18 changed files with 1101 additions and 484 deletions

View File

@@ -5,6 +5,7 @@ use App\Enums\TrendDirection;
use App\Models\BrentPrice;
use App\Models\PricePrediction;
use App\Services\ApiLogger;
use App\Services\LlmPrediction\OilPredictionProvider;
use App\Services\OilPriceService;
use Illuminate\Foundation\Testing\RefreshDatabase;
use Illuminate\Support\Facades\Http;
@@ -13,7 +14,8 @@ uses(RefreshDatabase::class);
beforeEach(function (): void {
Http::preventStrayRequests();
$this->service = new OilPriceService(new ApiLogger);
$this->provider = Mockery::mock(OilPredictionProvider::class);
$this->service = new OilPriceService(new ApiLogger, $this->provider);
});
// --- fetchBrentPrices ---
@@ -115,173 +117,9 @@ it('returns null when fewer than 14 prices are available for EWMA', function ():
expect($this->service->generateEwmaPrediction($prices))->toBeNull();
});
// --- generateLlmPrediction ---
it('generates an LLM prediction and stores it', function (): void {
$prices = collect(range(1, 14))->map(fn (int $i) => new BrentPrice([
'date' => now()->subDays(14 - $i)->toDateString(),
'price_usd' => 75.0 + $i,
]));
Http::fake([
'https://api.anthropic.com/*' => Http::response([
'content' => [
['text' => '{"direction":"rising","confidence":72,"reasoning":"Consistent upward trend over 14 days."}'],
],
]),
]);
$prediction = $this->service->generateLlmPrediction($prices);
expect($prediction->direction)->toBe(TrendDirection::Rising)
->and($prediction->source)->toBe(PredictionSource::Llm)
->and($prediction->confidence)->toBe(72)
->and($prediction->reasoning)->toBe('Consistent upward trend over 14 days.');
});
it('caps LLM confidence at 85', function (): void {
$prices = collect(range(1, 14))->map(fn (int $i) => new BrentPrice([
'date' => now()->subDays(14 - $i)->toDateString(),
'price_usd' => 75.0,
]));
Http::fake([
'https://api.anthropic.com/*' => Http::response([
'content' => [
['text' => '{"direction":"falling","confidence":99,"reasoning":"Very confident."}'],
],
]),
]);
$prediction = $this->service->generateLlmPrediction($prices);
expect($prediction->confidence)->toBe(85);
});
it('returns null when LLM returns malformed JSON', function (): void {
$prices = collect(range(1, 14))->map(fn (int $i) => new BrentPrice([
'date' => now()->subDays(14 - $i)->toDateString(),
'price_usd' => 75.0,
]));
Http::fake([
'https://api.anthropic.com/*' => Http::response([
'content' => [['text' => 'Sorry, I cannot help with that.']],
]),
]);
expect($this->service->generateLlmPrediction($prices))->toBeNull();
});
// --- generateLlmPredictionWithContext ---
it('generates LLM prediction with context and returns LlmWithContext source', function (): void {
config(['services.anthropic.api_key' => 'test-key']);
$prices = collect(range(1, 20))->map(fn (int $i) => new BrentPrice([
'date' => now()->subDays(20 - $i)->toDateString(),
'price_usd' => 80.0 + $i * 0.5,
]));
Http::fake([
'https://api.anthropic.com/*' => Http::response([
'content' => [['type' => 'text', 'text' => '{"direction":"rising","confidence":72,"reasoning":"OPEC+ extended cuts while prices trend upward."}']],
'stop_reason' => 'end_turn',
]),
]);
$prediction = $this->service->generateLlmPredictionWithContext($prices);
expect($prediction)->not->toBeNull()
->and($prediction->direction)->toBe(TrendDirection::Rising)
->and($prediction->confidence)->toBe(72)
->and($prediction->source)->toBe(PredictionSource::LlmWithContext)
->and($prediction->reasoning)->toBe('OPEC+ extended cuts while prices trend upward.');
Http::assertSentCount(1);
});
it('sends web_search tool in the context prediction request', function (): void {
config(['services.anthropic.api_key' => 'test-key']);
$prices = collect(range(1, 20))->map(fn (int $i) => new BrentPrice([
'date' => now()->subDays(20 - $i)->toDateString(),
'price_usd' => 80.0,
]));
Http::fake([
'https://api.anthropic.com/*' => Http::response([
'content' => [['type' => 'text', 'text' => '{"direction":"flat","confidence":50,"reasoning":"No clear trend."}']],
'stop_reason' => 'end_turn',
]),
]);
$this->service->generateLlmPredictionWithContext($prices);
Http::assertSent(function ($request) {
$tools = $request->data()['tools'] ?? [];
return collect($tools)->contains(fn ($t) => $t['type'] === 'web_search_20250305');
});
});
it('does not include EWMA indicators in the context prediction prompt', function (): void {
config(['services.anthropic.api_key' => 'test-key']);
$prices = collect(range(1, 20))->map(fn (int $i) => new BrentPrice([
'date' => now()->subDays(20 - $i)->toDateString(),
'price_usd' => 80.0,
]));
Http::fake([
'https://api.anthropic.com/*' => Http::response([
'content' => [['type' => 'text', 'text' => '{"direction":"flat","confidence":50,"reasoning":"No clear trend."}']],
'stop_reason' => 'end_turn',
]),
]);
$this->service->generateLlmPredictionWithContext($prices);
Http::assertSent(function ($request) {
$content = $request->data()['messages'][0]['content'] ?? '';
return ! str_contains($content, 'EWMA') && ! str_contains($content, 'Pre-computed');
});
});
it('continues on pause_turn and returns final answer', function (): void {
config(['services.anthropic.api_key' => 'test-key']);
$prices = collect(range(1, 20))->map(fn (int $i) => new BrentPrice([
'date' => now()->subDays(20 - $i)->toDateString(),
'price_usd' => 80.0,
]));
Http::fake([
'https://api.anthropic.com/*' => Http::sequence()
->push([
'content' => [['type' => 'server_tool_use', 'id' => 'sttool_1', 'name' => 'web_search', 'input' => ['query' => 'Brent crude news']]],
'stop_reason' => 'pause_turn',
])
->push([
'content' => [['type' => 'text', 'text' => '{"direction":"falling","confidence":60,"reasoning":"Demand fears weigh on prices."}']],
'stop_reason' => 'end_turn',
]),
]);
$prediction = $this->service->generateLlmPredictionWithContext($prices);
expect($prediction)->not->toBeNull()
->and($prediction->direction)->toBe(TrendDirection::Falling);
Http::assertSentCount(2);
});
// --- generatePrediction (orchestrator) ---
it('uses LLM with context when API key is configured', function (): void {
config(['services.anthropic.api_key' => 'test-key']);
it('stores both EWMA and LLM predictions when provider succeeds', function (): void {
BrentPrice::insert(
collect(range(1, 20))->map(fn (int $i) => [
'date' => now()->subDays(20 - $i)->toDateString(),
@@ -289,12 +127,14 @@ it('uses LLM with context when API key is configured', function (): void {
])->all()
);
Http::fake([
'https://api.anthropic.com/*' => Http::response([
'content' => [['type' => 'text', 'text' => '{"direction":"rising","confidence":70,"reasoning":"Trend is up."}']],
'stop_reason' => 'end_turn',
]),
]);
$this->provider->shouldReceive('predict')->once()->andReturn(new PricePrediction([
'predicted_for' => now()->toDateString(),
'source' => PredictionSource::LlmWithContext,
'direction' => TrendDirection::Rising,
'confidence' => 70,
'reasoning' => 'Trend is up.',
'generated_at' => now(),
]));
$prediction = $this->service->generatePrediction();
@@ -302,33 +142,31 @@ it('uses LLM with context when API key is configured', function (): void {
->and(PricePrediction::count())->toBe(2);
});
it('falls back to plain LLM when context method fails', function (): void {
config(['services.anthropic.api_key' => 'test-key']);
it('returns LLM prediction when provider succeeds', function (): void {
BrentPrice::insert(
collect(range(1, 20))->map(fn (int $i) => [
'date' => now()->subDays(20 - $i)->toDateString(),
'price_usd' => 75.0 + ($i * 0.8),
'price_usd' => 75.0 + $i,
])->all()
);
Http::fake([
'https://api.anthropic.com/*' => Http::sequence()
->push([], 500)
->push([
'content' => [['text' => '{"direction":"rising","confidence":70,"reasoning":"Trend up."}']],
]),
$llmPrediction = new PricePrediction([
'predicted_for' => now()->toDateString(),
'source' => PredictionSource::Llm,
'direction' => TrendDirection::Rising,
'confidence' => 65,
'reasoning' => 'Rising trend.',
'generated_at' => now(),
]);
$this->provider->shouldReceive('predict')->once()->andReturn($llmPrediction);
$prediction = $this->service->generatePrediction();
expect($prediction->source)->toBe(PredictionSource::Llm)
->and(PricePrediction::count())->toBe(2);
expect($prediction->source)->toBe(PredictionSource::Llm);
});
it('falls back to EWMA when both LLM methods fail', function (): void {
config(['services.anthropic.api_key' => 'test-key']);
it('falls back to EWMA when provider returns null', function (): void {
BrentPrice::insert(
collect(range(1, 20))->map(fn (int $i) => [
'date' => now()->subDays(20 - $i)->toDateString(),
@@ -336,9 +174,7 @@ it('falls back to EWMA when both LLM methods fail', function (): void {
])->all()
);
Http::fake([
'https://api.anthropic.com/*' => Http::response([], 500),
]);
$this->provider->shouldReceive('predict')->once()->andReturn(null);
$prediction = $this->service->generatePrediction();
@@ -352,6 +188,8 @@ it('returns null when there is insufficient price data', function (): void {
['date' => now()->subDay()->toDateString(), 'price_usd' => 76.0],
]);
$this->provider->shouldNotReceive('predict');
expect($this->service->generatePrediction())->toBeNull()
->and(PricePrediction::count())->toBe(0);
});