feat: add LLM prediction providers with structured output support
Some checks failed
linter / quality (push) Has been cancelled
tests / ci (8.3) (push) Has been cancelled
tests / ci (8.4) (push) Has been cancelled
tests / ci (8.5) (push) Has been cancelled

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Ovidiu U
2026-04-07 14:42:44 +01:00
parent e9612666e3
commit 6a80c11f38
18 changed files with 1101 additions and 484 deletions

View File

@@ -6,6 +6,7 @@ use App\Enums\PredictionSource;
use App\Enums\TrendDirection;
use App\Models\BrentPrice;
use App\Models\PricePrediction;
use App\Services\LlmPrediction\OilPredictionProvider;
use Illuminate\Support\Collection;
use Illuminate\Support\Facades\Http;
use Illuminate\Support\Facades\Log;
@@ -28,11 +29,6 @@ class OilPriceService
*/
private const int EWMA_MAX_CONFIDENCE = 65;
/**
* LLM confidence is capped no model should be certain about oil prices.
*/
private const int LLM_MAX_CONFIDENCE = 85;
/**
* Minimum price rows needed before EWMA is meaningful.
*/
@@ -40,6 +36,7 @@ class OilPriceService
public function __construct(
private readonly ApiLogger $apiLogger,
private readonly OilPredictionProvider $provider,
) {}
/**
@@ -87,7 +84,7 @@ class OilPriceService
/**
* Generate predictions from all available sources and store each one.
* EWMA always runs. LLM runs when an API key is configured.
* EWMA always runs. LLM provider runs and returns null if not configured.
* Returns the highest-confidence prediction (LLM preferred over EWMA).
*/
public function generatePrediction(): ?PricePrediction
@@ -108,207 +105,15 @@ class OilPriceService
PricePrediction::create($ewma->toArray());
}
$llm = null;
$llm = $this->provider->predict($prices);
if (config('services.anthropic.api_key')) {
$llm = $this->generateLlmPredictionWithContext($prices);
$llm ??= $this->generateLlmPrediction($prices);
if ($llm !== null) {
PricePrediction::create($llm->toArray());personal_access_tokens
}
if ($llm !== null) {
PricePrediction::create($llm->toArray());
}
return $llm ?? $ewma;
}
/**
* Option B LLM prediction via Anthropic API.
* Sends recent prices + pre-computed EWMA context and asks for direction + confidence.
*/
public function generateLlmPrediction(Collection $prices): ?PricePrediction
{
$chronological = $prices->sortBy('date');
$ewma3 = $this->computeEwma($chronological->take(-3)->pluck('price_usd')->values()->all());
$ewma7 = $this->computeEwma($chronological->take(-7)->pluck('price_usd')->values()->all());
$ewma14 = $this->computeEwma($chronological->pluck('price_usd')->values()->all());
$priceList = $chronological
->map(fn (BrentPrice $p) => "{$p->date->toDateString()}: \${$p->price_usd}")
->implode("\n");
$prompt = <<<PROMPT
You are analyzing Brent crude oil price data for a UK fuel price alert service.
Your goal is to predict the short-term direction over the next 35 days.
Recent Brent crude prices (USD/barrel):
{$priceList}
Pre-computed indicators:
- 3-day EWMA: \${$ewma3}
- 7-day EWMA: \${$ewma7}
- 14-day EWMA: \${$ewma14}
Respond with JSON only, no other text:
{"direction": "rising|falling|flat", "confidence": 0-85, "reasoning": "one sentence"}
PROMPT;
$url = 'https://api.anthropic.com/v1/messages';
try {
$response = $this->apiLogger->send('anthropic', 'POST', $url, fn () => Http::timeout(15)
->withHeaders([
'x-api-key' => config('services.anthropic.api_key'),
'anthropic-version' => '2023-06-01',
])
->post($url, [
'model' => config('services.anthropic.model', 'claude-haiku-4-5-20251001'),
'max_tokens' => 256,
'messages' => [
['role' => 'user', 'content' => $prompt],
],
]));
if (! $response->successful()) {
Log::error('OilPriceService: Anthropic request failed', ['status' => $response->status()]);
return null;
}
$text = $response->json('content.0.text') ?? '';
$data = $this->extractJson($text);
if (! isset($data['direction'], $data['confidence'], $data['reasoning'])) {
Log::error('OilPriceService: unexpected LLM response format', ['text' => $text]);
return null;
}
$direction = TrendDirection::tryFrom($data['direction']);
$confidence = min((int) $data['confidence'], self::LLM_MAX_CONFIDENCE);
if ($direction === null) {
Log::error('OilPriceService: invalid direction in LLM response', ['direction' => $data['direction']]);
return null;
}
return new PricePrediction([
'predicted_for' => now()->toDateString(),
'source' => PredictionSource::Llm,
'direction' => $direction,
'confidence' => $confidence,
'reasoning' => $data['reasoning'],
'generated_at' => now(),
]);
} catch (Throwable $e) {
Log::error('OilPriceService: generateLlmPrediction failed', ['error' => $e->getMessage()]);
return null;
}
}
/**
* LLM prediction with 48h geopolitical context via Anthropic web search.
* Claude searches for recent oil/geopolitical news before answering.
* Reasons from raw prices only no pre-computed indicators in prompt.
*/
public function generateLlmPredictionWithContext(Collection $prices): ?PricePrediction
{
$priceList = $prices->sortBy('date')
->map(fn (BrentPrice $p) => "{$p->date->toDateString()}: \${$p->price_usd}")
->implode("\n");
$prompt = <<<PROMPT
You are analyzing Brent crude oil price data for a UK fuel price alert service.
Your goal is to predict the short-term direction over the next 35 days.
First, search for recent news (last 48 hours) about:
- Brent crude oil price movements
- OPEC+ production decisions or announcements
- Major geopolitical events affecting oil supply (Middle East, Russia, US sanctions)
- Global demand signals (China economic data, US inventory reports)
Then, combining the news context with the price history below, predict the direction.
Recent Brent crude prices (USD/barrel):
{$priceList}
Respond with JSON only, no other text:
{"direction": "rising|falling|flat", "confidence": 0-85, "reasoning": "one sentence combining price trend and key news factor"}
PROMPT;
$url = 'https://api.anthropic.com/v1/messages';
$messages = [['role' => 'user', 'content' => $prompt]];
try {
for ($i = 0, $response = null; $i < 5; $i++) {
$response = $this->apiLogger->send('anthropic', 'POST', $url, fn () => Http::timeout(30)
->withHeaders([
'x-api-key' => config('services.anthropic.api_key'),
'anthropic-version' => '2023-06-01',
])
->post($url, [
'model' => config('services.anthropic.model', 'claude-sonnet-4-6'),
'max_tokens' => 1024,
'tools' => [['type' => 'web_search_20250305', 'name' => 'web_search']],
'messages' => $messages,
]));
if (! $response->successful()) {
Log::error('OilPriceService: Anthropic context request failed', [
'status' => $response->status(),
'body' => $response->body(),
]);
return null;
}
if ($response->json('stop_reason') !== 'pause_turn') {
break;
}
$messages[] = ['role' => 'assistant', 'content' => $response->json('content')];
}
$content = $response->json('content') ?? [];
$text = collect($content)
->filter(fn ($b) => ($b['type'] ?? '') === 'text')
->implode('text', '');
$data = $this->extractJson($text);
if (! isset($data['direction'], $data['confidence'], $data['reasoning'])) {
Log::error('OilPriceService: unexpected context LLM response format', ['text' => $text]);
return null;
}
$direction = TrendDirection::tryFrom($data['direction']);
$confidence = min((int) $data['confidence'], self::LLM_MAX_CONFIDENCE);
if ($direction === null) {
Log::error('OilPriceService: invalid direction in context LLM response', ['direction' => $data['direction']]);
return null;
}
return new PricePrediction([
'predicted_for' => now()->toDateString(),
'source' => PredictionSource::LlmWithContext,
'direction' => $direction,
'confidence' => $confidence,
'reasoning' => $data['reasoning'],
'generated_at' => now(),
]);
} catch (Throwable $e) {
Log::error('OilPriceService: generateLlmPredictionWithContext failed', ['error' => $e->getMessage()]);
return null;
}
}
/**
* Option A EWMA-based trend extrapolation. Used as fallback when LLM is unavailable.
* Compares the 3-day EWMA against the 7-day EWMA to detect direction.
@@ -372,23 +177,6 @@ class OilPriceService
return round($ema, 4);
}
/**
* Strip markdown code fences from a string and extract the first JSON object found.
* Handles prose preambles that Claude sometimes adds before the JSON.
*/
private function extractJson(string $text): ?array
{
$text = preg_replace('/^```(?:json)?\s*/m', '', trim($text));
$text = preg_replace('/```\s*$/m', '', $text);
$start = strpos($text, '{');
$end = strrpos($text, '}');
if ($start === false || $end === false) {
return null;
}
return json_decode(substr($text, $start, $end - $start + 1), true) ?: null;
}
/**
* Map a % change magnitude to a 0EWMA_MAX_CONFIDENCE confidence score.
* 1.5% ~30, 3% ~50, 5%+ 65.