feat: add postcode resolution to /api/stations and Filament SearchResource
Some checks failed
linter / quality (push) Has been cancelled
tests / ci (8.3) (push) Has been cancelled
tests / ci (8.4) (push) Has been cancelled
tests / ci (8.5) (push) Has been cancelled

Extends NearbyStationsRequest to accept `postcode` (full or outcode) as an alternative to lat/lng. PostcodeService resolves it via postcodes.io and falls through to coordinates. Also adds SearchResource to the Filament admin panel for viewing logged search activity with fuel type filter and price/distance stats columns. Includes SQLite GREATEST/LEAST function polyfills in AppServiceProvider for test compatibility.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Ovidiu U
2026-04-05 19:10:25 +01:00
parent 3ccdc28763
commit 7101ed3550
15 changed files with 392 additions and 45 deletions

View File

@@ -86,8 +86,9 @@ class OilPriceService
}
/**
* Generate a prediction using LLM first, falling back to EWMA.
* Stores the result in price_predictions and returns it.
* Generate predictions from all available sources and store each one.
* EWMA always runs. LLM runs when an API key is configured.
* Returns the highest-confidence prediction (LLM preferred over EWMA).
*/
public function generatePrediction(): ?PricePrediction
{
@@ -101,20 +102,24 @@ class OilPriceService
return null;
}
$prediction = null;
$ewma = $this->generateEwmaPrediction($prices);
if ($ewma !== null) {
PricePrediction::create($ewma->toArray());
}
$llm = null;
if (config('services.anthropic.api_key')) {
$prediction = $this->generateLlmPredictionWithContext($prices);
$prediction ??= $this->generateLlmPrediction($prices);
$llm = $this->generateLlmPredictionWithContext($prices);
$llm ??= $this->generateLlmPrediction($prices);
if ($llm !== null) {
PricePrediction::create($llm->toArray());personal_access_tokens
}
}
$prediction ??= $this->generateEwmaPrediction($prices);
if ($prediction !== null) {
PricePrediction::create($prediction->toArray());
}
return $prediction;
return $llm ?? $ewma;
}
/**
@@ -171,9 +176,7 @@ class OilPriceService
}
$text = $response->json('content.0.text') ?? '';
$text = preg_replace('/^```(?:json)?\s*/m', '', trim($text));
$text = preg_replace('/```\s*$/m', '', $text);
$data = json_decode(trim($text), true);
$data = $this->extractJson($text);
if (! isset($data['direction'], $data['confidence'], $data['reasoning'])) {
Log::error('OilPriceService: unexpected LLM response format', ['text' => $text]);
@@ -237,10 +240,9 @@ class OilPriceService
$url = 'https://api.anthropic.com/v1/messages';
$messages = [['role' => 'user', 'content' => $prompt]];
$response = null;
try {
for ($i = 0; $i < 5; $i++) {
for ($i = 0, $response = null; $i < 5; $i++) {
$response = $this->apiLogger->send('anthropic', 'POST', $url, fn () => Http::timeout(30)
->withHeaders([
'x-api-key' => config('services.anthropic.api_key'),
@@ -249,12 +251,15 @@ class OilPriceService
->post($url, [
'model' => config('services.anthropic.model', 'claude-sonnet-4-6'),
'max_tokens' => 1024,
'tools' => [['type' => 'web_search_20260209', 'name' => 'web_search']],
'tools' => [['type' => 'web_search_20250305', 'name' => 'web_search']],
'messages' => $messages,
]));
if (! $response->successful()) {
Log::error('OilPriceService: Anthropic context request failed', ['status' => $response->status()]);
Log::error('OilPriceService: Anthropic context request failed', [
'status' => $response->status(),
'body' => $response->body(),
]);
return null;
}
@@ -266,12 +271,13 @@ class OilPriceService
$messages[] = ['role' => 'assistant', 'content' => $response->json('content')];
}
$text = collect($response->json('content') ?? [])
->firstWhere('type', 'text')['text'] ?? '';
$content = $response->json('content') ?? [];
$text = preg_replace('/^```(?:json)?\s*/m', '', trim($text));
$text = preg_replace('/```\s*$/m', '', $text);
$data = json_decode(trim($text), true);
$text = collect($content)
->filter(fn ($b) => ($b['type'] ?? '') === 'text')
->implode('text', '');
$data = $this->extractJson($text);
if (! isset($data['direction'], $data['confidence'], $data['reasoning'])) {
Log::error('OilPriceService: unexpected context LLM response format', ['text' => $text]);
@@ -366,6 +372,23 @@ class OilPriceService
return round($ema, 4);
}
/**
* Strip markdown code fences from a string and extract the first JSON object found.
* Handles prose preambles that Claude sometimes adds before the JSON.
*/
private function extractJson(string $text): ?array
{
$text = preg_replace('/^```(?:json)?\s*/m', '', trim($text));
$text = preg_replace('/```\s*$/m', '', $text);
$start = strpos($text, '{');
$end = strrpos($text, '}');
if ($start === false || $end === false) {
return null;
}
return json_decode(substr($text, $start, $end - $start + 1), true) ?: null;
}
/**
* Map a % change magnitude to a 0EWMA_MAX_CONFIDENCE confidence score.
* 1.5% ~30, 3% ~50, 5%+ 65.