apiLogger->send('openai', 'POST', $url, fn () => Http::timeout(15) ->withToken($apiKey) ->post($url, [ 'model' => config('services.openai.model', 'gpt-4o-mini'), 'response_format' => [ 'type' => 'json_schema', 'json_schema' => [ 'name' => 'oil_prediction', 'strict' => true, 'schema' => [ 'type' => 'object', 'properties' => [ 'direction' => ['type' => 'string', 'enum' => ['rising', 'falling', 'flat']], 'confidence' => ['type' => 'integer'], 'reasoning' => ['type' => 'string'], ], 'required' => ['direction', 'confidence', 'reasoning'], 'additionalProperties' => false, ], ], ], 'messages' => [[ 'role' => 'user', 'content' => $this->defaultPrompt($priceList), ]], ])); if (! $response->successful()) { Log::error(self::class.': request failed', ['status' => $response->status()]); return null; } $data = json_decode($response->json('choices.0.message.content') ?? '{}', true); if (! isset($data['direction'], $data['confidence'], $data['reasoning'])) { Log::error(self::class.': unexpected response format', ['data' => $data]); return null; } return $data; } }