diff --git a/app/Services/BaccaratPredictionService.php b/app/Services/BaccaratPredictionService.php index c109f54..3d6f0c4 100644 --- a/app/Services/BaccaratPredictionService.php +++ b/app/Services/BaccaratPredictionService.php @@ -125,8 +125,8 @@ PROMPT; ->timeout(self::REQUEST_TIMEOUT) ->post($endpoint, [ 'model' => $config->model, - 'temperature' => 0.3, // 预测任务偏确定性,使用较低温度 - 'max_tokens' => 10, // 只需要输出单个词 + 'temperature' => 0.3, // 预测任务偏确定性,使用较低温度 + 'max_tokens' => 1000, // 推理模型需要大量 token 完成思考后才输出 content 'messages' => [ [ 'role' => 'system', @@ -147,7 +147,17 @@ PROMPT; } $data = $response->json(); - $reply = trim($data['choices'][0]['message']['content'] ?? ''); + $message = $data['choices'][0]['message'] ?? []; + + // 兼容推理模型(如 DeepSeek-R1、StepFun):content 可能为 null, + // 此时从 reasoning 字段中正则匹配最后出现的预测关键词作为兜底。 + $reply = trim($message['content'] ?? ''); + if ($reply === '') { + $reasoning = $message['reasoning'] ?? ''; + if (preg_match('/[大小豹子]+(?=[^大小豹子]*$)/u', $reasoning, $m)) { + $reply = $m[0]; + } + } $promptTokens = $data['usage']['prompt_tokens'] ?? 0; $completionTokens = $data['usage']['completion_tokens'] ?? 0;