From 8636ff97bffec9f2130628bf09c9d0fbb371e2bc Mon Sep 17 00:00:00 2001
From: Junjie <fallin.jie@qq.com>
Date: 星期二, 10 三月 2026 16:53:24 +0800
Subject: [PATCH] #
---
src/main/java/com/zy/ai/service/LlmChatService.java | 795 ++++++++++++++++++++++++++++++--------------------------
1 files changed, 426 insertions(+), 369 deletions(-)
diff --git a/src/main/java/com/zy/ai/service/LlmChatService.java b/src/main/java/com/zy/ai/service/LlmChatService.java
index ddb333a..e2eddd6 100644
--- a/src/main/java/com/zy/ai/service/LlmChatService.java
+++ b/src/main/java/com/zy/ai/service/LlmChatService.java
@@ -1,45 +1,51 @@
package com.zy.ai.service;
+import com.alibaba.fastjson.JSON;
import com.zy.ai.entity.ChatCompletionRequest;
import com.zy.ai.entity.ChatCompletionResponse;
+import com.zy.ai.entity.LlmCallLog;
+import com.zy.ai.entity.LlmRouteConfig;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
-import org.springframework.web.reactive.function.client.WebClient;
-import reactor.core.publisher.Mono;
+import org.springframework.web.client.RestClientResponseException;
+import org.springframework.web.reactive.function.client.WebClientResponseException;
import reactor.core.publisher.Flux;
+import java.util.ArrayList;
+import java.util.Date;
import java.util.HashMap;
import java.util.List;
-import java.util.function.Consumer;
+import java.util.Locale;
+import java.util.UUID;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import com.alibaba.fastjson.JSON;
-import com.alibaba.fastjson.JSONArray;
-import com.alibaba.fastjson.JSONObject;
+import java.util.function.Consumer;
@Slf4j
@Service
@RequiredArgsConstructor
public class LlmChatService {
- private final WebClient llmWebClient;
+ private static final int LOG_TEXT_LIMIT = 16000;
- @Value("${llm.api-key}")
- private String apiKey;
+ private final LlmRoutingService llmRoutingService;
+ private final LlmCallLogService llmCallLogService;
+ private final LlmSpringAiClientService llmSpringAiClientService;
- @Value("${llm.model}")
- private String model;
+ @Value("${llm.base-url:}")
+ private String fallbackBaseUrl;
- @Value("${llm.pythonPlatformUrl}")
- private String pythonPlatformUrl;
+ @Value("${llm.api-key:}")
+ private String fallbackApiKey;
- @Value("${llm.thinking}")
- private String thinking;
+ @Value("${llm.model:}")
+ private String fallbackModel;
+
+ @Value("${llm.thinking:false}")
+ private String fallbackThinking;
/**
* 閫氱敤瀵硅瘽鏂规硶锛氫紶鍏� messages锛岃繑鍥炲ぇ妯″瀷鏂囨湰鍥炲
@@ -49,27 +55,12 @@
Integer maxTokens) {
ChatCompletionRequest req = new ChatCompletionRequest();
- req.setModel(model);
req.setMessages(messages);
req.setTemperature(temperature != null ? temperature : 0.3);
req.setMax_tokens(maxTokens != null ? maxTokens : 1024);
req.setStream(false);
- ChatCompletionResponse response = llmWebClient.post()
- .uri("/chat/completions")
- .header(HttpHeaders.AUTHORIZATION, "Bearer " + apiKey)
- .contentType(MediaType.APPLICATION_JSON)
- .accept(MediaType.APPLICATION_JSON, MediaType.TEXT_EVENT_STREAM)
- .bodyValue(req)
- .exchangeToMono(resp -> resp.bodyToFlux(String.class)
- .collectList()
- .map(list -> {
- String payload = String.join("\n\n", list);
- return parseCompletion(payload);
- }))
- .doOnError(ex -> log.error("璋冪敤 LLM 澶辫触", ex))
- .onErrorResume(ex -> Mono.empty())
- .block();
+ ChatCompletionResponse response = complete(req, "chat");
if (response == null ||
response.getChoices() == null ||
@@ -88,45 +79,81 @@
Integer maxTokens,
List<Object> tools) {
ChatCompletionRequest req = new ChatCompletionRequest();
- req.setModel(model);
req.setMessages(messages);
req.setTemperature(temperature != null ? temperature : 0.3);
req.setMax_tokens(maxTokens != null ? maxTokens : 1024);
req.setStream(false);
-
- if(thinking.equals("enable")) {
- ChatCompletionRequest.Thinking thinking = new ChatCompletionRequest.Thinking();
- thinking.setType("enable");
- req.setThinking(thinking);
- }
if (tools != null && !tools.isEmpty()) {
req.setTools(tools);
req.setTool_choice("auto");
}
- return complete(req);
+ return complete(req, tools != null && !tools.isEmpty() ? "chat_completion_tools" : "chat_completion");
}
public ChatCompletionResponse complete(ChatCompletionRequest req) {
- try {
- return llmWebClient.post()
- .uri("/chat/completions")
- .header(HttpHeaders.AUTHORIZATION, "Bearer " + apiKey)
- .contentType(MediaType.APPLICATION_JSON)
- .accept(MediaType.APPLICATION_JSON, MediaType.TEXT_EVENT_STREAM)
- .bodyValue(req)
- .exchangeToMono(resp -> resp.bodyToFlux(String.class)
- .collectList()
- .map(list -> {
- String payload = String.join("\n\n", list);
- return parseCompletion(payload);
- }))
- .doOnError(ex -> log.error("璋冪敤 LLM 澶辫触", ex))
- .onErrorResume(ex -> Mono.empty())
- .block();
- } catch (Exception e) {
- log.error("璋冪敤 LLM 澶辫触", e);
+ return complete(req, "completion");
+ }
+
+ private ChatCompletionResponse complete(ChatCompletionRequest req, String scene) {
+ String traceId = nextTraceId();
+ List<ResolvedRoute> routes = resolveRoutes();
+ if (routes.isEmpty()) {
+ log.error("璋冪敤 LLM 澶辫触: 鏈厤缃彲鐢� LLM 璺敱");
+ recordCall(traceId, scene, false, 1, null, false, null, 0L, req, null, "none",
+ new RuntimeException("鏈厤缃彲鐢� LLM 璺敱"), "no_route");
return null;
}
+
+ Throwable last = null;
+ for (int i = 0; i < routes.size(); i++) {
+ ResolvedRoute route = routes.get(i);
+ boolean hasNext = i < routes.size() - 1;
+ ChatCompletionRequest routeReq = applyRoute(cloneRequest(req), route, false);
+ long start = System.currentTimeMillis();
+ try {
+ CompletionCallResult callResult = callCompletion(route, routeReq);
+ ChatCompletionResponse resp = callResult.response;
+ if (!isValidCompletion(resp)) {
+ RuntimeException ex = new RuntimeException("LLM 鍝嶅簲涓虹┖");
+ boolean canSwitch = shouldSwitch(route, false);
+ markFailure(route, ex, canSwitch);
+ recordCall(traceId, scene, false, i + 1, route, false, callResult.statusCode,
+ System.currentTimeMillis() - start, routeReq, callResult.payload, "error", ex,
+ "invalid_completion");
+ if (hasNext && canSwitch) {
+ log.warn("LLM 鍒囨崲鍒颁笅涓�璺敱, current={}, reason={}", route.tag(), ex.getMessage());
+ continue;
+ }
+ log.error("璋冪敤 LLM 澶辫触, route={}", route.tag(), ex);
+ last = ex;
+ break;
+ }
+ markSuccess(route);
+ recordCall(traceId, scene, false, i + 1, route, true, callResult.statusCode,
+ System.currentTimeMillis() - start, routeReq, buildResponseText(resp, callResult.payload),
+ "none", null, null);
+ return resp;
+ } catch (Throwable ex) {
+ last = ex;
+ boolean quota = isQuotaExhausted(ex);
+ boolean canSwitch = shouldSwitch(route, quota);
+ markFailure(route, ex, canSwitch);
+ recordCall(traceId, scene, false, i + 1, route, false, statusCodeOf(ex),
+ System.currentTimeMillis() - start, routeReq, responseBodyOf(ex),
+ quota ? "quota" : "error", ex, null);
+ if (hasNext && canSwitch) {
+ log.warn("LLM 鍒囨崲鍒颁笅涓�璺敱, current={}, reason={}", route.tag(), errorText(ex));
+ continue;
+ }
+ log.error("璋冪敤 LLM 澶辫触, route={}", route.tag(), ex);
+ break;
+ }
+ }
+
+ if (last != null) {
+ log.error("璋冪敤 LLM 鍏ㄩ儴璺敱澶辫触: {}", errorText(last));
+ }
+ return null;
}
public void chatStream(List<ChatCompletionRequest.Message> messages,
@@ -137,92 +164,12 @@
Consumer<Throwable> onError) {
ChatCompletionRequest req = new ChatCompletionRequest();
- req.setModel(model);
req.setMessages(messages);
req.setTemperature(temperature != null ? temperature : 0.3);
req.setMax_tokens(maxTokens != null ? maxTokens : 1024);
req.setStream(true);
-
- Flux<String> flux = llmWebClient.post()
- .uri("/chat/completions")
- .header(HttpHeaders.AUTHORIZATION, "Bearer " + apiKey)
- .contentType(MediaType.APPLICATION_JSON)
- .accept(MediaType.TEXT_EVENT_STREAM)
- .bodyValue(req)
- .retrieve()
- .bodyToFlux(String.class)
- .doOnError(ex -> log.error("璋冪敤 LLM 娴佸紡澶辫触", ex));
-
- AtomicBoolean doneSeen = new AtomicBoolean(false);
- AtomicBoolean errorSeen = new AtomicBoolean(false);
- LinkedBlockingQueue<String> queue = new LinkedBlockingQueue<>();
-
- Thread drain = new Thread(() -> {
- try {
- while (true) {
- String s = queue.poll(2, TimeUnit.SECONDS);
- if (s != null) {
- try { onChunk.accept(s); } catch (Exception ignore) {}
- }
- if (doneSeen.get() && queue.isEmpty()) {
- if (!errorSeen.get()) {
- try { if (onComplete != null) onComplete.run(); } catch (Exception ignore) {}
- }
- break;
- }
- }
- } catch (InterruptedException ignore) {
- ignore.printStackTrace();
- }
- });
- drain.setDaemon(true);
- drain.start();
-
- flux.subscribe(payload -> {
- if (payload == null || payload.isEmpty()) return;
- String[] events = payload.split("\\r?\\n\\r?\\n");
- for (String part : events) {
- String s = part;
- if (s == null || s.isEmpty()) continue;
- if (s.startsWith("data:")) {
- s = s.substring(5);
- if (s.startsWith(" ")) s = s.substring(1);
- }
- if ("[DONE]".equals(s.trim())) {
- doneSeen.set(true);
- continue;
- }
- try {
- JSONObject obj = JSON.parseObject(s);
- JSONArray choices = obj.getJSONArray("choices");
- if (choices != null && !choices.isEmpty()) {
- JSONObject c0 = choices.getJSONObject(0);
- JSONObject delta = c0.getJSONObject("delta");
- if (delta != null) {
- String content = delta.getString("content");
- if (content != null) {
- try { queue.offer(content); } catch (Exception ignore) {}
- }
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }, err -> {
- errorSeen.set(true);
- doneSeen.set(true);
- if (onError != null) onError.accept(err);
- }, () -> {
- if (!doneSeen.get()) {
- errorSeen.set(true);
- doneSeen.set(true);
- if (onError != null) onError.accept(new RuntimeException("LLM 娴佹剰澶栧畬鎴�"));
- } else {
- doneSeen.set(true);
- }
- });
+ streamWithFailover(req, onChunk, onComplete, onError, "chat_stream");
}
public void chatStreamWithTools(List<ChatCompletionRequest.Message> messages,
@@ -233,120 +180,54 @@
Runnable onComplete,
Consumer<Throwable> onError) {
ChatCompletionRequest req = new ChatCompletionRequest();
- req.setModel(model);
req.setMessages(messages);
req.setTemperature(temperature != null ? temperature : 0.3);
req.setMax_tokens(maxTokens != null ? maxTokens : 1024);
req.setStream(true);
- if(thinking.equals("enable")) {
- ChatCompletionRequest.Thinking thinking = new ChatCompletionRequest.Thinking();
- thinking.setType("enable");
- req.setThinking(thinking);
- }
if (tools != null && !tools.isEmpty()) {
req.setTools(tools);
req.setTool_choice("auto");
}
- Flux<String> flux = llmWebClient.post()
- .uri("/chat/completions")
- .header(HttpHeaders.AUTHORIZATION, "Bearer " + apiKey)
- .contentType(MediaType.APPLICATION_JSON)
- .accept(MediaType.TEXT_EVENT_STREAM)
- .bodyValue(req)
- .retrieve()
- .bodyToFlux(String.class)
- .doOnError(ex -> log.error("璋冪敤 LLM 娴佸紡澶辫触", ex));
-
- AtomicBoolean doneSeen = new AtomicBoolean(false);
- AtomicBoolean errorSeen = new AtomicBoolean(false);
- LinkedBlockingQueue<String> queue = new LinkedBlockingQueue<>();
-
- Thread drain = new Thread(() -> {
- try {
- while (true) {
- String s = queue.poll(5, TimeUnit.SECONDS);
- if (s != null) {
- try { onChunk.accept(s); } catch (Exception ignore) {}
- }
- if (doneSeen.get() && queue.isEmpty()) {
- if (!errorSeen.get()) {
- try { if (onComplete != null) onComplete.run(); } catch (Exception ignore) {}
- }
- break;
- }
- }
- } catch (InterruptedException ignore) {
- ignore.printStackTrace();
- }
- });
- drain.setDaemon(true);
- drain.start();
-
- flux.subscribe(payload -> {
- if (payload == null || payload.isEmpty()) return;
- String[] events = payload.split("\\r?\\n\\r?\\n");
- for (String part : events) {
- String s = part;
- if (s == null || s.isEmpty()) continue;
- if (s.startsWith("data:")) {
- s = s.substring(5);
- if (s.startsWith(" ")) s = s.substring(1);
- }
- if ("[DONE]".equals(s.trim())) {
- doneSeen.set(true);
- continue;
- }
- try {
- JSONObject obj = JSON.parseObject(s);
- JSONArray choices = obj.getJSONArray("choices");
- if (choices != null && !choices.isEmpty()) {
- JSONObject c0 = choices.getJSONObject(0);
- JSONObject delta = c0.getJSONObject("delta");
- if (delta != null) {
- String content = delta.getString("content");
- if (content != null) {
- try { queue.offer(content); } catch (Exception ignore) {}
- }
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }, err -> {
- errorSeen.set(true);
- doneSeen.set(true);
- if (onError != null) onError.accept(err);
- }, () -> {
- if (!doneSeen.get()) {
- errorSeen.set(true);
- doneSeen.set(true);
- if (onError != null) onError.accept(new RuntimeException("LLM 娴佹剰澶栧畬鎴�"));
- } else {
- doneSeen.set(true);
- }
- });
+ streamWithFailover(req, onChunk, onComplete, onError, tools != null && !tools.isEmpty() ? "chat_stream_tools" : "chat_stream");
}
- public void chatStreamRunPython(String prompt, String chatId, Consumer<String> onChunk,
+ private void streamWithFailover(ChatCompletionRequest req,
+ Consumer<String> onChunk,
Runnable onComplete,
- Consumer<Throwable> onError) {
- HashMap<String, Object> req = new HashMap<>();
- req.put("prompt", prompt);
- req.put("chatId", chatId);
+ Consumer<Throwable> onError,
+ String scene) {
+ String traceId = nextTraceId();
+ List<ResolvedRoute> routes = resolveRoutes();
+ if (routes.isEmpty()) {
+ recordCall(traceId, scene, true, 1, null, false, null, 0L, req, null, "none",
+ new RuntimeException("鏈厤缃彲鐢� LLM 璺敱"), "no_route");
+ if (onError != null) onError.accept(new RuntimeException("鏈厤缃彲鐢� LLM 璺敱"));
+ return;
+ }
+ attemptStream(routes, 0, req, onChunk, onComplete, onError, traceId, scene);
+ }
- Flux<String> flux = llmWebClient.post()
- .uri(pythonPlatformUrl)
- .header(HttpHeaders.AUTHORIZATION, "Bearer " + apiKey)
- .contentType(MediaType.APPLICATION_JSON)
- .accept(MediaType.TEXT_EVENT_STREAM)
- .bodyValue(req)
- .retrieve()
- .bodyToFlux(String.class)
- .doOnError(ex -> log.error("璋冪敤 LLM 娴佸紡澶辫触", ex));
+ private void attemptStream(List<ResolvedRoute> routes,
+ int index,
+ ChatCompletionRequest req,
+ Consumer<String> onChunk,
+ Runnable onComplete,
+ Consumer<Throwable> onError,
+ String traceId,
+ String scene) {
+ if (index >= routes.size()) {
+ if (onError != null) onError.accept(new RuntimeException("LLM 璺敱鍏ㄩ儴澶辫触"));
+ return;
+ }
+
+ ResolvedRoute route = routes.get(index);
+ ChatCompletionRequest routeReq = applyRoute(cloneRequest(req), route, true);
+ long start = System.currentTimeMillis();
+ StringBuilder outputBuffer = new StringBuilder();
AtomicBoolean doneSeen = new AtomicBoolean(false);
AtomicBoolean errorSeen = new AtomicBoolean(false);
+ AtomicBoolean emitted = new AtomicBoolean(false);
LinkedBlockingQueue<String> queue = new LinkedBlockingQueue<>();
Thread drain = new Thread(() -> {
@@ -354,6 +235,7 @@
while (true) {
String s = queue.poll(2, TimeUnit.SECONDS);
if (s != null) {
+ emitted.set(true);
try {
onChunk.accept(s);
} catch (Exception ignore) {
@@ -370,152 +252,327 @@
}
}
} catch (InterruptedException ignore) {
- ignore.printStackTrace();
}
});
drain.setDaemon(true);
drain.start();
- flux.subscribe(payload -> {
+ Flux<String> streamSource = streamFluxWithSpringAi(route, routeReq);
+ streamSource.subscribe(payload -> {
if (payload == null || payload.isEmpty()) return;
- String[] events = payload.split("\\r?\\n\\r?\\n");
- for (String part : events) {
- String s = part;
- if (s == null || s.isEmpty()) continue;
- if (s.startsWith("data:")) {
- s = s.substring(5);
- if (s.startsWith(" ")) s = s.substring(1);
- }
- if ("[DONE]".equals(s.trim())) {
- doneSeen.set(true);
- continue;
- }
- if("<think>".equals(s.trim()) || "</think>".equals(s.trim())) {
- queue.offer(s.trim());
- continue;
- }
- try {
- JSONObject obj = JSON.parseObject(s);
- JSONArray choices = obj.getJSONArray("choices");
- if (choices != null && !choices.isEmpty()) {
- JSONObject c0 = choices.getJSONObject(0);
- JSONObject delta = c0.getJSONObject("delta");
- if (delta != null) {
- String content = delta.getString("content");
- if (content != null) {
- try {
- queue.offer(content);
- } catch (Exception ignore) {
- }
- }
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
+ queue.offer(payload);
+ appendLimited(outputBuffer, payload);
}, err -> {
errorSeen.set(true);
doneSeen.set(true);
+ boolean quota = isQuotaExhausted(err);
+ boolean canSwitch = shouldSwitch(route, quota);
+ markFailure(route, err, canSwitch);
+ recordCall(traceId, scene, true, index + 1, route, false, statusCodeOf(err),
+ System.currentTimeMillis() - start, routeReq, outputBuffer.toString(),
+ quota ? "quota" : "error", err, "emitted=" + emitted.get());
+ if (!emitted.get() && canSwitch && index < routes.size() - 1) {
+ log.warn("LLM 璺敱澶辫触锛岃嚜鍔ㄥ垏鎹紝current={}, reason={}", route.tag(), errorText(err));
+ attemptStream(routes, index + 1, req, onChunk, onComplete, onError, traceId, scene);
+ return;
+ }
if (onError != null) onError.accept(err);
}, () -> {
- if (!doneSeen.get()) {
- errorSeen.set(true);
- doneSeen.set(true);
- if (onError != null) onError.accept(new RuntimeException("LLM 娴佹剰澶栧畬鎴�"));
- } else {
- doneSeen.set(true);
- }
+ markSuccess(route);
+ recordCall(traceId, scene, true, index + 1, route, true, 200,
+ System.currentTimeMillis() - start, routeReq, outputBuffer.toString(),
+ "none", null, null);
+ doneSeen.set(true);
});
}
- private ChatCompletionResponse mergeSseChunk(ChatCompletionResponse acc, String payload) {
- if (payload == null || payload.isEmpty()) return acc;
- String[] events = payload.split("\\r?\\n\\r?\\n");
- for (String part : events) {
- String s = part;
- if (s == null || s.isEmpty()) continue;
- if (s.startsWith("data:")) {
- s = s.substring(5);
- if (s.startsWith(" ")) s = s.substring(1);
- }
- if ("[DONE]".equals(s.trim())) {
- continue;
- }
- try {
- JSONObject obj = JSON.parseObject(s);
- if (obj == null) continue;
- JSONArray choices = obj.getJSONArray("choices");
- if (choices != null && !choices.isEmpty()) {
- JSONObject c0 = choices.getJSONObject(0);
- if (acc.getChoices() == null || acc.getChoices().isEmpty()) {
- ChatCompletionResponse.Choice choice = new ChatCompletionResponse.Choice();
- ChatCompletionRequest.Message msg = new ChatCompletionRequest.Message();
- choice.setMessage(msg);
- java.util.ArrayList<ChatCompletionResponse.Choice> list = new java.util.ArrayList<>();
- list.add(choice);
- acc.setChoices(list);
- }
- ChatCompletionResponse.Choice choice = acc.getChoices().get(0);
- ChatCompletionRequest.Message msg = choice.getMessage();
- if (msg.getRole() == null || msg.getRole().isEmpty()) {
- msg.setRole("assistant");
- }
- JSONObject delta = c0.getJSONObject("delta");
- if (delta != null) {
- String c = delta.getString("content");
- if (c != null) {
- String prev = msg.getContent();
- msg.setContent(prev == null ? c : prev + c);
- }
- String role = delta.getString("role");
- if (role != null && !role.isEmpty()) msg.setRole(role);
- }
- JSONObject message = c0.getJSONObject("message");
- if (message != null) {
- String c = message.getString("content");
- if (c != null) {
- String prev = msg.getContent();
- msg.setContent(prev == null ? c : prev + c);
- }
- String role = message.getString("role");
- if (role != null && !role.isEmpty()) msg.setRole(role);
- }
- String fr = c0.getString("finish_reason");
- if (fr != null && !fr.isEmpty()) choice.setFinishReason(fr);
- }
- String id = obj.getString("id");
- if (id != null && !id.isEmpty()) acc.setId(id);
- Long created = obj.getLong("created");
- if (created != null) acc.setCreated(created);
- String object = obj.getString("object");
- if (object != null && !object.isEmpty()) acc.setObjectName(object);
- } catch (Exception ignore) {}
- }
- return acc;
+ private Flux<String> streamFluxWithSpringAi(ResolvedRoute route, ChatCompletionRequest req) {
+ return llmSpringAiClientService.streamCompletion(route.baseUrl, route.apiKey, req)
+ .doOnError(ex -> log.error("璋冪敤 Spring AI 娴佸紡澶辫触, route={}", route.tag(), ex));
}
- private ChatCompletionResponse parseCompletion(String payload) {
- if (payload == null) return null;
- try {
- ChatCompletionResponse r = JSON.parseObject(payload, ChatCompletionResponse.class);
- if (r != null && r.getChoices() != null && !r.getChoices().isEmpty() && r.getChoices().get(0).getMessage() != null) {
- return r;
- }
- } catch (Exception ignore) {}
- ChatCompletionResponse sse = mergeSseChunk(new ChatCompletionResponse(), payload);
- if (sse.getChoices() != null && !sse.getChoices().isEmpty() && sse.getChoices().get(0).getMessage() != null && sse.getChoices().get(0).getMessage().getContent() != null) {
- return sse;
+ private CompletionCallResult callCompletion(ResolvedRoute route, ChatCompletionRequest req) {
+ return callCompletionWithSpringAi(route, req);
+ }
+
+ private CompletionCallResult callCompletionWithSpringAi(ResolvedRoute route, ChatCompletionRequest req) {
+ LlmSpringAiClientService.CompletionCallResult result =
+ llmSpringAiClientService.callCompletion(route.baseUrl, route.apiKey, req);
+ return new CompletionCallResult(result.getStatusCode(), result.getPayload(), result.getResponse());
+ }
+
+ private ChatCompletionRequest applyRoute(ChatCompletionRequest req, ResolvedRoute route, boolean stream) {
+ req.setModel(route.model);
+ req.setStream(stream);
+ if (route.thinkingEnabled) {
+ ChatCompletionRequest.Thinking t = new ChatCompletionRequest.Thinking();
+ t.setType("enable");
+ req.setThinking(t);
+ } else {
+ req.setThinking(null);
}
- ChatCompletionResponse r = new ChatCompletionResponse();
- ChatCompletionResponse.Choice choice = new ChatCompletionResponse.Choice();
- ChatCompletionRequest.Message msg = new ChatCompletionRequest.Message();
- msg.setRole("assistant");
- msg.setContent(payload);
- choice.setMessage(msg);
- java.util.ArrayList<ChatCompletionResponse.Choice> list = new java.util.ArrayList<>();
- list.add(choice);
- r.setChoices(list);
- return r;
+ return req;
+ }
+
+ private ChatCompletionRequest cloneRequest(ChatCompletionRequest src) {
+ ChatCompletionRequest req = new ChatCompletionRequest();
+ req.setModel(src.getModel());
+ req.setMessages(src.getMessages());
+ req.setTemperature(src.getTemperature());
+ req.setMax_tokens(src.getMax_tokens());
+ req.setStream(src.getStream());
+ req.setTools(src.getTools());
+ req.setTool_choice(src.getTool_choice());
+ req.setThinking(src.getThinking());
+ return req;
+ }
+
+ private boolean isValidCompletion(ChatCompletionResponse response) {
+ if (response == null || response.getChoices() == null || response.getChoices().isEmpty()) {
+ return false;
+ }
+ ChatCompletionRequest.Message message = response.getChoices().get(0).getMessage();
+ if (message == null) {
+ return false;
+ }
+ if (!isBlank(message.getContent())) {
+ return true;
+ }
+ return message.getTool_calls() != null && !message.getTool_calls().isEmpty();
+ }
+
+ private boolean shouldSwitch(ResolvedRoute route, boolean quota) {
+ return quota ? route.switchOnQuota : route.switchOnError;
+ }
+
+ private void markSuccess(ResolvedRoute route) {
+ if (route.id != null) {
+ llmRoutingService.markSuccess(route.id);
+ }
+ }
+
+ private void markFailure(ResolvedRoute route, Throwable ex, boolean enterCooldown) {
+ if (route.id != null) {
+ llmRoutingService.markFailure(route.id, errorText(ex), enterCooldown, route.cooldownSeconds);
+ }
+ }
+
+ private String errorText(Throwable ex) {
+ if (ex == null) return "unknown";
+ if (ex instanceof RestClientResponseException) {
+ RestClientResponseException e = (RestClientResponseException) ex;
+ String body = e.getResponseBodyAsString();
+ if (body != null && body.length() > 240) {
+ body = body.substring(0, 240);
+ }
+ return "status=" + e.getStatusCode().value() + ", body=" + body;
+ }
+ if (ex instanceof WebClientResponseException) {
+ WebClientResponseException e = (WebClientResponseException) ex;
+ String body = e.getResponseBodyAsString();
+ if (body != null && body.length() > 240) {
+ body = body.substring(0, 240);
+ }
+ return "status=" + e.getStatusCode().value() + ", body=" + body;
+ }
+ Integer springAiStatus = llmSpringAiClientService.statusCodeOf(ex);
+ if (springAiStatus != null) {
+ return "status=" + springAiStatus + ", body=" + llmSpringAiClientService.responseBodyOf(ex, 240);
+ }
+ return ex.getMessage() == null ? ex.toString() : ex.getMessage();
+ }
+
+ private boolean isQuotaExhausted(Throwable ex) {
+ Integer status = statusCodeOf(ex);
+ if (status != null && status == 429) {
+ return true;
+ }
+ String text = responseBodyOf(ex);
+ text = text == null ? "" : text.toLowerCase(Locale.ROOT);
+ return text.contains("insufficient_quota")
+ || text.contains("quota")
+ || text.contains("浣欓")
+ || text.contains("鐢ㄩ噺")
+ || text.contains("瓒呴檺")
+ || text.contains("rate limit");
+ }
+
+ private List<ResolvedRoute> resolveRoutes() {
+ List<ResolvedRoute> routes = new ArrayList<>();
+ List<LlmRouteConfig> dbRoutes = llmRoutingService.listAvailableRoutes();
+ for (LlmRouteConfig c : dbRoutes) {
+ routes.add(ResolvedRoute.fromDb(c));
+ }
+ // 鍏煎锛氭暟鎹簱涓虹┖鏃讹紝鍥為��鍒� yml
+ if (routes.isEmpty() && !isBlank(fallbackBaseUrl) && !isBlank(fallbackApiKey) && !isBlank(fallbackModel)) {
+ routes.add(ResolvedRoute.fromFallback(fallbackBaseUrl, fallbackApiKey, fallbackModel, isFallbackThinkingEnabled()));
+ }
+ return routes;
+ }
+
+ private boolean isFallbackThinkingEnabled() {
+ String x = fallbackThinking == null ? "" : fallbackThinking.trim().toLowerCase();
+ return "true".equals(x) || "1".equals(x) || "enable".equals(x);
+ }
+
+ private boolean isBlank(String s) {
+ return s == null || s.trim().isEmpty();
+ }
+
+ private String nextTraceId() {
+ return UUID.randomUUID().toString().replace("-", "");
+ }
+
+ private void appendLimited(StringBuilder sb, String text) {
+ if (sb == null || text == null || text.isEmpty()) {
+ return;
+ }
+ int remain = LOG_TEXT_LIMIT - sb.length();
+ if (remain <= 0) {
+ return;
+ }
+ if (text.length() <= remain) {
+ sb.append(text);
+ } else {
+ sb.append(text, 0, remain);
+ }
+ }
+
+ private Integer statusCodeOf(Throwable ex) {
+ if (ex instanceof RestClientResponseException) {
+ return ((RestClientResponseException) ex).getStatusCode().value();
+ }
+ if (ex instanceof WebClientResponseException) {
+ return ((WebClientResponseException) ex).getStatusCode().value();
+ }
+ return llmSpringAiClientService.statusCodeOf(ex);
+ }
+
+ private String responseBodyOf(Throwable ex) {
+ if (ex instanceof RestClientResponseException) {
+ return cut(((RestClientResponseException) ex).getResponseBodyAsString(), LOG_TEXT_LIMIT);
+ }
+ if (ex instanceof WebClientResponseException) {
+ return cut(((WebClientResponseException) ex).getResponseBodyAsString(), LOG_TEXT_LIMIT);
+ }
+ return cut(llmSpringAiClientService.responseBodyOf(ex, LOG_TEXT_LIMIT), LOG_TEXT_LIMIT);
+ }
+
+ private String buildResponseText(ChatCompletionResponse resp, String fallbackPayload) {
+ if (resp != null && resp.getChoices() != null && !resp.getChoices().isEmpty()
+ && resp.getChoices().get(0) != null && resp.getChoices().get(0).getMessage() != null) {
+ ChatCompletionRequest.Message m = resp.getChoices().get(0).getMessage();
+ if (!isBlank(m.getContent())) {
+ return cut(m.getContent(), LOG_TEXT_LIMIT);
+ }
+ if (m.getTool_calls() != null && !m.getTool_calls().isEmpty()) {
+ return cut(JSON.toJSONString(m), LOG_TEXT_LIMIT);
+ }
+ }
+ return cut(fallbackPayload, LOG_TEXT_LIMIT);
+ }
+
+ private String safeName(Throwable ex) {
+ return ex == null ? null : ex.getClass().getSimpleName();
+ }
+
+ private String cut(String text, int maxLen) {
+ if (text == null) return null;
+ String clean = text.replace("\r", " ");
+ return clean.length() > maxLen ? clean.substring(0, maxLen) : clean;
+ }
+
+ private void recordCall(String traceId,
+ String scene,
+ boolean stream,
+ int attemptNo,
+ ResolvedRoute route,
+ boolean success,
+ Integer httpStatus,
+ long latencyMs,
+ ChatCompletionRequest req,
+ String response,
+ String switchMode,
+ Throwable err,
+ String extra) {
+ LlmCallLog item = new LlmCallLog();
+ item.setTraceId(cut(traceId, 64));
+ item.setScene(cut(scene, 64));
+ item.setStream((short) (stream ? 1 : 0));
+ item.setAttemptNo(attemptNo);
+ if (route != null) {
+ item.setRouteId(route.id);
+ item.setRouteName(cut(route.name, 128));
+ item.setBaseUrl(cut(route.baseUrl, 255));
+ item.setModel(cut(route.model, 128));
+ }
+ item.setSuccess((short) (success ? 1 : 0));
+ item.setHttpStatus(httpStatus);
+ item.setLatencyMs(latencyMs < 0 ? 0 : latencyMs);
+ item.setSwitchMode(cut(switchMode, 32));
+ item.setRequestContent(cut(JSON.toJSONString(req), LOG_TEXT_LIMIT));
+ item.setResponseContent(cut(response, LOG_TEXT_LIMIT));
+ item.setErrorType(cut(safeName(err), 128));
+ item.setErrorMessage(err == null ? null : cut(errorText(err), 1024));
+ item.setExtra(cut(extra, 512));
+ item.setCreateTime(new Date());
+ llmCallLogService.saveIgnoreError(item);
+ }
+
+ private static class CompletionCallResult {
+ private final int statusCode;
+ private final String payload;
+ private final ChatCompletionResponse response;
+
+ private CompletionCallResult(int statusCode, String payload, ChatCompletionResponse response) {
+ this.statusCode = statusCode;
+ this.payload = payload;
+ this.response = response;
+ }
+ }
+
+ private static class ResolvedRoute {
+ private Long id;
+ private String name;
+ private String baseUrl;
+ private String apiKey;
+ private String model;
+ private boolean thinkingEnabled;
+ private boolean switchOnQuota;
+ private boolean switchOnError;
+ private Integer cooldownSeconds;
+
+ private static ResolvedRoute fromDb(LlmRouteConfig c) {
+ ResolvedRoute r = new ResolvedRoute();
+ r.id = c.getId();
+ r.name = c.getName();
+ r.baseUrl = c.getBaseUrl();
+ r.apiKey = c.getApiKey();
+ r.model = c.getModel();
+ r.thinkingEnabled = c.getThinking() != null && c.getThinking() == 1;
+ r.switchOnQuota = c.getSwitchOnQuota() == null || c.getSwitchOnQuota() == 1;
+ r.switchOnError = c.getSwitchOnError() == null || c.getSwitchOnError() == 1;
+ r.cooldownSeconds = c.getCooldownSeconds();
+ return r;
+ }
+
+ private static ResolvedRoute fromFallback(String baseUrl, String apiKey, String model, boolean thinkingEnabled) {
+ ResolvedRoute r = new ResolvedRoute();
+ r.name = "fallback-yml";
+ r.baseUrl = baseUrl;
+ r.apiKey = apiKey;
+ r.model = model;
+ r.thinkingEnabled = thinkingEnabled;
+ r.switchOnQuota = true;
+ r.switchOnError = true;
+ r.cooldownSeconds = 300;
+ return r;
+ }
+
+ private String tag() {
+ String showName = name == null ? "unnamed" : name;
+ String showModel = model == null ? "" : (" model=" + model);
+ return showName + showModel;
+ }
}
}
--
Gitblit v1.9.1