| | |
| | | import com.alibaba.fastjson.JSONObject; |
| | | import com.zy.ai.entity.ChatCompletionRequest; |
| | | import com.zy.ai.entity.ChatCompletionResponse; |
| | | import com.zy.ai.entity.LlmCallLog; |
| | | import com.zy.ai.entity.LlmRouteConfig; |
| | | import lombok.RequiredArgsConstructor; |
| | | import lombok.extern.slf4j.Slf4j; |
| | |
| | | import reactor.core.publisher.Flux; |
| | | |
| | | import java.util.ArrayList; |
| | | import java.util.Date; |
| | | import java.util.List; |
| | | import java.util.UUID; |
| | | import java.util.concurrent.LinkedBlockingQueue; |
| | | import java.util.concurrent.TimeUnit; |
| | | import java.util.concurrent.atomic.AtomicBoolean; |
| | |
| | | @RequiredArgsConstructor |
| | | public class LlmChatService { |
| | | |
| | | private static final int LOG_TEXT_LIMIT = 16000; |
| | | |
| | | private final LlmRoutingService llmRoutingService; |
| | | private final LlmCallLogService llmCallLogService; |
| | | |
| | | @Value("${llm.base-url:}") |
| | | private String fallbackBaseUrl; |
| | |
| | | req.setMax_tokens(maxTokens != null ? maxTokens : 1024); |
| | | req.setStream(false); |
| | | |
| | | ChatCompletionResponse response = complete(req); |
| | | ChatCompletionResponse response = complete(req, "chat"); |
| | | |
| | | if (response == null || |
| | | response.getChoices() == null || |
| | |
| | | req.setTools(tools); |
| | | req.setTool_choice("auto"); |
| | | } |
| | | return complete(req); |
| | | return complete(req, tools != null && !tools.isEmpty() ? "chat_completion_tools" : "chat_completion"); |
| | | } |
| | | |
| | | public ChatCompletionResponse complete(ChatCompletionRequest req) { |
| | | return complete(req, "completion"); |
| | | } |
| | | |
| | | private ChatCompletionResponse complete(ChatCompletionRequest req, String scene) { |
| | | String traceId = nextTraceId(); |
| | | List<ResolvedRoute> routes = resolveRoutes(); |
| | | if (routes.isEmpty()) { |
| | | log.error("调用 LLM 失败: 未配置可用 LLM 路由"); |
| | | recordCall(traceId, scene, false, 1, null, false, null, 0L, req, null, "none", |
| | | new RuntimeException("未配置可用 LLM 路由"), "no_route"); |
| | | return null; |
| | | } |
| | | |
| | |
| | | for (int i = 0; i < routes.size(); i++) { |
| | | ResolvedRoute route = routes.get(i); |
| | | boolean hasNext = i < routes.size() - 1; |
| | | try { |
| | | ChatCompletionRequest routeReq = applyRoute(cloneRequest(req), route, false); |
| | | ChatCompletionResponse resp = callCompletion(route, routeReq); |
| | | long start = System.currentTimeMillis(); |
| | | try { |
| | | CompletionCallResult callResult = callCompletion(route, routeReq); |
| | | ChatCompletionResponse resp = callResult.response; |
| | | if (!isValidCompletion(resp)) { |
| | | throw new RuntimeException("LLM 响应为空"); |
| | | RuntimeException ex = new RuntimeException("LLM 响应为空"); |
| | | boolean canSwitch = shouldSwitch(route, false); |
| | | markFailure(route, ex, canSwitch); |
| | | recordCall(traceId, scene, false, i + 1, route, false, callResult.statusCode, |
| | | System.currentTimeMillis() - start, routeReq, callResult.payload, "error", ex, |
| | | "invalid_completion"); |
| | | if (hasNext && canSwitch) { |
| | | log.warn("LLM 切换到下一路由, current={}, reason={}", route.tag(), ex.getMessage()); |
| | | continue; |
| | | } |
| | | log.error("调用 LLM 失败, route={}", route.tag(), ex); |
| | | last = ex; |
| | | break; |
| | | } |
| | | markSuccess(route); |
| | | recordCall(traceId, scene, false, i + 1, route, true, callResult.statusCode, |
| | | System.currentTimeMillis() - start, routeReq, buildResponseText(resp, callResult.payload), |
| | | "none", null, null); |
| | | return resp; |
| | | } catch (Throwable ex) { |
| | | last = ex; |
| | | boolean quota = isQuotaExhausted(ex); |
| | | boolean canSwitch = shouldSwitch(route, quota); |
| | | markFailure(route, ex, canSwitch); |
| | | recordCall(traceId, scene, false, i + 1, route, false, statusCodeOf(ex), |
| | | System.currentTimeMillis() - start, routeReq, responseBodyOf(ex), |
| | | quota ? "quota" : "error", ex, null); |
| | | if (hasNext && canSwitch) { |
| | | log.warn("LLM 切换到下一路由, current={}, reason={}", route.tag(), errorText(ex)); |
| | | continue; |
| | |
| | | req.setMax_tokens(maxTokens != null ? maxTokens : 1024); |
| | | req.setStream(true); |
| | | |
| | | streamWithFailover(req, onChunk, onComplete, onError); |
| | | streamWithFailover(req, onChunk, onComplete, onError, "chat_stream"); |
| | | } |
| | | |
| | | public void chatStreamWithTools(List<ChatCompletionRequest.Message> messages, |
| | |
| | | req.setTools(tools); |
| | | req.setTool_choice("auto"); |
| | | } |
| | | streamWithFailover(req, onChunk, onComplete, onError); |
| | | streamWithFailover(req, onChunk, onComplete, onError, tools != null && !tools.isEmpty() ? "chat_stream_tools" : "chat_stream"); |
| | | } |
| | | |
| | | private void streamWithFailover(ChatCompletionRequest req, |
| | | Consumer<String> onChunk, |
| | | Runnable onComplete, |
| | | Consumer<Throwable> onError) { |
| | | Consumer<Throwable> onError, |
| | | String scene) { |
| | | String traceId = nextTraceId(); |
| | | List<ResolvedRoute> routes = resolveRoutes(); |
| | | if (routes.isEmpty()) { |
| | | recordCall(traceId, scene, true, 1, null, false, null, 0L, req, null, "none", |
| | | new RuntimeException("未配置可用 LLM 路由"), "no_route"); |
| | | if (onError != null) onError.accept(new RuntimeException("未配置可用 LLM 路由")); |
| | | return; |
| | | } |
| | | attemptStream(routes, 0, req, onChunk, onComplete, onError); |
| | | attemptStream(routes, 0, req, onChunk, onComplete, onError, traceId, scene); |
| | | } |
| | | |
| | | private void attemptStream(List<ResolvedRoute> routes, |
| | |
| | | ChatCompletionRequest req, |
| | | Consumer<String> onChunk, |
| | | Runnable onComplete, |
| | | Consumer<Throwable> onError) { |
| | | Consumer<Throwable> onError, |
| | | String traceId, |
| | | String scene) { |
| | | if (index >= routes.size()) { |
| | | if (onError != null) onError.accept(new RuntimeException("LLM 路由全部失败")); |
| | | return; |
| | |
| | | |
| | | ResolvedRoute route = routes.get(index); |
| | | ChatCompletionRequest routeReq = applyRoute(cloneRequest(req), route, true); |
| | | long start = System.currentTimeMillis(); |
| | | StringBuilder outputBuffer = new StringBuilder(); |
| | | |
| | | AtomicBoolean doneSeen = new AtomicBoolean(false); |
| | | AtomicBoolean errorSeen = new AtomicBoolean(false); |
| | |
| | | String content = delta.getString("content"); |
| | | if (content != null) { |
| | | queue.offer(content); |
| | | appendLimited(outputBuffer, content); |
| | | } |
| | | } |
| | | } |
| | |
| | | boolean quota = isQuotaExhausted(err); |
| | | boolean canSwitch = shouldSwitch(route, quota); |
| | | markFailure(route, err, canSwitch); |
| | | recordCall(traceId, scene, true, index + 1, route, false, statusCodeOf(err), |
| | | System.currentTimeMillis() - start, routeReq, outputBuffer.toString(), |
| | | quota ? "quota" : "error", err, "emitted=" + emitted.get()); |
| | | if (!emitted.get() && canSwitch && index < routes.size() - 1) { |
| | | log.warn("LLM 路由失败,自动切换,current={}, reason={}", route.tag(), errorText(err)); |
| | | attemptStream(routes, index + 1, req, onChunk, onComplete, onError); |
| | | attemptStream(routes, index + 1, req, onChunk, onComplete, onError, traceId, scene); |
| | | return; |
| | | } |
| | | if (onError != null) onError.accept(err); |
| | |
| | | doneSeen.set(true); |
| | | boolean canSwitch = shouldSwitch(route, false); |
| | | markFailure(route, ex, canSwitch); |
| | | recordCall(traceId, scene, true, index + 1, route, false, 200, |
| | | System.currentTimeMillis() - start, routeReq, outputBuffer.toString(), |
| | | "error", ex, "unexpected_stream_end"); |
| | | if (!emitted.get() && canSwitch && index < routes.size() - 1) { |
| | | log.warn("LLM 路由流异常完成,自动切换,current={}", route.tag()); |
| | | attemptStream(routes, index + 1, req, onChunk, onComplete, onError); |
| | | attemptStream(routes, index + 1, req, onChunk, onComplete, onError, traceId, scene); |
| | | } else { |
| | | if (onError != null) onError.accept(ex); |
| | | } |
| | | } else { |
| | | markSuccess(route); |
| | | recordCall(traceId, scene, true, index + 1, route, true, 200, |
| | | System.currentTimeMillis() - start, routeReq, outputBuffer.toString(), |
| | | "none", null, null); |
| | | doneSeen.set(true); |
| | | } |
| | | }); |
| | |
| | | .doOnError(ex -> log.error("调用 LLM 流式失败, route={}", route.tag(), ex)); |
| | | } |
| | | |
| | | private ChatCompletionResponse callCompletion(ResolvedRoute route, ChatCompletionRequest req) { |
| | | private CompletionCallResult callCompletion(ResolvedRoute route, ChatCompletionRequest req) { |
| | | WebClient client = WebClient.builder().baseUrl(route.baseUrl).build(); |
| | | RawCompletionResult raw = client.post() |
| | | .uri("/chat/completions") |
| | |
| | | if (raw.statusCode < 200 || raw.statusCode >= 300) { |
| | | throw new LlmRouteException(raw.statusCode, raw.payload); |
| | | } |
| | | return parseCompletion(raw.payload); |
| | | return new CompletionCallResult(raw.statusCode, raw.payload, parseCompletion(raw.payload)); |
| | | } |
| | | |
| | | private ChatCompletionRequest applyRoute(ChatCompletionRequest req, ResolvedRoute route, boolean stream) { |
| | |
| | | return r; |
| | | } |
| | | |
| | | private String nextTraceId() { |
| | | return UUID.randomUUID().toString().replace("-", ""); |
| | | } |
| | | |
| | | private void appendLimited(StringBuilder sb, String text) { |
| | | if (sb == null || text == null || text.isEmpty()) { |
| | | return; |
| | | } |
| | | int remain = LOG_TEXT_LIMIT - sb.length(); |
| | | if (remain <= 0) { |
| | | return; |
| | | } |
| | | if (text.length() <= remain) { |
| | | sb.append(text); |
| | | } else { |
| | | sb.append(text, 0, remain); |
| | | } |
| | | } |
| | | |
| | | private Integer statusCodeOf(Throwable ex) { |
| | | if (ex instanceof LlmRouteException) { |
| | | return ((LlmRouteException) ex).statusCode; |
| | | } |
| | | return null; |
| | | } |
| | | |
| | | private String responseBodyOf(Throwable ex) { |
| | | if (ex instanceof LlmRouteException) { |
| | | return cut(((LlmRouteException) ex).body, LOG_TEXT_LIMIT); |
| | | } |
| | | return null; |
| | | } |
| | | |
| | | private String buildResponseText(ChatCompletionResponse resp, String fallbackPayload) { |
| | | if (resp != null && resp.getChoices() != null && !resp.getChoices().isEmpty() |
| | | && resp.getChoices().get(0) != null && resp.getChoices().get(0).getMessage() != null) { |
| | | ChatCompletionRequest.Message m = resp.getChoices().get(0).getMessage(); |
| | | if (!isBlank(m.getContent())) { |
| | | return cut(m.getContent(), LOG_TEXT_LIMIT); |
| | | } |
| | | if (m.getTool_calls() != null && !m.getTool_calls().isEmpty()) { |
| | | return cut(JSON.toJSONString(m), LOG_TEXT_LIMIT); |
| | | } |
| | | } |
| | | return cut(fallbackPayload, LOG_TEXT_LIMIT); |
| | | } |
| | | |
| | | private String safeName(Throwable ex) { |
| | | return ex == null ? null : ex.getClass().getSimpleName(); |
| | | } |
| | | |
| | | private String cut(String text, int maxLen) { |
| | | if (text == null) return null; |
| | | String clean = text.replace("\r", " "); |
| | | return clean.length() > maxLen ? clean.substring(0, maxLen) : clean; |
| | | } |
| | | |
| | | private void recordCall(String traceId, |
| | | String scene, |
| | | boolean stream, |
| | | int attemptNo, |
| | | ResolvedRoute route, |
| | | boolean success, |
| | | Integer httpStatus, |
| | | long latencyMs, |
| | | ChatCompletionRequest req, |
| | | String response, |
| | | String switchMode, |
| | | Throwable err, |
| | | String extra) { |
| | | LlmCallLog item = new LlmCallLog(); |
| | | item.setTraceId(cut(traceId, 64)); |
| | | item.setScene(cut(scene, 64)); |
| | | item.setStream((short) (stream ? 1 : 0)); |
| | | item.setAttemptNo(attemptNo); |
| | | if (route != null) { |
| | | item.setRouteId(route.id); |
| | | item.setRouteName(cut(route.name, 128)); |
| | | item.setBaseUrl(cut(route.baseUrl, 255)); |
| | | item.setModel(cut(route.model, 128)); |
| | | } |
| | | item.setSuccess((short) (success ? 1 : 0)); |
| | | item.setHttpStatus(httpStatus); |
| | | item.setLatencyMs(latencyMs < 0 ? 0 : latencyMs); |
| | | item.setSwitchMode(cut(switchMode, 32)); |
| | | item.setRequestContent(cut(JSON.toJSONString(req), LOG_TEXT_LIMIT)); |
| | | item.setResponseContent(cut(response, LOG_TEXT_LIMIT)); |
| | | item.setErrorType(cut(safeName(err), 128)); |
| | | item.setErrorMessage(err == null ? null : cut(errorText(err), 1024)); |
| | | item.setExtra(cut(extra, 512)); |
| | | item.setCreateTime(new Date()); |
| | | llmCallLogService.saveIgnoreError(item); |
| | | } |
| | | |
| | | private static class CompletionCallResult { |
| | | private final int statusCode; |
| | | private final String payload; |
| | | private final ChatCompletionResponse response; |
| | | |
| | | private CompletionCallResult(int statusCode, String payload, ChatCompletionResponse response) { |
| | | this.statusCode = statusCode; |
| | | this.payload = payload; |
| | | this.response = response; |
| | | } |
| | | } |
| | | |
| | | private static class RawCompletionResult { |
| | | private final int statusCode; |
| | | private final String payload; |