From 83af5944a32527fd8aa83537dd840d428af7f577 Mon Sep 17 00:00:00 2001
From: Junjie <fallin.jie@qq.com>
Date: 星期一, 16 三月 2026 13:25:15 +0800
Subject: [PATCH] #
---
src/main/java/com/zy/ai/service/WcsDiagnosisService.java | 43 ++++++++++++++++++++++++++++++++++++++-----
1 files changed, 38 insertions(+), 5 deletions(-)
diff --git a/src/main/java/com/zy/ai/service/WcsDiagnosisService.java b/src/main/java/com/zy/ai/service/WcsDiagnosisService.java
index 3f65ed0..6288698 100644
--- a/src/main/java/com/zy/ai/service/WcsDiagnosisService.java
+++ b/src/main/java/com/zy/ai/service/WcsDiagnosisService.java
@@ -35,8 +35,14 @@
@Autowired
private AiChatStoreService aiChatStoreService;
- public void diagnoseStream(WcsDiagnosisRequest request, SseEmitter emitter) {
+ public void diagnoseStream(WcsDiagnosisRequest request,
+ String chatId,
+ boolean reset,
+ SseEmitter emitter) {
List<ChatCompletionRequest.Message> messages = new ArrayList<>();
+ if (chatId != null && !chatId.isEmpty() && reset) {
+ aiChatStoreService.deleteChat(chatId);
+ }
AiPromptTemplate promptTemplate = aiPromptTemplateService.resolvePublished(AiPromptScene.DIAGNOSE_STREAM.getCode());
ChatCompletionRequest.Message mcpSystem = new ChatCompletionRequest.Message();
@@ -47,7 +53,11 @@
mcpUser.setRole("user");
mcpUser.setContent(aiUtils.buildDiagnosisUserContentMcp(request));
- runMcpStreamingDiagnosis(messages, mcpSystem, mcpUser, promptTemplate, 0.3, 2048, emitter, null);
+ ChatCompletionRequest.Message storedUser = new ChatCompletionRequest.Message();
+ storedUser.setRole("user");
+ storedUser.setContent(buildDiagnoseDisplayPrompt(request));
+
+ runMcpStreamingDiagnosis(messages, mcpSystem, mcpUser, storedUser, promptTemplate, 0.3, 2048, emitter, chatId);
}
public void askStream(String prompt,
@@ -77,7 +87,7 @@
mcpUser.setRole("user");
mcpUser.setContent(prompt == null ? "" : prompt);
- runMcpStreamingDiagnosis(messages, mcpSystem, mcpUser, promptTemplate, 0.3, 2048, emitter, finalChatId);
+ runMcpStreamingDiagnosis(messages, mcpSystem, mcpUser, mcpUser, promptTemplate, 0.3, 2048, emitter, finalChatId);
}
public List<Map<String, Object>> listChats() {
@@ -101,6 +111,7 @@
private void runMcpStreamingDiagnosis(List<ChatCompletionRequest.Message> baseMessages,
ChatCompletionRequest.Message systemPrompt,
ChatCompletionRequest.Message userQuestion,
+ ChatCompletionRequest.Message storedUserQuestion,
AiPromptTemplate promptTemplate,
Double temperature,
Integer maxTokens,
@@ -115,6 +126,7 @@
throw new IllegalStateException("No MCP tools registered");
}
AgentUsageStats usageStats = new AgentUsageStats();
+ StringBuilder reasoningBuffer = new StringBuilder();
baseMessages.add(systemPrompt);
baseMessages.add(userQuestion);
@@ -123,11 +135,13 @@
messages.addAll(baseMessages);
sse(emitter, "<think>\\n姝e湪鍒濆鍖栬瘖鏂笌宸ュ叿鐜...\\n");
+ appendReasoning(reasoningBuffer, "姝e湪鍒濆鍖栬瘖鏂笌宸ュ叿鐜...\n");
int maxRound = 10;
int i = 0;
while(true) {
sse(emitter, "\\n姝e湪鍒嗘瀽锛堢" + (i + 1) + "杞級...\\n");
+ appendReasoning(reasoningBuffer, "\n姝e湪鍒嗘瀽锛堢" + (i + 1) + "杞級...\n");
ChatCompletionResponse resp = llmChatService.chatCompletion(messages, temperature, maxTokens, tools);
if (resp == null || resp.getChoices() == null || resp.getChoices().isEmpty() || resp.getChoices().get(0).getMessage() == null) {
throw new IllegalStateException("LLM returned empty response");
@@ -137,6 +151,7 @@
ChatCompletionRequest.Message assistant = resp.getChoices().get(0).getMessage();
messages.add(assistant);
sse(emitter, assistant.getContent());
+ appendReasoning(reasoningBuffer, assistant == null ? null : assistant.getContent());
List<ChatCompletionRequest.ToolCall> toolCalls = assistant.getTool_calls();
if (toolCalls == null || toolCalls.isEmpty()) {
@@ -147,6 +162,7 @@
String toolName = tc != null && tc.getFunction() != null ? tc.getFunction().getName() : null;
if (toolName == null || toolName.trim().isEmpty()) continue;
sse(emitter, "\\n鍑嗗璋冪敤宸ュ叿锛�" + toolName + "\\n");
+ appendReasoning(reasoningBuffer, "\n鍑嗗璋冪敤宸ュ叿锛�" + toolName + "\n");
JSONObject args = new JSONObject();
if (tc.getFunction() != null && tc.getFunction().getArguments() != null && !tc.getFunction().getArguments().trim().isEmpty()) {
try {
@@ -166,6 +182,7 @@
output = err;
}
sse(emitter, "\\n宸ュ叿杩斿洖锛屾鍦ㄧ户缁帹鐞�...\\n");
+ appendReasoning(reasoningBuffer, "\n宸ュ叿杩斿洖锛屾鍦ㄧ户缁帹鐞�...\n");
ChatCompletionRequest.Message toolMsg = new ChatCompletionRequest.Message();
toolMsg.setRole("tool");
toolMsg.setTool_call_id(tc == null ? null : tc.getId());
@@ -177,6 +194,7 @@
}
sse(emitter, "\\n姝e湪鏍规嵁鏁版嵁杩涜鍒嗘瀽...\\n</think>\\n\\n");
+ appendReasoning(reasoningBuffer, "\n姝e湪鏍规嵁鏁版嵁杩涜鍒嗘瀽...\n");
ChatCompletionRequest.Message diagnosisMessage = new ChatCompletionRequest.Message();
diagnosisMessage.setRole("system");
@@ -203,9 +221,10 @@
ChatCompletionRequest.Message a = new ChatCompletionRequest.Message();
a.setRole("assistant");
a.setContent(assistantBuffer.toString());
+ a.setReasoningContent(reasoningBuffer.toString());
aiChatStoreService.saveConversation(chatId,
- buildTitleFromPrompt(userQuestion.getContent()),
- userQuestion,
+ buildTitleFromPrompt(storedUserQuestion == null ? null : storedUserQuestion.getContent()),
+ storedUserQuestion == null ? userQuestion : storedUserQuestion,
a,
promptTemplate,
usageStats.getPromptTokens(),
@@ -260,6 +279,13 @@
payload.put("totalTokens", usageStats.getTotalTokens());
payload.put("llmCallCount", usageStats.getLlmCallCount());
return payload;
+ }
+
+ private void appendReasoning(StringBuilder reasoningBuffer, String text) {
+ if (reasoningBuffer == null || text == null || text.isEmpty()) {
+ return;
+ }
+ reasoningBuffer.append(text);
}
private void sendLargeText(SseEmitter emitter, String text) {
@@ -378,6 +404,13 @@
}
}
+ private String buildDiagnoseDisplayPrompt(WcsDiagnosisRequest request) {
+ if (request == null || request.getAlarmMessage() == null || request.getAlarmMessage().trim().isEmpty()) {
+ return "瀵瑰綋鍓嶇郴缁熻繘琛屽贰妫�";
+ }
+ return request.getAlarmMessage().trim();
+ }
+
private static class AgentUsageStats {
private long promptTokens;
private long completionTokens;
--
Gitblit v1.9.1