From 4954d3978cf1967729a5a2d5b90f6baef18974da Mon Sep 17 00:00:00 2001
From: zhou zhou <3272660260@qq.com>
Date: 星期一, 23 三月 2026 09:35:10 +0800
Subject: [PATCH] #ai redis+页面优化
---
rsf-server/src/main/java/com/vincent/rsf/server/ai/service/impl/AiChatServiceImpl.java | 151 ++++++++++++++++++++++++++++++++++++++------------
1 files changed, 115 insertions(+), 36 deletions(-)
diff --git a/rsf-server/src/main/java/com/vincent/rsf/server/ai/service/impl/AiChatServiceImpl.java b/rsf-server/src/main/java/com/vincent/rsf/server/ai/service/impl/AiChatServiceImpl.java
index e7d842e..320421e 100644
--- a/rsf-server/src/main/java/com/vincent/rsf/server/ai/service/impl/AiChatServiceImpl.java
+++ b/rsf-server/src/main/java/com/vincent/rsf/server/ai/service/impl/AiChatServiceImpl.java
@@ -8,6 +8,7 @@
import com.vincent.rsf.server.ai.dto.AiChatErrorDto;
import com.vincent.rsf.server.ai.dto.AiChatMemoryDto;
import com.vincent.rsf.server.ai.dto.AiChatMessageDto;
+import com.vincent.rsf.server.ai.dto.AiChatModelOptionDto;
import com.vincent.rsf.server.ai.dto.AiChatRequest;
import com.vincent.rsf.server.ai.dto.AiChatRuntimeDto;
import com.vincent.rsf.server.ai.dto.AiChatStatusDto;
@@ -27,6 +28,7 @@
import com.vincent.rsf.server.ai.service.AiChatService;
import com.vincent.rsf.server.ai.service.AiChatMemoryService;
import com.vincent.rsf.server.ai.service.AiConfigResolverService;
+import com.vincent.rsf.server.ai.service.AiParamService;
import com.vincent.rsf.server.ai.service.MountedToolCallback;
import com.vincent.rsf.server.ai.service.McpMountRuntimeFactory;
import io.micrometer.observation.ObservationRegistry;
@@ -78,8 +80,10 @@
private final AiConfigResolverService aiConfigResolverService;
private final AiChatMemoryService aiChatMemoryService;
+ private final AiParamService aiParamService;
private final McpMountRuntimeFactory mcpMountRuntimeFactory;
private final AiCallLogService aiCallLogService;
+ private final AiRedisSupport aiRedisSupport;
private final GenericApplicationContext applicationContext;
private final ObservationRegistry observationRegistry;
private final ObjectMapper objectMapper;
@@ -91,24 +95,31 @@
* 璇ユ柟娉曚笉浼氳Е鍙戞ā鍨嬭皟鐢紝鑰屾槸鎶婇厤缃В鏋愮粨鏋滃拰浼氳瘽璁板繂鑱氬悎鎴愬墠绔竴娆℃覆鏌撴墍闇�鐨勫揩鐓с��
*/
@Override
- public AiChatRuntimeDto getRuntime(String promptCode, Long sessionId, Long userId, Long tenantId) {
- AiResolvedConfig config = aiConfigResolverService.resolve(promptCode, tenantId);
+ public AiChatRuntimeDto getRuntime(String promptCode, Long sessionId, Long aiParamId, Long userId, Long tenantId) {
+ AiResolvedConfig config = aiConfigResolverService.resolve(promptCode, tenantId, aiParamId);
+ Long runtimeCacheAiParamId = aiParamId;
+ // runtime 鏄厤缃揩鐓у拰浼氳瘽璁板繂鐨勮仛鍚堣鍥撅紝鍗曠嫭缂撳瓨鑳藉噺灏戜竴娆¢〉闈㈣繘鍏ユ椂鐨勯噸澶嶆嫾瑁呫��
+ AiChatRuntimeDto cached = aiRedisSupport.getRuntime(tenantId, userId, config.getPromptCode(), sessionId, runtimeCacheAiParamId);
+ if (cached != null) {
+ return cached;
+ }
AiChatMemoryDto memory = aiChatMemoryService.getMemory(userId, tenantId, config.getPromptCode(), sessionId);
- return AiChatRuntimeDto.builder()
- .requestId(null)
- .sessionId(memory.getSessionId())
- .promptCode(config.getPromptCode())
- .promptName(config.getPrompt().getName())
- .model(config.getAiParam().getModel())
- .configuredMcpCount(config.getMcpMounts().size())
- .mountedMcpCount(config.getMcpMounts().size())
- .mountedMcpNames(config.getMcpMounts().stream().map(item -> item.getName()).toList())
- .mountErrors(List.of())
- .memorySummary(memory.getMemorySummary())
- .memoryFacts(memory.getMemoryFacts())
- .recentMessageCount(memory.getRecentMessageCount())
- .persistedMessages(memory.getPersistedMessages())
- .build();
+ List<AiChatModelOptionDto> modelOptions = aiParamService.listChatModelOptions(tenantId);
+ AiChatRuntimeDto runtime = buildRuntimeSnapshot(
+ null,
+ memory.getSessionId(),
+ config,
+ modelOptions,
+ config.getMcpMounts().size(),
+ config.getMcpMounts().stream().map(item -> item.getName()).toList(),
+ List.of(),
+ memory
+ );
+ aiRedisSupport.cacheRuntime(tenantId, userId, config.getPromptCode(), sessionId, runtimeCacheAiParamId, runtime);
+ if (memory.getSessionId() != null && !Objects.equals(memory.getSessionId(), sessionId)) {
+ aiRedisSupport.cacheRuntime(tenantId, userId, config.getPromptCode(), memory.getSessionId(), runtimeCacheAiParamId, runtime);
+ }
+ return runtime;
}
/**
@@ -174,14 +185,23 @@
Long sessionId = request.getSessionId();
Long callLogId = null;
String model = null;
+ String resolvedPromptCode = request.getPromptCode();
ThinkingTraceEmitter thinkingTraceEmitter = null;
try {
ensureIdentity(userId, tenantId);
AiResolvedConfig config = resolveConfig(request, tenantId);
+ List<AiChatModelOptionDto> modelOptions = aiParamService.listChatModelOptions(tenantId);
+ resolvedPromptCode = config.getPromptCode();
+ if (!aiRedisSupport.allowChatRequest(tenantId, userId, config.getPromptCode())) {
+ throw buildAiException("AI_RATE_LIMITED", AiErrorCategory.REQUEST, "RATE_LIMIT",
+ "褰撳墠鎻愰棶杩囦簬棰戠箒锛岃绋嶅悗鍐嶈瘯", null);
+ }
final String resolvedModel = config.getAiParam().getModel();
model = resolvedModel;
AiChatSession session = resolveSession(request, userId, tenantId, config.getPromptCode());
sessionId = session.getId();
+ // 娴佺姸鎬佽惤 Redis 鐨勭洰鏍囨槸缁欏瀹炰緥鍜屽悗缁繍缁存煡璇㈢暀缁熶竴鍏ュ彛锛屼笉鏇夸唬鏁版嵁搴撴棩蹇椼��
+ aiRedisSupport.markStreamState(requestId, tenantId, userId, sessionId, config.getPromptCode(), "RUNNING", null);
AiChatMemoryDto memory = loadMemory(userId, tenantId, config.getPromptCode(), session.getId());
List<AiChatMessageDto> mergedMessages = mergeMessages(memory.getShortMemoryMessages(), request.getMessages());
AiCallLog callLog = aiCallLogService.startCallLog(
@@ -198,21 +218,16 @@
);
callLogId = callLog.getId();
try (McpMountRuntimeFactory.McpMountRuntime runtime = createRuntime(config, userId)) {
- emitStrict(emitter, "start", AiChatRuntimeDto.builder()
- .requestId(requestId)
- .sessionId(session.getId())
- .promptCode(config.getPromptCode())
- .promptName(config.getPrompt().getName())
- .model(config.getAiParam().getModel())
- .configuredMcpCount(config.getMcpMounts().size())
- .mountedMcpCount(runtime.getMountedCount())
- .mountedMcpNames(runtime.getMountedNames())
- .mountErrors(runtime.getErrors())
- .memorySummary(memory.getMemorySummary())
- .memoryFacts(memory.getMemoryFacts())
- .recentMessageCount(memory.getRecentMessageCount())
- .persistedMessages(memory.getPersistedMessages())
- .build());
+ emitStrict(emitter, "start", buildRuntimeSnapshot(
+ requestId,
+ session.getId(),
+ config,
+ modelOptions,
+ runtime.getMountedCount(),
+ runtime.getMountedNames(),
+ runtime.getErrors(),
+ memory
+ ));
emitSafely(emitter, "status", AiChatStatusDto.builder()
.requestId(requestId)
.sessionId(session.getId())
@@ -259,6 +274,7 @@
toolSuccessCount.get(),
toolFailureCount.get()
);
+ aiRedisSupport.markStreamState(requestId, tenantId, userId, session.getId(), config.getPromptCode(), "COMPLETED", null);
log.info("AI chat completed, requestId={}, sessionId={}, elapsedMs={}, firstTokenLatencyMs={}",
requestId, session.getId(), System.currentTimeMillis() - startedAt, resolveFirstTokenLatency(startedAt, firstTokenAtRef.get()));
emitter.complete();
@@ -298,18 +314,21 @@
toolSuccessCount.get(),
toolFailureCount.get()
);
+ aiRedisSupport.markStreamState(requestId, tenantId, userId, session.getId(), config.getPromptCode(), "COMPLETED", null);
log.info("AI chat completed, requestId={}, sessionId={}, elapsedMs={}, firstTokenLatencyMs={}",
requestId, session.getId(), System.currentTimeMillis() - startedAt, resolveFirstTokenLatency(startedAt, firstTokenAtRef.get()));
emitter.complete();
}
} catch (AiChatException e) {
handleStreamFailure(emitter, requestId, sessionId, model, startedAt, firstTokenAtRef.get(), e,
- callLogId, toolSuccessCount.get(), toolFailureCount.get(), thinkingTraceEmitter);
+ callLogId, toolSuccessCount.get(), toolFailureCount.get(), thinkingTraceEmitter,
+ tenantId, userId, resolvedPromptCode);
} catch (Exception e) {
handleStreamFailure(emitter, requestId, sessionId, model, startedAt, firstTokenAtRef.get(),
buildAiException("AI_INTERNAL_ERROR", AiErrorCategory.INTERNAL, "INTERNAL",
e == null ? "AI 瀵硅瘽澶辫触" : e.getMessage(), e),
- callLogId, toolSuccessCount.get(), toolFailureCount.get(), thinkingTraceEmitter);
+ callLogId, toolSuccessCount.get(), toolFailureCount.get(), thinkingTraceEmitter,
+ tenantId, userId, resolvedPromptCode);
} finally {
log.debug("AI chat stream finished, requestId={}", requestId);
}
@@ -327,11 +346,34 @@
private AiResolvedConfig resolveConfig(AiChatRequest request, Long tenantId) {
/** 鎶婅姹傞噷鐨� Prompt 鍦烘櫙瑙f瀽鎴愪竴浠藉彲鐩存帴鎵ц鐨� AI 閰嶇疆銆� */
try {
- return aiConfigResolverService.resolve(request.getPromptCode(), tenantId);
+ return aiConfigResolverService.resolve(request.getPromptCode(), tenantId, request.getAiParamId());
} catch (Exception e) {
throw buildAiException("AI_CONFIG_RESOLVE_ERROR", AiErrorCategory.CONFIG, "CONFIG_RESOLVE",
e == null ? "AI 閰嶇疆瑙f瀽澶辫触" : e.getMessage(), e);
}
+ }
+
+ private AiChatRuntimeDto buildRuntimeSnapshot(String requestId, Long sessionId, AiResolvedConfig config,
+ List<AiChatModelOptionDto> modelOptions, Integer mountedMcpCount,
+ List<String> mountedMcpNames, List<String> mountErrors,
+ AiChatMemoryDto memory) {
+ return AiChatRuntimeDto.builder()
+ .requestId(requestId)
+ .sessionId(sessionId)
+ .aiParamId(config.getAiParam().getId())
+ .promptCode(config.getPromptCode())
+ .promptName(config.getPrompt().getName())
+ .model(config.getAiParam().getModel())
+ .modelOptions(modelOptions)
+ .configuredMcpCount(config.getMcpMounts().size())
+ .mountedMcpCount(mountedMcpCount)
+ .mountedMcpNames(mountedMcpNames)
+ .mountErrors(mountErrors)
+ .memorySummary(memory.getMemorySummary())
+ .memoryFacts(memory.getMemoryFacts())
+ .recentMessageCount(memory.getRecentMessageCount())
+ .persistedMessages(memory.getPersistedMessages())
+ .build();
}
private AiChatSession resolveSession(AiChatRequest request, Long userId, Long tenantId, String promptCode) {
@@ -420,7 +462,8 @@
private void handleStreamFailure(SseEmitter emitter, String requestId, Long sessionId, String model, long startedAt,
Long firstTokenAt, AiChatException exception, Long callLogId,
long toolSuccessCount, long toolFailureCount,
- ThinkingTraceEmitter thinkingTraceEmitter) {
+ ThinkingTraceEmitter thinkingTraceEmitter,
+ Long tenantId, Long userId, String promptCode) {
if (isClientAbortException(exception)) {
log.warn("AI chat aborted by client, requestId={}, sessionId={}, stage={}, message={}",
requestId, sessionId, exception.getStage(), exception.getMessage());
@@ -439,6 +482,7 @@
toolSuccessCount,
toolFailureCount
);
+ aiRedisSupport.markStreamState(requestId, tenantId, userId, sessionId, promptCode, "ABORTED", exception.getMessage());
emitter.completeWithError(exception);
return;
}
@@ -468,6 +512,7 @@
toolSuccessCount,
toolFailureCount
);
+ aiRedisSupport.markStreamState(requestId, tenantId, userId, sessionId, promptCode, "FAILED", exception.getMessage());
emitter.completeWithError(exception);
}
@@ -921,6 +966,38 @@
String mountName = delegate instanceof MountedToolCallback ? ((MountedToolCallback) delegate).getMountName() : null;
String toolCallId = requestId + "-tool-" + toolCallSequence.incrementAndGet();
long startedAt = System.currentTimeMillis();
+ // 杩欓噷鍙鍚屼竴 request 鍐呯殑閲嶅宸ュ叿璋冪敤鍋氱煭鏈熷鐢紝閬垮厤鎶婅法璇锋眰缁撴灉璇綋鎴愰�氱敤缂撳瓨銆�
+ AiRedisSupport.CachedToolResult cachedToolResult = aiRedisSupport.getToolResult(tenantId, requestId, toolName, toolInput);
+ if (cachedToolResult != null) {
+ emitSafely(emitter, "tool_result", AiChatToolEventDto.builder()
+ .requestId(requestId)
+ .sessionId(sessionId)
+ .toolCallId(toolCallId)
+ .toolName(toolName)
+ .mountName(mountName)
+ .status(cachedToolResult.isSuccess() ? "COMPLETED" : "FAILED")
+ .inputSummary(summarizeToolPayload(toolInput, 400))
+ .outputSummary(summarizeToolPayload(cachedToolResult.getOutput(), 600))
+ .errorMessage(cachedToolResult.getErrorMessage())
+ .durationMs(0L)
+ .timestamp(System.currentTimeMillis())
+ .build());
+ if (thinkingTraceEmitter != null) {
+ thinkingTraceEmitter.onToolResult(toolName, toolCallId, !cachedToolResult.isSuccess());
+ }
+ if (cachedToolResult.isSuccess()) {
+ toolSuccessCount.incrementAndGet();
+ aiCallLogService.saveMcpCallLog(callLogId, requestId, sessionId, toolCallId, mountName, toolName,
+ "COMPLETED", summarizeToolPayload(toolInput, 400), summarizeToolPayload(cachedToolResult.getOutput(), 600),
+ null, 0L, userId, tenantId);
+ return cachedToolResult.getOutput();
+ }
+ toolFailureCount.incrementAndGet();
+ aiCallLogService.saveMcpCallLog(callLogId, requestId, sessionId, toolCallId, mountName, toolName,
+ "FAILED", summarizeToolPayload(toolInput, 400), null, cachedToolResult.getErrorMessage(),
+ 0L, userId, tenantId);
+ throw new CoolException(cachedToolResult.getErrorMessage());
+ }
if (thinkingTraceEmitter != null) {
thinkingTraceEmitter.onToolStart(toolName, toolCallId);
}
@@ -952,6 +1029,7 @@
if (thinkingTraceEmitter != null) {
thinkingTraceEmitter.onToolResult(toolName, toolCallId, false);
}
+ aiRedisSupport.cacheToolResult(tenantId, requestId, toolName, toolInput, true, output, null);
toolSuccessCount.incrementAndGet();
aiCallLogService.saveMcpCallLog(callLogId, requestId, sessionId, toolCallId, mountName, toolName,
"COMPLETED", summarizeToolPayload(toolInput, 400), summarizeToolPayload(output, 600),
@@ -974,6 +1052,7 @@
if (thinkingTraceEmitter != null) {
thinkingTraceEmitter.onToolResult(toolName, toolCallId, true);
}
+ aiRedisSupport.cacheToolResult(tenantId, requestId, toolName, toolInput, false, null, e.getMessage());
toolFailureCount.incrementAndGet();
aiCallLogService.saveMcpCallLog(callLogId, requestId, sessionId, toolCallId, mountName, toolName,
"FAILED", summarizeToolPayload(toolInput, 400), null, e.getMessage(),
--
Gitblit v1.9.1