package com.zy.ai.service;
|
|
import com.zy.ai.entity.ChatCompletionRequest;
|
import com.zy.ai.entity.ChatCompletionResponse;
|
import lombok.RequiredArgsConstructor;
|
import lombok.extern.slf4j.Slf4j;
|
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.http.HttpHeaders;
|
import org.springframework.http.MediaType;
|
import org.springframework.stereotype.Service;
|
import org.springframework.web.reactive.function.client.WebClient;
|
import reactor.core.publisher.Mono;
|
|
import java.util.List;
|
|
@Slf4j
|
@Service
|
@RequiredArgsConstructor
|
public class LlmChatService {
|
|
private final WebClient llmWebClient;
|
|
@Value("${llm.api-key}")
|
private String apiKey;
|
|
@Value("${llm.model}")
|
private String model;
|
|
/**
|
* 通用对话方法:传入 messages,返回大模型文本回复
|
*/
|
public String chat(List<ChatCompletionRequest.Message> messages,
|
Double temperature,
|
Integer maxTokens) {
|
|
ChatCompletionRequest req = new ChatCompletionRequest();
|
req.setModel(model);
|
req.setMessages(messages);
|
req.setTemperature(temperature != null ? temperature : 0.3);
|
req.setMax_tokens(maxTokens != null ? maxTokens : 1024);
|
|
ChatCompletionResponse response = llmWebClient.post()
|
.uri("/chat/completions")
|
.header(HttpHeaders.AUTHORIZATION, "Bearer " + apiKey)
|
.contentType(MediaType.APPLICATION_JSON)
|
.bodyValue(req) // 2.5.14 已支持 bodyValue
|
.retrieve()
|
.bodyToMono(ChatCompletionResponse.class)
|
.doOnError(ex -> log.error("调用 LLM 失败", ex))
|
.onErrorResume(ex -> Mono.empty())
|
.block();
|
|
if (response == null ||
|
response.getChoices() == null ||
|
response.getChoices().isEmpty() ||
|
response.getChoices().get(0).getMessage() == null) {
|
|
return "AI 诊断失败:未获取到有效回复。";
|
}
|
|
return response.getChoices().get(0).getMessage().getContent();
|
}
|
}
|