#
Junjie
3 天以前 ba0dae92a0eb14b378fd44c91ed2ccb481c164e3
src/main/java/com/zy/ai/service/LlmChatService.java
@@ -10,8 +10,16 @@
import org.springframework.stereotype.Service;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;
import reactor.core.publisher.Flux;
import java.util.List;
import java.util.function.Consumer;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
@Slf4j
@Service
@@ -43,7 +51,7 @@
                .uri("/chat/completions")
                .header(HttpHeaders.AUTHORIZATION, "Bearer " + apiKey)
                .contentType(MediaType.APPLICATION_JSON)
                .bodyValue(req)   // 2.5.14 已支持 bodyValue
                .bodyValue(req)
                .retrieve()
                .bodyToMono(ChatCompletionResponse.class)
                .doOnError(ex -> log.error("调用 LLM 失败", ex))
@@ -60,4 +68,100 @@
        return response.getChoices().get(0).getMessage().getContent();
    }
}
    public void chatStream(List<ChatCompletionRequest.Message> messages,
                           Double temperature,
                           Integer maxTokens,
                           Consumer<String> onChunk,
                           Runnable onComplete,
                           Consumer<Throwable> onError) {
        ChatCompletionRequest req = new ChatCompletionRequest();
        req.setModel(model);
        req.setMessages(messages);
        req.setTemperature(temperature != null ? temperature : 0.3);
        req.setMax_tokens(maxTokens != null ? maxTokens : 1024);
        req.setStream(true);
        Flux<String> flux = llmWebClient.post()
                .uri("/chat/completions")
                .header(HttpHeaders.AUTHORIZATION, "Bearer " + apiKey)
                .contentType(MediaType.APPLICATION_JSON)
                .accept(MediaType.TEXT_EVENT_STREAM)
                .bodyValue(req)
                .retrieve()
                .bodyToFlux(String.class)
                .doOnError(ex -> log.error("调用 LLM 流式失败", ex));
        AtomicBoolean doneSeen = new AtomicBoolean(false);
        AtomicBoolean errorSeen = new AtomicBoolean(false);
        LinkedBlockingQueue<String> queue = new LinkedBlockingQueue<>();
        Thread drain = new Thread(() -> {
            try {
                while (true) {
                    String s = queue.poll(2, TimeUnit.SECONDS);
                    if (s != null) {
                        try { onChunk.accept(s); } catch (Exception ignore) {}
                    }
                    if (doneSeen.get() && queue.isEmpty()) {
                        if (!errorSeen.get()) {
                            try { if (onComplete != null) onComplete.run(); } catch (Exception ignore) {}
                        }
                        break;
                    }
                }
            } catch (InterruptedException ignore) {
                ignore.printStackTrace();
            }
        });
        drain.setDaemon(true);
        drain.start();
        flux.subscribe(payload -> {
            if (payload == null || payload.isEmpty()) return;
            String[] events = payload.split("\\r?\\n\\r?\\n");
            for (String part : events) {
                String s = part;
                if (s == null || s.isEmpty()) continue;
                if (s.startsWith("data:")) {
                    s = s.substring(5);
                    if (s.startsWith(" ")) s = s.substring(1);
                }
                if ("[DONE]".equals(s.trim())) {
                    doneSeen.set(true);
                    continue;
                }
                try {
                    JSONObject obj = JSON.parseObject(s);
                    JSONArray choices = obj.getJSONArray("choices");
                    if (choices != null && !choices.isEmpty()) {
                        JSONObject c0 = choices.getJSONObject(0);
                        JSONObject delta = c0.getJSONObject("delta");
                        if (delta != null) {
                            String content = delta.getString("content");
                            if (content != null) {
                                try { queue.offer(content); } catch (Exception ignore) {}
                            }
                        }
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }, err -> {
            errorSeen.set(true);
            doneSeen.set(true);
            if (onError != null) onError.accept(err);
        }, () -> {
            if (!doneSeen.get()) {
                errorSeen.set(true);
                doneSeen.set(true);
                if (onError != null) onError.accept(new RuntimeException("LLM 流意外完成"));
            } else {
                doneSeen.set(true);
            }
        });
    }
}