Parcourir la source

集成LangChain4j:实现AI流式对话功能模块

xingbo il y a 3 semaines
Parent
commit
4c60a0de4c

+ 11 - 1
nb-admin/src/main/resources/application-dev.yml

@@ -186,4 +186,14 @@ oss:
 
 notify:
   wechat:
-    url: https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=c3e093fe-5125-47d5-a171-0f4be2f61a78
+    url: https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=c3e093fe-5125-47d5-a171-0f4be2f61a78
+
+langchain4j:
+  open-ai:
+    chat-model:
+      api-key: sk-2f8229627f104a18a1fb90c98e9ea48f
+      base-url: https://api.deepseek.com/v1
+      model-name: deepseek-chat
+      temperature: 0.7
+      timeout: 60 # 秒
+      max-retries: 3

+ 71 - 0
nb-core/pom.xml

@@ -109,5 +109,76 @@
             <artifactId>org.eclipse.paho.client.mqttv3</artifactId>
             <version>1.2.5</version>
         </dependency>
+
+        <!-- 显式引入统一版本的 OkHttp -->
+        <dependency>
+            <groupId>com.squareup.okhttp3</groupId>
+            <artifactId>okhttp</artifactId>
+            <version>${okhttp.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.squareup.okhttp3</groupId>
+            <artifactId>logging-interceptor</artifactId>
+            <version>${okhttp.version}</version>
+        </dependency>
+        <!-- 添加 SSE 支持 -->
+        <dependency>
+            <groupId>com.squareup.okhttp3</groupId>
+            <artifactId>okhttp-sse</artifactId>
+            <version>${okhttp.version}</version>
+        </dependency>
+
+        <!-- LangChain4j 依赖 -->
+        <dependency>
+            <groupId>dev.langchain4j</groupId>
+            <artifactId>langchain4j-core</artifactId>
+            <!-- 排除可能引入的冲突依赖 -->
+            <exclusions>
+                <exclusion>
+                    <groupId>com.squareup.okhttp3</groupId>
+                    <artifactId>okhttp</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.squareup.okhttp3</groupId>
+                    <artifactId>okhttp-bom</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.squareup.okhttp3</groupId>
+                    <artifactId>logging-interceptor</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>dev.langchain4j</groupId>
+            <artifactId>langchain4j-open-ai</artifactId>
+            <!-- 排除 OkHttp 相关依赖 -->
+            <exclusions>
+                <exclusion>
+                    <groupId>com.squareup.okhttp3</groupId>
+                    <artifactId>okhttp</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.squareup.okhttp3</groupId>
+                    <artifactId>okhttp-bom</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.squareup.okhttp3</groupId>
+                    <artifactId>logging-interceptor</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.squareup.okhttp3</groupId>
+                    <artifactId>okhttp-sse</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>dev.langchain4j</groupId>
+            <artifactId>langchain4j-reactor</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>dev.langchain4j</groupId>
+            <artifactId>langchain4j-spring-boot-starter</artifactId>
+        </dependency>
     </dependencies>
 </project>

+ 181 - 0
nb-core/src/main/java/com/nb/core/ai/Utils/ReactiveStreamConverter.java

@@ -0,0 +1,181 @@
+package com.nb.core.ai.Utils;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
+import reactor.core.publisher.Flux;
+
+import java.io.IOException;
+import java.time.LocalDateTime;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.function.Consumer;
+
+/**
+ * Reactive Stream 转换工具类
+ * 将 Reactor Flux 转换为 Spring MVC 的 SseEmitter
+ */
+@Slf4j
+public class ReactiveStreamConverter {
+
+    // 线程池,用于异步处理 SSE 事件发送(使用缓存线程池,根据需要创建新线程,空闲线程会被回收)
+    private static final ExecutorService executor = Executors.newCachedThreadPool();
+
+    /**
+     * 将 Flux 转换为 SseEmitter
+     *
+     * @param flux 数据流
+     * @param dataConsumer 数据处理回调(可选)
+     * @return SseEmitter
+     */
+    public static SseEmitter toSseEmitter(Flux<String> flux, Consumer<String> dataConsumer, String requestId) {
+        // 创建 SSE 发射器,设置 60 秒超时时间
+        // 超时时间需要根据实际业务场景调整,对于长文本生成可以设置更长
+        SseEmitter emitter = new SseEmitter(60_000L);
+
+        // 在线程池中异步执行流处理
+        executor.execute(() -> {
+            try {
+                log.info("开始处理SSE流,请求ID: {}", requestId);
+
+                // 订阅 Flux 流,处理三种事件:数据、错误、完成
+                flux.subscribe(
+                        // 数据到达时的处理逻辑
+                        data -> {
+                            try {
+                                // 如果提供了数据处理回调,则调用它
+                                // 可以用于数据清洗、统计、日志记录等
+                                if (dataConsumer != null) {
+                                    dataConsumer.accept(data);
+                                }
+
+                                // 构建 SSE 事件并发送到客户端
+                                // 每个事件包含数据、ID、事件类型和时间戳注释
+                                SseEmitter.SseEventBuilder event = SseEmitter.event()
+                                        .data(data)                                      // 事件数据内容
+                                        .id(String.valueOf(System.currentTimeMillis()))  // 事件唯一ID
+                                        .name("message")                                 // 事件类型名称
+                                        .comment("timestamp: " + LocalDateTime.now());   // 时间戳注释
+                                emitter.send(event);
+
+                                // 记录调试日志,便于问题排查
+                                log.debug("发送SSE数据,请求ID: {}, 数据长度: {}", requestId, data.length());
+                            } catch (IOException e) {
+                                // 发送失败时的错误处理
+                                log.error("发送SSE数据失败,请求ID: {}", requestId, e);
+                                emitter.completeWithError(e);
+                            }
+                        },
+                        // 流发生错误时的处理逻辑
+                        error -> {
+                            log.error("SSE流处理错误,请求ID: {}", requestId, error);
+                            try {
+                                // 构建错误事件通知客户端
+                                SseEmitter.SseEventBuilder errorEvent = SseEmitter.event()
+                                        .data("{\"error\": \"" + error.getMessage() + "\"}")  // 错误信息JSON格式
+                                        .name("error")                                        // 错误事件类型
+                                        .comment("timestamp: " + LocalDateTime.now());        // 错误发生时间
+                                emitter.send(errorEvent);
+                                emitter.completeWithError(error);
+                            } catch (IOException ex) {
+                                // 发送错误事件失败时的处理
+                                log.error("发送SSE错误事件失败,请求ID: {}", requestId, ex);
+                                emitter.completeWithError(ex);
+                            }
+                        },
+                        // 流正常完成时的处理逻辑
+                        () -> {
+                            log.info("SSE流处理完成,请求ID: {}", requestId);
+                            try {
+                                // 构建完成事件通知客户端
+                                SseEmitter.SseEventBuilder completeEvent = SseEmitter.event()
+                                        .data("{\"status\": \"completed\"}")           // 完成状态信息
+                                        .name("complete")                              // 完成事件类型
+                                        .comment("timestamp: " + LocalDateTime.now()); // 完成时间
+                                emitter.send(completeEvent);
+                                emitter.complete();
+                            } catch (IOException e) {
+                                // 发送完成事件失败时的处理
+                                log.error("发送SSE完成事件失败,请求ID: {}", requestId, e);
+                                emitter.completeWithError(e);
+                            }
+                        }
+                );
+            } catch (Exception e) {
+                // 流执行过程中的异常处理
+                log.error("SSE流执行异常,请求ID: {}", requestId, e);
+                emitter.completeWithError(e);
+            }
+        });
+        // 流完成时的回调(无论是正常完成还是异常完成)
+        emitter.onCompletion(() ->
+                log.info("SSE流完成回调,请求ID: {}", requestId));
+
+        // 流超时时的回调
+        emitter.onTimeout(() -> {
+            log.warn("SSE流超时,请求ID: {}", requestId);
+            try {
+                // 发送超时事件通知客户端
+                SseEmitter.SseEventBuilder timeoutEvent = SseEmitter.event()
+                        .data("{\"error\": \"请求超时\"}")  // 超时错误信息
+                        .name("timeout");                 // 超时事件类型
+                emitter.send(timeoutEvent);
+            } catch (IOException e) {
+                log.error("发送SSE超时事件失败,请求ID: {}", requestId, e);
+            }
+        });
+        // 流发生错误时的回调
+        emitter.onError(throwable ->
+                log.error("SSE流错误回调,请求ID: {}", requestId, throwable));
+
+        return emitter;
+    }
+
+    /**
+     * 将 Flux 数据流转换为 SseEmitter - 简化版本
+     * 包含数据处理回调,自动生成请求ID
+     *
+     * @param flux 数据流
+     * @param dataConsumer 数据处理回调函数
+     * @return SseEmitter Spring MVC 的服务器发送事件发射器
+     *
+     * @example
+     * // 使用示例:
+     * Flux<String> flux = streamAIService.generateStream(message);
+     * SseEmitter emitter = ReactiveStreamConverter.toSseEmitter(
+     *     flux,
+     *     data -> System.out.println("收到数据: " + data)
+     * );
+     */
+    public static SseEmitter toSseEmitter(Flux<String> flux, Consumer<String> dataConsumer) {
+        // 自动生成请求ID:req_ + 当前时间戳
+        return toSseEmitter(flux, dataConsumer, "req_" + System.currentTimeMillis());
+    }
+
+    /**
+     * 将 Flux 数据流转换为 SseEmitter - 最简版本
+     * 不包含数据处理回调,自动生成请求ID
+     * 适用于简单的流转换场景
+     * @param flux 数据流
+     * @return SseEmitter Spring MVC 的服务器发送事件发射器
+     */
+    public static SseEmitter toSseEmitter(Flux<String> flux) {
+        // 调用完整版本,dataConsumer 参数为 null
+        return toSseEmitter(flux, null, "req_" + System.currentTimeMillis());
+    }
+
+    /**
+     * 关闭工具类的线程池
+     * 注意:在应用程序关闭时调用,释放线程资源
+     *
+     * @example
+     * // 使用示例(在Spring Bean的销毁方法中):
+     * @PreDestroy
+     * public void destroy() {
+     *     ReactiveStreamConverter.shutdown();
+     * }
+     */
+    public static void shutdown() {
+        log.info("关闭ReactiveStreamConverter线程池");
+        executor.shutdown();
+    }
+}

+ 72 - 0
nb-core/src/main/java/com/nb/core/ai/config/LangChain4jConfig.java

@@ -0,0 +1,72 @@
+package com.nb.core.ai.config;
+
+
+
+import dev.langchain4j.model.chat.ChatLanguageModel;
+import dev.langchain4j.model.chat.StreamingChatLanguageModel;
+import dev.langchain4j.model.openai.OpenAiChatModel;
+import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.time.Duration;
+
+@Configuration
+public class LangChain4jConfig {
+    // 秘钥key
+    @Value("${langchain4j.open-ai.chat-model.api-key}")
+    private String apiKey;
+    // 基础url
+    @Value("${langchain4j.open-ai.chat-model.base-url}")
+    private String baseUrl;
+    // 模型名称
+    @Value("${langchain4j.open-ai.chat-model.model-name}")
+    private String modelName;
+    //模型输出随机性
+    @Value("${langchain4j.open-ai.chat-model.temperature}")
+    private Double temperature;
+    // 超时时间
+    @Value("${langchain4j.open-ai.chat-model.timeout}")
+    private Integer timeout;
+    // 最大重试次数
+    @Value("${langchain4j.open-ai.chat-model.max-retries}")
+    private Integer maxRetries;
+
+    /**
+     * 配置普通聊天模型Bean
+     *
+     * @return ChatLanguageModel 实例
+     */
+    @Bean
+    public ChatLanguageModel chatLanguageModel() {
+        return OpenAiChatModel.builder()
+                .apiKey(apiKey)
+                .baseUrl(baseUrl)
+                .modelName(modelName)
+                .temperature(temperature)
+                .timeout(java.time.Duration.ofSeconds(timeout))
+                .maxRetries(maxRetries)
+                .logRequests(true)
+                .logResponses(true)
+                .build();
+    }
+
+    /**
+     * 配置流式聊天模型Bean
+     *
+     * @return StreamingChatLanguageModel 实例
+     */
+    @Bean
+    public StreamingChatLanguageModel streamingChatLanguageModel() {
+        return OpenAiStreamingChatModel.builder()
+                .apiKey(apiKey)
+                .baseUrl(baseUrl)
+                .modelName(modelName)
+                .temperature(temperature)
+                .timeout(Duration.ofSeconds(timeout))
+                .logRequests(true)
+                .logResponses(true)
+                .build();
+    }
+}

+ 103 - 0
nb-core/src/main/java/com/nb/core/ai/controller/AIController.java

@@ -0,0 +1,103 @@
+package com.nb.core.ai.controller;
+
+
+import com.nb.core.ai.entity.ChatParam;
+import com.nb.core.ai.service.DeepSeekAIService;
+import com.nb.core.ai.service.StreamAIService;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.http.codec.ServerSentEvent;
+import com.nb.core.ai.Utils.ReactiveStreamConverter;
+import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.MediaType;
+import org.springframework.web.bind.annotation.*;
+import reactor.core.publisher.Flux;
+import java.time.Duration;
+
+import java.util.List;
+
+@RestController
+@RequestMapping("/ai")
+@Slf4j
+public class AIController {
+    /**
+     * 聊天接口
+     */
+    @Autowired
+    private  DeepSeekAIService deepSeekAIService;
+
+    @Autowired
+    private StreamAIService streamAIService;
+
+
+    /**
+     * 测试连接
+     */
+    @GetMapping("/test")
+    public String testConnection() {
+        return deepSeekAIService.testConnection();
+    }
+
+    /**
+     * 简单的对话生成
+     */
+    @PostMapping("/chat")
+    public String chat(@RequestBody ChatParam chatParam) {
+        return deepSeekAIService.generate(chatParam.getMessage());
+    }
+
+    /**
+     * 标准流式对话接口 - 使用SSE (Server-Sent Events)
+     *
+     */
+    @GetMapping(value = "/sseStreamChat", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
+    public Flux<ServerSentEvent<String>> sseStreamChat(@RequestParam String message) {
+        log.info("SSE流式请求: {}", message);
+
+        return streamAIService.generateStream(message)
+                .map(token -> ServerSentEvent.<String>builder()
+                        .data(token)
+                        .id(String.valueOf(System.currentTimeMillis()))
+                        .event("message")
+                        .build())
+                .doOnSubscribe(subscription -> log.info("SSE流订阅开始: {}", message))
+                .doOnComplete(() -> log.info("SSE流完成: {}", message))
+                .doOnError(error -> log.error("SSE流错误: {}", message, error))
+                // 添加心跳,防止连接超时
+                .mergeWith(Flux.interval(Duration.ofSeconds(30))
+                        .map(sequence -> ServerSentEvent.<String>builder()
+                                .comment("heartbeat")
+                                .build()))
+                .onErrorResume(throwable -> {
+                    log.error("SSE流异常: {}", throwable.getMessage());
+                    return Flux.just(ServerSentEvent.<String>builder()
+                            .event("error")
+                            .data("流处理发生错误: " + throwable.getMessage())
+                            .build());
+                });
+    }
+
+
+    /**
+     * 流式对话接口 - 使用SseEmitter
+     * 兼容Spring MVC的异步处理方式
+     */
+    @GetMapping(value = "/SseEmitterChat", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
+    public SseEmitter SseEmitterChat(@RequestParam(value = "message", defaultValue = "能为我介绍一下驼人医疗器械集团吗?不少于200字") String message) {
+        log.info("收到流式对话请求: {}", message);
+
+        Flux<String> flux = streamAIService.generateStream(message);
+
+        return ReactiveStreamConverter.toSseEmitter(flux,
+                data -> {
+                    // 数据处理回调 - 可以在这里添加业务逻辑
+                    System.out.println(data);
+                    log.info("处理数据: {}", data);
+                    // 可以添加额外的处理,如数据清洗、统计等
+                });
+    }
+
+
+
+
+}

+ 14 - 0
nb-core/src/main/java/com/nb/core/ai/service/DeepSeekAIService.java

@@ -0,0 +1,14 @@
+package com.nb.core.ai.service;
+
+
+public interface DeepSeekAIService {
+    /**
+     * 简单的对话生成
+     */
+    String generate(String message);
+
+    /**
+     * 测试连接
+     */
+    String testConnection();
+}

+ 15 - 0
nb-core/src/main/java/com/nb/core/ai/service/StreamAIService.java

@@ -0,0 +1,15 @@
+package com.nb.core.ai.service;
+
+import reactor.core.publisher.Flux;
+
+
+public interface StreamAIService {
+
+    /**
+     * 流式对话生成
+     * @param message 用户消息
+     * @return 流式响应
+     */
+    Flux<String> generateStream(String message);
+
+}

+ 40 - 0
nb-core/src/main/java/com/nb/core/ai/service/impl/DeepSeekAIServiceImpl.java

@@ -0,0 +1,40 @@
+package com.nb.core.ai.service.impl;
+
+
+import com.nb.core.ai.service.DeepSeekAIService;
+import dev.langchain4j.model.chat.ChatLanguageModel;
+import lombok.extern.slf4j.Slf4j;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+@Service
+@Slf4j
+public class DeepSeekAIServiceImpl implements DeepSeekAIService {
+
+    @Autowired
+    private ChatLanguageModel chatModel;
+    /**
+     * 简单的对话生成
+     */
+    @Override
+    public String generate(String message) {
+        return chatModel.generate(message);
+    }
+
+    /**
+     * 测试连接
+     */
+    @Override
+    public String testConnection() {
+        try {
+            String response = chatModel.generate("请用一句话回复'AI服务连接正常'");
+            log.info("DeepSeek服务连接测试成功: {}", response);
+            return response;
+        } catch (Exception e) {
+            log.error("DeepSeek服务连接测试失败: {}", e.getMessage());
+            return "连接失败: " + e.getMessage();
+        }
+    }
+}

+ 152 - 0
nb-core/src/main/java/com/nb/core/ai/service/impl/StreamAIServiceImpl.java

@@ -0,0 +1,152 @@
+package com.nb.core.ai.service.impl;
+
+
+import com.nb.core.ai.service.StreamAIService;
+import dev.langchain4j.model.chat.StreamingChatLanguageModel;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+import reactor.core.publisher.Flux;
+import java.util.concurrent.atomic.AtomicReference;
+import dev.langchain4j.data.message.AiMessage;
+import dev.langchain4j.model.StreamingResponseHandler;
+import dev.langchain4j.model.output.Response;
+import reactor.core.publisher.Sinks;
+
+
+
+
+
+/**
+ * 流式AI服务实现类
+ * 提供流式对话生成功能,基于LangChain4j的流式聊天模型
+ */
+@Service
+@Slf4j
+public class StreamAIServiceImpl implements StreamAIService {
+
+    @Autowired
+    private  StreamingChatLanguageModel streamingChatModel;
+
+    /**
+     * 流式对话生成方法
+     *
+     * @param message 用户输入的消息
+     * @return 包含流式响应的Flux
+     */
+    @Override
+    public Flux<String> generateStream(String message) {
+        // 使用Sinks.Many提供更好的控制
+        Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
+        try {
+            // 使用AtomicReference跟踪完成状态
+            AtomicReference<Boolean> isCompleted = new AtomicReference<>(false);
+            // 创建流式响应处理器
+            StreamingResponseHandler<AiMessage> handler = new StreamingResponseHandler<AiMessage>() {
+                /**
+                 * 当接收到新的token时调用
+                 * @param token 流式响应的单个token
+                 */
+                @Override
+                public void onNext(String token) {
+                    // 检查流是否已经完成或出错
+                    if (!isCompleted.get()) {
+                        try {
+                            // 将token发送到响应流
+                            sink.tryEmitNext(token);
+                            log.debug("发送token: {}", token);
+                        } catch (Exception e) {
+                            log.warn("发送token失败: {}", token, e);
+                        }
+                    }
+                }
+                /**
+                 * 当流式响应完成时调用
+                 * @param response 完整的响应对象
+                 */
+                @Override
+                public void onComplete(Response<AiMessage> response) {
+                    // 标记流已完成
+                    if (!isCompleted.getAndSet(true)) {
+                        // 记录完整的响应信息
+                        AiMessage aiMessage = response.content();
+                        String fullResponse = aiMessage.text();
+                        log.info("信息流已完成: {}. 完整回应: {}", message, fullResponse);
+                        // 完成Flux流
+                        sink.tryEmitComplete();
+                    }
+                }
+                /**
+                 * 当发生错误时调用,处理AI服务调用过程中的各种异常情况
+                 * @param error 异常信息
+                 */
+                @Override
+                public void onError(Throwable error) {
+                    // 标记流已出错
+                    if (!isCompleted.getAndSet(true)) {
+                        log.error("消息流错误: {}", message, error);
+                        // 发送错误到Flux流
+                        sink.tryEmitError(error);
+                    }
+                }
+            };
+            // 启动流式响应(异步执行)
+            new Thread(() -> {
+                try {
+                    streamingChatModel.generate(message, handler);
+                    log.info("开始对消息进行流处理: {}", message);
+                } catch (Exception e) {
+                    log.error("无法启动消息流: {}", message, e);
+                    if (!isCompleted.getAndSet(true)) {
+                        sink.tryEmitError(e);
+                    }
+                }
+            }).start();
+        } catch (Exception e) {
+            log.error("创建流处理器失败: {}", message, e);
+            sink.tryEmitError(e);
+        }
+        return sink.asFlux()
+                .doOnSubscribe(subscription -> log.debug("流订阅开始: {}", message))
+                .doOnCancel(() -> log.info("流被取消: {}", message))
+                .doOnTerminate(() -> log.debug("流终止: {}", message));
+    }
+
+    /**
+     * 简化的流式生成方法
+     * 适用于简单的使用场景,使用基本的String参数版本
+     *
+     * @param message 用户消息文本
+     * @return 包含流式响应token的Flux
+     */
+    public Flux<String> generateStreamSimple(String message) {
+        return Flux.create(sink -> {
+            // 用于收集完整的响应内容(调试用)
+            StringBuilder fullResponse = new StringBuilder();
+            // 创建响应处理器
+            StreamingResponseHandler<AiMessage> handler = new StreamingResponseHandler<AiMessage>() {
+                @Override
+                public void onNext(String token) {
+                    // 收集token用于调试
+                    fullResponse.append(token);
+                    // 发送token到流
+                    sink.next(token);
+                }
+                @Override
+                public void onComplete(Response<AiMessage> response) {
+                    // 记录完整响应
+                    log.debug("简单对话完成----完整回应: {}", fullResponse.toString());
+                    sink.complete();
+                }
+                @Override
+                public void onError(Throwable error) {
+                    log.error("消息的简单流错误: {}", message, error);
+                    sink.error(error);
+                }
+            };
+            // 启动流式生成
+            streamingChatModel.generate(message, handler);
+        });
+    }
+
+}

+ 47 - 1
pom.xml

@@ -30,7 +30,7 @@
         <fastjson.version>1.2.79</fastjson.version>
         <easyexcel.version>3.0.5</easyexcel.version>
         <minio.version>8.3.7</minio.version>
-        <okhttp.version>4.9.3</okhttp.version>
+        <okhttp.version>4.11.0</okhttp.version>
         <velocity.version>2.3</velocity.version>
         <log4j2.version>2.17.1</log4j2.version>
         <satoken.version>1.31.0</satoken.version>
@@ -47,6 +47,7 @@
         <aliyun.oss.version>3.15.0</aliyun.oss.version>
         <poi.tl.verison>1.12.0</poi.tl.verison>
         <poi.version>5.2.2</poi.version>
+        <langchain4j.version>0.35.0</langchain4j.version>
     </properties>
 
     <modules>
@@ -274,6 +275,25 @@
                 <artifactId>okhttp</artifactId>
                 <version>${okhttp.version}</version>
             </dependency>
+            <!-- 添加 OkHttp BOM 强制统一版本 -->
+            <dependency>
+                <groupId>com.squareup.okhttp3</groupId>
+                <artifactId>okhttp-bom</artifactId>
+                <version>${okhttp.version}</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
+            <!-- 显式声明其他 OkHttp 组件 -->
+            <dependency>
+                <groupId>com.squareup.okhttp3</groupId>
+                <artifactId>logging-interceptor</artifactId>
+                <version>${okhttp.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>com.squareup.okhttp3</groupId>
+                <artifactId>okhttp-sse</artifactId>
+                <version>${okhttp.version}</version>
+            </dependency>
             <!-- log4j -->
             <dependency>
                 <groupId>org.apache.logging.log4j</groupId>
@@ -379,6 +399,32 @@
                 <artifactId>pinyin4j</artifactId>
                 <version>${pinyin.verison}</version>
             </dependency>
+
+            <!-- LangChain4j 核心依赖 -->
+            <dependency>
+                <groupId>dev.langchain4j</groupId>
+                <artifactId>langchain4j-core</artifactId>
+                <version>${langchain4j.version}</version>
+            </dependency>
+            <!-- 以集成 OpenAI 为例 -->
+            <dependency>
+                <groupId>dev.langchain4j</groupId>
+                <artifactId>langchain4j-open-ai</artifactId>
+                <version>${langchain4j.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>dev.langchain4j</groupId>
+                <artifactId>langchain4j-reactor</artifactId>
+                <version>${langchain4j.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>dev.langchain4j</groupId>
+                <artifactId>langchain4j-spring-boot-starter</artifactId>
+                <version>${langchain4j.version}</version>
+            </dependency>
+
+
         </dependencies>
     </dependencyManagement>