LangChain4j Prompt对话机器人
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>langchain4j-test</artifactId>
<version>1.0-SNAPSHOT</version>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.5.4</version>
</parent>
<properties>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai-spring-boot-starter</artifactId>
<version>1.8.0-beta15</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>langchain4j:
open-ai:
streaming-chat-model:
base-url: https://api.deepseek.com
api-key: ${DEEPSEEK_KEY}
model-name: deepseek-reasoner
log-requests: true
log-responses: true
return-thinking: true
chat-model:
base-url: https://api.deepseek.com
api-key: ${DEEPSEEK_KEY}
model-name: deepseek-reasoner
log-requests: true
log-responses: true
return-thinking: true
server:
port: 8080
阻塞式和类Spring AI Flux的“流式”
package org.example.controller;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.chat.response.*;
import jakarta.annotation.Resource;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import java.util.Arrays;
import java.util.List;
@RestController
@RequestMapping("chat")
public class ChatController {
@Resource
private StreamingChatModel streamingChatModel;
@Resource
private ChatModel chatModel;
@GetMapping("chat")
public String chat(String msg) {
List<ChatMessage> messages = Arrays.asList(
new SystemMessage("你是一个数学老师,用简单易懂的方式解释数学概念。"),
new UserMessage(msg)
);
ChatResponse chatResponse = chatModel.chat(messages);
return chatResponse.aiMessage().text();
}
@GetMapping("streaming")
public Flux<String> streaming(String msg) {
List<ChatMessage> messages = Arrays.asList(
new SystemMessage("你是一个数学老师,用简单易懂的方式解释数学概念。"),
new UserMessage(msg)
);
return Flux.create(sink -> {
streamingChatModel.chat(messages, new StreamingChatResponseHandler() {
@Override
public void onPartialResponse(PartialResponse partialResponse, PartialResponseContext context) {
sink.next(partialResponse.text());
}
@Override
public void onPartialThinking(PartialThinking partialThinking) {
sink.next("<thinking>" + partialThinking.text() + "</thinking>");
}
@Override
public void onCompleteResponse(ChatResponse completeResponse) {
sink.complete();
}
@Override
public void onError(Throwable error) {
}
});
});
}
}
"如果文章对您有帮助,可以请作者喝杯咖啡吗?"
微信支付
支付宝
LangChain4j Prompt对话机器人
https://blog.liuzijian.com/post/langchain4j/2025/11/04/langchain4j-prompt.html