feat:【AI 大模型】文心一言存在 springai 接入问题,无法使用 https://github.com/spring-ai-community/qianfan/issues/6

This commit is contained in:
YunaiV 2025-07-14 22:42:56 +08:00
parent 750709d706
commit 39ecf5ebe5
5 changed files with 59 additions and 67 deletions

View File

@ -357,7 +357,7 @@ public class AiModelFactoryImpl implements AiModelFactory {
* 可参考 {@link QianFanChatAutoConfiguration} qianFanChatModel 方法
*/
private static QianFanChatModel buildYiYanChatModel(String key) {
// TODO @芋艿未测试
// TODO spring ai qianfan bug无法使用 https://github.com/spring-ai-community/qianfan/issues/6
List<String> keys = StrUtil.split(key, '|');
Assert.equals(keys.size(), 2, "YiYanChatClient 的密钥需要 (appKey|secretKey) 格式");
String appKey = keys.get(0);
@ -370,7 +370,7 @@ public class AiModelFactoryImpl implements AiModelFactory {
* 可参考 {@link QianFanEmbeddingAutoConfiguration} qianFanImageModel 方法
*/
private QianFanImageModel buildQianFanImageModel(String key) {
// TODO @芋艿未测试
// TODO spring ai qianfan bug无法使用 https://github.com/spring-ai-community/qianfan/issues/6
List<String> keys = StrUtil.split(key, '|');
Assert.equals(keys.size(), 2, "YiYanChatClient 的密钥需要 (appKey|secretKey) 格式");
String appKey = keys.get(0);
@ -525,7 +525,6 @@ public class AiModelFactoryImpl implements AiModelFactory {
* 创建 SiliconFlowImageModel 对象
*/
private SiliconFlowImageModel buildSiliconFlowImageModel(String apiToken, String url) {
// TODO @芋艿未测试
url = StrUtil.blankToDefault(url, SiliconFlowApiConstants.DEFAULT_BASE_URL);
SiliconFlowImageApi openAiApi = new SiliconFlowImageApi(url, apiToken);
return new SiliconFlowImageModel(openAiApi);
@ -535,9 +534,11 @@ public class AiModelFactoryImpl implements AiModelFactory {
* 可参考 {@link OllamaChatAutoConfiguration} ollamaChatModel 方法
*/
private static OllamaChatModel buildOllamaChatModel(String url) {
// TODO @芋艿未测试
OllamaApi ollamaApi = OllamaApi.builder().baseUrl(url).build();
return OllamaChatModel.builder().ollamaApi(ollamaApi).toolCallingManager(getToolCallingManager()).build();
return OllamaChatModel.builder()
.ollamaApi(ollamaApi)
.toolCallingManager(getToolCallingManager())
.build();
}
/**
@ -596,7 +597,10 @@ public class AiModelFactoryImpl implements AiModelFactory {
private OllamaEmbeddingModel buildOllamaEmbeddingModel(String url, String model) {
OllamaApi ollamaApi = OllamaApi.builder().baseUrl(url).build();
OllamaOptions ollamaOptions = OllamaOptions.builder().model(model).build();
return OllamaEmbeddingModel.builder().ollamaApi(ollamaApi).defaultOptions(ollamaOptions).build();
return OllamaEmbeddingModel.builder()
.ollamaApi(ollamaApi)
.defaultOptions(ollamaOptions)
.build();
}
/**

View File

@ -1,20 +1,6 @@
package cn.iocoder.yudao.module.ai.framework.ai.core.model.chat;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.SystemMessage;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaApi;
import org.springframework.ai.ollama.api.OllamaModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import reactor.core.publisher.Flux;
import java.util.ArrayList;
import java.util.List;
/**
* {@link OllamaChatModel} 集成测试
@ -23,43 +9,43 @@ import java.util.List;
*/
public class LlamaChatModelTests {
private final OllamaChatModel chatModel = OllamaChatModel.builder()
.ollamaApi(new OllamaApi("http://127.0.0.1:11434")) // Ollama 服务地址
.defaultOptions(OllamaOptions.builder()
.model(OllamaModel.LLAMA3.getName()) // 模型
.build())
.build();
@Test
@Disabled
public void testCall() {
// 准备参数
List<Message> messages = new ArrayList<>();
messages.add(new SystemMessage("你是一个优质的文言文作者,用文言文描述着各城市的人文风景。"));
messages.add(new UserMessage("1 + 1 = "));
// 调用
ChatResponse response = chatModel.call(new Prompt(messages));
// 打印结果
System.out.println(response);
System.out.println(response.getResult().getOutput());
}
@Test
@Disabled
public void testStream() {
// 准备参数
List<Message> messages = new ArrayList<>();
messages.add(new SystemMessage("你是一个优质的文言文作者,用文言文描述着各城市的人文风景。"));
messages.add(new UserMessage("1 + 1 = "));
// 调用
Flux<ChatResponse> flux = chatModel.stream(new Prompt(messages));
// 打印结果
flux.doOnNext(response -> {
// System.out.println(response);
System.out.println(response.getResult().getOutput());
}).then().block();
}
// private final OllamaChatModel chatModel = OllamaChatModel.builder()
// .ollamaApi(new OllamaApi("http://127.0.0.1:11434")) // Ollama 服务地址
// .defaultOptions(OllamaOptions.builder()
// .model(OllamaModel.LLAMA3.getName()) // 模型
// .build())
// .build();
//
// @Test
// @Disabled
// public void testCall() {
// // 准备参数
// List<Message> messages = new ArrayList<>();
// messages.add(new SystemMessage("你是一个优质的文言文作者,用文言文描述着各城市的人文风景。"));
// messages.add(new UserMessage("1 + 1 = "));
//
// // 调用
// ChatResponse response = chatModel.call(new Prompt(messages));
// // 打印结果
// System.out.println(response);
// System.out.println(response.getResult().getOutput());
// }
//
// @Test
// @Disabled
// public void testStream() {
// // 准备参数
// List<Message> messages = new ArrayList<>();
// messages.add(new SystemMessage("你是一个优质的文言文作者,用文言文描述着各城市的人文风景。"));
// messages.add(new UserMessage("1 + 1 = "));
//
// // 调用
// Flux<ChatResponse> flux = chatModel.stream(new Prompt(messages));
// // 打印结果
// flux.doOnNext(response -> {
//// System.out.println(response);
// System.out.println(response.getResult().getOutput());
// }).then().block();
// }
}

View File

@ -23,7 +23,9 @@ import java.util.List;
public class OllamaChatModelTests {
private final OllamaChatModel chatModel = OllamaChatModel.builder()
.ollamaApi(new OllamaApi("http://127.0.0.1:11434")) // Ollama 服务地址
.ollamaApi(OllamaApi.builder()
.baseUrl("http://127.0.0.1:11434") // Ollama 服务地址
.build())
.defaultOptions(OllamaOptions.builder()
// .model("qwen") // 模型https://ollama.com/library/qwen
.model("deepseek-r1") // 模型https://ollama.com/library/deepseek-r1

View File

@ -2,13 +2,13 @@ package cn.iocoder.yudao.module.ai.framework.ai.core.model.chat;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springaicommunity.qianfan.QianFanChatModel;
import org.springaicommunity.qianfan.QianFanChatOptions;
import org.springaicommunity.qianfan.api.QianFanApi;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.qianfan.QianFanChatModel;
import org.springframework.ai.qianfan.QianFanChatOptions;
import org.springframework.ai.qianfan.api.QianFanApi;
import reactor.core.publisher.Flux;
import java.util.ArrayList;
@ -23,9 +23,9 @@ import java.util.List;
public class YiYanChatModelTests {
private final QianFanChatModel chatModel = new QianFanChatModel(
new QianFanApi("qS8k8dYr2nXunagK4SSU8Xjj", "pHGbx51ql2f0hOyabQvSZezahVC3hh3e"), // 密钥
new QianFanApi("DGnyzREuaY7av7c38bOM9Ji2", "9aR8myflEOPDrEeLhoXv0FdqANOAyIZW"), // 密钥
QianFanChatOptions.builder()
.model(QianFanApi.ChatModel.ERNIE_4_0_8K_Preview.getValue())
.model("ERNIE-4.5-8K-Preview")
.build()
);

View File

@ -2,11 +2,11 @@ package cn.iocoder.yudao.module.ai.framework.ai.core.model.image;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springaicommunity.qianfan.QianFanImageModel;
import org.springaicommunity.qianfan.QianFanImageOptions;
import org.springaicommunity.qianfan.api.QianFanImageApi;
import org.springframework.ai.image.ImagePrompt;
import org.springframework.ai.image.ImageResponse;
import org.springframework.ai.qianfan.QianFanImageModel;
import org.springframework.ai.qianfan.QianFanImageOptions;
import org.springframework.ai.qianfan.api.QianFanImageApi;
import static cn.iocoder.yudao.module.ai.framework.ai.core.model.image.StabilityAiImageModelTests.viewImage;