Skip to content

Commit 7d200c8

Browse files
committed
fix:修复lm studio不支持http/2的问题
1 parent b6a258f commit 7d200c8

File tree

2 files changed

+35
-10
lines changed

2 files changed

+35
-10
lines changed

src/main/java/com/xiaozhi/dialogue/llm/factory/ChatModelFactory.java

Lines changed: 33 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,10 @@
44
import com.xiaozhi.dialogue.llm.providers.DifyChatModel;
55
import com.xiaozhi.entity.SysConfig;
66
import com.xiaozhi.service.SysConfigService;
7+
8+
import java.net.http.HttpClient;
9+
import java.time.Duration;
10+
711
import org.slf4j.Logger;
812
import org.slf4j.LoggerFactory;
913
import org.springframework.ai.chat.model.ChatModel;
@@ -20,29 +24,36 @@
2024
import org.springframework.ai.zhipuai.ZhiPuAiChatOptions;
2125
import org.springframework.ai.zhipuai.api.ZhiPuAiApi;
2226
import org.springframework.beans.factory.annotation.Autowired;
27+
import org.springframework.http.client.JdkClientHttpRequestFactory;
28+
import org.springframework.http.client.reactive.JdkClientHttpConnector;
2329
import org.springframework.stereotype.Component;
2430
import org.springframework.util.Assert;
2531
import org.springframework.util.LinkedMultiValueMap;
2632
import org.springframework.util.MultiValueMap;
2733
import org.springframework.util.StringUtils;
34+
import org.springframework.web.client.RestClient;
35+
import org.springframework.web.reactive.function.client.WebClient;
2836

2937
/**
3038
* ChatModel工厂
3139
* 根据配置的模型ID,创建对应的ChatModel
3240
*/
3341
@Component
3442
public class ChatModelFactory {
35-
@Autowired private SysConfigService configService;
36-
@Autowired private ToolCallingManager toolCallingManager;
43+
@Autowired
44+
private SysConfigService configService;
45+
@Autowired
46+
private ToolCallingManager toolCallingManager;
3747
private final Logger logger = LoggerFactory.getLogger(ChatModelFactory.class);
3848

3949
/**
4050
* 根据配置ID创建ChatModel,首次调用时缓存,缓存key为配置ID。
41-
* @see SysConfigService#selectConfigById(Integer) 已经进行了Cacheable,所以此处没有必要缓存
51+
*
52+
* @see SysConfigService#selectConfigById(Integer) 已经进行了Cacheable,所以此处没有必要缓存
4253
* @param configId 配置ID,实际是模型ID。
4354
* @return
4455
*/
45-
public ChatModel takeChatModel(Integer configId){
56+
public ChatModel takeChatModel(Integer configId) {
4657
Assert.notNull(configId, "配置ID不能为空");
4758
// 根据配置ID查询配置
4859
SysConfig config = configService.selectConfigById(configId);
@@ -51,6 +62,7 @@ public ChatModel takeChatModel(Integer configId){
5162

5263
/**
5364
* 创建ChatModel
65+
*
5466
* @param config
5567
* @return
5668
*/
@@ -87,19 +99,32 @@ private ChatModel newOllamaChatModel(String endpoint, String appId, String apiKe
8799
.model(model)
88100
.build())
89101
.build();
90-
logger.info( "Using Ollama model: {}" , model);
102+
logger.info("Using Ollama model: {}", model);
91103
return chatModel;
92104
}
93105

94106
private ChatModel newOpenAiChatModel(String endpoint, String appId, String apiKey, String apiSecret, String model) {
95107
MultiValueMap<String, String> headers = new LinkedMultiValueMap<>();
96108
headers.add("Content-Type", "application/json");
97109

110+
// LM Studio不支持Http/2,所以需要强制使用HTTP/1.1
98111
var openAiApi = OpenAiApi.builder()
99112
.apiKey(StringUtils.hasText(apiKey) ? new SimpleApiKey(apiKey) : new NoopApiKey())
100113
.baseUrl(endpoint)
101114
.completionsPath("/chat/completions")
102115
.headers(headers)
116+
.webClientBuilder(WebClient.builder()
117+
// Force HTTP/1.1 for streaming
118+
.clientConnector(new JdkClientHttpConnector(HttpClient.newBuilder()
119+
.version(HttpClient.Version.HTTP_1_1)
120+
.connectTimeout(Duration.ofSeconds(30))
121+
.build())))
122+
.restClientBuilder(RestClient.builder()
123+
// Force HTTP/1.1 for non-streaming
124+
.requestFactory(new JdkClientHttpRequestFactory(HttpClient.newBuilder()
125+
.version(HttpClient.Version.HTTP_1_1)
126+
.connectTimeout(Duration.ofSeconds(30))
127+
.build())))
103128
.build();
104129
var openAiChatOptions = OpenAiChatOptions.builder()
105130
.model(model)
@@ -110,17 +135,17 @@ private ChatModel newOpenAiChatModel(String endpoint, String appId, String apiKe
110135
.defaultOptions(openAiChatOptions)
111136
.toolCallingManager(toolCallingManager)
112137
.build();
113-
logger.info( "Using OpenAi model: {}" , model);
138+
logger.info("Using OpenAi model: {}", model);
114139
return chatModel;
115140
}
116141

117142
private ChatModel newZhipuChatModel(String endpoint, String appId, String apiKey, String apiSecret, String model) {
118-
var zhiPuAiApi = new ZhiPuAiApi(endpoint,apiKey);
143+
var zhiPuAiApi = new ZhiPuAiApi(endpoint, apiKey);
119144

120145
var chatModel = new ZhiPuAiChatModel(zhiPuAiApi, ZhiPuAiChatOptions.builder()
121146
.model(model)
122147
.build());
123-
logger.info( "Using zhiPu model: {}" , model);
148+
logger.info("Using zhiPu model: {}", model);
124149
return chatModel;
125150
}
126151
}

web/src/views/page/Chat.vue

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -484,7 +484,7 @@ export default {
484484
485485
// 如果已连接但配置改变,提示重新连接
486486
if (isConnected) {
487-
message.info('配置已保存,需要重新连接才能生效');
487+
this.$message.info('配置已保存,需要重新连接才能生效');
488488
} else {
489489
// 尝试连接
490490
this.connect();
@@ -518,7 +518,7 @@ export default {
518518
// 立即更新本地状态
519519
this.localIsConnected = false;
520520
this.localConnectionStatus = '已断开';
521-
message.info('已断开连接');
521+
this.message.info('已断开连接');
522522
},
523523
}
524524
};

0 commit comments

Comments
 (0)