Skip to content

Commit 100d60e

Browse files
committed
feat: add model
1 parent e550c89 commit 100d60e

File tree

3 files changed

+10
-4
lines changed

3 files changed

+10
-4
lines changed

backend/src/main/java/ch/xxx/aidoclibchat/usecase/service/LocalMcpClient.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
import java.util.Optional;
1717

1818
import org.springframework.ai.chat.client.ChatClient;
19+
import org.springframework.ai.chat.client.ChatClient.Builder;
1920
import org.springframework.ai.mcp.SyncMcpToolCallbackProvider;
2021
import org.springframework.stereotype.Service;
2122

@@ -28,9 +29,9 @@ public class LocalMcpClient {
2829
private final List<McpSyncClient> mcpSyncClients;
2930
private final ChatClient chatClient;
3031

31-
public LocalMcpClient(List<McpSyncClient> mcpSyncClients, ChatClient.Builder chatClientBuilder) {
32+
public LocalMcpClient(List<McpSyncClient> mcpSyncClients, Builder builder) {
3233
this.mcpSyncClients = mcpSyncClients;
33-
this.chatClient = chatClientBuilder.build();
34+
this.chatClient = builder.build();
3435
}
3536

3637
public McpResponseDto createResponse(McpRequestDto requestDto) {

backend/src/main/resources/application-ollama.properties

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ spring.liquibase.change-log=classpath:/dbchangelog/db.changelog-master-ollama.xm
1313
#spring.ai.ollama.chat.options.model=qwen2.5:32b
1414
#spring.ai.ollama.chat.options.model=deepseek-r1:14b
1515
#spring.ai.ollama.chat.options.model=llama3.1:8b
16-
spring.ai.ollama.chat.options.num-ctx=12288
16+
#spring.ai.ollama.chat.options.num-ctx=12288
1717
spring.ai.embedding.transformer.onnx.modelUri=https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1/resolve/main/onnx/model_quantized.onnx
1818
spring.ai.embedding.transformer.tokenizer.uri=https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1/resolve/main/tokenizer.json
1919

@@ -28,7 +28,7 @@ spring.ai.embedding.transformer.tokenizer.uri=https://huggingface.co/mixedbread-
2828

2929
# image processing
3030
# best model for vision currently
31-
spring.ai.ollama.chat.options.model=llama3.2-vision
31+
#spring.ai.ollama.chat.options.model=llama3.2-vision
3232
#spring.ai.ollama.chat.options.num-thread=8
3333
#spring.ai.ollama.chat.options.keep_alive=1s
3434

@@ -57,6 +57,10 @@ spring.ai.ollama.chat.options.model=llama3.2-vision
5757
spring.ai.ollama.chat.options.num-thread=8
5858
spring.ai.ollama.chat.options.keep_alive=1s
5959

60+
# mcp-server
61+
spring.ai.ollama.chat.options.model=qwen3:32b
62+
spring.ai.ollama.chat.options.num-ctx=32768
63+
6064
#spring.ai.ollama.chat.options.model=llama3.1:70b
6165
#spring.ai.ollama.chat.options.num-ctx=131072
6266

runOllama.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,4 +17,5 @@ docker exec -it ollama ollama run stable-beluga:13b
1717
#docker exec -it ollama ollama run qwen2.5:14b
1818
#docker exec -it ollama ollama run llama3.2-vision:11b
1919
#docker exec -it ollama ollama run devstral:24b
20+
#docker exec -it ollama ollama run qwen3:32b
2021
#docker exec -it ollama bash

0 commit comments

Comments
 (0)