File tree Expand file tree Collapse file tree 3 files changed +10
-4
lines changed
java/ch/xxx/aidoclibchat/usecase/service Expand file tree Collapse file tree 3 files changed +10
-4
lines changed Original file line number Diff line number Diff line change 1616import java .util .Optional ;
1717
1818import org .springframework .ai .chat .client .ChatClient ;
19+ import org .springframework .ai .chat .client .ChatClient .Builder ;
1920import org .springframework .ai .mcp .SyncMcpToolCallbackProvider ;
2021import org .springframework .stereotype .Service ;
2122
@@ -28,9 +29,9 @@ public class LocalMcpClient {
2829 private final List <McpSyncClient > mcpSyncClients ;
2930 private final ChatClient chatClient ;
3031
31- public LocalMcpClient (List <McpSyncClient > mcpSyncClients , ChatClient . Builder chatClientBuilder ) {
32+ public LocalMcpClient (List <McpSyncClient > mcpSyncClients , Builder builder ) {
3233 this .mcpSyncClients = mcpSyncClients ;
33- this .chatClient = chatClientBuilder .build ();
34+ this .chatClient = builder .build ();
3435 }
3536
3637 public McpResponseDto createResponse (McpRequestDto requestDto ) {
Original file line number Diff line number Diff line change @@ -13,7 +13,7 @@ spring.liquibase.change-log=classpath:/dbchangelog/db.changelog-master-ollama.xm
1313# spring.ai.ollama.chat.options.model=qwen2.5:32b
1414# spring.ai.ollama.chat.options.model=deepseek-r1:14b
1515# spring.ai.ollama.chat.options.model=llama3.1:8b
16- spring.ai.ollama.chat.options.num-ctx =12288
16+ # spring.ai.ollama.chat.options.num-ctx=12288
1717spring.ai.embedding.transformer.onnx.modelUri =https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1/resolve/main/onnx/model_quantized.onnx
1818spring.ai.embedding.transformer.tokenizer.uri =https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1/resolve/main/tokenizer.json
1919
@@ -28,7 +28,7 @@ spring.ai.embedding.transformer.tokenizer.uri=https://huggingface.co/mixedbread-
2828
2929# image processing
3030# best model for vision currently
31- spring.ai.ollama.chat.options.model =llama3.2-vision
31+ # spring.ai.ollama.chat.options.model=llama3.2-vision
3232# spring.ai.ollama.chat.options.num-thread=8
3333# spring.ai.ollama.chat.options.keep_alive=1s
3434
@@ -57,6 +57,10 @@ spring.ai.ollama.chat.options.model=llama3.2-vision
5757spring.ai.ollama.chat.options.num-thread =8
5858spring.ai.ollama.chat.options.keep_alive =1s
5959
60+ # mcp-server
61+ spring.ai.ollama.chat.options.model =qwen3:32b
62+ spring.ai.ollama.chat.options.num-ctx =32768
63+
6064# spring.ai.ollama.chat.options.model=llama3.1:70b
6165# spring.ai.ollama.chat.options.num-ctx=131072
6266
Original file line number Diff line number Diff line change @@ -17,4 +17,5 @@ docker exec -it ollama ollama run stable-beluga:13b
1717# docker exec -it ollama ollama run qwen2.5:14b
1818# docker exec -it ollama ollama run llama3.2-vision:11b
1919# docker exec -it ollama ollama run devstral:24b
20+ # docker exec -it ollama ollama run qwen3:32b
2021# docker exec -it ollama bash
You can’t perform that action at this time.
0 commit comments