Skip to content

Add documentation how to use the OpenAI client to access DeepSeek #2120

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,343 @@
/*
* Copyright 2024-2025 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.springframework.ai.openai.chat.proxy;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import reactor.core.publisher.Flux;

import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.messages.AssistantMessage;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.model.Generation;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.ai.chat.prompt.SystemPromptTemplate;
import org.springframework.ai.converter.BeanOutputConverter;
import org.springframework.ai.converter.ListOutputConverter;
import org.springframework.ai.converter.MapOutputConverter;
import org.springframework.ai.model.function.FunctionCallback;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.ai.openai.OpenAiChatOptions;
import org.springframework.ai.openai.api.OpenAiApi;
import org.springframework.ai.openai.api.tool.MockWeatherService;
import org.springframework.ai.openai.chat.ActorsFilms;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.Bean;
import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.core.io.Resource;

import static org.assertj.core.api.Assertions.assertThat;

/**
* @author Alexandros Pappas
*
* The DeepSeek API uses an API format compatible with OpenAI, allowing developers to
* easily integrate it into existing systems that use the OpenAI SDK5.
*
* For more information on DeepSeek behavior, refer to its API documentation:
* <a href="https://api-docs.deepseek.com/">DeepSeek API</a>
*/
@SpringBootTest(classes = DeepSeekWithOpenAiChatModelIT.Config.class)
@EnabledIfEnvironmentVariable(named = "DEEPSEEK_API_KEY", matches = ".+")
@Disabled("Requires DeepSeek credits")
class DeepSeekWithOpenAiChatModelIT {

private static final Logger logger = LoggerFactory.getLogger(DeepSeekWithOpenAiChatModelIT.class);

private static final String DEEPSEEK_BASE_URL = "https://api.deepseek.com";

private static final String DEFAULT_DEEPSEEK_MODEL = "deepseek-chat";

@Value("classpath:/prompts/system-message.st")
private Resource systemResource;

@Autowired
private OpenAiChatModel chatModel;

@Test
void roleTest() {
UserMessage userMessage = new UserMessage(
"Tell me about 3 famous pirates from the Golden Age of Piracy and what they did.");
SystemPromptTemplate systemPromptTemplate = new SystemPromptTemplate(this.systemResource);
Message systemMessage = systemPromptTemplate.createMessage(Map.of("name", "Bob", "voice", "pirate"));
Prompt prompt = new Prompt(List.of(userMessage, systemMessage));
ChatResponse response = this.chatModel.call(prompt);
assertThat(response.getResults()).hasSize(1);
assertThat(response.getResults().get(0).getOutput().getText()).contains("Blackbeard");
}

@Test
void streamRoleTest() {
UserMessage userMessage = new UserMessage(
"Tell me about 3 famous pirates from the Golden Age of Piracy and what they did.");
SystemPromptTemplate systemPromptTemplate = new SystemPromptTemplate(this.systemResource);
Message systemMessage = systemPromptTemplate.createMessage(Map.of("name", "Bob", "voice", "pirate"));
Prompt prompt = new Prompt(List.of(userMessage, systemMessage));
Flux<ChatResponse> flux = this.chatModel.stream(prompt);

List<ChatResponse> responses = flux.collectList().block();
assertThat(responses.size()).isGreaterThan(1);

String stitchedResponseContent = responses.stream()
.map(ChatResponse::getResults)
.flatMap(List::stream)
.map(Generation::getOutput)
.map(AssistantMessage::getText)
.collect(Collectors.joining());

assertThat(stitchedResponseContent).contains("Blackbeard");
}

@Test
void streamingWithTokenUsage() {
var promptOptions = OpenAiChatOptions.builder().streamUsage(true).seed(1).build();

var prompt = new Prompt("List two colors of the Polish flag. Be brief.", promptOptions);

var streamingTokenUsage = this.chatModel.stream(prompt).blockLast().getMetadata().getUsage();
var referenceTokenUsage = this.chatModel.call(prompt).getMetadata().getUsage();

assertThat(streamingTokenUsage.getPromptTokens()).isGreaterThan(0);
assertThat(streamingTokenUsage.getGenerationTokens()).isGreaterThan(0);
assertThat(streamingTokenUsage.getTotalTokens()).isGreaterThan(0);

assertThat(streamingTokenUsage.getPromptTokens()).isEqualTo(referenceTokenUsage.getPromptTokens());
assertThat(streamingTokenUsage.getGenerationTokens()).isEqualTo(referenceTokenUsage.getGenerationTokens());
assertThat(streamingTokenUsage.getTotalTokens()).isEqualTo(referenceTokenUsage.getTotalTokens());

}

@Test
void listOutputConverter() {
DefaultConversionService conversionService = new DefaultConversionService();
ListOutputConverter outputConverter = new ListOutputConverter(conversionService);

String format = outputConverter.getFormat();
String template = """
List five {subject}
{format}
""";
PromptTemplate promptTemplate = new PromptTemplate(template,
Map.of("subject", "ice cream flavors", "format", format));
Prompt prompt = new Prompt(promptTemplate.createMessage());
Generation generation = this.chatModel.call(prompt).getResult();

List<String> list = outputConverter.convert(generation.getOutput().getText());
assertThat(list).hasSize(5);

}

@Test
void mapOutputConverter() {
MapOutputConverter outputConverter = new MapOutputConverter();

String format = outputConverter.getFormat();
String template = """
Provide me a List of {subject}
{format}
""";
PromptTemplate promptTemplate = new PromptTemplate(template,
Map.of("subject", "numbers from 1 to 9 under they key name 'numbers'", "format", format));
Prompt prompt = new Prompt(promptTemplate.createMessage());
Generation generation = this.chatModel.call(prompt).getResult();

Map<String, Object> result = outputConverter.convert(generation.getOutput().getText());
assertThat(result.get("numbers")).isEqualTo(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9));

}

@Test
void beanOutputConverter() {

BeanOutputConverter<ActorsFilms> outputConverter = new BeanOutputConverter<>(ActorsFilms.class);

String format = outputConverter.getFormat();
String template = """
Generate the filmography for a random actor.
{format}
""";
PromptTemplate promptTemplate = new PromptTemplate(template, Map.of("format", format));
Prompt prompt = new Prompt(promptTemplate.createMessage());
Generation generation = this.chatModel.call(prompt).getResult();

ActorsFilms actorsFilms = outputConverter.convert(generation.getOutput().getText());
assertThat(actorsFilms.getActor()).isNotEmpty();
}

@Test
void beanOutputConverterRecords() {

BeanOutputConverter<DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord> outputConverter = new BeanOutputConverter<>(
DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord.class);

String format = outputConverter.getFormat();
String template = """
Generate the filmography of 5 movies for Tom Hanks.
{format}
""";
PromptTemplate promptTemplate = new PromptTemplate(template, Map.of("format", format));
Prompt prompt = new Prompt(promptTemplate.createMessage());
Generation generation = this.chatModel.call(prompt).getResult();

DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord actorsFilms = outputConverter
.convert(generation.getOutput().getText());
logger.info("" + actorsFilms);
assertThat(actorsFilms.actor()).isEqualTo("Tom Hanks");
assertThat(actorsFilms.movies()).hasSize(5);
}

@Test
void beanStreamOutputConverterRecords() {

BeanOutputConverter<DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord> outputConverter = new BeanOutputConverter<>(
DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord.class);

String format = outputConverter.getFormat();
String template = """
Generate the filmography of 5 movies for Tom Hanks.
{format}
""";
PromptTemplate promptTemplate = new PromptTemplate(template, Map.of("format", format));
Prompt prompt = new Prompt(promptTemplate.createMessage());

String generationTextFromStream = this.chatModel.stream(prompt)
.collectList()
.block()
.stream()
.map(ChatResponse::getResults)
.flatMap(List::stream)
.map(Generation::getOutput)
.map(AssistantMessage::getText)
.collect(Collectors.joining());

DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord actorsFilms = outputConverter.convert(generationTextFromStream);
logger.info("" + actorsFilms);
assertThat(actorsFilms.actor()).isEqualTo("Tom Hanks");
assertThat(actorsFilms.movies()).hasSize(5);
}

@Test
@Disabled("The current version of the deepseek-chat model's Function Calling capability is unstable, which may result in looped calls or empty responses.")
void functionCallTest() {

UserMessage userMessage = new UserMessage("What's the weather like in San Francisco, Tokyo, and Paris?");

List<Message> messages = new ArrayList<>(List.of(userMessage));

var promptOptions = OpenAiChatOptions.builder()
.functionCallbacks(List.of(FunctionCallback.builder()
.function("getCurrentWeather", new MockWeatherService())
.description("Get the weather in location")
.inputType(MockWeatherService.Request.class)
.build()))
.build();

ChatResponse response = this.chatModel.call(new Prompt(messages, promptOptions));

logger.info("Response: {}", response);

assertThat(response.getResult().getOutput().getText()).contains("30", "10", "15");
}

@Test
@Disabled("The current version of the deepseek-chat model's Function Calling capability is unstable, which may result in looped calls or empty responses.")
void streamFunctionCallTest() {

UserMessage userMessage = new UserMessage(
"What's the weather like in San Francisco, Tokyo, and Paris? Return the temperature in Celsius.");

List<Message> messages = new ArrayList<>(List.of(userMessage));

var promptOptions = OpenAiChatOptions.builder()
.functionCallbacks(List.of(FunctionCallback.builder()
.function("getCurrentWeather", new MockWeatherService())
.description("Get the weather in location")
.inputType(MockWeatherService.Request.class)
.build()))
.build();

Flux<ChatResponse> response = this.chatModel.stream(new Prompt(messages, promptOptions));

String content = response.collectList()
.block()
.stream()
.map(ChatResponse::getResults)
.flatMap(List::stream)
.map(Generation::getOutput)
.map(AssistantMessage::getText)
.collect(Collectors.joining());
logger.info("Response: {}", content);

assertThat(content).contains("30", "10", "15");
}

@ParameterizedTest(name = "{0} : {displayName} ")
@ValueSource(strings = { "deepseek-chat", "deepseek-reasoner" })
void validateCallResponseMetadata(String model) {
// @formatter:off
ChatResponse response = ChatClient.create(this.chatModel).prompt()
.options(OpenAiChatOptions.builder().model(model).build())
.user("Tell me about 3 famous pirates from the Golden Age of Piracy and what they did")
.call()
.chatResponse();
// @formatter:on

logger.info(response.toString());
assertThat(response.getMetadata().getId()).isNotEmpty();
assertThat(response.getMetadata().getModel()).containsIgnoringCase(model);
assertThat(response.getMetadata().getUsage().getPromptTokens()).isPositive();
assertThat(response.getMetadata().getUsage().getGenerationTokens()).isPositive();
assertThat(response.getMetadata().getUsage().getTotalTokens()).isPositive();
}

record ActorsFilmsRecord(String actor, List<String> movies) {

}

@SpringBootConfiguration
static class Config {

@Bean
public OpenAiApi chatCompletionApi() {
return new OpenAiApi(DEEPSEEK_BASE_URL, System.getenv("DEEPSEEK_API_KEY"));
}

@Bean
public OpenAiChatModel openAiClient(OpenAiApi openAiApi) {
return new OpenAiChatModel(openAiApi, OpenAiChatOptions.builder().model(DEFAULT_DEEPSEEK_MODEL).build());
}

}

}
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions spring-ai-docs/src/main/antora/modules/ROOT/nav.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
**** xref:api/chat/functions/anthropic-chat-functions.adoc[Anthropic Function Calling]
*** xref:api/chat/azure-openai-chat.adoc[Azure OpenAI]
**** xref:api/chat/functions/azure-open-ai-chat-functions.adoc[Azure OpenAI Function Calling]
*** xref:api/chat/deepseek-chat.adoc[DeepSeek AI]
*** xref:api/chat/google-vertexai.adoc[Google VertexAI]
**** xref:api/chat/vertexai-gemini-chat.adoc[VertexAI Gemini]
***** xref:api/chat/functions/vertexai-gemini-chat-functions.adoc[Gemini Function Calling]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ This table compares various Chat Models supported by Spring AI, detailing their

| xref::api/chat/anthropic-chat.adoc[Anthropic Claude] | text, pdf, image ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12]
| xref::api/chat/azure-openai-chat.adoc[Azure OpenAI] | text, image ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::no.svg[width=12] ^a| image::yes.svg[width=16]
| xref::api/chat/deepseek-chat.adoc[DeepSeek (OpenAI-proxy)] | text ^a| image::no.svg[width=12] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16]
| xref::api/chat/vertexai-gemini-chat.adoc[Google VertexAI Gemini] | text, pdf, image, audio, video ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::no.svg[width=12] ^a| image::yes.svg[width=16]
| xref::api/chat/groq-chat.adoc[Groq (OpenAI-proxy)] | text, image ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::yes.svg[width=16]
| xref::api/chat/huggingface.adoc[HuggingFace] | text ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12]
Expand Down
Loading
Loading