Skip to content

Commit 3dfea48

Browse files
tzolovmarkpollack
authored andcommitted
feat(anthropic): update to Claude 3.7 Sonnet and refactor API
- Add support for Claude 3.7 Sonnet model and make it the default - Rename function-related APIs to tool-related APIs for consistency: - Change functionCallbacks to toolCallbacks - Change function to toolNames - Replace FunctionCallingOptions with ToolCallingChatOptions - Refactor AnthropicChatModel instantiation to use builder pattern - Update tests to use latest model versions instead of dated versions Signed-off-by: Christian Tzolov <[email protected]>
1 parent c91163b commit 3dfea48

File tree

8 files changed

+26
-20
lines changed

8 files changed

+26
-20
lines changed

Diff for: models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatModel.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@
9393
*/
9494
public class AnthropicChatModel extends AbstractToolCallSupport implements ChatModel {
9595

96-
public static final String DEFAULT_MODEL_NAME = AnthropicApi.ChatModel.CLAUDE_3_5_SONNET.getValue();
96+
public static final String DEFAULT_MODEL_NAME = AnthropicApi.ChatModel.CLAUDE_3_7_SONNET.getValue();
9797

9898
public static final Integer DEFAULT_MAX_TOKENS = 500;
9999

Diff for: models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/api/AnthropicApi.java

+5
Original file line numberDiff line numberDiff line change
@@ -227,6 +227,11 @@ public Flux<ChatCompletionResponse> chatCompletionStream(ChatCompletionRequest c
227227
public enum ChatModel implements ChatModelDescription {
228228

229229
// @formatter:off
230+
/**
231+
* The claude-3-7-sonnet-latest model.
232+
*/
233+
CLAUDE_3_7_SONNET("claude-3-7-sonnet-latest"),
234+
230235
/**
231236
* The claude-3-5-sonnet-20241022 model.
232237
*/

Diff for: models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java

+9-9
Original file line numberDiff line numberDiff line change
@@ -89,8 +89,8 @@ private static void validateChatResponseMetadata(ChatResponse response, String m
8989
}
9090

9191
@ParameterizedTest(name = "{0} : {displayName} ")
92-
@ValueSource(strings = { "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307",
93-
"claude-3-5-sonnet-20241022" })
92+
@ValueSource(strings = { "claude-3-7-sonnet-latest", "claude-3-5-sonnet-latest", "claude-3-5-haiku-latest",
93+
"claude-3-opus-latest" })
9494
void roleTest(String modelName) {
9595
UserMessage userMessage = new UserMessage(
9696
"Tell me about 3 famous pirates from the Golden Age of Piracy and why they did.");
@@ -275,11 +275,11 @@ void functionCallTest() {
275275

276276
var promptOptions = AnthropicChatOptions.builder()
277277
.model(AnthropicApi.ChatModel.CLAUDE_3_OPUS.getName())
278-
.functionCallbacks(List.of(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
278+
.toolCallbacks(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
279279
.description(
280280
"Get the weather in location. Return temperature in 36°F or 36°C format. Use multi-turn if needed.")
281281
.inputType(MockWeatherService.Request.class)
282-
.build()))
282+
.build())
283283
.build();
284284

285285
ChatResponse response = this.chatModel.call(new Prompt(messages, promptOptions));
@@ -307,11 +307,11 @@ void streamFunctionCallTest() {
307307

308308
var promptOptions = AnthropicChatOptions.builder()
309309
.model(AnthropicApi.ChatModel.CLAUDE_3_5_SONNET.getName())
310-
.functionCallbacks(List.of(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
310+
.toolCallbacks(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
311311
.description(
312312
"Get the weather in location. Return temperature in 36°F or 36°C format. Use multi-turn if needed.")
313313
.inputType(MockWeatherService.Request.class)
314-
.build()))
314+
.build())
315315
.build();
316316

317317
Flux<ChatResponse> response = this.chatModel.stream(new Prompt(messages, promptOptions));
@@ -337,11 +337,11 @@ void streamFunctionCallUsageTest() {
337337

338338
var promptOptions = AnthropicChatOptions.builder()
339339
.model(AnthropicApi.ChatModel.CLAUDE_3_5_SONNET.getName())
340-
.functionCallbacks(List.of(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
340+
.toolCallbacks(FunctionToolCallback.builder("getCurrentWeather", new MockWeatherService())
341341
.description(
342342
"Get the weather in location. Return temperature in 36°F or 36°C format. Use multi-turn if needed.")
343343
.inputType(MockWeatherService.Request.class)
344-
.build()))
344+
.build())
345345
.build();
346346

347347
Flux<ChatResponse> responseFlux = this.chatModel.stream(new Prompt(messages, promptOptions));
@@ -410,7 +410,7 @@ private String getApiKey() {
410410

411411
@Bean
412412
public AnthropicChatModel openAiChatModel(AnthropicApi api) {
413-
return new AnthropicChatModel(api);
413+
return AnthropicChatModel.builder().anthropicApi(api).build();
414414
}
415415

416416
}

Diff for: models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicTestConfiguration.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,7 @@ private String getApiKey() {
4040

4141
@Bean
4242
public AnthropicChatModel anthropicChatModel(AnthropicApi api) {
43-
AnthropicChatModel anthropicChatModel = new AnthropicChatModel(api);
44-
return anthropicChatModel;
43+
return AnthropicChatModel.builder().anthropicApi(api).build();
4544
}
4645

4746
}

Diff for: models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/ChatCompletionRequestTests.java

+4-2
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,10 @@ public class ChatCompletionRequestTests {
3333
@Test
3434
public void createRequestWithChatOptions() {
3535

36-
var client = new AnthropicChatModel(new AnthropicApi("TEST"),
37-
AnthropicChatOptions.builder().model("DEFAULT_MODEL").temperature(66.6).build());
36+
var client = AnthropicChatModel.builder()
37+
.anthropicApi(new AnthropicApi("TEST"))
38+
.defaultOptions(AnthropicChatOptions.builder().model("DEFAULT_MODEL").temperature(66.6).build())
39+
.build();
3840

3941
var prompt = client.buildRequestPrompt(new Prompt("Test message content"));
4042

Diff for: spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/anthropic-chat.adoc

+1-1
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ The prefix `spring.ai.anthropic.chat` is the property prefix that lets you confi
102102
| Property | Description | Default
103103

104104
| spring.ai.anthropic.chat.enabled | Enable Anthropic chat model. | true
105-
| spring.ai.anthropic.chat.options.model | This is the Anthropic Chat model to use. Supports: `claude-3-5-sonnet-20241022`, `claude-3-opus-20240229`, `claude-3-sonnet-20240229`, `claude-3-haiku-20240307` and the legacy `claude-2.1`, `claude-2.0` and `claude-instant-1.2` models. | `claude-3-opus-20240229`
105+
| spring.ai.anthropic.chat.options.model | This is the Anthropic Chat model to use. Supports: `claude-3-7-sonnet-latest`, `claude-3-5-sonnet-latest`, `claude-3-opus-20240229`, `claude-3-sonnet-20240229`, `claude-3-haiku-20240307` and the legacy `claude-2.1`, `claude-2.0` and `claude-instant-1.2` models. | `claude-3-7-sonnet-latest`
106106
| spring.ai.anthropic.chat.options.temperature | The sampling temperature to use that controls the apparent creativity of generated completions. Higher values will make output more random while lower values will make results more focused and deterministic. It is not recommended to modify temperature and top_p for the same completions request as the interaction of these two settings is difficult to predict. | 0.8
107107
| spring.ai.anthropic.chat.options.max-tokens | The maximum number of tokens to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length. | 500
108108
| spring.ai.anthropic.chat.options.stop-sequence | Custom text sequences that will cause the model to stop generating. Our models will normally stop when they have naturally completed their turn, which will result in a response stop_reason of "end_turn". If you want the model to stop generating when it encounters custom strings of text, you can use the stop_sequences parameter. If the model encounters one of the custom sequences, the response stop_reason value will be "stop_sequence" and the response stop_sequence value will contain the matched stop sequence. | -

Diff for: spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/anthropic/tool/FunctionCallWithFunctionBeanIT.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
import org.springframework.ai.chat.messages.UserMessage;
3434
import org.springframework.ai.chat.model.ChatResponse;
3535
import org.springframework.ai.chat.prompt.Prompt;
36-
import org.springframework.ai.model.function.FunctionCallingOptions;
36+
import org.springframework.ai.model.tool.ToolCallingChatOptions;
3737
import org.springframework.boot.autoconfigure.AutoConfigurations;
3838
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
3939
import org.springframework.context.annotation.Bean;
@@ -66,14 +66,14 @@ void functionCallTest() {
6666
"What's the weather like in San Francisco, in Paris, France and in Tokyo, Japan? Return the temperature in Celsius.");
6767

6868
ChatResponse response = chatModel.call(new Prompt(List.of(userMessage),
69-
AnthropicChatOptions.builder().function("weatherFunction").build()));
69+
AnthropicChatOptions.builder().toolNames("weatherFunction").build()));
7070

7171
logger.info("Response: {}", response);
7272

7373
assertThat(response.getResult().getOutput().getText()).contains("30", "10", "15");
7474

7575
response = chatModel.call(new Prompt(List.of(userMessage),
76-
AnthropicChatOptions.builder().function("weatherFunction3").build()));
76+
AnthropicChatOptions.builder().toolNames("weatherFunction3").build()));
7777

7878
logger.info("Response: {}", response);
7979

@@ -96,7 +96,7 @@ void functionCallWithPortableFunctionCallingOptions() {
9696
"What's the weather like in San Francisco, in Paris, France and in Tokyo, Japan? Return the temperature in Celsius.");
9797

9898
ChatResponse response = chatModel.call(new Prompt(List.of(userMessage),
99-
FunctionCallingOptions.builder().function("weatherFunction").build()));
99+
ToolCallingChatOptions.builder().toolNames("weatherFunction").build()));
100100

101101
logger.info("Response: {}", response);
102102

Diff for: spring-ai-spring-boot-autoconfigure/src/test/java/org/springframework/ai/autoconfigure/anthropic/tool/FunctionCallWithPromptFunctionIT.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ void functionCallTest() {
5858
"What's the weather like in San Francisco, in Paris and in Tokyo? Return the temperature in Celsius.");
5959

6060
var promptOptions = AnthropicChatOptions.builder()
61-
.functionCallbacks(
61+
.toolCallbacks(
6262
List.of(FunctionToolCallback.builder("CurrentWeatherService", new MockWeatherService())
6363
.description("Get the weather in location. Return temperature in 36°F or 36°C format.")
6464
.inputType(MockWeatherService.Request.class)

0 commit comments

Comments
 (0)