Skip to content

Commit a1980ec

Browse files
committed
Update Claude 3.5 Sonnet model version to 20241022
- Update Claude 3.5 Sonnet model version from 20240620 to 20241022 across: - AnthropicApi model definitions - Integration tests - Sample events JSON - Documentation pages - Upgrade Ollama container to 0.3.14 in tests - Add llama3.2:1b model to Ollama tests - Convert Ollama functionCallTest to parameterized test
1 parent 7b06fcf commit a1980ec

File tree

7 files changed

+13
-11
lines changed

7 files changed

+13
-11
lines changed

models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/api/AnthropicApi.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ public AnthropicApi(String baseUrl, String anthropicApiKey, String anthropicVers
150150
public enum ChatModel implements ChatModelDescription {
151151

152152
// @formatter:off
153-
CLAUDE_3_5_SONNET("claude-3-5-sonnet-20240620"),
153+
CLAUDE_3_5_SONNET("claude-3-5-sonnet-20241022"),
154154

155155
CLAUDE_3_OPUS("claude-3-opus-20240229"),
156156
CLAUDE_3_SONNET("claude-3-sonnet-20240229"),

models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ class AnthropicChatModelIT {
7878

7979
@ParameterizedTest(name = "{0} : {displayName} ")
8080
@ValueSource(strings = { "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307",
81-
"claude-3-5-sonnet-20240620" })
81+
"claude-3-5-sonnet-20241022" })
8282
void roleTest(String modelName) {
8383
UserMessage userMessage = new UserMessage(
8484
"Tell me about 3 famous pirates from the Golden Age of Piracy and why they did.");

models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/client/AnthropicChatClientIT.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@ void streamFunctionCallTest() {
260260

261261
@ParameterizedTest(name = "{0} : {displayName} ")
262262
@ValueSource(strings = { "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307",
263-
"claude-3-5-sonnet-20240620" })
263+
"claude-3-5-sonnet-20241022" })
264264
void multiModalityEmbeddedImage(String modelName) throws IOException {
265265

266266
// @formatter:off
@@ -280,7 +280,7 @@ void multiModalityEmbeddedImage(String modelName) throws IOException {
280280
@Disabled("Currently Anthropic API does not support external image URLs")
281281
@ParameterizedTest(name = "{0} : {displayName} ")
282282
@ValueSource(strings = { "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307",
283-
"claude-3-5-sonnet-20240620" })
283+
"claude-3-5-sonnet-20241022" })
284284
void multiModalityImageUrl(String modelName) throws IOException {
285285

286286
// TODO: add url method that wrapps the checked exception.

models/spring-ai-anthropic/src/test/resources/sample_events.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
"type": "message",
77
"role": "assistant",
88
"content": [],
9-
"model": "claude-3-5-sonnet-20240620",
9+
"model": "claude-3-5-sonnet-20241022",
1010
"stop_reason": null,
1111
"stop_sequence": null,
1212
"usage": {

models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/OllamaWithOpenAiChatModelIT.java

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ class OllamaWithOpenAiChatModelIT {
7777
private static final String DEFAULT_OLLAMA_MODEL = "mistral";
7878

7979
@Container
80-
static OllamaContainer ollamaContainer = new OllamaContainer("ollama/ollama:0.3.9");
80+
static OllamaContainer ollamaContainer = new OllamaContainer("ollama/ollama:0.3.14");
8181

8282
static String baseUrl = "http://localhost:11434";
8383

@@ -86,6 +86,7 @@ public static void beforeAll() throws IOException, InterruptedException {
8686
logger.info("Start pulling the '" + DEFAULT_OLLAMA_MODEL + " ' generative ... would take several minutes ...");
8787
ollamaContainer.execInContainer("ollama", "pull", DEFAULT_OLLAMA_MODEL);
8888
ollamaContainer.execInContainer("ollama", "pull", "llava");
89+
ollamaContainer.execInContainer("ollama", "pull", "llama3.2:1b");
8990
logger.info(DEFAULT_OLLAMA_MODEL + " pulling competed!");
9091

9192
baseUrl = "http://" + ollamaContainer.getHost() + ":" + ollamaContainer.getMappedPort(11434);
@@ -260,8 +261,9 @@ void beanStreamOutputConverterRecords() {
260261
assertThat(actorsFilms.movies()).hasSize(5);
261262
}
262263

263-
@Test
264-
void functionCallTest() {
264+
@ParameterizedTest(name = "{0} : {displayName} ")
265+
@ValueSource(strings = { "llama3.2:1b" })
266+
void functionCallTest(String modelName) {
265267

266268
UserMessage userMessage = new UserMessage(
267269
"What's the weather like in San Francisco, Tokyo, and Paris? Return the temperature in Celsius.");

spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/anthropic-chat.adoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ The prefix `spring.ai.anthropic.chat` is the property prefix that lets you confi
102102
| Property | Description | Default
103103

104104
| spring.ai.anthropic.chat.enabled | Enable Anthropic chat model. | true
105-
| spring.ai.anthropic.chat.options.model | This is the Anthropic Chat model to use. Supports: `claude-3-5-sonnet-20240620`, `claude-3-opus-20240229`, `claude-3-sonnet-20240229`, `claude-3-haiku-20240307` and the legacy `claude-2.1`, `claude-2.0` and `claude-instant-1.2` models. | `claude-3-opus-20240229`
105+
| spring.ai.anthropic.chat.options.model | This is the Anthropic Chat model to use. Supports: `claude-3-5-sonnet-20241022`, `claude-3-opus-20240229`, `claude-3-sonnet-20240229`, `claude-3-haiku-20240307` and the legacy `claude-2.1`, `claude-2.0` and `claude-instant-1.2` models. | `claude-3-opus-20240229`
106106
| spring.ai.anthropic.chat.options.temperature | The sampling temperature to use that controls the apparent creativity of generated completions. Higher values will make output more random while lower values will make results more focused and deterministic. It is not recommended to modify temperature and top_p for the same completions request as the interaction of these two settings is difficult to predict. | 0.8
107107
| spring.ai.anthropic.chat.options.max-tokens | The maximum number of tokens to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length. | 500
108108
| spring.ai.anthropic.chat.options.stop-sequence | Custom text sequences that will cause the model to stop generating. Our models will normally stop when they have naturally completed their turn, which will result in a response stop_reason of "end_turn". If you want the model to stop generating when it encounters custom strings of text, you can use the stop_sequences parameter. If the model encounters one of the custom sequences, the response stop_reason value will be "stop_sequence" and the response stop_sequence value will contain the matched stop sequence. | -
@@ -195,7 +195,7 @@ Add a `application.properties` file, under the `src/main/resources` directory, t
195195
[source,application.properties]
196196
----
197197
spring.ai.anthropic.api-key=YOUR_API_KEY
198-
spring.ai.anthropic.chat.options.model=claude-3-5-sonnet-20240620
198+
spring.ai.anthropic.chat.options.model=claude-3-5-sonnet-20241022
199199
spring.ai.anthropic.chat.options.temperature=0.7
200200
spring.ai.anthropic.chat.options.max-tokens=450
201201
----

spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/functions/anthropic-chat-functions.adoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ TIP: Starting of Jul 1st, 2024, streaming function calling and Tool use is suppo
44

55
You can register custom Java functions with the `AnthropicChatModel` and have the Anthropic models intelligently choose to output a JSON object containing arguments to call one or many of the registered functions.
66
This allows you to connect the LLM capabilities with external tools and APIs.
7-
The `claude-3-5-sonnet-20240620`, `claude-3-opus`, `claude-3-sonnet` and `claude-3-haiku` link:https://docs.anthropic.com/claude/docs/tool-use#tool-use-best-practices-and-limitations[models are trained to detect when a function should be called] and to respond with JSON that adheres to the function signature.
7+
The `claude-3-5-sonnet-20241022`, `claude-3-opus`, `claude-3-sonnet` and `claude-3-haiku` link:https://docs.anthropic.com/claude/docs/tool-use#tool-use-best-practices-and-limitations[models are trained to detect when a function should be called] and to respond with JSON that adheres to the function signature.
88

99
The Anthropic API does not call the function directly; instead, the model generates JSON that you can use to call the function in your code and return the result back to the model to complete the conversation.
1010

0 commit comments

Comments
 (0)