1
1
/*
2
- * Copyright 2023-2023 the original author or authors.
2
+ * Copyright 2023-2024 the original author or authors.
3
3
*
4
4
* Licensed under the Apache License, Version 2.0 (the "License");
5
5
* you may not use this file except in compliance with the License.
16
16
17
17
package org .springframework .ai .ollama ;
18
18
19
- import java .util .HashMap ;
20
19
import java .util .List ;
21
- import java .util .Map ;
22
- import java .util .stream .Collectors ;
23
20
24
- import com .fasterxml .jackson .core .JsonProcessingException ;
25
- import com .fasterxml .jackson .core .type .TypeReference ;
26
- import com .fasterxml .jackson .databind .ObjectMapper ;
27
- import org .springframework .ai .chat .metadata .ChatGenerationMetadata ;
28
21
import reactor .core .publisher .Flux ;
29
22
30
23
import org .springframework .ai .chat .ChatClient ;
24
+ import org .springframework .ai .chat .ChatOptions ;
31
25
import org .springframework .ai .chat .ChatResponse ;
32
26
import org .springframework .ai .chat .Generation ;
33
27
import org .springframework .ai .chat .StreamingChatClient ;
28
+ import org .springframework .ai .chat .messages .Message ;
29
+ import org .springframework .ai .chat .messages .MessageType ;
30
+ import org .springframework .ai .chat .metadata .ChatGenerationMetadata ;
34
31
import org .springframework .ai .chat .metadata .Usage ;
32
+ import org .springframework .ai .chat .prompt .Prompt ;
33
+ import org .springframework .ai .model .ModelOptionsUtils ;
35
34
import org .springframework .ai .ollama .api .OllamaApi ;
36
- import org .springframework .ai .ollama .api .OllamaOptions ;
37
- import org .springframework .ai .ollama .api .OllamaApi .ChatRequest ;
38
35
import org .springframework .ai .ollama .api .OllamaApi .Message .Role ;
39
-
40
- import org .springframework .ai .chat .prompt .Prompt ;
41
- import org .springframework .ai .chat .messages .Message ;
42
- import org .springframework .ai .chat .messages .MessageType ;
36
+ import org .springframework .ai .ollama .api .OllamaOptions ;
37
+ import org .springframework .util .StringUtils ;
43
38
44
39
/**
45
- * {@link ChatClient} implementation for {@literal Ollma }.
40
+ * {@link ChatClient} implementation for {@literal Ollama }.
46
41
*
47
42
* Ollama allows developers to run large language models and generate embeddings locally.
48
43
* It supports open-source models available on [Ollama AI
57
52
*/
58
53
public class OllamaChatClient implements ChatClient , StreamingChatClient {
59
54
55
+ /**
56
+ * Low-level Ollama API library.
57
+ */
60
58
private final OllamaApi chatApi ;
61
59
62
- private String model = "orca-mini" ;
63
-
64
- private Map <String , Object > clientOptions ;
65
-
66
- private final static ObjectMapper OBJECT_MAPPER = new ObjectMapper ();
60
+ /**
61
+ * Default options to be used for all chat requests.
62
+ */
63
+ private OllamaOptions defaultOptions = OllamaOptions .create ().withModel (OllamaOptions .DEFAULT_MODEL );
67
64
68
65
public OllamaChatClient (OllamaApi chatApi ) {
69
66
this .chatApi = chatApi ;
70
67
}
71
68
69
+ /**
70
+ * @deprecated Use {@link OllamaOptions#setModel} instead.
71
+ */
72
+ @ Deprecated
72
73
public OllamaChatClient withModel (String model ) {
73
- this .model = model ;
74
+ this .defaultOptions . setModel ( model ) ;
74
75
return this ;
75
76
}
76
77
77
- public OllamaChatClient withOptions (Map <String , Object > options ) {
78
- this .clientOptions = options ;
79
- return this ;
80
- }
81
-
82
- public OllamaChatClient withOptions (OllamaOptions options ) {
83
- this .clientOptions = options .toMap ();
78
+ public OllamaChatClient withDefaultOptions (OllamaOptions options ) {
79
+ this .defaultOptions = options ;
84
80
return this ;
85
81
}
86
82
87
83
@ Override
88
84
public ChatResponse call (Prompt prompt ) {
89
85
90
- OllamaApi .ChatResponse response = this .chatApi .chat (request (prompt , this . model , false ));
86
+ OllamaApi .ChatResponse response = this .chatApi .chat (ollamaChatRequest (prompt , false ));
91
87
var generator = new Generation (response .message ().content ());
92
88
if (response .promptEvalCount () != null && response .evalCount () != null ) {
93
89
generator = generator
@@ -99,7 +95,7 @@ public ChatResponse call(Prompt prompt) {
99
95
@ Override
100
96
public Flux <ChatResponse > stream (Prompt prompt ) {
101
97
102
- Flux <OllamaApi .ChatResponse > response = this .chatApi .streamingChat (request (prompt , this . model , true ));
98
+ Flux <OllamaApi .ChatResponse > response = this .chatApi .streamingChat (ollamaChatRequest (prompt , true ));
103
99
104
100
return response .map (chunk -> {
105
101
Generation generation = (chunk .message () != null ) ? new Generation (chunk .message ().content ())
@@ -127,7 +123,10 @@ public Long getGenerationTokens() {
127
123
};
128
124
}
129
125
130
- private OllamaApi .ChatRequest request (Prompt prompt , String model , boolean stream ) {
126
+ /**
127
+ * Package access for testing.
128
+ */
129
+ OllamaApi .ChatRequest ollamaChatRequest (Prompt prompt , boolean stream ) {
131
130
132
131
List <OllamaApi .Message > ollamaMessages = prompt .getInstructions ()
133
132
.stream ()
@@ -138,49 +137,31 @@ private OllamaApi.ChatRequest request(Prompt prompt, String model, boolean strea
138
137
.toList ();
139
138
140
139
// runtime options
141
- Map <String , Object > clientOptionsToUse = merge (prompt .getOptions (), this .clientOptions , HashMap .class );
142
-
143
- return ChatRequest .builder (model )
144
- .withStream (stream )
145
- .withMessages (ollamaMessages )
146
- .withOptions (clientOptionsToUse )
147
- .build ();
148
- }
149
-
150
- public static Map <String , Object > objectToMap (Object source ) {
151
- try {
152
- String json = OBJECT_MAPPER .writeValueAsString (source );
153
- return OBJECT_MAPPER .readValue (json , new TypeReference <Map <String , Object >>() {
154
- });
155
- }
156
- catch (JsonProcessingException e ) {
157
- throw new RuntimeException (e );
140
+ OllamaOptions runtimeOptions = null ;
141
+ if (prompt .getOptions () != null ) {
142
+ if (prompt .getOptions () instanceof ChatOptions runtimeChatOptions ) {
143
+ runtimeOptions = ModelOptionsUtils .copyToTarget (runtimeChatOptions , ChatOptions .class ,
144
+ OllamaOptions .class );
145
+ }
146
+ else {
147
+ throw new IllegalArgumentException ("Prompt options are not of type ChatOptions: "
148
+ + prompt .getOptions ().getClass ().getSimpleName ());
149
+ }
158
150
}
159
- }
160
151
161
- public static <T > T mapToClass (Map <String , Object > source , Class <T > clazz ) {
162
- try {
163
- String json = OBJECT_MAPPER .writeValueAsString (source );
164
- return OBJECT_MAPPER .readValue (json , clazz );
165
- }
166
- catch (JsonProcessingException e ) {
167
- throw new RuntimeException (e );
168
- }
169
- }
152
+ OllamaOptions mergedOptions = ModelOptionsUtils .merge (runtimeOptions , this .defaultOptions , OllamaOptions .class );
170
153
171
- public static < T > T merge ( Object source , Object target , Class < T > clazz ) {
172
- if (source == null ) {
173
- source = Map . of ( );
154
+ // Override the model.
155
+ if (! StringUtils . hasText ( mergedOptions . getModel ()) ) {
156
+ throw new IllegalArgumentException ( "Model is not set!" );
174
157
}
175
- Map <String , Object > sourceMap = objectToMap (source );
176
- Map <String , Object > targetMap = objectToMap (target );
177
-
178
- targetMap .putAll (sourceMap .entrySet ()
179
- .stream ()
180
- .filter (e -> e .getValue () != null )
181
- .collect (Collectors .toMap (e -> e .getKey (), e -> e .getValue ())));
182
158
183
- return mapToClass (targetMap , clazz );
159
+ String model = mergedOptions .getModel ();
160
+ return OllamaApi .ChatRequest .builder (model )
161
+ .withStream (stream )
162
+ .withMessages (ollamaMessages )
163
+ .withOptions (mergedOptions )
164
+ .build ();
184
165
}
185
166
186
167
private OllamaApi .Message .Role toRole (Message message ) {
0 commit comments