@@ -20,7 +20,7 @@ type Response struct {
20
20
CreatedAt time.Time `json:"created_at"`
21
21
Done bool `json:"done"`
22
22
Reason string `json:"done_reason,omitempty"`
23
- Response * string `json:"response"` // For completion
23
+ Response * string `json:"response,omitempty "` // For completion
24
24
Message `json:"message"` // For chat
25
25
Metrics
26
26
}
@@ -90,6 +90,7 @@ type reqCompletion struct {
90
90
Options map [string ]any `json:"options,omitempty"`
91
91
}
92
92
93
+ // Create a completion from a prompt
93
94
func (model * model ) Completion (ctx context.Context , prompt string , opts ... llm.Opt ) (llm.Completion , error ) {
94
95
// Apply options - including prompt options
95
96
opt , err := llm .ApplyPromptOpts (opts ... )
@@ -125,126 +126,86 @@ func (model *model) Completion(ctx context.Context, prompt string, opts ...llm.O
125
126
return model .request (ctx , req , opt .StreamFn (), client .OptPath ("generate" ))
126
127
}
127
128
128
- func (model * model ) request (ctx context.Context , req client.Payload , streamfn func (llm.Completion ), opts ... client.RequestOpt ) (* Response , error ) {
129
- var delta , response Response
130
- if streamfn != nil {
131
- opts = append (opts , client .OptJsonStreamCallback (func (v any ) error {
132
- if v , ok := v .(* Response ); ! ok || v == nil {
133
- return llm .ErrConflict .Withf ("Invalid stream response: %v" , delta )
134
- } else if err := streamEvent (& response , v ); err != nil {
135
- return err
136
- }
137
- if fn := streamfn ; fn != nil {
138
- fn (& response )
139
- }
140
- return nil
141
- }))
142
- }
143
-
144
- // Response
145
- if err := model .DoWithContext (ctx , req , & delta , opts ... ); err != nil {
146
- return nil , err
147
- }
148
-
149
- // Return success
150
- if streamfn != nil {
151
- return & response , nil
152
- } else if delta .Response != nil {
153
- delta .Message = Message {
154
- RoleContent : RoleContent {
155
- Role : "user" ,
156
- Content : * delta .Response ,
157
- },
158
- }
159
- return & delta , nil
160
- } else {
161
- return nil , llm .ErrInternalServerError .Withf ("No response" )
162
- }
163
- }
164
-
165
- // Create a completion from a chat session
166
- func (model * model ) Chat (context.Context , []llm.Completion , ... llm.Opt ) (llm.Completion , error ) {
167
- return nil , llm .ErrNotImplemented
168
- }
169
-
170
- /*
171
129
type reqChat struct {
172
- Model string `json:"model"`
173
- Messages []*Message `json:"messages"`
174
- Tools []llm.Tool `json:"tools,omitempty"`
175
- Format string `json:"format,omitempty"`
176
- Options map[string]interface{} `json:"options,omitempty"`
177
- Stream bool `json:"stream"`
178
- KeepAlive *time.Duration `json:"keep_alive,omitempty"`
130
+ Model string `json:"model"`
131
+ Messages []llm. Completion `json:"messages"`
132
+ Tools []llm.Tool `json:"tools,omitempty"`
133
+ Format string `json:"format,omitempty"`
134
+ Options map [string ]any `json:"options,omitempty"`
135
+ Stream * bool `json:"stream"`
136
+ KeepAlive * time.Duration `json:"keep_alive,omitempty"`
179
137
}
180
138
181
- func (ollama *Client) Chat(ctx context.Context, context llm.Context, opts ...llm.Opt) (*Response, error) {
139
+ // Create a completion from a chat session
140
+ func (model * model ) Chat (ctx context.Context , completions []llm.Completion , opts ... llm.Opt ) (llm.Completion , error ) {
182
141
// Apply options
183
142
opt , err := llm .ApplyOpts (opts ... )
184
143
if err != nil {
185
144
return nil , err
186
145
}
187
146
188
- // Append the system prompt at the beginning
189
- messages := make([]*Message , 0, len(context.(*session).seq )+1)
147
+ // Create the completions including the system prompt
148
+ messages := make ([]llm. Completion , 0 , len (completions )+ 1 )
190
149
if system := opt .SystemPrompt (); system != "" {
191
- messages = append(messages, systemPrompt(system))
192
- }
193
-
194
- // Always append the first message of each completion
195
- for _, message := range context.(*session).seq {
196
- messages = append(messages, message)
150
+ messages = append (messages , messagefactory {}.SystemPrompt (system ))
197
151
}
152
+ messages = append (messages , completions ... )
198
153
199
154
// Request
200
155
req , err := client .NewJSONRequest (reqChat {
201
- Model: context.(*session). model.Name(),
156
+ Model : model .Name (),
202
157
Messages : messages ,
203
- Tools: optTools(ollama , opt),
158
+ Tools : optTools (model . Client , opt ),
204
159
Format : optFormat (opt ),
205
160
Options : optOptions (opt ),
206
- Stream: optStream(ollama , opt),
161
+ Stream : optStream (model . Client , opt ),
207
162
KeepAlive : optKeepAlive (opt ),
208
163
})
209
164
if err != nil {
210
165
return nil , err
211
166
}
212
167
213
- // Response
214
- var response, delta Response
215
- reqopts := []client.RequestOpt{
216
- client.OptPath("chat"),
217
- }
218
- if optStream(ollama, opt) {
219
- reqopts = append(reqopts, client.OptJsonStreamCallback(func(v any) error {
168
+ // Make the request
169
+ return model .request (ctx , req , opt .StreamFn (), client .OptPath ("chat" ))
170
+ }
171
+
172
+ ///////////////////////////////////////////////////////////////////////////////
173
+ // PRIVATE METHODS
174
+
175
+ func (model * model ) request (ctx context.Context , req client.Payload , streamfn func (llm.Completion ), opts ... client.RequestOpt ) (* Response , error ) {
176
+ var delta , response Response
177
+ if streamfn != nil {
178
+ opts = append (opts , client .OptJsonStreamCallback (func (v any ) error {
220
179
if v , ok := v .(* Response ); ! ok || v == nil {
221
- return llm.ErrConflict.Withf("Invalid stream response: %v", v )
180
+ return llm .ErrConflict .Withf ("Invalid stream response: %v" , delta )
222
181
} else if err := streamEvent (& response , v ); err != nil {
223
182
return err
224
183
}
225
- if fn := opt.StreamFn() ; fn != nil {
184
+ if fn := streamfn ; fn != nil {
226
185
fn (& response )
227
186
}
228
187
return nil
229
188
}))
230
189
}
231
190
232
191
// Response
233
- if err := ollama .DoWithContext(ctx, req, &delta, reqopts ...); err != nil {
192
+ if err := model .DoWithContext (ctx , req , & delta , opts ... ); err != nil {
234
193
return nil , err
235
194
}
236
195
237
196
// Return success
238
- if optStream(ollama, opt) {
197
+ if streamfn != nil {
239
198
return & response , nil
240
- } else {
241
- return &delta, nil
199
+ } else if delta .Response != nil {
200
+ delta .Message = Message {
201
+ RoleContent : RoleContent {
202
+ Role : "user" ,
203
+ Content : * delta .Response ,
204
+ },
205
+ }
242
206
}
207
+ return & delta , nil
243
208
}
244
- */
245
-
246
- ///////////////////////////////////////////////////////////////////////////////
247
- // PRIVATE METHODS
248
209
249
210
func streamEvent (response , delta * Response ) error {
250
211
// Completion instead of chat
0 commit comments