@@ -83,22 +83,24 @@ VALIDATE-COMMAND, and GROUNDING-SEARCH handler."
83
83
(:key . chatgpt-shell-google-key)
84
84
(:validate-command . chatgpt-shell-google--validate-command)))
85
85
86
- (defun chatgpt-shell--google-current-generative-model-p (model )
87
- " a predicate that looks at a model description returned from Google and
88
- returns non-nil if the model is current and supports \" generateContent\" .
86
+ (defun chatgpt-shell-google--current-generative-model-p (api-response )
87
+ " This is a predicate that looks at a model description within
88
+ API-RESPONSE.
89
+
90
+ It returns non-nil if the model described in API-RESPONSE is current and
91
+ supports \" generateContent\" .
92
+
89
93
This is used to filter the list of models returned from
90
94
https://generativelanguage.googleapis.com"
91
- (let ((description (gethash " description" model))
92
- (supported-methods
93
- (gethash " supportedGenerationMethods" model)))
94
- (and
95
- (not (string-match-p (rx (or " discontinued" " deprecated" )) description))
96
- (seq-contains-p supported-methods " generateContent" ))))
97
-
98
- (defun chatgpt-shell--google-get-generative-models ()
99
- " Retrieves the list of Generative models from
100
- generativelanguage.googleapis.com"
101
- (let ((url (concat chatgpt-shell-google-api-url-base " /v1beta/models?key=" (chatgpt-shell-google-key))))
95
+ (when-let* ((description (gethash " description" api-response))
96
+ ((not (string-match-p (rx (or " discontinued" " deprecated" )) description)))
97
+ (supported-methods (gethash " supportedGenerationMethods" api-response)))
98
+ (seq-contains-p supported-methods " generateContent" )))
99
+
100
+ (defun chatgpt-shell-google--fetch-model-versions ()
101
+ " Retrieves the list of generative models from the Google API."
102
+ (let ((url (concat chatgpt-shell-google-api-url-base " /v1beta/models?key="
103
+ (chatgpt-shell-google-key))))
102
104
(with-current-buffer (url-retrieve-synchronously url)
103
105
(goto-char (if (boundp 'url-http-end-of-headers )
104
106
url-http-end-of-headers
@@ -109,12 +111,12 @@ generativelanguage.googleapis.com"
109
111
(let ((parsed-response
110
112
(json-read-from-string
111
113
(buffer-substring-no-properties (point ) (point-max )))))
112
- (seq-filter #'chatgpt-shell-- google-current-generative-model-p
114
+ (seq-filter #'chatgpt-shell-google- -current-generative-model-p
113
115
(gethash " models" parsed-response)))))))
114
116
115
- (defun chatgpt-shell-- google-convert-model (model )
116
- " converts between the model returned by Gemini, and
117
- the model description needed by chatgpt-shell ."
117
+ (defun chatgpt-shell-google-- convert-model (api-response )
118
+ " Convert the API-RESPONSE returned by Gemini into a
119
+ the model description needed by ` chatgpt-shell' ."
118
120
(let ((model-name (gethash " name" model))
119
121
(model-cwindow (gethash " inputTokenLimit" model)))
120
122
(let ((model-version (string-remove-prefix " models/" model-name)))
@@ -133,13 +135,17 @@ the model description needed by chatgpt-shell ."
133
135
:context-window model-cwindow)))))
134
136
135
137
(cl-defun chatgpt-shell-google-load-models (&key override )
136
- " Query Google for the list of Gemini LLM models available (see
137
- https://ai.google.dev/gemini-api/docs/models/gemini) and add them to
138
- `chatgpt-shell-models' unless a model with the same name is already
139
- present. By default, replace the existing Google models in
140
- `chatgpt-shell-models' with the newly retrieved models. When OVERRIDE is
141
- non-nil (interactively with a prefix argument), replace all the Google
142
- models with those retrieved."
138
+ " Query Google for the list of Gemini LLM models available.
139
+
140
+ The data is retrieved from
141
+ https://ai.google.dev/gemini-api/docs/models/gemini. This fn then the
142
+ models retrieved to `chatgpt-shell-models' unless a model with the same
143
+ name is already present.
144
+
145
+ By default, replace the existing Google models in `chatgpt-shell-models'
146
+ with the newly retrieved models. When OVERRIDE is non-nil (interactively
147
+ with a prefix argument), replace all the Google models with those
148
+ retrieved."
143
149
(interactive (list :override current-prefix-arg))
144
150
(let* ((goog-predicate (lambda (model )
145
151
(string= (map-elt model :provider ) " Google" )))
@@ -152,7 +158,7 @@ models with those retrieved."
152
158
(cl-remove-if-not goog-predicate
153
159
chatgpt-shell-models)))
154
160
(new-gemini-models
155
- (mapcar #'chatgpt-shell-- google-convert-model (chatgpt-shell-- google-get-generative-models ))))
161
+ (mapcar #'chatgpt-shell-google-- convert-model (chatgpt-shell-google--fetch-model-versions ))))
156
162
(setq chatgpt-shell-models
157
163
(append (seq-take chatgpt-shell-models goog-index)
158
164
new-gemini-models
@@ -162,30 +168,43 @@ models with those retrieved."
162
168
(length existing-gemini-models)))))
163
169
164
170
(defun chatgpt-shell-google-toggle-grounding ()
165
- " Toggles the :grounding for the currently-selected model. Google's
166
- documentation states that All Gemini 1.5 and 2.0 models support
167
- grounding, some of the experimental or short-lived models do not. If
168
- chatgpt-shell tries to use a model that does nto support grounding, the
169
- API returns an error. In that case, the user can toggle grounding on the
170
- model, using this function."
171
+ " Toggle the `:grounding-search' boolean for the currently-selected model.
172
+
173
+ Google's documentation states that All Gemini 1.5 and 2.0 models support
174
+ grounding, and `:grounding-search' will be `t' for those models. For
175
+ models that support grounding, this package will include a
176
+
177
+ (tools .((google_search . ())))
178
+
179
+ in the request payload for 2.0+ models, or
180
+
181
+ (tools .((google_search_retrieval . ())))
182
+
183
+ for 1.5-era models.
184
+
185
+ But some of the experimental models may not support grounding. If
186
+ `chatgpt-shell' tries to send a tools parameter as above to a model that
187
+ does not support grounding, the API returns an error. In that case, the
188
+ user can use this function to toggle grounding on the model, so that
189
+ this package does not send the tools parameter in subsequent outbound
190
+ requests to that model.
191
+
192
+ Returns the newly toggled value of `:grounding-search' ."
171
193
(interactive )
172
- (let ((current-model (chatgpt-shell--resolved-model)))
173
- (when (and current-model
174
- (string= (map-elt current-model :provider ) " Google" ))
175
- (let ((current-grounding-cons
176
- (assq :grounding-search current-model)))
177
- (when current-grounding
178
- (setf (cdr current-grounding-cons) (not (cdr current-grounding-cons))))))))
179
-
180
- (defun chatgpt-shell-google-get-grounding-tool-keyword ()
181
- " retrieves the keyword for the grounding tool. This gets set
182
- once for each model, based on a heuristic."
183
- (let ((current-model (chatgpt-shell--resolved-model)))
184
- (when (and current-model
185
- (string= (map-elt current-model :provider ) " Google" ))
186
- (save-match-data
187
- (let ((version (map-elt current-model :version )))
188
- (if (string-match " 1\\ .5" version) " google_search_retrieval" " google_search" ))))))
194
+ (when-let* ((current-model (chatgpt-shell--resolved-model))
195
+ (is-google (string= (map-elt current-model :provider ) " Google" ))
196
+ (current-grounding-cons (assq :grounding-search current-model)))
197
+ (setf (cdr current-grounding-cons) (not (cdr current-grounding-cons)))))
198
+
199
+ (defun chatgpt-shell-google--get-grounding-tool-keyword (model )
200
+ " Retrieves the keyword for the grounding tool.
201
+
202
+ This gets set once for each model, based on a heuristic."
203
+ (when-let* ((current-model model)
204
+ (is-google (string= (map-elt current-model :provider ) " Google" ))
205
+ (version (map-elt current-model :version )))
206
+ (save-match-data
207
+ (if (string-match " 1\\ .5" version) " google_search_retrieval" " google_search" ))))
189
208
190
209
(defun chatgpt-shell-google-models ()
191
210
" Build a list of Google LLM models available."
295
314
(when (map-elt model :grounding-search )
296
315
; ; Google's docs say that grounding is supported for all Gemini 1.5 and 2.0 models.
297
316
; ; But the API is slightly different between them. This uses the correct tool name.
298
- `((tools . ((,(intern (chatgpt-shell-google-get-grounding-tool-keyword)) . ())))))
317
+ `((tools . ((,(intern (chatgpt-shell-google-- get-grounding-tool-keyword model )) . ())))))
299
318
`((generation_config . ((temperature . ,(or (map-elt settings :temperature ) 1 ))
300
319
; ; 1 is most diverse output.
301
320
(topP . 1 ))))))
@@ -338,58 +357,58 @@ For example:
338
357
.choices " " )))))
339
358
response
340
359
(if-let ((chunks (shell-maker--split-text raw-response)))
341
- (let ((response)
342
- (pending)
343
- (result))
344
- (mapc (lambda (chunk )
345
- ; ; Response chunks come in the form:
346
- ; ; data: {...}
347
- ; ; data: {...}
348
- (if-let* ((is-data (equal (map-elt chunk :key ) " data:" ))
349
- (obj (shell-maker--json-parse-string (map-elt chunk :value )))
350
- (text (let-alist obj
351
- (or (let-alist (seq-first .candidates)
352
- (cond ((seq-first .content.parts)
353
- (let-alist (seq-first .content.parts)
354
- .text))
355
- ((equal .finishReason " RECITATION" )
356
- " " )
357
- ((equal .finishReason " STOP" )
358
- " " )
359
- ((equal .finishReason " CANCELLED" )
360
- " Error: Request cancellled." )
361
- ((equal .finishReason " CRASHED" )
362
- " Error: An error occurred. Try again." )
363
- ((equal .finishReason " END_OF_PROMPT" )
364
- " Error: Couldn't generate a response. Try rephrasing." )
365
- ((equal .finishReason " LENGTH" )
366
- " Error: Response is too big. Try rephrasing." )
367
- ((equal .finishReason " TIME" )
368
- " Error: Timed out." )
369
- ((equal .finishReason " SAFETY" )
370
- " Error: Flagged for safety." )
371
- ((equal .finishReason " LANGUAGE" )
372
- " Error: Flagged for language." )
373
- ((equal .finishReason " BLOCKLIST" )
374
- " Error: Flagged for forbidden terms." )
375
- ((equal .finishReason " PROHIBITED_CONTENT" )
376
- " Error: Flagged for prohibited content." )
377
- ((equal .finishReason " SPII" )
378
- " Error: Flagged for sensitive personally identifiable information." )
379
- (.finishReason
380
- (format " \n\n Error: Something's up (%s )" .finishReason))))
381
- .error.message))))
382
- (unless (string-empty-p text)
383
- (setq response (concat response text)))
384
- (setq pending (concat pending
385
- (or (map-elt chunk :key ) " " )
386
- (map-elt chunk :value )))))
387
- chunks)
388
- (setq result
389
- (list (cons :filtered (unless (string-empty-p response)
390
- response))
391
- (cons :pending pending)))
392
- result)
360
+ (let ((response)
361
+ (pending)
362
+ (result))
363
+ (mapc (lambda (chunk )
364
+ ; ; Response chunks come in the form:
365
+ ; ; data: {...}
366
+ ; ; data: {...}
367
+ (if-let* ((is-data (equal (map-elt chunk :key ) " data:" ))
368
+ (obj (shell-maker--json-parse-string (map-elt chunk :value )))
369
+ (text (let-alist obj
370
+ (or (let-alist (seq-first .candidates)
371
+ (cond ((seq-first .content.parts)
372
+ (let-alist (seq-first .content.parts)
373
+ .text))
374
+ ((equal .finishReason " RECITATION" )
375
+ " " )
376
+ ((equal .finishReason " STOP" )
377
+ " " )
378
+ ((equal .finishReason " CANCELLED" )
379
+ " Error: Request cancellled." )
380
+ ((equal .finishReason " CRASHED" )
381
+ " Error: An error occurred. Try again." )
382
+ ((equal .finishReason " END_OF_PROMPT" )
383
+ " Error: Couldn't generate a response. Try rephrasing." )
384
+ ((equal .finishReason " LENGTH" )
385
+ " Error: Response is too big. Try rephrasing." )
386
+ ((equal .finishReason " TIME" )
387
+ " Error: Timed out." )
388
+ ((equal .finishReason " SAFETY" )
389
+ " Error: Flagged for safety." )
390
+ ((equal .finishReason " LANGUAGE" )
391
+ " Error: Flagged for language." )
392
+ ((equal .finishReason " BLOCKLIST" )
393
+ " Error: Flagged for forbidden terms." )
394
+ ((equal .finishReason " PROHIBITED_CONTENT" )
395
+ " Error: Flagged for prohibited content." )
396
+ ((equal .finishReason " SPII" )
397
+ " Error: Flagged for sensitive personally identifiable information." )
398
+ (.finishReason
399
+ (format " \n\n Error: Something's up (%s )" .finishReason))))
400
+ .error.message))))
401
+ (unless (string-empty-p text)
402
+ (setq response (concat response text)))
403
+ (setq pending (concat pending
404
+ (or (map-elt chunk :key ) " " )
405
+ (map-elt chunk :value )))))
406
+ chunks)
407
+ (setq result
408
+ (list (cons :filtered (unless (string-empty-p response)
409
+ response))
410
+ (cons :pending pending)))
411
+ result)
393
412
(list (cons :filtered nil )
394
413
(cons :pending raw-response)))))
395
414
0 commit comments