@@ -86,44 +86,43 @@ VALIDATE-COMMAND, and GROUNDING-SEARCH handler."
86
86
(:validate-command . chatgpt-shell-google--validate-command)))
87
87
88
88
(defun chatgpt-shell-google--current-generative-model-p (api-response )
89
- " This is a predicate that looks at a model description within
90
- API-RESPONSE.
89
+ " Determine if model in API-RESPONSE is generative.
91
90
92
91
It returns non-nil if the model described in API-RESPONSE is current and
93
92
supports \" generateContent\" .
94
93
95
94
This is used to filter the list of models returned from
96
95
https://generativelanguage.googleapis.com"
97
96
(let-alist api-response
98
- (if ( not ( string-match-p (rx (or " discontinued" " deprecated" )) .description) )
99
- (seq-contains-p .supportedGenerationMethods " generateContent" ))))
97
+ (unless ( string-match-p (rx (or " discontinued" " deprecated" )) .description)
98
+ (seq-contains-p .supportedGenerationMethods " generateContent" ))))
100
99
101
100
(defun chatgpt-shell-google--fetch-model-versions ()
102
101
" Retrieves the list of generative models from the Google API."
103
- (if-let* ((api-key (chatgpt-shell-google-key)))
104
- (let ((url (concat chatgpt-shell-google-api-url-base " /v1beta/models?key=" api-key)))
105
- (with-current-buffer (url-retrieve-synchronously url)
106
- (goto-char (if (boundp 'url-http-end-of-headers )
107
- url-http-end-of-headers
108
- (error " `url-http-end-of-headers' marker is not defined" )))
109
- (if-let* ((parsed-response
110
- (shell-maker--json-parse-string
111
- (buffer-substring-no-properties (point ) (point-max )))))
112
- (let-alist parsed-response
113
- (seq-filter #'chatgpt-shell-google--current-generative-model-p .models)))))
114
- (error " Set your Google API Key. " )))
102
+ (unless (chatgpt-shell-google-key)
103
+ (user-error " Please set your `chatgpt-shell-google-key' " ))
104
+ (with-current-buffer (url-retrieve-synchronously
105
+ (concat chatgpt-shell-google-api-url-base " /v1beta/models?key="
106
+ (chatgpt-shell-google-key)))
107
+ (goto-char (if (boundp 'url-http-end-of-headers )
108
+ url-http-end-of-headers
109
+ (error " `url-http-end-of-headers' marker is not defined" )))
110
+ (if-let* ((parsed-response
111
+ (shell-maker--json-parse-string
112
+ (buffer-substring-no-properties (point ) (point-max )))))
113
+ (let-alist parsed-response
114
+ (seq-filter #'chatgpt-shell-google--current-generative-model-p .models))
115
+ (error " No response from Google " ))))
115
116
116
117
(defun chatgpt-shell-google--parse-model (api-response )
117
- " Convert the API-RESPONSE returned by Gemini into a
118
- the model description needed by `chatgpt-shell' ."
118
+ " Parse Google API-RESPONSE and return a `chatgpt-shell' model."
119
119
(let-alist api-response
120
120
(let* ((model-version (string-remove-prefix " models/" .name))
121
121
(model-shortversion (string-remove-prefix " gemini-" model-version))
122
122
(model-urlpath (concat " /v1beta/" .name))
123
123
; ; The api-response descriptor does not stipulate whether grounding is supported.
124
124
; ; This logic applies a heuristic based on the model name (aka version).
125
- (model-supports-grounding
126
- (if (string-match-p (rx bol (or " gemini-1.5" " gemini-2.0" )) model-version) t nil )))
125
+ (model-supports-grounding (string-match-p (rx bol (or " gemini-1.5" " gemini-2.0" )) model-version)))
127
126
(chatgpt-shell-google-make-model :version model-version
128
127
:short-version model-shortversion
129
128
:grounding-search model-supports-grounding
@@ -135,7 +134,7 @@ the model description needed by `chatgpt-shell'."
135
134
" Query Google for the list of Gemini LLM models available.
136
135
137
136
By default, this package uses a static list of models as returned from
138
- `chatgpt-shell-google-models' . But some users may want to choose from
137
+ `chatgpt-shell-google-models' . But some users may want to choose from
139
138
a fresher set of available models.
140
139
141
140
This function retrieves data from
@@ -172,8 +171,8 @@ argument, replace all the Google models with those retrieved."
172
171
" Toggle the `:grounding-search' boolean for the currently-selected model.
173
172
174
173
Google's documentation states that All Gemini 1.5 and 2.0 models support
175
- grounding with Google search, and `:grounding-search' will be `t' for
176
- those models. For models that support grounding, this package will
174
+ grounding with Google search, and `:grounding-search' will be t for
175
+ those models. For models that support grounding, this package will
177
176
include a
178
177
179
178
(tools .((google_search . ())))
@@ -192,7 +191,7 @@ And in some cases users may wish to not _use_ grounding in Search, even
192
191
though it is available.
193
192
194
193
In either case, the user can invoke this function to toggle
195
- grounding-in-google-search on the model. This package will send the
194
+ grounding-in-google-search on the model. This package will send the
196
195
tools parameter in subsequent outbound requests to that model, when
197
196
grounding is enabled.
198
197
@@ -209,11 +208,13 @@ Returns the new boolean value of `:grounding-search'."
209
208
(defun chatgpt-shell-google--get-grounding-in-search-tool-keyword (model )
210
209
" Retrieves the keyword for the grounding tool.
211
210
212
- This gets set once for each model , based on a heuristic."
211
+ This gets set once for each MODEL , based on a heuristic."
213
212
(when-let* ((current-model model)
214
213
(is-google (string= (map-elt current-model :provider ) " Google" ))
215
214
(version (map-elt current-model :version )))
216
- (if (string-match " 1\\ .5" version) " google_search_retrieval" " google_search" )))
215
+ (if (string-match " 1\\ .5" version)
216
+ " google_search_retrieval"
217
+ " google_search" )))
217
218
218
219
(defun chatgpt-shell-google-models ()
219
220
" Build a list of Google LLM models available."
@@ -366,58 +367,58 @@ For example:
366
367
.choices " " )))))
367
368
response
368
369
(if-let ((chunks (shell-maker--split-text raw-response)))
369
- (let ((response)
370
- (pending)
371
- (result))
372
- (mapc (lambda (chunk )
373
- ; ; Response chunks come in the form:
374
- ; ; data: {...}
375
- ; ; data: {...}
376
- (if-let* ((is-data (equal (map-elt chunk :key ) " data:" ))
377
- (obj (shell-maker--json-parse-string (map-elt chunk :value )))
378
- (text (let-alist obj
379
- (or (let-alist (seq-first .candidates)
380
- (cond ((seq-first .content.parts)
381
- (let-alist (seq-first .content.parts)
382
- .text))
383
- ((equal .finishReason " RECITATION" )
384
- " " )
385
- ((equal .finishReason " STOP" )
386
- " " )
387
- ((equal .finishReason " CANCELLED" )
388
- " Error: Request cancellled." )
389
- ((equal .finishReason " CRASHED" )
390
- " Error: An error occurred. Try again." )
391
- ((equal .finishReason " END_OF_PROMPT" )
392
- " Error: Couldn't generate a response. Try rephrasing." )
393
- ((equal .finishReason " LENGTH" )
394
- " Error: Response is too big. Try rephrasing." )
395
- ((equal .finishReason " TIME" )
396
- " Error: Timed out." )
397
- ((equal .finishReason " SAFETY" )
398
- " Error: Flagged for safety." )
399
- ((equal .finishReason " LANGUAGE" )
400
- " Error: Flagged for language." )
401
- ((equal .finishReason " BLOCKLIST" )
402
- " Error: Flagged for forbidden terms." )
403
- ((equal .finishReason " PROHIBITED_CONTENT" )
404
- " Error: Flagged for prohibited content." )
405
- ((equal .finishReason " SPII" )
406
- " Error: Flagged for sensitive personally identifiable information." )
407
- (.finishReason
408
- (format " \n\n Error: Something's up (%s )" .finishReason))))
409
- .error.message))))
410
- (unless (string-empty-p text)
411
- (setq response (concat response text)))
412
- (setq pending (concat pending
413
- (or (map-elt chunk :key ) " " )
414
- (map-elt chunk :value )))))
415
- chunks)
416
- (setq result
417
- (list (cons :filtered (unless (string-empty-p response)
418
- response))
419
- (cons :pending pending)))
420
- result)
370
+ (let ((response)
371
+ (pending)
372
+ (result))
373
+ (mapc (lambda (chunk )
374
+ ; ; Response chunks come in the form:
375
+ ; ; data: {...}
376
+ ; ; data: {...}
377
+ (if-let* ((is-data (equal (map-elt chunk :key ) " data:" ))
378
+ (obj (shell-maker--json-parse-string (map-elt chunk :value )))
379
+ (text (let-alist obj
380
+ (or (let-alist (seq-first .candidates)
381
+ (cond ((seq-first .content.parts)
382
+ (let-alist (seq-first .content.parts)
383
+ .text))
384
+ ((equal .finishReason " RECITATION" )
385
+ " " )
386
+ ((equal .finishReason " STOP" )
387
+ " " )
388
+ ((equal .finishReason " CANCELLED" )
389
+ " Error: Request cancellled." )
390
+ ((equal .finishReason " CRASHED" )
391
+ " Error: An error occurred. Try again." )
392
+ ((equal .finishReason " END_OF_PROMPT" )
393
+ " Error: Couldn't generate a response. Try rephrasing." )
394
+ ((equal .finishReason " LENGTH" )
395
+ " Error: Response is too big. Try rephrasing." )
396
+ ((equal .finishReason " TIME" )
397
+ " Error: Timed out." )
398
+ ((equal .finishReason " SAFETY" )
399
+ " Error: Flagged for safety." )
400
+ ((equal .finishReason " LANGUAGE" )
401
+ " Error: Flagged for language." )
402
+ ((equal .finishReason " BLOCKLIST" )
403
+ " Error: Flagged for forbidden terms." )
404
+ ((equal .finishReason " PROHIBITED_CONTENT" )
405
+ " Error: Flagged for prohibited content." )
406
+ ((equal .finishReason " SPII" )
407
+ " Error: Flagged for sensitive personally identifiable information." )
408
+ (.finishReason
409
+ (format " \n\n Error: Something's up (%s )" .finishReason))))
410
+ .error.message))))
411
+ (unless (string-empty-p text)
412
+ (setq response (concat response text)))
413
+ (setq pending (concat pending
414
+ (or (map-elt chunk :key ) " " )
415
+ (map-elt chunk :value )))))
416
+ chunks)
417
+ (setq result
418
+ (list (cons :filtered (unless (string-empty-p response)
419
+ response))
420
+ (cons :pending pending)))
421
+ result)
421
422
(list (cons :filtered nil )
422
423
(cons :pending raw-response)))))
423
424
0 commit comments