Skip to content

Commit e5f3401

Browse files
author
xenodium
committed
Add experimental basic function calling (OpenAI only for now)
1 parent 1e9d534 commit e5f3401

9 files changed

+422
-147
lines changed

chatgpt-shell-anthropic.el

Lines changed: 44 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ If you use Claude through a proxy service, change the URL base."
5858
"The token budget allocated for Anthropic model thinking.
5959
6060
Needs `chatgpt-shell-anthropic-thinking-budget-tokens' set to
61-
non-nil. nil means to use the maximum number of thinking tokens
61+
non-nil. nil means to use the maximum number of thinking tokens
6262
allowed."
6363
:type '(choice integer (const nil))
6464
:group 'chatgpt-shell)
@@ -239,9 +239,19 @@ or
239239
:filter #'chatgpt-shell-anthropic--extract-claude-response
240240
:shell shell))
241241

242-
(defun chatgpt-shell-anthropic--extract-claude-response (raw-response)
243-
"Extract Claude response from RAW-RESPONSE."
244-
(if-let* ((whole (shell-maker--json-parse-string raw-response))
242+
(defun chatgpt-shell-anthropic--extract-claude-response (output)
243+
"Process pending OUTPUT to extract Claude response.
244+
245+
OUTPUT is always of the form:
246+
247+
((:function-calls . ...)
248+
(:pending . ...)
249+
(:filtered . ...))
250+
251+
and must be returned in the same form."
252+
(when (stringp output)
253+
(error "Please upgrade shell-maker to 0.79.1 or newer"))
254+
(if-let* ((whole (shell-maker--json-parse-string (map-elt output :pending)))
245255
(response (or (let-alist whole
246256
.error.message)
247257
(let-alist whole
@@ -250,37 +260,36 @@ or
250260
.text))
251261
.content "")))))
252262
response
253-
(if-let ((chunks (shell-maker--split-text raw-response)))
254-
(let ((response)
255-
(pending)
256-
(result))
257-
(mapc (lambda (chunk)
258-
;; Response chunks come in the form:
259-
;; event: message_start
260-
;; data: {...}
261-
;; event: content_block_start
262-
;; data: {...}
263-
(if-let* ((is-data (equal (map-elt chunk :key) "data:"))
264-
(obj (shell-maker--json-parse-string (map-elt chunk :value)))
265-
(text (let-alist obj
266-
(or .text
267-
.content_block.text
268-
.delta.text
269-
.error.message
270-
""))))
271-
(unless (string-empty-p text)
272-
(setq response (concat response text)))
273-
(setq pending (concat pending
274-
(or (map-elt chunk :key) "")
275-
(map-elt chunk :value)))))
276-
chunks)
277-
(setq result
278-
(list (cons :filtered (unless (string-empty-p response)
279-
response))
280-
(cons :pending pending)))
281-
result)
282-
(list (cons :filtered nil)
283-
(cons :pending raw-response)))))
263+
(if-let ((chunks (shell-maker--split-text (map-elt output :pending))))
264+
(let ((response)
265+
(pending)
266+
(result))
267+
(mapc (lambda (chunk)
268+
;; Response chunks come in the form:
269+
;; event: message_start
270+
;; data: {...}
271+
;; event: content_block_start
272+
;; data: {...}
273+
(if-let* ((is-data (equal (map-elt chunk :key) "data:"))
274+
(obj (shell-maker--json-parse-string (map-elt chunk :value)))
275+
(text (let-alist obj
276+
(or .text
277+
.content_block.text
278+
.delta.text
279+
.error.message
280+
""))))
281+
(unless (string-empty-p text)
282+
(setq response (concat response text)))
283+
(setq pending (concat pending
284+
(or (map-elt chunk :key) "")
285+
(map-elt chunk :value)))))
286+
chunks)
287+
(setq result
288+
(list (cons :filtered (unless (string-empty-p response)
289+
response))
290+
(cons :pending pending)))
291+
result)
292+
output)))
284293

285294
(provide 'chatgpt-shell-anthropic)
286295

chatgpt-shell-deepseek.el

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -103,9 +103,9 @@ If you use DeepSeek through a proxy service, change the URL base."
103103
:filter #'chatgpt-shell-deepseek--filter-output
104104
:missing-key-msg "Your chatgpt-shell-deepseek-key is missing"))
105105

106-
(defun chatgpt-shell-deepseek--filter-output (raw-response)
107-
"Filter RAW-RESPONSE when processing responses are sent."
108-
(chatgpt-shell-openai--filter-output raw-response))
106+
(defun chatgpt-shell-deepseek--filter-output (object)
107+
"Process OBJECT to extract response output."
108+
(chatgpt-shell-openai--filter-output object))
109109

110110
(defun chatgpt-shell-deepseek--make-headers (&rest args)
111111
"Create the API headers.

chatgpt-shell-google.el

Lines changed: 18 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -369,9 +369,21 @@ For example:
369369
context)
370370
(nreverse result)))
371371

372-
(defun chatgpt-shell-google--extract-gemini-response (raw-response)
373-
"Extract Gemini response from RAW-RESPONSE."
374-
(if-let* ((whole (shell-maker--json-parse-string raw-response))
372+
(defun chatgpt-shell-google--extract-gemini-response (output)
373+
"Process pending OUTPUT to extract Gemini response.
374+
375+
OUTPUT is always of the form:
376+
377+
((:function-calls . ...)
378+
(:pending . ...)
379+
(:filtered . ...))
380+
381+
and must be returned in the same form.
382+
383+
Processing means processing :pending content into :filtered."
384+
(when (stringp output)
385+
(error "Please upgrade shell-maker to 0.79.1 or newer"))
386+
(if-let* ((whole (shell-maker--json-parse-string (map-elt output :pending)))
375387
(response (or (let-alist whole
376388
.error.message)
377389
(let-alist whole
@@ -380,8 +392,8 @@ For example:
380392
(or .delta.content
381393
.message.content)))
382394
.choices "")))))
383-
response
384-
(if-let ((chunks (shell-maker--split-text raw-response)))
395+
(list (cons :filtered response))
396+
(if-let ((chunks (shell-maker--split-text (map-elt output :pending))))
385397
(let ((response)
386398
(pending)
387399
(result))
@@ -434,8 +446,7 @@ For example:
434446
response))
435447
(cons :pending pending)))
436448
result)
437-
(list (cons :filtered nil)
438-
(cons :pending raw-response)))))
449+
output)))
439450

440451
(provide 'chatgpt-shell-google)
441452

chatgpt-shell-kagi.el

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -107,18 +107,25 @@ If you use Kagi through a proxy service, change the URL base."
107107
"&url="
108108
(url-hexify-string (string-trim command))))
109109

110-
(defun chatgpt-shell-kagi--extract-summarizer-response (raw-response)
111-
"Extract Kagi summarizer response from RAW-RESPONSE.
110+
(defun chatgpt-shell-kagi--extract-summarizer-response (object)
111+
"Extract Kagi summarizer response from OBJECT.
112112
113113
Responses are never streamed."
114+
(when (stringp output)
115+
(error "Please upgrade shell-maker to 0.79.1 or newer"))
114116
;; Non-streamed
115-
(if-let* ((whole (shell-maker--json-parse-string raw-response))
116-
(response (let-alist whole
117-
(or (let-alist (seq-first .error)
118-
.msg)
119-
.data.output))))
120-
response
121-
(list (cons :pending raw-response))))
117+
(if-let* ((whole (shell-maker--json-parse-string (map-elt object :pending)))
118+
(response (cond ((and (map-elt whole 'error)
119+
(seq-first (map-elt whole 'error)))
120+
(map-elt (seq-first (map-elt whole 'error)) 'msg))
121+
((and (map-elt whole 'data)
122+
(not (eq (map-elt whole 'data) :null)))
123+
(map-elt (map-elt whole 'data) 'output)))))
124+
(progn
125+
(setf (map-elt object :filtered) response)
126+
(setf (map-elt object :pending) nil)
127+
object)
128+
object))
122129

123130
(cl-defun chatgpt-shell-kagi--extract-url (&key text fail)
124131
"Trim TEXT URL found.

chatgpt-shell-ollama.el

Lines changed: 27 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,7 @@ If you use Ollama through a proxy service, change the URL base."
103103
:context-window 32768)))
104104

105105
(defun chatgpt-shell-ollama--fetch-model-versions ()
106+
"Fetch available Ollama model versions (installed locally)."
106107
(mapcar (lambda (model)
107108
(string-remove-suffix ":latest" (map-elt model 'name)))
108109
(map-elt (shell-maker--json-parse-string
@@ -113,10 +114,12 @@ If you use Ollama through a proxy service, change the URL base."
113114
'models)))
114115

115116
(defun chatgpt-shell-ollama--parse-token-width (quantization)
117+
"Parse token using QUANTIZATION."
116118
(when (string-match "^[FQ]\\([1-9][0-9]*\\)" quantization)
117119
(string-to-number (match-string 1 quantization))))
118120

119121
(defun chatgpt-shell-ollama--fetch-model (version)
122+
"Fetch Ollama model details with VERSION."
120123
(let* ((data (shell-maker--json-parse-string
121124
(map-elt (shell-maker-make-http-request
122125
:async nil
@@ -137,12 +140,15 @@ If you use Ollama through a proxy service, change the URL base."
137140
:context-window context-window)))
138141

139142
(cl-defun chatgpt-shell-ollama-load-models (&key override)
140-
"Query ollama for the locally installed models and add them to
141-
`chatgpt-shell-models' unless a model with the same name is
142-
already present. By default, replace the ollama models in
143-
`chatgpt-shell-models' locally installed ollama models. When
144-
OVERRIDE is non-nil (interactively with a prefix argument),
145-
replace all models with locally installed ollama models."
143+
"Query ollama for the locally installed models.
144+
145+
Queried models are added to `chatgpt-shell-models' unless a model
146+
with the same name is already present.
147+
148+
By default, replace the ollama models in `chatgpt-shell-models'
149+
locally installed ollama models. When OVERRIDE is non-nil (interactively
150+
with a prefix argument), replace all models with
151+
locally installed ollama models."
146152
(interactive (list :override current-prefix-arg))
147153
(let* ((ollama-predicate (lambda (model)
148154
(string= (map-elt model :provider) "Ollama")))
@@ -189,21 +195,28 @@ replace all models with locally installed ollama models."
189195
(concat chatgpt-shell-ollama-api-url-base
190196
"/api/chat"))
191197

192-
(defun chatgpt-shell-ollama--extract-ollama-response (raw-response)
193-
"Extract Claude response from RAW-RESPONSE."
194-
(if-let* ((whole (shell-maker--json-parse-string raw-response))
198+
(defun chatgpt-shell-ollama--extract-ollama-response (object)
199+
"Process Ollama response from OBJECT."
200+
(when (stringp object)
201+
(error "Please upgrade shell-maker to 0.79.1 or newer"))
202+
(if-let* ((whole (shell-maker--json-parse-string (map-elt object :pending)))
195203
(response (let-alist whole
196204
.response)))
197-
response
198-
(if-let ((chunks (string-split raw-response "\n")))
205+
(progn
206+
(setf (map-elt object :filtered) response)
207+
(setf (map-elt object :pending) nil)
208+
object)
209+
(if-let ((chunks (string-split (map-elt object :pending) "\n")))
199210
(let ((response))
200211
(mapc (lambda (chunk)
201212
(let-alist (shell-maker--json-parse-string chunk)
202213
(unless (string-empty-p .message.content)
203214
(setq response (concat response .message.content)))))
204215
chunks)
205-
(or response raw-response))
206-
raw-response)))
216+
(setf (map-elt object :filtered) response)
217+
(setf (map-elt object :pending) nil)
218+
object)
219+
object)))
207220

208221
(cl-defun chatgpt-shell-ollama-make-payload (&key model context settings)
209222
"Create the API payload using MODEL CONTEXT and SETTINGS."
@@ -256,7 +269,7 @@ CONTEXT: Excludes PROMPT."
256269
(image_url . ,prompt-url))))))))))))
257270

258271
(defun chatgpt-shell-ollama--validate-command (_command model _settings)
259-
"Return error string if command/setup isn't valid."
272+
"Return error string if MODEL isn't valid."
260273
(unless (seq-contains-p (chatgpt-shell-ollama--fetch-model-versions)
261274
(map-elt model :version))
262275
(format " Local model \"%s\" not found.

0 commit comments

Comments
 (0)