-
Notifications
You must be signed in to change notification settings - Fork 154
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[Looking for feedback] Add support for Function Calls #209
base: master
Are you sure you want to change the base?
Changes from all commits
9c6ae50
9a72918
5433bf7
6067168
ba6da17
717b607
861edf8
1c6e4a3
74d5853
63937cb
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -33,6 +33,7 @@ | |
(defvar gptel-backend) | ||
(defvar gptel-temperature) | ||
(defvar gptel-max-tokens) | ||
(defvar gptel-callable-functions) | ||
(defvar gptel--system-message) | ||
(defvar gptel--known-backends) | ||
(defvar json-object-type) | ||
|
@@ -73,16 +74,24 @@ | |
(apply #'concat (nreverse content-strs)))) | ||
|
||
(cl-defmethod gptel--parse-response ((_backend gptel-openai) response _info) | ||
(map-nested-elt response '(:choices 0 :message :content))) | ||
;; If the reply specifies a function call, parse and return it instead of the message | ||
(let* ((choices-path '(:choices 0 :message)) | ||
(tool-calls (map-nested-elt response (append choices-path '(:tool_calls)))) | ||
(content (map-nested-elt response (append choices-path '(:content))))) | ||
(if tool-calls | ||
(prin1-to-string tool-calls) | ||
content))) | ||
|
||
(cl-defmethod gptel--request-data ((_backend gptel-openai) prompts) | ||
"JSON encode PROMPTS for sending to ChatGPT." | ||
(let ((prompts-plist | ||
`(:model ,gptel-model | ||
:messages [,@prompts] | ||
:stream ,(or (and gptel-stream gptel-use-curl | ||
(gptel-backend-stream gptel-backend)) | ||
:json-false)))) | ||
(gptel-backend-stream gptel-backend)) | ||
:json-false)))) | ||
(when gptel-callable-functions | ||
(plist-put prompts-plist :tools gptel-callable-functions)) | ||
Comment on lines
+93
to
+94
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is the main important change. This could be toggled on or off based on a setting in the transient. A certain function call can also be forced by using the |
||
(when gptel-temperature | ||
(plist-put prompts-plist :temperature gptel-temperature)) | ||
(when gptel-max-tokens | ||
|
@@ -118,7 +127,7 @@ | |
(name &key models stream key | ||
(header | ||
(lambda () (when-let (key (gptel--get-api-key)) | ||
`(("Authorization" . ,(concat "Bearer " key)))))) | ||
`(("Authorization" . ,(concat "Bearer " key)))))) | ||
(host "api.openai.com") | ||
(protocol "https") | ||
(endpoint "/v1/chat/completions")) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -386,6 +386,9 @@ To set the model for a chat session interactively call | |
(const :tag "GPT 4 32k" "gpt-4-32k") | ||
(const :tag "GPT 4 1106 (preview)" "gpt-4-1106-preview"))) | ||
|
||
(defcustom gptel-callable-functions nil | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I wasn't sure where to put this |
||
"Vector defining callable functions according to JSON Schema") | ||
|
||
(defcustom gptel-temperature 1.0 | ||
"\"Temperature\" of the LLM response. | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,93 @@ | ||
;;; test-user-config.el --- Description -*- lexical-binding: t; -*- | ||
|
||
;;; Example functions | ||
(cl-defun my-cowsay (&key term) | ||
(let* ((bubble-top (concat " " (make-string (+ (length term) 2) ?_) "\n")) | ||
(bubble-middle (format "< %s >\n" term)) | ||
(bubble-bottom (concat " " (make-string (+ (length term) 2) ?-) "\n")) | ||
(cow " \\ ^__^\n \\ (oo)\\_______\n (__)\\ )\\/\\\n ||----w |\n || ||")) | ||
(format "%s%s%s%s" bubble-top bubble-middle bubble-bottom cow))) | ||
|
||
(cl-defun my-create-file (&key filename contents) | ||
"Create a file with FILENAME and CONTENTS, and open it in a new buffer." | ||
(interactive "sEnter filename: \nMEnter contents: ") | ||
(if (not (file-exists-p filename)) | ||
(with-temp-buffer | ||
(insert contents) | ||
(write-file filename))) | ||
(split-window) | ||
(other-window 1) | ||
(find-file filename)) | ||
|
||
;;; Callable function schema | ||
(setq! gptel-callable-functions | ||
;; Hard coded variable specifying callable functions. This could be defined in a user's configuration | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Given a list of emacs functions, we should be able to introspect them to generate this schema data if that's preferable. |
||
(vector | ||
(list | ||
:type "function" | ||
:function (list | ||
:name "my-cowsay" | ||
:description "Have a cow say something" | ||
:parameters (list | ||
:type "object" | ||
:properties (list | ||
:term (list | ||
:type "string" | ||
:description "term to say")) | ||
:required ["term"]))) | ||
(list | ||
:type "function" | ||
:function (list | ||
:name "my-create-file" | ||
:description "Create a new file" | ||
:parameters (list | ||
:type "object" | ||
:properties (list | ||
:filename (list | ||
:type "string" | ||
:description "local path to file including file extension") | ||
:contents (list | ||
:type "string" | ||
:description "file contents"))) | ||
:required ["filename" "contents"])))) | ||
|
||
;;; Backend to use for testing. Stream must be turned off (for now) | ||
(setq gptel-backend (gptel-make-openai | ||
"OpenAI with function calls" | ||
:key #'gptel-api-key | ||
:models '("gpt-3.5-turbo" "gpt-3.5-turbo-16k" "gpt-4" "gpt-4-turbo-preview" "gpt-4-32k" "gpt-4-1106-preview") | ||
:stream nil)) | ||
|
||
;;; Hook to call function after gptel query | ||
(defun cons-list-to-plist (cons-list) | ||
(let ((plist '())) | ||
(dolist (item cons-list) | ||
(setq plist (plist-put plist (if (keywordp (car item)) | ||
(car item) | ||
(intern (concat ":" (symbol-name (car item))))) | ||
(cdr item)))) | ||
plist)) | ||
|
||
(cl-defun gptel-run-function-on-region (beg end) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For now this is just a post-response hook function, but this would likely be more appropriate in the core gptel source. I'm not sure exactly where though. |
||
(when (and beg end) | ||
(save-excursion | ||
(let* ((contents (buffer-substring-no-properties beg end)) | ||
(parsed (ignore-errors (read contents))) | ||
(is-vector (vectorp parsed))) | ||
(if is-vector | ||
;; Function call data is in a vector. If the contents are just a string, do nothing | ||
(let* ((plist (aref parsed 0)) | ||
(is-function-call (string= (plist-get plist :type) "function"))) | ||
(if is-function-call | ||
(let* ((function-data (plist-get plist :function)) | ||
(function-name (plist-get function-data :name)) | ||
(arguments-json (plist-get function-data :arguments)) | ||
(arguments (json-read-from-string arguments-json))) | ||
(when (yes-or-no-p (format "Call function `%s` with arguments %s?" function-name arguments-json)) | ||
(when (fboundp (intern function-name)) | ||
;; Call the function and insert return value | ||
(goto-char end) | ||
(insert "\n") | ||
(insert (apply (intern function-name) (cons-list-to-plist arguments))))))))))))) | ||
|
||
(add-hook 'gptel-post-response-functions #'gptel-run-function-on-region) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
If GPT decides a function should be called, the structured data is included in
tool_calls
instead ofcontent
so that parsing needs to be updated.For now I'm only parsing the full response, streaming is not supported yet (but could be)