-
-
Notifications
You must be signed in to change notification settings - Fork 17
/
openai-chat.el
123 lines (104 loc) · 4.33 KB
/
openai-chat.el
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
;;; openai-chat.el --- Create chat with OpenAI API -*- lexical-binding: t; -*-
;; Copyright (C) 2023-2024 Shen, Jen-Chieh
;; This file is not part of GNU Emacs.
;; This program is free software: you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation, either version 3 of the License, or
;; (at your option) any later version.
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;; You should have received a copy of the GNU General Public License
;; along with this program. If not, see <https://www.gnu.org/licenses/>.
;;; Commentary:
;;
;; Create chat with OpenAI API.
;;
;; See https://platform.openai.com/docs/api-reference/chat
;;
;;; Code:
(require 'openai)
;;
;;; API
;;;###autoload
(cl-defun openai-chat ( messages callback
&key
(base-url openai-base-url)
(parameters openai-parameters)
(content-type "application/json")
(key openai-key)
org-id
(model "gpt-3.5-turbo")
temperature
top-p
n
stream
stop
max-tokens
presence-penalty
frequency-penalty
logit-bias
(user openai-user))
"Send chat request.
Arguments MESSAGES and CALLBACK are required for this type of request.
MESSAGES is the conversation data. CALLBACK is the execuation after request is
made.
Arguments BASE-URL, PARAMETERS, CONTENT-TYPE, KEY, ORG-ID and USER are global
options; however, you can overwrite the value by passing it in.
The rest of the arugments are optional, please see OpenAI API reference page
for more information. Arguments here refer to MODEL, TEMPERATURE, TOP-P, N,
STREAM, STOP, MAX-TOKENS, PRESENCE-PENALTY, FREQUENCY-PENALTY, and LOGIT-BIAS."
(openai-request (concat base-url "/chat/completions")
:type "POST"
:params parameters
:headers (openai--headers content-type key org-id)
:data (openai--json-encode
`(("model" . ,model)
("messages" . ,messages)
("temperature" . ,temperature)
("top_p" . ,top-p)
("n" . ,n)
("stream" . ,stream)
("stop" . ,stop)
("max_tokens" . ,max-tokens)
("presence_penalty" . ,presence-penalty)
("frequency_penalty" . ,frequency-penalty)
("logit_bias" . ,logit-bias)
("user" . ,user)))
:parser 'json-read
:complete (cl-function
(lambda (&key data &allow-other-keys)
(funcall callback data)))))
;;
;;; Application
(defcustom openai-chat-max-tokens 4000
"The maximum number of tokens to generate in the completion."
:type 'integer
:group 'openai)
(defcustom openai-chat-temperature 1.0
"What sampling temperature to use."
:type 'number
:group 'openai)
;;;###autoload
(defun openai-chat-say ()
"Start making a conversation to OpenAI.
This is a ping pong message, so you will only get one response."
(interactive)
(if-let* ((user (read-string "What is your name? " "user"))
(say (read-string "Start the conversation: ")))
(openai-chat `[(("role" . ,user)
("content" . ,say))]
(lambda (data)
(let ((choices (let-alist data .choices)))
(mapc (lambda (choice)
(let-alist choice
(let-alist .message
(message "%s: %s" .role (string-trim .content)))))
choices)))
:max-tokens openai-chat-max-tokens
:temperature openai-chat-temperature
:user (unless (string= user "user") user))
(user-error "Abort, cancel chat operation")))
(provide 'openai-chat)
;;; openai-chat.el ends here