Skip to content

Commit

Permalink
feat(chat): Add support for chat request (#8)
Browse files Browse the repository at this point in the history
* feat(chat): Add support for chat request

* Impls example

* Update changelog
  • Loading branch information
jcs090218 authored Mar 18, 2023
1 parent ff4a7eb commit 6e00213
Show file tree
Hide file tree
Showing 4 changed files with 136 additions and 2 deletions.
7 changes: 6 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,12 @@ All notable changes to this project will be documented in this file.
Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file.


## 0.1.0 (Unreleased)
## 0.2.0 (Unreleased)
> Released N/A
* N/A

## 0.1.0
> Released N/A
* Initial release
118 changes: 118 additions & 0 deletions openai-chat.el
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
;;; openai-chat.el --- -*- lexical-binding: t; -*-

;; Copyright (C) 2023 Shen, Jen-Chieh

;; This file is not part of GNU Emacs.

;; This program is free software: you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation, either version 3 of the License, or
;; (at your option) any later version.

;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.

;; You should have received a copy of the GNU General Public License
;; along with this program. If not, see <https://www.gnu.org/licenses/>.

;;; Commentary:
;;
;; Create chat with OpenAI API.
;;
;; See https://platform.openai.com/docs/api-reference/chat
;;

;;; Code:

(require 'openai)

;;
;;; API

;;;###autoload
(cl-defun openai-chat ( messages callback
&key
(key openai-key)
(model "gpt-3.5-turbo")
temperature
top-p
n
stream
stop
max-tokens
presence-penalty
frequency-penalty
logit-bias
(user openai-user))
"Send chat request.
Arguments MESSAGES and CALLBACK are required for this type of request. MESSAGES
is the conversation data. CALLBACK is the execuation after request is made.
Arguments KEY and USER are global options; however, you can overwrite the value
by passing it in.
The rest of the arugments are optional, please see OpenAI API reference page
for more information. Arguments here refer to MODEL, TEMPERATURE, TOP-P, N,
STREAM, STOP, MAX-TOKENS, PRESENCE-PENALTY, FREQUENCY-PENALTY, and LOGIT-BIAS."
(openai-request "https://api.openai.com/v1/chat/completions"
:type "POST"
:headers `(("Content-Type" . "application/json")
("Authorization" . ,(concat "Bearer " key)))
:data (openai--json-encode
`(("model" . ,model)
("messages" . ,messages)
("temperature" . ,temperature)
("top-p" . ,top-p)
("n" . ,n)
("stream" . ,stream)
("stop" . ,stop)
("max_tokens" . ,max-tokens)
("presence_penalty" . ,presence-penalty)
("frequency_penalty" . ,frequency-penalty)
("logit_bias" . ,logit-bias)
("user" . ,user)))
:parser 'json-read
:success (cl-function
(lambda (&key data &allow-other-keys)
(funcall callback data)))))

;;
;;; Application

(defcustom openai-chat-max-tokens 4000
"The maximum number of tokens to generate in the completion."
:type 'integer
:group 'openai)

(defcustom openai-chat-temperature 1.0
"What sampling temperature to use."
:type 'number
:group 'openai)

;;;###autoload
(defun openai-chat-say ()
"Start making a conversation to OpenAI.
This is a ping pong message, so you will only get one response."
(interactive)
(if-let* ((user (read-string "What is your name? " "user"))
(say (read-string "Start the conversation: ")))
(openai-chat `[(("role" . ,user)
("content" . ,say))]
(lambda (data)
(let ((choices (let-alist data .choices)))
(mapc (lambda (choice)
(let-alist choice
(let-alist .message
(message "%s: %s" .role (string-trim .content)))))
choices)))
:max-tokens openai-chat-max-tokens
:temperature openai-chat-temperature
:user (unless (string= user "user") user))
(user-error "Abort, canecel chat operation")))

(provide 'openai-chat)
;;; openai-chat.el ends here
2 changes: 1 addition & 1 deletion openai-completion.el
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
(user openai-user))
"Send completion request.
Arguments PROMPT and CALLBACK are required for this type of request. PROMPT is
Arguments PROMPT and CALLBACK are required for this type of request. PROMPT is
either the question or instruction to OpenAI. CALLBACK is the execuation after
request is made.
Expand Down
11 changes: 11 additions & 0 deletions openai.el
Original file line number Diff line number Diff line change
Expand Up @@ -173,5 +173,16 @@ Argument OPTIONS ia an alist use to calculate the frame offset."
(t
(completing-read "Response: " choices nil t))))

;;
;;; Testing

;; The module here is for users to test to see some result.

(defun openai-print-json-encode (object)
"Encode OBJECT to JSON format then print out the result."
(let ((encoded (openai--json-encode object)))
(message "%s" encoded) ; don't pretty it, show the raw!
encoded))

(provide 'openai)
;;; openai.el ends here

0 comments on commit 6e00213

Please sign in to comment.