From 6e002136153608a855d061d402a3cf570a059de1 Mon Sep 17 00:00:00 2001 From: Jen-Chieh Shen Date: Fri, 17 Mar 2023 17:32:13 -0700 Subject: [PATCH] feat(chat): Add support for chat request (#8) * feat(chat): Add support for chat request * Impls example * Update changelog --- CHANGELOG.md | 7 ++- openai-chat.el | 118 +++++++++++++++++++++++++++++++++++++++++++ openai-completion.el | 2 +- openai.el | 11 ++++ 4 files changed, 136 insertions(+), 2 deletions(-) create mode 100644 openai-chat.el diff --git a/CHANGELOG.md b/CHANGELOG.md index 1bbe4d2..a82ce44 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,12 @@ All notable changes to this project will be documented in this file. Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file. -## 0.1.0 (Unreleased) +## 0.2.0 (Unreleased) +> Released N/A + +* N/A + +## 0.1.0 > Released N/A * Initial release diff --git a/openai-chat.el b/openai-chat.el new file mode 100644 index 0000000..f63306e --- /dev/null +++ b/openai-chat.el @@ -0,0 +1,118 @@ +;;; openai-chat.el --- -*- lexical-binding: t; -*- + +;; Copyright (C) 2023 Shen, Jen-Chieh + +;; This file is not part of GNU Emacs. + +;; This program is free software: you can redistribute it and/or modify +;; it under the terms of the GNU General Public License as published by +;; the Free Software Foundation, either version 3 of the License, or +;; (at your option) any later version. + +;; This program is distributed in the hope that it will be useful, +;; but WITHOUT ANY WARRANTY; without even the implied warranty of +;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +;; GNU General Public License for more details. + +;; You should have received a copy of the GNU General Public License +;; along with this program. If not, see . + +;;; Commentary: +;; +;; Create chat with OpenAI API. +;; +;; See https://platform.openai.com/docs/api-reference/chat +;; + +;;; Code: + +(require 'openai) + +;; +;;; API + +;;;###autoload +(cl-defun openai-chat ( messages callback + &key + (key openai-key) + (model "gpt-3.5-turbo") + temperature + top-p + n + stream + stop + max-tokens + presence-penalty + frequency-penalty + logit-bias + (user openai-user)) + "Send chat request. + +Arguments MESSAGES and CALLBACK are required for this type of request. MESSAGES +is the conversation data. CALLBACK is the execuation after request is made. + +Arguments KEY and USER are global options; however, you can overwrite the value +by passing it in. + +The rest of the arugments are optional, please see OpenAI API reference page +for more information. Arguments here refer to MODEL, TEMPERATURE, TOP-P, N, +STREAM, STOP, MAX-TOKENS, PRESENCE-PENALTY, FREQUENCY-PENALTY, and LOGIT-BIAS." + (openai-request "https://api.openai.com/v1/chat/completions" + :type "POST" + :headers `(("Content-Type" . "application/json") + ("Authorization" . ,(concat "Bearer " key))) + :data (openai--json-encode + `(("model" . ,model) + ("messages" . ,messages) + ("temperature" . ,temperature) + ("top-p" . ,top-p) + ("n" . ,n) + ("stream" . ,stream) + ("stop" . ,stop) + ("max_tokens" . ,max-tokens) + ("presence_penalty" . ,presence-penalty) + ("frequency_penalty" . ,frequency-penalty) + ("logit_bias" . ,logit-bias) + ("user" . ,user))) + :parser 'json-read + :success (cl-function + (lambda (&key data &allow-other-keys) + (funcall callback data))))) + +;; +;;; Application + +(defcustom openai-chat-max-tokens 4000 + "The maximum number of tokens to generate in the completion." + :type 'integer + :group 'openai) + +(defcustom openai-chat-temperature 1.0 + "What sampling temperature to use." + :type 'number + :group 'openai) + +;;;###autoload +(defun openai-chat-say () + "Start making a conversation to OpenAI. + +This is a ping pong message, so you will only get one response." + (interactive) + (if-let* ((user (read-string "What is your name? " "user")) + (say (read-string "Start the conversation: "))) + (openai-chat `[(("role" . ,user) + ("content" . ,say))] + (lambda (data) + (let ((choices (let-alist data .choices))) + (mapc (lambda (choice) + (let-alist choice + (let-alist .message + (message "%s: %s" .role (string-trim .content))))) + choices))) + :max-tokens openai-chat-max-tokens + :temperature openai-chat-temperature + :user (unless (string= user "user") user)) + (user-error "Abort, canecel chat operation"))) + +(provide 'openai-chat) +;;; openai-chat.el ends here diff --git a/openai-completion.el b/openai-completion.el index 64f0179..df9e801 100644 --- a/openai-completion.el +++ b/openai-completion.el @@ -52,7 +52,7 @@ (user openai-user)) "Send completion request. -Arguments PROMPT and CALLBACK are required for this type of request. PROMPT is +Arguments PROMPT and CALLBACK are required for this type of request. PROMPT is either the question or instruction to OpenAI. CALLBACK is the execuation after request is made. diff --git a/openai.el b/openai.el index b1189c5..722eea3 100644 --- a/openai.el +++ b/openai.el @@ -173,5 +173,16 @@ Argument OPTIONS ia an alist use to calculate the frame offset." (t (completing-read "Response: " choices nil t)))) +;; +;;; Testing + +;; The module here is for users to test to see some result. + +(defun openai-print-json-encode (object) + "Encode OBJECT to JSON format then print out the result." + (let ((encoded (openai--json-encode object))) + (message "%s" encoded) ; don't pretty it, show the raw! + encoded)) + (provide 'openai) ;;; openai.el ends here