From af315f2c6418ccdd77cd1ce2af9247f8594c5088 Mon Sep 17 00:00:00 2001 From: Sebastian Mueller Date: Mon, 31 Jul 2023 16:42:15 +0200 Subject: [PATCH] Better logging with more verbose trace options and channels --- package.json | 2 +- src/botservice.ts | 30 ++++++-------- src/logging.ts | 7 ++++ src/openai-wrapper.ts | 93 ++++++++++++++++++++++++++++++------------- 4 files changed, 86 insertions(+), 46 deletions(-) create mode 100644 src/logging.ts diff --git a/package.json b/package.json index bef7eb1..99c0aba 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "chatgpt-mattermost-bot", - "version": "2.1.1", + "version": "2.1.2", "private": true, "scripts": { "start": "ts-node ./src/botservice.ts", diff --git a/src/botservice.ts b/src/botservice.ts index afc498a..3c43d7c 100644 --- a/src/botservice.ts +++ b/src/botservice.ts @@ -1,5 +1,4 @@ import {continueThread, registerChatPlugin} from "./openai-wrapper"; -import {Log} from "debug-level" import {mmClient, wsClient} from "./mm-client"; import 'babel-polyfill' import 'isomorphic-fetch' @@ -13,6 +12,8 @@ import {JSONMessageData, MessageData} from "./types"; import {ExitPlugin} from "./plugins/ExitPlugin"; import {MessageCollectPlugin} from "./plugins/MessageCollectPlugin"; +import {botLog, matterMostLog} from "./logging"; + if (!global.FormData) { global.FormData = require('form-data') } @@ -33,9 +34,9 @@ const botInstructions = "Your name is " + name + " and you are a helpful assista "provide them with succinct answers formatted using Markdown. You know the user's name as it is provided within the " + "meta data of the messages." -async function onClientMessage(msg: WebSocketMessage, meId: string, log: Log) { +async function onClientMessage(msg: WebSocketMessage, meId: string) { if (msg.event !== 'posted' || !meId) { - log.debug({msg: msg}) + matterMostLog.debug({msg: msg}) return } @@ -55,7 +56,7 @@ async function onClientMessage(msg: WebSocketMessage, meId: str // create the context for (const threadPost of posts.slice(-contextMsgCount)) { - log.trace({msg: threadPost}) + matterMostLog.trace({msg: threadPost}) if (threadPost.user_id === meId) { chatmessages.push({ role: ChatCompletionRequestMessageRoleEnum.Assistant, @@ -76,9 +77,8 @@ async function onClientMessage(msg: WebSocketMessage, meId: str const typingInterval = setInterval(typing, 2000) try { - log.trace({chatmessages}) const {message, fileId, props} = await continueThread(chatmessages, msgData) - log.trace({message}) + botLog.trace({message}) // create answer response const newPost = await mmClient.createPost({ @@ -88,9 +88,9 @@ async function onClientMessage(msg: WebSocketMessage, meId: str root_id: msgData.post.root_id || msgData.post.id, file_ids: fileId ? [fileId] : undefined }) - log.trace({msg: newPost}) + botLog.trace({msg: newPost}) } catch (e) { - log.error(e) + botLog.error(e) await mmClient.createPost({ message: "Sorry, but I encountered an internal error when trying to process your message", channel_id: msgData.post.channel_id, @@ -208,28 +208,24 @@ async function userIdToName(userId: string): Promise { return username } -Log.options({json: true, colors: true}) -Log.wrapConsole('bot-ws', {level4log: 'INFO'}) -const log = new Log('bot') - /* Entry point */ async function main(): Promise { const meId = (await mmClient.getMe()).id - log.log("Connected to Mattermost.") + botLog.log("Connected to Mattermost.") for (const plugin of plugins) { if (plugin.setup()) { registerChatPlugin(plugin) - log.trace("Registered plugin " + plugin.key) + botLog.trace("Registered plugin " + plugin.key) } } - wsClient.addMessageListener((e) => onClientMessage(e, meId, log)) - log.trace("Listening to MM messages...") + wsClient.addMessageListener((e) => onClientMessage(e, meId)) + botLog.trace("Listening to MM messages...") } main().catch(reason => { - log.error(reason); + botLog.error(reason); process.exit(-1) }) diff --git a/src/logging.ts b/src/logging.ts new file mode 100644 index 0000000..bf56502 --- /dev/null +++ b/src/logging.ts @@ -0,0 +1,7 @@ +import {Log} from "debug-level"; + +Log.options({json: true, colors: true}) +Log.wrapConsole('bot-ws', {level4log: 'INFO'}) +export const botLog = new Log('bot') +export const openAILog = new Log('open-ai') +export const matterMostLog = new Log('mattermost') \ No newline at end of file diff --git a/src/openai-wrapper.ts b/src/openai-wrapper.ts index 07e8305..e40e916 100644 --- a/src/openai-wrapper.ts +++ b/src/openai-wrapper.ts @@ -2,22 +2,28 @@ import { ChatCompletionFunctions, ChatCompletionRequestMessage, ChatCompletionResponseMessage, ChatCompletionResponseMessageRoleEnum, - Configuration, + Configuration, CreateChatCompletionRequest, CreateImageRequest, OpenAIApi } from "openai"; +import {openAILog as log} from "./logging" + import {PluginBase} from "./plugins/PluginBase"; import {AiResponse, MessageData} from "./types"; -const configuration = new Configuration({ - apiKey: process.env['OPENAI_API_KEY'] -}) +const apiKey = process.env['OPENAI_API_KEY']; +log.trace({apiKey}) + +const configuration = new Configuration({ apiKey }) + const openai = new OpenAIApi(configuration) const model = process.env['OPENAI_MODEL_NAME'] ?? 'gpt-3.5-turbo' const max_tokens = Number(process.env['OPENAI_MAX_TOKENS'] ?? 2000) const temperature = Number(process.env['OPENAI_TEMPERATURE'] ?? 1) -const plugins: Record> = {} +log.debug({model, max_tokens, temperature}) + +const plugins: Map> = new Map() const functions: ChatCompletionFunctions[] = [] /** @@ -25,7 +31,7 @@ const functions: ChatCompletionFunctions[] = [] * @param plugin */ export function registerChatPlugin(plugin: PluginBase) { - plugins[plugin.key] = plugin + plugins.set(plugin.key, plugin) functions.push({ name: plugin.key, description: plugin.description, @@ -48,27 +54,52 @@ export async function continueThread(messages: ChatCompletionRequestMessage[], m message: 'Sorry, but it seems I found no valid response.' } + // the number of rounds we're going to run at maximum + let maxChainLength = 7; + + // check whether ChatGPT hallucinates a plugin name. + const missingPlugins = new Set() + let isIntermediateResponse = true - while(isIntermediateResponse) { + while(isIntermediateResponse && maxChainLength-- > 0) { const responseMessage = await createChatCompletion(messages, functions) + log.trace(responseMessage) if(responseMessage) { // if the function_call is set, we have a plugin call if(responseMessage.function_call && responseMessage.function_call.name) { + const pluginName = responseMessage.function_call.name; + log.trace({pluginName}) try { - const pluginResponse = await plugins[responseMessage.function_call!.name!].runPlugin((JSON.parse(responseMessage.function_call!.arguments!)), msgData) - - if(pluginResponse.intermediate) { - messages.push({ - role: ChatCompletionResponseMessageRoleEnum.Function, - name: responseMessage.function_call!.name!, - content: pluginResponse.message - }) - continue - } + const plugin = plugins.get(pluginName); + if (plugin){ + const pluginArguments = JSON.parse(responseMessage.function_call.arguments ?? '[]'); + log.trace({plugin, pluginArguments}) + const pluginResponse = await plugin.runPlugin(pluginArguments, msgData) + log.trace({pluginResponse}) - aiResponse = pluginResponse + if(pluginResponse.intermediate) { + messages.push({ + role: ChatCompletionResponseMessageRoleEnum.Function, + name: pluginName, + content: pluginResponse.message + }) + continue + } + aiResponse = pluginResponse + } else { + if (!missingPlugins.has(pluginName)){ + missingPlugins.add(pluginName) + log.debug({ error: 'Missing plugin ' + pluginName, pluginArguments: responseMessage.function_call.arguments}) + messages.push({ role: 'system', content: `There is no plugin named '${pluginName}' available. Try without using that plugin.`}) + continue + } else { + log.debug({ messages }) + aiResponse.message = `Sorry, but it seems there was an error when using the plugin \`\`\`${pluginName}\`\`\`.` + } + } } catch (e) { - aiResponse.message = `Sorry, but it seems there was an error when using the plugin \`\`\`${responseMessage.function_call!.name!}\`\`\`.` + log.debug({ messages, error: e }) + aiResponse.message = `Sorry, but it seems there was an error when using the plugin \`\`\`${pluginName}\`\`\`.` } } else if(responseMessage.content) { aiResponse.message = responseMessage.content @@ -87,18 +118,22 @@ export async function continueThread(messages: ChatCompletionRequestMessage[], m * @param functions Function calls which can be called by the openAI model */ export async function createChatCompletion(messages: ChatCompletionRequestMessage[], functions: ChatCompletionFunctions[] | undefined = undefined): Promise { - const options: any = { + const chatCompletionOptions: CreateChatCompletionRequest = { model: model, messages: messages, max_tokens: max_tokens, temperature: temperature, } if(functions) { - options.functions = functions - options.function_call = 'auto' + chatCompletionOptions.functions = functions + chatCompletionOptions.function_call = 'auto' } - const chatCompletion = await openai.createChatCompletion(options) + log.trace({chatCompletionOptions}) + + const chatCompletion = await openai.createChatCompletion(chatCompletionOptions) + + log.trace({chatCompletion}) return chatCompletion.data?.choices?.[0]?.message } @@ -108,12 +143,14 @@ export async function createChatCompletion(messages: ChatCompletionRequestMessag * @param prompt The image description provided to DALL-E. */ export async function createImage(prompt: string): Promise { - const image = await openai.createImage({ - prompt: prompt, + const createImageOptions: CreateImageRequest = { + prompt, n: 1, size: '512x512', - response_format: "b64_json" - }) - + response_format: 'b64_json' + }; + log.trace({createImageOptions}) + const image = await openai.createImage(createImageOptions) + log.trace({image}) return image.data?.data[0]?.b64_json } \ No newline at end of file