Skip to content

Commit

Permalink
Better logging with more verbose trace options and channels
Browse files Browse the repository at this point in the history
  • Loading branch information
yGuy committed Jul 31, 2023
1 parent 8ef5c69 commit af315f2
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 46 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "chatgpt-mattermost-bot",
"version": "2.1.1",
"version": "2.1.2",
"private": true,
"scripts": {
"start": "ts-node ./src/botservice.ts",
Expand Down
30 changes: 13 additions & 17 deletions src/botservice.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import {continueThread, registerChatPlugin} from "./openai-wrapper";
import {Log} from "debug-level"
import {mmClient, wsClient} from "./mm-client";
import 'babel-polyfill'
import 'isomorphic-fetch'
Expand All @@ -13,6 +12,8 @@ import {JSONMessageData, MessageData} from "./types";
import {ExitPlugin} from "./plugins/ExitPlugin";
import {MessageCollectPlugin} from "./plugins/MessageCollectPlugin";

import {botLog, matterMostLog} from "./logging";

if (!global.FormData) {
global.FormData = require('form-data')
}
Expand All @@ -33,9 +34,9 @@ const botInstructions = "Your name is " + name + " and you are a helpful assista
"provide them with succinct answers formatted using Markdown. You know the user's name as it is provided within the " +
"meta data of the messages."

async function onClientMessage(msg: WebSocketMessage<JSONMessageData>, meId: string, log: Log) {
async function onClientMessage(msg: WebSocketMessage<JSONMessageData>, meId: string) {
if (msg.event !== 'posted' || !meId) {
log.debug({msg: msg})
matterMostLog.debug({msg: msg})
return
}

Expand All @@ -55,7 +56,7 @@ async function onClientMessage(msg: WebSocketMessage<JSONMessageData>, meId: str

// create the context
for (const threadPost of posts.slice(-contextMsgCount)) {
log.trace({msg: threadPost})
matterMostLog.trace({msg: threadPost})
if (threadPost.user_id === meId) {
chatmessages.push({
role: ChatCompletionRequestMessageRoleEnum.Assistant,
Expand All @@ -76,9 +77,8 @@ async function onClientMessage(msg: WebSocketMessage<JSONMessageData>, meId: str
const typingInterval = setInterval(typing, 2000)

try {
log.trace({chatmessages})
const {message, fileId, props} = await continueThread(chatmessages, msgData)
log.trace({message})
botLog.trace({message})

// create answer response
const newPost = await mmClient.createPost({
Expand All @@ -88,9 +88,9 @@ async function onClientMessage(msg: WebSocketMessage<JSONMessageData>, meId: str
root_id: msgData.post.root_id || msgData.post.id,
file_ids: fileId ? [fileId] : undefined
})
log.trace({msg: newPost})
botLog.trace({msg: newPost})
} catch (e) {
log.error(e)
botLog.error(e)
await mmClient.createPost({
message: "Sorry, but I encountered an internal error when trying to process your message",
channel_id: msgData.post.channel_id,
Expand Down Expand Up @@ -208,28 +208,24 @@ async function userIdToName(userId: string): Promise<string> {
return username
}

Log.options({json: true, colors: true})
Log.wrapConsole('bot-ws', {level4log: 'INFO'})
const log = new Log('bot')

/* Entry point */
async function main(): Promise<void> {
const meId = (await mmClient.getMe()).id

log.log("Connected to Mattermost.")
botLog.log("Connected to Mattermost.")

for (const plugin of plugins) {
if (plugin.setup()) {
registerChatPlugin(plugin)
log.trace("Registered plugin " + plugin.key)
botLog.trace("Registered plugin " + plugin.key)
}
}

wsClient.addMessageListener((e) => onClientMessage(e, meId, log))
log.trace("Listening to MM messages...")
wsClient.addMessageListener((e) => onClientMessage(e, meId))
botLog.trace("Listening to MM messages...")
}

main().catch(reason => {
log.error(reason);
botLog.error(reason);
process.exit(-1)
})
7 changes: 7 additions & 0 deletions src/logging.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import {Log} from "debug-level";

Log.options({json: true, colors: true})
Log.wrapConsole('bot-ws', {level4log: 'INFO'})
export const botLog = new Log('bot')
export const openAILog = new Log('open-ai')
export const matterMostLog = new Log('mattermost')
93 changes: 65 additions & 28 deletions src/openai-wrapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,30 +2,36 @@ import {
ChatCompletionFunctions,
ChatCompletionRequestMessage,
ChatCompletionResponseMessage, ChatCompletionResponseMessageRoleEnum,
Configuration,
Configuration, CreateChatCompletionRequest, CreateImageRequest,
OpenAIApi
} from "openai";
import {openAILog as log} from "./logging"

import {PluginBase} from "./plugins/PluginBase";
import {AiResponse, MessageData} from "./types";

const configuration = new Configuration({
apiKey: process.env['OPENAI_API_KEY']
})
const apiKey = process.env['OPENAI_API_KEY'];
log.trace({apiKey})

const configuration = new Configuration({ apiKey })

const openai = new OpenAIApi(configuration)

const model = process.env['OPENAI_MODEL_NAME'] ?? 'gpt-3.5-turbo'
const max_tokens = Number(process.env['OPENAI_MAX_TOKENS'] ?? 2000)
const temperature = Number(process.env['OPENAI_TEMPERATURE'] ?? 1)

const plugins: Record<string, PluginBase<any>> = {}
log.debug({model, max_tokens, temperature})

const plugins: Map<string, PluginBase<any>> = new Map()
const functions: ChatCompletionFunctions[] = []

/**
* Registers a plugin as a GPT function. These functions are sent to openAI when the user interacts with chatGPT.
* @param plugin
*/
export function registerChatPlugin(plugin: PluginBase<any>) {
plugins[plugin.key] = plugin
plugins.set(plugin.key, plugin)
functions.push({
name: plugin.key,
description: plugin.description,
Expand All @@ -48,27 +54,52 @@ export async function continueThread(messages: ChatCompletionRequestMessage[], m
message: 'Sorry, but it seems I found no valid response.'
}

// the number of rounds we're going to run at maximum
let maxChainLength = 7;

// check whether ChatGPT hallucinates a plugin name.
const missingPlugins = new Set<string>()

let isIntermediateResponse = true
while(isIntermediateResponse) {
while(isIntermediateResponse && maxChainLength-- > 0) {
const responseMessage = await createChatCompletion(messages, functions)
log.trace(responseMessage)
if(responseMessage) {
// if the function_call is set, we have a plugin call
if(responseMessage.function_call && responseMessage.function_call.name) {
const pluginName = responseMessage.function_call.name;
log.trace({pluginName})
try {
const pluginResponse = await plugins[responseMessage.function_call!.name!].runPlugin((JSON.parse(responseMessage.function_call!.arguments!)), msgData)

if(pluginResponse.intermediate) {
messages.push({
role: ChatCompletionResponseMessageRoleEnum.Function,
name: responseMessage.function_call!.name!,
content: pluginResponse.message
})
continue
}
const plugin = plugins.get(pluginName);
if (plugin){
const pluginArguments = JSON.parse(responseMessage.function_call.arguments ?? '[]');
log.trace({plugin, pluginArguments})
const pluginResponse = await plugin.runPlugin(pluginArguments, msgData)
log.trace({pluginResponse})

aiResponse = pluginResponse
if(pluginResponse.intermediate) {
messages.push({
role: ChatCompletionResponseMessageRoleEnum.Function,
name: pluginName,
content: pluginResponse.message
})
continue
}
aiResponse = pluginResponse
} else {
if (!missingPlugins.has(pluginName)){
missingPlugins.add(pluginName)
log.debug({ error: 'Missing plugin ' + pluginName, pluginArguments: responseMessage.function_call.arguments})
messages.push({ role: 'system', content: `There is no plugin named '${pluginName}' available. Try without using that plugin.`})
continue
} else {
log.debug({ messages })
aiResponse.message = `Sorry, but it seems there was an error when using the plugin \`\`\`${pluginName}\`\`\`.`
}
}
} catch (e) {
aiResponse.message = `Sorry, but it seems there was an error when using the plugin \`\`\`${responseMessage.function_call!.name!}\`\`\`.`
log.debug({ messages, error: e })
aiResponse.message = `Sorry, but it seems there was an error when using the plugin \`\`\`${pluginName}\`\`\`.`
}
} else if(responseMessage.content) {
aiResponse.message = responseMessage.content
Expand All @@ -87,18 +118,22 @@ export async function continueThread(messages: ChatCompletionRequestMessage[], m
* @param functions Function calls which can be called by the openAI model
*/
export async function createChatCompletion(messages: ChatCompletionRequestMessage[], functions: ChatCompletionFunctions[] | undefined = undefined): Promise<ChatCompletionResponseMessage | undefined> {
const options: any = {
const chatCompletionOptions: CreateChatCompletionRequest = {
model: model,
messages: messages,
max_tokens: max_tokens,
temperature: temperature,
}
if(functions) {
options.functions = functions
options.function_call = 'auto'
chatCompletionOptions.functions = functions
chatCompletionOptions.function_call = 'auto'
}

const chatCompletion = await openai.createChatCompletion(options)
log.trace({chatCompletionOptions})

const chatCompletion = await openai.createChatCompletion(chatCompletionOptions)

log.trace({chatCompletion})

return chatCompletion.data?.choices?.[0]?.message
}
Expand All @@ -108,12 +143,14 @@ export async function createChatCompletion(messages: ChatCompletionRequestMessag
* @param prompt The image description provided to DALL-E.
*/
export async function createImage(prompt: string): Promise<string | undefined> {
const image = await openai.createImage({
prompt: prompt,
const createImageOptions: CreateImageRequest = {
prompt,
n: 1,
size: '512x512',
response_format: "b64_json"
})

response_format: 'b64_json'
};
log.trace({createImageOptions})
const image = await openai.createImage(createImageOptions)
log.trace({image})
return image.data?.data[0]?.b64_json
}

0 comments on commit af315f2

Please sign in to comment.