Skip to content

Commit

Permalink
IMPROVE: add additional info to logged chat completion errors
Browse files Browse the repository at this point in the history
  • Loading branch information
merefield committed May 17, 2024
1 parent 7f67633 commit c463cf6
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 56 deletions.
53 changes: 25 additions & 28 deletions lib/discourse_chatbot/bots/open_ai_bot_basic.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,42 +6,39 @@ module ::DiscourseChatbot
class OpenAiBotBasic < OpenAIBotBase

def get_response(prompt, opts)
private_discussion = opts[:private] || false
begin
private_discussion = opts[:private] || false

if private_discussion
system_message = { "role": "system", "content": I18n.t("chatbot.prompt.system.basic.private", current_date_time: DateTime.current) }
else
system_message = { "role": "system", "content": I18n.t("chatbot.prompt.system.basic.open", current_date_time: DateTime.current) }
end
if private_discussion
system_message = { "role": "system", "content": I18n.t("chatbot.prompt.system.basic.private", current_date_time: DateTime.current) }
else
system_message = { "role": "system", "content": I18n.t("chatbot.prompt.system.basic.open", current_date_time: DateTime.current) }
end

prompt.unshift(system_message)
prompt.unshift(system_message)

response = @client.chat(
parameters: {
model: @model_name,
messages: prompt,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
})
response = @client.chat(
parameters: {
model: @model_name,
messages: prompt,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
})

if response["error"]
begin
raise StandardError, response["error"]["message"]
rescue => e
Rails.logger.error("Chatbot: There was a problem: #{e}")
{
reply: I18n.t('chatbot.errors.general'),
inner_thoughts: nil
}
end
else
{
reply: response.dig("choices", 0, "message", "content"),
inner_thoughts: nil
}
rescue => e
if e.respond_to?(:response)
status = e.response[:status]
message = e.response[:body]["error"]["message"]
Rails.logger.error("Chatbot: There was a problem with Chat Completion: status: #{status}, message: #{message}")
end
raise e
end
end
end
Expand Down
63 changes: 36 additions & 27 deletions lib/discourse_chatbot/bots/open_ai_bot_rag.rb
Original file line number Diff line number Diff line change
Expand Up @@ -99,37 +99,46 @@ def create_func_mapping(functions)
end

def create_chat_completion(messages, use_functions = true, force_search = false)
::DiscourseChatbot.progress_debug_message <<~EOS
I called the LLM to help me
------------------------------
value of messages is: #{messages}
+++++++++++++++++++++++++++++++
EOS
parameters = {
model: @model_name,
messages: messages,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
}
begin
::DiscourseChatbot.progress_debug_message <<~EOS
I called the LLM to help me
------------------------------
value of messages is: #{messages}
+++++++++++++++++++++++++++++++
EOS
parameters = {
model: @model_name,
messages: messages,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
}

parameters.merge!(tools: @tools) if use_functions && @tools
parameters.merge!(tools: @tools) if use_functions && @tools

parameters.merge!(tool_choice: {"type": "function", "function": {"name": "local_forum_search"}}) if use_functions && @tools && force_search
parameters.merge!(tool_choice: {"type": "function", "function": {"name": "local_forum_search"}}) if use_functions && @tools && force_search

res = @client.chat(
parameters: parameters
)
res = @client.chat(
parameters: parameters
)

::DiscourseChatbot.progress_debug_message <<~EOS
+++++++++++++++++++++++++++++++++++++++
The llm responded with
#{res}
+++++++++++++++++++++++++++++++++++++++
EOS
res
::DiscourseChatbot.progress_debug_message <<~EOS
+++++++++++++++++++++++++++++++++++++++
The llm responded with
#{res}
+++++++++++++++++++++++++++++++++++++++
EOS
res
rescue => e
if e.respond_to?(:response)
status = e.response[:status]
message = e.response[:body]["error"]["message"]
Rails.logger.error("Chatbot: There was a problem with Chat Completion: status: #{status}, message: #{message}")
end
raise e
end
end

def generate_response(opts)
Expand Down
2 changes: 1 addition & 1 deletion plugin.rb
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# frozen_string_literal: true
# name: discourse-chatbot
# about: a plugin that allows you to have a conversation with a configurable chatbot in Discourse Chat, Topics and Private Messages
# version: 0.9.19
# version: 0.9.20
# authors: merefield
# url: https://github.com/merefield/discourse-chatbot

Expand Down

0 comments on commit c463cf6

Please sign in to comment.