From 7ebbcd2de3d4c206e42e4d52e9d3ec231a5984f9 Mon Sep 17 00:00:00 2001 From: Roman Rizzi Date: Thu, 5 Dec 2024 21:04:35 -0300 Subject: [PATCH] FIX: Make sure prompt uploads get included in the prompt when triaging (#1008) --- lib/automation/llm_triage.rb | 2 +- lib/completions/llm.rb | 4 ++++ .../lib/modules/automation/llm_triage_spec.rb | 19 +++++++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/lib/automation/llm_triage.rb b/lib/automation/llm_triage.rb index 145e203e8..5c58b9d27 100644 --- a/lib/automation/llm_triage.rb +++ b/lib/automation/llm_triage.rb @@ -32,7 +32,7 @@ def self.handle( content = llm.tokenizer.truncate(content, max_post_tokens) if max_post_tokens.present? - prompt.push(type: :user, content: content) + prompt.push(type: :user, content: content, upload_ids: post.upload_ids) result = nil diff --git a/lib/completions/llm.rb b/lib/completions/llm.rb index 51ba0464d..9de8f9fee 100644 --- a/lib/completions/llm.rb +++ b/lib/completions/llm.rb @@ -164,6 +164,10 @@ def record_prompt(prompt) @prompts << prompt.dup if @prompts end + def prompts + @prompts + end + def proxy(model) llm_model = if model.is_a?(LlmModel) diff --git a/spec/lib/modules/automation/llm_triage_spec.rb b/spec/lib/modules/automation/llm_triage_spec.rb index 3148d5441..a966cfb51 100644 --- a/spec/lib/modules/automation/llm_triage_spec.rb +++ b/spec/lib/modules/automation/llm_triage_spec.rb @@ -161,4 +161,23 @@ def triage(**args) expect(reviewable.target).to eq(post) end + + it "includes post uploads when triaging" do + post_upload = Fabricate(:image_upload, posts: [post]) + + DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do + triage( + post: post, + model: "custom:#{llm_model.id}", + system_prompt: "test %%POST%%", + search_for_text: "bad", + flag_post: true, + automation: nil, + ) + + triage_prompt = DiscourseAi::Completions::Llm.prompts.last + + expect(triage_prompt.messages.last[:upload_ids]).to contain_exactly(post_upload.id) + end + end end