From 9ac98d543b11be6ac55d2e2ad37f62e0705cea3c Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Thu, 5 Sep 2024 20:09:51 +0800 Subject: [PATCH] wip --- src/compute_horde_prompt_gen/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compute_horde_prompt_gen/model.py b/src/compute_horde_prompt_gen/model.py index 83485fc..8666772 100644 --- a/src/compute_horde_prompt_gen/model.py +++ b/src/compute_horde_prompt_gen/model.py @@ -80,7 +80,7 @@ def tokenize(prompt: str) -> str: def tokenize_phi3(self, prompts: list[str], role: str) -> str: inputs = [{"role": role, "content": prompt} for prompt in prompts] inputs = self.tokenizer.apply_chat_template( - prompts, add_generation_prompt=True, return_tensors="pt" + inputs, add_generation_prompt=True, return_tensors="pt" ) return inputs