Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
andreea-popescu-reef committed Sep 5, 2024
1 parent f8d2ba9 commit 6b04b41
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 4 deletions.
5 changes: 2 additions & 3 deletions src/compute_horde_prompt_gen/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def tokenize(prompt: str) -> str:
return inputs

def tokenize_phi3(self, prompts: list[str], role: str) -> str:
inputs = [{"role": role, "content": prompt} for prompt in prompts]
inputs = [{"role": "user", "content": prompt} for prompt in prompts]
inputs = self.tokenizer.apply_chat_template(
inputs, add_generation_prompt=True, return_tensors="pt"
).to("cuda")
Expand All @@ -99,10 +99,9 @@ def generate(
inputs = self.tokenize_phi3(prompts, role)
else:
raise ValueError(f"Unknown model {self.model_name}")
print(inputs)

return self.model.generate(
**inputs,
inputs,
max_new_tokens=max_new_tokens,
temperature=temperature,
num_return_sequences=num_return_sequences,
Expand Down
2 changes: 1 addition & 1 deletion src/compute_horde_prompt_gen/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def generate_prompt(self) -> str:
formats = self.random_select(FORMATS, num=5)

prompt = (
f"Generate a list of 5 complex prompts (questions or instruct tasks) that cover a wide range of skills and knowledge areas related to the themes of {themes}. "
f"Generate a list of 5 concise prompts (questions or instruct tasks) that cover a wide range of skills and knowledge areas related to the themes of {themes}. "
f"Each of these prompts should: "
f"\n- have a complexity level of {complexity_level} out of 20 and a relevance level to the theme of {relevance_level} out of 20"
f"\n- test various cognitive abilities ({abilities}) and require different types of writting formats ({formats})"
Expand Down
1 change: 1 addition & 0 deletions src/compute_horde_prompt_gen/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ def generate_prompts(
new_prompts = []
for j, sequence in enumerate(sequences):
output = model.decode(sequence)
log.info(f"{i=} output={output}")
generated_prompts = parse_output(output)
log.debug(f"{i=} sequence={j} {generated_prompts=} from {output=}")

Expand Down

0 comments on commit 6b04b41

Please sign in to comment.