Skip to content

Commit

Permalink
Merge pull request #24 from atomiechen/dev
Browse files Browse the repository at this point in the history
Bump version to 0.7.3
  • Loading branch information
atomiechen authored May 22, 2024
2 parents eb82a7b + 2cf6e60 commit eb4f69c
Show file tree
Hide file tree
Showing 6 changed files with 107 additions and 97 deletions.
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "HandyLLM"
version = "0.7.2"
version = "0.7.3"
authors = [
{ name="Atomie CHEN", email="atomic_cwh@163.com" },
]
Expand All @@ -13,7 +13,7 @@ readme = "README.md"
requires-python = ">=3.7"
classifiers = [
"Programming Language :: Python :: 3",
# "License :: OSI Approved :: MIT License",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
]
keywords = ["LLM", "Large Language Model", "Prompt", "OpenAI", "API"]
Expand Down
6 changes: 3 additions & 3 deletions src/handyllm/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,9 @@ def exception2err_msg(exception: Exception):
def _chat_log_response_final(logger, log_marks, kwargs, messages, start_time, role, content, err_msg=None):
end_time = time.perf_counter()
duration = end_time - start_time
input_content = PromptConverter.chat2raw(messages)
input_content = PromptConverter.msgs2raw(messages)
if not err_msg:
output_content = PromptConverter.chat2raw([{'role': role, 'content': content}])
output_content = PromptConverter.msgs2raw([{'role': role, 'content': content}])
log_result(logger, "Chat request", duration, log_marks, kwargs, input_content, output_content)
else:
log_exception(logger, "Chat request", duration, log_marks, kwargs, input_content, err_msg)
Expand Down Expand Up @@ -137,7 +137,7 @@ def _chat_log_exception(logger, log_marks, kwargs, messages, start_time, excepti
if logger is not None:
end_time = time.perf_counter()
duration = end_time - start_time
input_content = PromptConverter.chat2raw(messages)
input_content = PromptConverter.msgs2raw(messages)
err_msg = exception2err_msg(exception)
log_exception(logger, "Chat request", duration, log_marks, kwargs, input_content, err_msg)

Expand Down
20 changes: 18 additions & 2 deletions src/handyllm/cli.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,18 @@
import argparse
import sys


def get_version():
package_name = __name__.split(".")[0]
if sys.version_info >= (3, 8):
# if python 3.8 or later, use importlib.metadata
import importlib.metadata
return importlib.metadata.version(package_name)
else:
# if older python, use pkg_resources
import pkg_resources
return pkg_resources.get_distribution(package_name).version

def register_hprompt_command(subparsers: argparse._SubParsersAction):
parser_hprompt = subparsers.add_parser(
'hprompt',
Expand All @@ -15,7 +27,6 @@ def register_hprompt_command(subparsers: argparse._SubParsersAction):
parser_hprompt.add_argument("-vmp", "--var-map-path", help="Variable map file path")

def hprompt_command(args):
import sys
from handyllm import hprompt

run_config = hprompt.RunConfig()
Expand Down Expand Up @@ -45,9 +56,14 @@ def cli():
"""Main entry point for the handyllm CLI."""
parser = argparse.ArgumentParser(
prog="handyllm",
description="HandyLLM CLI",
description="HandyLLM CLI" + f" v{get_version()}",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"-v", "--version",
action="version",
version=get_version(),
)
subparsers = parser.add_subparsers(dest="command")
register_hprompt_command(subparsers)
args = parser.parse_args()
Expand Down
93 changes: 48 additions & 45 deletions src/handyllm/hprompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,10 @@


PromptType = TypeVar('PromptType', bound='HandyPrompt')
PathType = Union[str, os.PathLike[str]]
if sys.version_info >= (3, 9):
PathType = Union[str, os.PathLike[str]]
else:
PathType = Union[str, os.PathLike]

converter = PromptConverter()
handler = frontmatter.YAMLHandler()
Expand Down Expand Up @@ -84,8 +87,8 @@ def loads(
if api == "completions":
return CompletionsPrompt(content, request, meta, base_path)
else:
chat = converter.raw2chat(content)
return ChatPrompt(chat, request, meta, base_path)
messages = converter.raw2msgs(content)
return ChatPrompt(messages, request, meta, base_path)

def load(
fd: io.IOBase,
Expand Down Expand Up @@ -558,36 +561,36 @@ def _parse_var_map(self, run_config: RunConfig):
class ChatPrompt(HandyPrompt):

def __init__(
self, chat: list, request: dict, meta: Union[dict, RunConfig],
self, messages: list, request: dict, meta: Union[dict, RunConfig],
base_path: Optional[PathType] = None,
response: Optional[dict] = None,
):
super().__init__(chat, request, meta, base_path, response)
super().__init__(messages, request, meta, base_path, response)

@property
def chat(self) -> list:
def messages(self) -> list:
return self.data

@chat.setter
def chat(self, value: list):
@messages.setter
def messages(self, value: list):
self.data = value

@property
def result_str(self) -> str:
if len(self.chat) == 0:
if len(self.messages) == 0:
return ""
return self.chat[-1]['content']
return self.messages[-1]['content']

def _serialize_data(self, data) -> str:
return converter.chat2raw(data)
return converter.msgs2raw(data)

def _eval_data(self, run_config: RunConfig) -> list:
var_map = self._parse_var_map(run_config)
if var_map:
return converter.chat_replace_variables(
self.chat, var_map, inplace=False)
return converter.msgs_replace_variables(
self.messages, var_map, inplace=False)
else:
return self.chat
return self.messages

def _run_with_client(
self, client: OpenAIClient,
Expand All @@ -602,19 +605,19 @@ def _run_with_client(
new_request = self._filter_request(new_request, run_config)
base_path = Path(run_config.output_path).parent.resolve() if run_config.output_path else None
if stream:
if run_config.output_path:
if run_config.output_fd:
# dump frontmatter, no base_path
run_config.output_fd.write(self._dumps_frontmatter(new_request, run_config))
# stream response to a file descriptor
role, content, tool_calls = converter.stream_msgs2raw(stream_chat_all(response), run_config.output_fd)
elif run_config.output_path:
# stream response to a file
with open(run_config.output_path, 'w', encoding='utf-8') as fout:
# dump frontmatter
fout.write(self._dumps_frontmatter(new_request, run_config, base_path))
role, content, tool_calls = converter.stream_chat2raw(stream_chat_all(response), fout)
elif run_config.output_fd:
# dump frontmatter, no base_path
run_config.output_fd.write(self._dumps_frontmatter(new_request, run_config))
# stream response to a file descriptor
role, content, tool_calls = converter.stream_chat2raw(stream_chat_all(response), run_config.output_fd)
role, content, tool_calls = converter.stream_msgs2raw(stream_chat_all(response), fout)
else:
role, content, tool_calls = converter.stream_chat2raw(stream_chat_all(response))
role, content, tool_calls = converter.stream_msgs2raw(stream_chat_all(response))
else:
role = response['choices'][0]['message']['role']
content = response['choices'][0]['message'].get('content')
Expand All @@ -638,17 +641,17 @@ async def _arun_with_client(
new_request = self._filter_request(new_request, run_config)
base_path = Path(run_config.output_path).parent.resolve() if run_config.output_path else None
if stream:
if run_config.output_path:
if run_config.output_fd:
# stream response to a file descriptor
run_config.output_fd.write(self._dumps_frontmatter(new_request, run_config))
role, content, tool_calls = await converter.astream_msgs2raw(astream_chat_all(response), run_config.output_fd)
elif run_config.output_path:
# stream response to a file
with open(run_config.output_path, 'w', encoding='utf-8') as fout:
fout.write(self._dumps_frontmatter(new_request, run_config, base_path))
role, content, tool_calls = await converter.astream_chat2raw(astream_chat_all(response), fout)
elif run_config.output_fd:
# stream response to a file descriptor
run_config.output_fd.write(self._dumps_frontmatter(new_request, run_config))
role, content, tool_calls = await converter.astream_chat2raw(astream_chat_all(response), run_config.output_fd)
role, content, tool_calls = await converter.astream_msgs2raw(astream_chat_all(response), fout)
else:
role, content, tool_calls = await converter.astream_chat2raw(astream_chat_all(response))
role, content, tool_calls = await converter.astream_msgs2raw(astream_chat_all(response))
else:
role = response['choices'][0]['message']['role']
content = response['choices'][0]['message'].get('content')
Expand All @@ -663,14 +666,14 @@ def __add__(self, other: Union[str, list, ChatPrompt]):
# support concatenation with string, list or another ChatPrompt
if isinstance(other, str):
return ChatPrompt(
self.chat + [{"role": "user", "content": other}],
self.messages + [{"role": "user", "content": other}],
copy.deepcopy(self.request),
replace(self.run_config),
self.base_path
)
elif isinstance(other, list):
return ChatPrompt(
self.chat + [{"role": msg['role'], "content": msg['content']} for msg in other],
self.messages + other,
copy.deepcopy(self.request),
replace(self.run_config),
self.base_path
Expand All @@ -679,7 +682,7 @@ def __add__(self, other: Union[str, list, ChatPrompt]):
# merge two ChatPrompt objects
merged_request, merged_run_config = self._merge_non_data(other)
return ChatPrompt(
self.chat + other.chat, merged_request, merged_run_config,
self.messages + other.messages, merged_request, merged_run_config,
self.base_path
)
else:
Expand All @@ -688,12 +691,12 @@ def __add__(self, other: Union[str, list, ChatPrompt]):
def __iadd__(self, other: Union[str, list, ChatPrompt]):
# support concatenation with string, list or another ChatPrompt
if isinstance(other, str):
self.chat.append({"role": "user", "content": other})
self.messages.append({"role": "user", "content": other})
elif isinstance(other, list):
self.chat += [{"role": msg['role'], "content": msg['content']} for msg in other]
self.messages += other
elif isinstance(other, ChatPrompt):
# merge two ChatPrompt objects
self.chat += other.chat
self.messages += other.messages
self._merge_non_data(other, inplace=True)
else:
raise TypeError(f"unsupported operand type(s) for +: 'ChatPrompt' and '{type(other)}'")
Expand Down Expand Up @@ -749,15 +752,15 @@ def _run_with_client(
new_request = self._filter_request(new_request, run_config)
base_path = Path(run_config.output_path).parent.resolve() if run_config.output_path else None
if stream:
if run_config.output_path:
if run_config.output_fd:
# stream response to a file descriptor
run_config.output_fd.write(self._dumps_frontmatter(new_request, run_config))
content = self._stream_completions_proc(response, run_config.output_fd)
elif run_config.output_path:
# stream response to a file
with open(run_config.output_path, 'w', encoding='utf-8') as fout:
fout.write(self._dumps_frontmatter(new_request, run_config, base_path))
content = self._stream_completions_proc(response, fout)
elif run_config.output_fd:
# stream response to a file descriptor
run_config.output_fd.write(self._dumps_frontmatter(new_request, run_config))
content = self._stream_completions_proc(response, run_config.output_fd)
else:
content = self._stream_completions_proc(response)
else:
Expand Down Expand Up @@ -788,15 +791,15 @@ async def _arun_with_client(
new_request = self._filter_request(new_request, run_config)
base_path = Path(run_config.output_path).parent.resolve() if run_config.output_path else None
if stream:
if run_config.output_path:
if run_config.output_fd:
# stream response to a file descriptor
run_config.output_fd.write(self._dumps_frontmatter(new_request, run_config))
content = await self._astream_completions_proc(response, run_config.output_fd)
elif run_config.output_path:
# stream response to a file
with open(run_config.output_path, 'w', encoding='utf-8') as fout:
fout.write(self._dumps_frontmatter(new_request, run_config, base_path))
content = await self._astream_completions_proc(response, fout)
elif run_config.output_fd:
# stream response to a file descriptor
run_config.output_fd.write(self._dumps_frontmatter(new_request, run_config))
content = await self._astream_completions_proc(response, run_config.output_fd)
else:
content = await self._astream_completions_proc(response)
else:
Expand Down
Loading

0 comments on commit eb4f69c

Please sign in to comment.