From d09b1b2fd965133776f410c17077617e6bc673fe Mon Sep 17 00:00:00 2001 From: Atomie CHEN Date: Wed, 22 May 2024 15:53:48 +0800 Subject: [PATCH 1/6] build: raise min supported python version to 3.8 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 2ef961e..af790f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ authors = [ ] description = "A handy toolkit for using LLM." readme = "README.md" -requires-python = ">=3.7" +requires-python = ">=3.8" classifiers = [ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", From 66f86c8fc553dce64b8da925258973bc287eb636 Mon Sep 17 00:00:00 2001 From: Atomie CHEN Date: Wed, 22 May 2024 16:04:09 +0800 Subject: [PATCH 2/6] feat(OpenAIClient): add type hint for api_type --- src/handyllm/_constants.py | 4 ++++ src/handyllm/openai_client.py | 8 ++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/handyllm/_constants.py b/src/handyllm/_constants.py index 0bde872..c35b4dc 100644 --- a/src/handyllm/_constants.py +++ b/src/handyllm/_constants.py @@ -1,3 +1,5 @@ +from typing import Literal + _API_BASE_OPENAI = 'https://api.openai.com/v1' _API_TYPE_OPENAI = 'openai' _API_TYPES_AZURE = ( @@ -5,3 +7,5 @@ 'azure_ad', 'azuread' ) + +TYPE_API_TYPES = Literal['openai', 'azure', 'azure_ad', 'azuread'] diff --git a/src/handyllm/openai_client.py b/src/handyllm/openai_client.py index a5a6280..d605db9 100644 --- a/src/handyllm/openai_client.py +++ b/src/handyllm/openai_client.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import Union, TYPE_CHECKING +from typing import Union import os import json import time @@ -9,7 +9,7 @@ from .endpoint_manager import Endpoint, EndpointManager from .requestor import Requestor from ._utils import get_request_url, join_url, _chat_log_response, _chat_log_exception, _completions_log_response, _completions_log_exception -from ._constants import _API_BASE_OPENAI, _API_TYPE_OPENAI, _API_TYPES_AZURE +from ._constants import _API_BASE_OPENAI, _API_TYPE_OPENAI, _API_TYPES_AZURE, TYPE_API_TYPES def api(func): @@ -27,7 +27,7 @@ class OpenAIClient: # set this to your API type; # or environment variable OPENAI_API_TYPE will be used; # can be None (roll back to default). - api_type: Union[str, None] + api_type: Union[TYPE_API_TYPES, None] # set this to your API base; # or environment variable OPENAI_API_BASE will be used. @@ -60,7 +60,7 @@ def __init__( api_base=None, api_key=None, organization=None, - api_type=None, + api_type: Union[TYPE_API_TYPES, None] = None, api_version=None, model_engine_map=None, ) -> None: From 72742b0cc2f2b7221679bc50c045e7729f36468c Mon Sep 17 00:00:00 2001 From: Atomie CHEN Date: Wed, 22 May 2024 16:05:38 +0800 Subject: [PATCH 3/6] feat(OpenAIClient): add more type hints to constructor --- src/handyllm/openai_client.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/handyllm/openai_client.py b/src/handyllm/openai_client.py index d605db9..4ea85a4 100644 --- a/src/handyllm/openai_client.py +++ b/src/handyllm/openai_client.py @@ -57,12 +57,12 @@ def __init__( self, mode: Union[str, ClientMode] = ClientMode.SYNC, *, - api_base=None, - api_key=None, - organization=None, + api_base: Union[str, None] = None, + api_key: Union[str, None] = None, + organization: Union[str, None] = None, api_type: Union[TYPE_API_TYPES, None] = None, - api_version=None, - model_engine_map=None, + api_version: Union[str, None] = None, + model_engine_map: Union[dict, None] = None, ) -> None: self._sync_client = None self._async_client = None From 26fe75f2312d68fcd935e3848057985432d2800f Mon Sep 17 00:00:00 2001 From: Atomie CHEN Date: Wed, 22 May 2024 17:37:39 +0800 Subject: [PATCH 4/6] fix(PromptConverter): should return None tool_calls if no tool calls instead of [ ] --- src/handyllm/prompt_converter.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/handyllm/prompt_converter.py b/src/handyllm/prompt_converter.py index bee80cb..2fde49b 100644 --- a/src/handyllm/prompt_converter.py +++ b/src/handyllm/prompt_converter.py @@ -130,6 +130,9 @@ def stream_msgs2raw(gen_sync, fd: Optional[io.IOBase] = None) -> Tuple[str, str] if tool_calls and fd: # dump tool calls fd.write(yaml.dump(tool_calls)) + if not tool_calls: + # should return None if no tool calls + tool_calls = None return role, content, tool_calls @staticmethod @@ -161,6 +164,9 @@ async def astream_msgs2raw(gen_async, fd: Optional[io.IOBase] = None) -> Tuple[s if tool_calls and fd: # dump tool calls fd.write(yaml.dump(tool_calls)) + if not tool_calls: + # should return None if no tool calls + tool_calls = None return role, content, tool_calls @classmethod From a6db2ef3e77145deda7df9c1042c9409868c82a0 Mon Sep 17 00:00:00 2001 From: Atomie CHEN Date: Wed, 22 May 2024 17:42:39 +0800 Subject: [PATCH 5/6] fix(cli): do not output to stderr if already set output_path --- src/handyllm/cli.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/handyllm/cli.py b/src/handyllm/cli.py index 82c3a5a..6ee25ca 100644 --- a/src/handyllm/cli.py +++ b/src/handyllm/cli.py @@ -38,14 +38,16 @@ def hprompt_command(args): run_config.var_map = var_map if args.var_map_path: run_config.var_map_path = args.var_map_path + prompt = hprompt.load_from(args.path[0]) if args.output: run_config.output_path = args.output else: - run_config.output_fd = sys.stderr + # check if prompt has output_path set + if not prompt.run_config.output_path: + run_config.output_fd = sys.stderr if args.verbose: run_config.verbose = True print(f"Input paths: {args.path}", file=sys.stderr) - prompt = hprompt.load_from(args.path[0]) result_prompt = prompt.run(run_config=run_config) for next_path in args.path[1:]: prompt += result_prompt From 2c2fa2fc7bdc43ad409f5c2930d0e32679779d93 Mon Sep 17 00:00:00 2001 From: Atomie CHEN Date: Wed, 22 May 2024 18:12:07 +0800 Subject: [PATCH 6/6] Bump version to 0.7.4 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index af790f7..ec1a2f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "HandyLLM" -version = "0.7.3" +version = "0.7.4" authors = [ { name="Atomie CHEN", email="atomic_cwh@163.com" }, ]