Skip to content

Commit

Permalink
Merge pull request #25 from atomiechen/dev
Browse files Browse the repository at this point in the history
Bump version to 0.7.4
  • Loading branch information
atomiechen authored May 22, 2024
2 parents eb4f69c + 2c2fa2f commit 4aa2d1b
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 13 deletions.
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@ build-backend = "setuptools.build_meta"

[project]
name = "HandyLLM"
version = "0.7.3"
version = "0.7.4"
authors = [
{ name="Atomie CHEN", email="atomic_cwh@163.com" },
]
description = "A handy toolkit for using LLM."
readme = "README.md"
requires-python = ">=3.7"
requires-python = ">=3.8"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
Expand Down
4 changes: 4 additions & 0 deletions src/handyllm/_constants.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
from typing import Literal

_API_BASE_OPENAI = 'https://api.openai.com/v1'
_API_TYPE_OPENAI = 'openai'
_API_TYPES_AZURE = (
'azure',
'azure_ad',
'azuread'
)

TYPE_API_TYPES = Literal['openai', 'azure', 'azure_ad', 'azuread']
6 changes: 4 additions & 2 deletions src/handyllm/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,16 @@ def hprompt_command(args):
run_config.var_map = var_map
if args.var_map_path:
run_config.var_map_path = args.var_map_path
prompt = hprompt.load_from(args.path[0])
if args.output:
run_config.output_path = args.output
else:
run_config.output_fd = sys.stderr
# check if prompt has output_path set
if not prompt.run_config.output_path:
run_config.output_fd = sys.stderr
if args.verbose:
run_config.verbose = True
print(f"Input paths: {args.path}", file=sys.stderr)
prompt = hprompt.load_from(args.path[0])
result_prompt = prompt.run(run_config=run_config)
for next_path in args.path[1:]:
prompt += result_prompt
Expand Down
18 changes: 9 additions & 9 deletions src/handyllm/openai_client.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from __future__ import annotations
from typing import Union, TYPE_CHECKING
from typing import Union
import os
import json
import time
Expand All @@ -9,7 +9,7 @@
from .endpoint_manager import Endpoint, EndpointManager
from .requestor import Requestor
from ._utils import get_request_url, join_url, _chat_log_response, _chat_log_exception, _completions_log_response, _completions_log_exception
from ._constants import _API_BASE_OPENAI, _API_TYPE_OPENAI, _API_TYPES_AZURE
from ._constants import _API_BASE_OPENAI, _API_TYPE_OPENAI, _API_TYPES_AZURE, TYPE_API_TYPES


def api(func):
Expand All @@ -27,7 +27,7 @@ class OpenAIClient:
# set this to your API type;
# or environment variable OPENAI_API_TYPE will be used;
# can be None (roll back to default).
api_type: Union[str, None]
api_type: Union[TYPE_API_TYPES, None]

# set this to your API base;
# or environment variable OPENAI_API_BASE will be used.
Expand Down Expand Up @@ -57,12 +57,12 @@ def __init__(
self,
mode: Union[str, ClientMode] = ClientMode.SYNC,
*,
api_base=None,
api_key=None,
organization=None,
api_type=None,
api_version=None,
model_engine_map=None,
api_base: Union[str, None] = None,
api_key: Union[str, None] = None,
organization: Union[str, None] = None,
api_type: Union[TYPE_API_TYPES, None] = None,
api_version: Union[str, None] = None,
model_engine_map: Union[dict, None] = None,
) -> None:
self._sync_client = None
self._async_client = None
Expand Down
6 changes: 6 additions & 0 deletions src/handyllm/prompt_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,9 @@ def stream_msgs2raw(gen_sync, fd: Optional[io.IOBase] = None) -> Tuple[str, str]
if tool_calls and fd:
# dump tool calls
fd.write(yaml.dump(tool_calls))
if not tool_calls:
# should return None if no tool calls
tool_calls = None
return role, content, tool_calls

@staticmethod
Expand Down Expand Up @@ -161,6 +164,9 @@ async def astream_msgs2raw(gen_async, fd: Optional[io.IOBase] = None) -> Tuple[s
if tool_calls and fd:
# dump tool calls
fd.write(yaml.dump(tool_calls))
if not tool_calls:
# should return None if no tool calls
tool_calls = None
return role, content, tool_calls

@classmethod
Expand Down

0 comments on commit 4aa2d1b

Please sign in to comment.