diff --git a/.github/workflows/publish-to-testpypi-pypi.yml b/.github/workflows/publish-to-testpypi-pypi.yml index d07eefd..5eebb2d 100644 --- a/.github/workflows/publish-to-testpypi-pypi.yml +++ b/.github/workflows/publish-to-testpypi-pypi.yml @@ -30,7 +30,10 @@ on: required: false jobs: + test: + uses: ./.github/workflows/test.yml build-n-publish: + needs: test uses: atomiechen/reusable-workflows/.github/workflows/publish-python-distributions.yml@main with: publish_testpypi: ${{ inputs.publish_testpypi }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..ba1d56a --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,31 @@ +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + workflow_call: + +jobs: + build: + strategy: + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Lint + run: | + ./scripts/lint.sh + - name: Test + run: | + ./scripts/test.sh diff --git a/src/handyllm/cache_manager.py b/src/handyllm/cache_manager.py index 1c8061f..bec53bf 100644 --- a/src/handyllm/cache_manager.py +++ b/src/handyllm/cache_manager.py @@ -5,7 +5,7 @@ import json from os import PathLike from pathlib import Path -from typing import Callable, Collection, Iterable, Optional, TypeVar, Union, cast +from typing import Callable, Collection, Iterable, List, Optional, TypeVar, Union, cast from typing_extensions import ParamSpec import yaml @@ -67,7 +67,7 @@ def _load_files( def _dump_files( results, - files: list[Path], + files: List[Path], dump_method: Optional[ Union[Collection[Optional[StringifyHandler]], StringifyHandler] ], diff --git a/src/handyllm/hprompt.py b/src/handyllm/hprompt.py index d5430de..ccda4e9 100644 --- a/src/handyllm/hprompt.py +++ b/src/handyllm/hprompt.py @@ -27,10 +27,12 @@ from typing import ( IO, AsyncGenerator, + Dict, Generator, Generic, MutableMapping, Optional, + Tuple, Type, Union, TypeVar, @@ -476,7 +478,7 @@ def _prepare_run( def _merge_non_data( self: PromptType, other: PromptType, inplace=False - ) -> tuple[MutableMapping, RunConfig]: + ) -> Tuple[MutableMapping, RunConfig]: if inplace: merge_dict(self.request, other.request, strategy=Strategy.ADDITIVE) self.run_config.merge(other.run_config, inplace=True) @@ -1095,7 +1097,7 @@ def dump_to( def load_var_map( path: PathType, format: VarMapFileFormat = VarMapFileFormat.TEXT -) -> dict[str, str]: +) -> Dict[str, str]: """ Read all content that needs to be replaced in the prompt from a text file. """ diff --git a/src/handyllm/openai_client.py b/src/handyllm/openai_client.py index 521ee76..28f6d32 100644 --- a/src/handyllm/openai_client.py +++ b/src/handyllm/openai_client.py @@ -5,7 +5,7 @@ "ClientMode", ] -from typing import Iterable, Mapping, Optional, TypeVar, Union +from typing import Dict, Iterable, Mapping, Optional, TypeVar, Union import os import json import time @@ -81,7 +81,7 @@ class OpenAIClient: # set this to your model-engine map; # or environment variable MODEL_ENGINE_MAP will be used; # can be None. - model_engine_map: Optional[dict[str, str]] + model_engine_map: Optional[Dict[str, str]] # set this to your endpoint manager endpoint_manager: Optional[EndpointManager] = None @@ -95,7 +95,7 @@ def __init__( organization: Optional[str] = None, api_type: Optional[TYPE_API_TYPES] = None, api_version: Optional[str] = None, - model_engine_map: Optional[dict[str, str]] = None, + model_engine_map: Optional[Dict[str, str]] = None, endpoint_manager: Optional[EndpointManager] = None, endpoints: Optional[Iterable] = None, load_path: Optional[PathType] = None, diff --git a/src/handyllm/response.py b/src/handyllm/response.py index 24fac44..1d3e861 100644 --- a/src/handyllm/response.py +++ b/src/handyllm/response.py @@ -11,7 +11,7 @@ "CompletionsChunk", ] -from typing import MutableMapping, Optional, Sequence, TypedDict +from typing import List, MutableMapping, Optional, Sequence, TypedDict from typing_extensions import NotRequired @@ -51,24 +51,24 @@ class ToolCall(DictProxy): class Message(TypedDict): role: str content: Optional[str] - tool_calls: NotRequired[list[ToolCall]] + tool_calls: NotRequired[List[ToolCall]] class TopLogProbItem(DictProxy): token: str logprob: float - bytes: Optional[list[int]] + bytes: Optional[List[int]] class LogProbItem(DictProxy): token: str logprob: float - bytes: Optional[list[int]] - top_logprobs: list[TopLogProbItem] + bytes: Optional[List[int]] + top_logprobs: List[TopLogProbItem] class Logprobs(DictProxy): - content: Optional[list[LogProbItem]] + content: Optional[List[LogProbItem]] class ChatChoice(DictProxy): @@ -86,7 +86,7 @@ class Usage(DictProxy): class ChatResponse(DictProxy): id: str - choices: list[ChatChoice] + choices: List[ChatChoice] created: int model: str service_tier: Optional[str] @@ -109,7 +109,7 @@ class ChatChunkDelta(TypedDict): role: NotRequired[str] content: NotRequired[Optional[str]] - tool_calls: NotRequired[list[ToolCallDelta]] + tool_calls: NotRequired[List[ToolCallDelta]] class ChatChunkChoice(DictProxy): @@ -121,7 +121,7 @@ class ChatChunkChoice(DictProxy): class ChatChunk(DictProxy): id: str - choices: list[ChatChunkChoice] + choices: List[ChatChunkChoice] created: int model: str service_tier: Optional[str] @@ -146,7 +146,7 @@ class CompletionChoice(DictProxy): class CompletionsResponse(DictProxy): id: str - choices: list[CompletionChoice] + choices: List[CompletionChoice] created: int model: str system_fingerprint: str @@ -159,4 +159,4 @@ class CompletionsChunkChoice(DictProxy): class CompletionsChunk(DictProxy): - choices: list[CompletionsChunkChoice] + choices: List[CompletionsChunkChoice] diff --git a/src/handyllm/run_config.py b/src/handyllm/run_config.py index ec5ce8f..b177849 100644 --- a/src/handyllm/run_config.py +++ b/src/handyllm/run_config.py @@ -2,7 +2,7 @@ import sys from enum import auto from pathlib import Path -from typing import IO, Mapping, Optional +from typing import IO, List, Mapping, Optional from dataclasses import dataclass, asdict, fields, replace from mergedeep import merge as merge_dict, Strategy @@ -38,8 +38,8 @@ class RunConfig: record_request: Optional[RecordRequestMode] = ( None # default: RecordRequestMode.BLACKLIST ) - record_blacklist: Optional[list[str]] = None # default: DEFAULT_BLACKLIST - record_whitelist: Optional[list[str]] = None + record_blacklist: Optional[List[str]] = None # default: DEFAULT_BLACKLIST + record_whitelist: Optional[List[str]] = None # variable map var_map: Optional[VarMapType] = None # variable map file path diff --git a/src/handyllm/types.py b/src/handyllm/types.py index 58017ca..98ed667 100644 --- a/src/handyllm/types.py +++ b/src/handyllm/types.py @@ -12,7 +12,16 @@ ] import sys -from typing import Any, Awaitable, Callable, Dict, MutableMapping, Optional, Union +from typing import ( + Any, + Awaitable, + Callable, + Dict, + MutableMapping, + Optional, + Tuple, + Union, +) from os import PathLike from .response import ToolCallDelta @@ -36,4 +45,4 @@ StrHandler = Callable[[str], Any] StringifyHandler = Callable[[Any], str] -ShortChatChunk = tuple[str, Optional[str], ToolCallDelta] +ShortChatChunk = Tuple[str, Optional[str], ToolCallDelta] diff --git a/src/handyllm/utils.py b/src/handyllm/utils.py index 489df01..a780a08 100644 --- a/src/handyllm/utils.py +++ b/src/handyllm/utils.py @@ -28,6 +28,7 @@ AsyncIterable, Generator, Iterable, + List, Optional, TypeVar, cast, @@ -83,7 +84,7 @@ def trans_stream_chat( role = cast(str, message["role"]) content = cast(Optional[str], message.get("content")) tool_calls = cast( - Optional[list[ToolCallDelta]], message.get("tool_calls") + Optional[List[ToolCallDelta]], message.get("tool_calls") ) if tool_calls: for chunk in tool_calls: