Skip to content

Commit

Permalink
Merge pull request #40 from atomiechen/gh_action
Browse files Browse the repository at this point in the history
add test action
  • Loading branch information
atomiechen authored Jul 28, 2024
2 parents 837ebbc + 7f5b4a2 commit 9ea0599
Show file tree
Hide file tree
Showing 9 changed files with 70 additions and 24 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/publish-to-testpypi-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,10 @@ on:
required: false

jobs:
test:
uses: ./.github/workflows/test.yml
build-n-publish:
needs: test
uses: atomiechen/reusable-workflows/.github/workflows/publish-python-distributions.yml@main
with:
publish_testpypi: ${{ inputs.publish_testpypi }}
Expand Down
31 changes: 31 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
workflow_call:

jobs:
build:
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Lint
run: |
./scripts/lint.sh
- name: Test
run: |
./scripts/test.sh
4 changes: 2 additions & 2 deletions src/handyllm/cache_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import json
from os import PathLike
from pathlib import Path
from typing import Callable, Collection, Iterable, Optional, TypeVar, Union, cast
from typing import Callable, Collection, Iterable, List, Optional, TypeVar, Union, cast
from typing_extensions import ParamSpec
import yaml

Expand Down Expand Up @@ -67,7 +67,7 @@ def _load_files(

def _dump_files(
results,
files: list[Path],
files: List[Path],
dump_method: Optional[
Union[Collection[Optional[StringifyHandler]], StringifyHandler]
],
Expand Down
6 changes: 4 additions & 2 deletions src/handyllm/hprompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,12 @@
from typing import (
IO,
AsyncGenerator,
Dict,
Generator,
Generic,
MutableMapping,
Optional,
Tuple,
Type,
Union,
TypeVar,
Expand Down Expand Up @@ -476,7 +478,7 @@ def _prepare_run(

def _merge_non_data(
self: PromptType, other: PromptType, inplace=False
) -> tuple[MutableMapping, RunConfig]:
) -> Tuple[MutableMapping, RunConfig]:
if inplace:
merge_dict(self.request, other.request, strategy=Strategy.ADDITIVE)
self.run_config.merge(other.run_config, inplace=True)
Expand Down Expand Up @@ -1095,7 +1097,7 @@ def dump_to(

def load_var_map(
path: PathType, format: VarMapFileFormat = VarMapFileFormat.TEXT
) -> dict[str, str]:
) -> Dict[str, str]:
"""
Read all content that needs to be replaced in the prompt from a text file.
"""
Expand Down
6 changes: 3 additions & 3 deletions src/handyllm/openai_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"ClientMode",
]

from typing import Iterable, Mapping, Optional, TypeVar, Union
from typing import Dict, Iterable, Mapping, Optional, TypeVar, Union
import os
import json
import time
Expand Down Expand Up @@ -81,7 +81,7 @@ class OpenAIClient:
# set this to your model-engine map;
# or environment variable MODEL_ENGINE_MAP will be used;
# can be None.
model_engine_map: Optional[dict[str, str]]
model_engine_map: Optional[Dict[str, str]]

# set this to your endpoint manager
endpoint_manager: Optional[EndpointManager] = None
Expand All @@ -95,7 +95,7 @@ def __init__(
organization: Optional[str] = None,
api_type: Optional[TYPE_API_TYPES] = None,
api_version: Optional[str] = None,
model_engine_map: Optional[dict[str, str]] = None,
model_engine_map: Optional[Dict[str, str]] = None,
endpoint_manager: Optional[EndpointManager] = None,
endpoints: Optional[Iterable] = None,
load_path: Optional[PathType] = None,
Expand Down
22 changes: 11 additions & 11 deletions src/handyllm/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"CompletionsChunk",
]

from typing import MutableMapping, Optional, Sequence, TypedDict
from typing import List, MutableMapping, Optional, Sequence, TypedDict
from typing_extensions import NotRequired


Expand Down Expand Up @@ -51,24 +51,24 @@ class ToolCall(DictProxy):
class Message(TypedDict):
role: str
content: Optional[str]
tool_calls: NotRequired[list[ToolCall]]
tool_calls: NotRequired[List[ToolCall]]


class TopLogProbItem(DictProxy):
token: str
logprob: float
bytes: Optional[list[int]]
bytes: Optional[List[int]]


class LogProbItem(DictProxy):
token: str
logprob: float
bytes: Optional[list[int]]
top_logprobs: list[TopLogProbItem]
bytes: Optional[List[int]]
top_logprobs: List[TopLogProbItem]


class Logprobs(DictProxy):
content: Optional[list[LogProbItem]]
content: Optional[List[LogProbItem]]


class ChatChoice(DictProxy):
Expand All @@ -86,7 +86,7 @@ class Usage(DictProxy):

class ChatResponse(DictProxy):
id: str
choices: list[ChatChoice]
choices: List[ChatChoice]
created: int
model: str
service_tier: Optional[str]
Expand All @@ -109,7 +109,7 @@ class ChatChunkDelta(TypedDict):

role: NotRequired[str]
content: NotRequired[Optional[str]]
tool_calls: NotRequired[list[ToolCallDelta]]
tool_calls: NotRequired[List[ToolCallDelta]]


class ChatChunkChoice(DictProxy):
Expand All @@ -121,7 +121,7 @@ class ChatChunkChoice(DictProxy):

class ChatChunk(DictProxy):
id: str
choices: list[ChatChunkChoice]
choices: List[ChatChunkChoice]
created: int
model: str
service_tier: Optional[str]
Expand All @@ -146,7 +146,7 @@ class CompletionChoice(DictProxy):

class CompletionsResponse(DictProxy):
id: str
choices: list[CompletionChoice]
choices: List[CompletionChoice]
created: int
model: str
system_fingerprint: str
Expand All @@ -159,4 +159,4 @@ class CompletionsChunkChoice(DictProxy):


class CompletionsChunk(DictProxy):
choices: list[CompletionsChunkChoice]
choices: List[CompletionsChunkChoice]
6 changes: 3 additions & 3 deletions src/handyllm/run_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import sys
from enum import auto
from pathlib import Path
from typing import IO, Mapping, Optional
from typing import IO, List, Mapping, Optional
from dataclasses import dataclass, asdict, fields, replace

from mergedeep import merge as merge_dict, Strategy
Expand Down Expand Up @@ -38,8 +38,8 @@ class RunConfig:
record_request: Optional[RecordRequestMode] = (
None # default: RecordRequestMode.BLACKLIST
)
record_blacklist: Optional[list[str]] = None # default: DEFAULT_BLACKLIST
record_whitelist: Optional[list[str]] = None
record_blacklist: Optional[List[str]] = None # default: DEFAULT_BLACKLIST
record_whitelist: Optional[List[str]] = None
# variable map
var_map: Optional[VarMapType] = None
# variable map file path
Expand Down
13 changes: 11 additions & 2 deletions src/handyllm/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,16 @@
]

import sys
from typing import Any, Awaitable, Callable, Dict, MutableMapping, Optional, Union
from typing import (
Any,
Awaitable,
Callable,
Dict,
MutableMapping,
Optional,
Tuple,
Union,
)
from os import PathLike

from .response import ToolCallDelta
Expand All @@ -36,4 +45,4 @@
StrHandler = Callable[[str], Any]
StringifyHandler = Callable[[Any], str]

ShortChatChunk = tuple[str, Optional[str], ToolCallDelta]
ShortChatChunk = Tuple[str, Optional[str], ToolCallDelta]
3 changes: 2 additions & 1 deletion src/handyllm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
AsyncIterable,
Generator,
Iterable,
List,
Optional,
TypeVar,
cast,
Expand Down Expand Up @@ -83,7 +84,7 @@ def trans_stream_chat(
role = cast(str, message["role"])
content = cast(Optional[str], message.get("content"))
tool_calls = cast(
Optional[list[ToolCallDelta]], message.get("tool_calls")
Optional[List[ToolCallDelta]], message.get("tool_calls")
)
if tool_calls:
for chunk in tool_calls:
Expand Down

0 comments on commit 9ea0599

Please sign in to comment.