Skip to content

Commit

Permalink
Bump version to 0.3.1: Merge pull request #7 from atomiechen/dev
Browse files Browse the repository at this point in the history
Bump version to 0.3.1
  • Loading branch information
atomiechen authored Jul 29, 2023
2 parents 33ccc6e + cb65968 commit c12601a
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 13 deletions.
17 changes: 14 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,13 @@ Example scripts are placed in [tests](./tests) folder.

## OpenAI API Request

### Logs

You can pass custom `logger` and `log_marks` (a string or a collection of strings) to `chat`/`completions` to get input and output logging.

### Timeout control

This toolkit supports client-side `timeout` control, which OpenAI's official python package does not support yet:
This toolkit supports client-side `timeout` control:

```python
from handyllm import OpenAIAPI
Expand Down Expand Up @@ -133,7 +137,8 @@ $user$
"item2": "Indeed."
}
%output_format%
%misc%
%misc1%
%misc2%
```

```python
Expand All @@ -144,7 +149,13 @@ converter = PromptConverter()
chat = converter.rawfile2chat('prompt.txt')

# variables wrapped in %s can be replaced at runtime
new_chat = converter.chat_replace_variables(chat, {r'%misc%': 'Note: do not use any bad word.'})
new_chat = converter.chat_replace_variables(
chat,
{
r'%misc1%': 'Note1: do not use any bad word.',
r'%misc2%': 'Note2: be optimistic.',
}
)
```

### Substitute
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "HandyLLM"
version = "0.3.0"
version = "0.3.1"
authors = [
{ name="Atomie CHEN", email="atomic_cwh@163.com" },
]
Expand Down
13 changes: 11 additions & 2 deletions src/handyllm/openai_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import copy

from .prompt_converter import PromptConverter
from . import utils

module_logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -145,7 +146,11 @@ def chat(model, messages, timeout=None, endpoint_manager=None, logger=None, log_
if logger is not None and 'messages' in kwargs:
arguments = copy.deepcopy(kwargs)
arguments.pop('messages', None)
input_lines = [str(item) for item in log_marks]
# check if log_marks is iterable
if utils.isiterable(log_marks):
input_lines = [str(item) for item in log_marks]
else:
input_lines = [str(log_marks)]
input_lines.append(json.dumps(arguments, indent=2, ensure_ascii=False))
input_lines.append(" INPUT START ".center(50, '-'))
input_lines.append(OpenAIAPI.converter.chat2raw(kwargs['messages']))
Expand Down Expand Up @@ -199,7 +204,11 @@ def completions(model, prompt, timeout=None, endpoint_manager=None, logger=None,
if logger is not None and 'prompt' in kwargs:
arguments = copy.deepcopy(kwargs)
arguments.pop('prompt', None)
input_lines = [str(item) for item in log_marks]
# check if log_marks is iterable
if utils.isiterable(log_marks):
input_lines = [str(item) for item in log_marks]
else:
input_lines = [str(log_marks)]
input_lines.append(json.dumps(arguments, indent=2, ensure_ascii=False))
input_lines.append(" INPUT START ".center(50, '-'))
input_lines.append(kwargs['prompt'])
Expand Down
4 changes: 2 additions & 2 deletions src/handyllm/prompt_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ def chat_replace_variables(self, chat, variable_map: dict, inplace=False):
for message in chat:
new_message = {"role": message['role'], "content": message['content']}
for var, value in variable_map.items():
if var in message['content']:
new_message = {"role": new_message['role'], "content": new_message['content'].replace(var, value)}
if var in new_message['content']:
new_message['content'] = new_message['content'].replace(var, value)
new_chat.append(new_message)
return new_chat

6 changes: 6 additions & 0 deletions src/handyllm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from urllib.parse import urlparse
import os
import time
import collections.abc


def get_filename_from_url(download_url):
Expand All @@ -23,3 +24,8 @@ def download_binary(download_url, file_path=None, dir='.'):
file.write(response.content)
return file_path

def isiterable(arg):
return (
isinstance(arg, collections.abc.Iterable)
and not isinstance(arg, str)
)
3 changes: 2 additions & 1 deletion tests/prompt.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,5 @@ $user$
"item2": "Indeed."
}
%output_format%
%misc%
%misc1%
%misc2%
16 changes: 12 additions & 4 deletions tests/test_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,17 @@

# chat can be used as the message parameter for OpenAI API
chat = converter.rawfile2chat('prompt.txt') # variables are substituted according to map
print(json.dumps(chat, indent=2))
print()
# print(json.dumps(chat, indent=2))
print(converter.chat2raw(chat))
print('-----')

# variables wrapped in %s can be replaced at runtime
new_chat = converter.chat_replace_variables(chat, {r'%misc%': 'Note: do not use any bad word.'})
print(json.dumps(new_chat, indent=2))
new_chat = converter.chat_replace_variables(
chat,
{
r'%misc1%': 'Note1: do not use any bad word.',
r'%misc2%': 'Note2: be optimistic.',
}
)
# print(json.dumps(new_chat, indent=2))
print(converter.chat2raw(new_chat))

0 comments on commit c12601a

Please sign in to comment.