Skip to content

Commit

Permalink
support zhipuai
Browse files Browse the repository at this point in the history
  • Loading branch information
JiauZhang committed Jun 10, 2024
1 parent 97e4e22 commit 7724cce
Show file tree
Hide file tree
Showing 6 changed files with 109 additions and 10 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
- ✅ 讯飞星火大模型/Spark
- ✅ 腾讯混元大模型/Hunyuan
- ✅ DeepSeek
- ✅ 智谱/ChatGLM

### Install
```shell
Expand Down
14 changes: 5 additions & 9 deletions chatchat/cli/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
'tencent': ['secret_id', 'secret_key'],
'xunfei': ['app_id', 'api_key', 'api_secret'],
'deepseek': ['api_key'],
'zhipu': ['api_key'],
}

def supported_platforms():
Expand All @@ -20,24 +21,19 @@ def parse_config(args):
if args.list:
supported_platforms()
elif args.cfgs:
cfg = args.cfgs.split('.')
cfg = args.cfgs.split('=')
plat_key = cfg[0].split('.')
usage = 'Usage: chatchat config platform.key=value'
if len(cfg) != 2:
if len(cfg) != 2 and len(plat_key) != 2:
print(usage)
return

plat = cfg[0]
(plat, key), value = plat_key, cfg[1]
if plat not in __platform_config__:
print(f'Platform <{plat}> is currently NOT supported!')
supported_platforms()
return

kv = cfg[1].split('=')
if len(kv) != 2:
print(usage)
return
key, value = kv

if key not in __platform_config__[plat]:
print(f'Platform <{plat}> do NOT has secret key <{key}>!\nYou can set the following keys:')
for key in __platform_config__[plat]:
Expand Down
75 changes: 75 additions & 0 deletions chatchat/zhipu.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
from chatchat.base import Base
import httpx

class Completion(Base):
def __init__(self, model='glm-4-flash', proxy=None, timeout=None):
super().__init__()

plat = 'zhipu'
self.verify_secret_data(plat, ('api_key',))
self.jdata = self.secret_data[plat]

self.model_type = set([
'glm-4-0520',
'glm-4',
'glm-4-air',
'glm-4-airx',
'glm-4-flash',
'glm-4v',
'glm-3-turbo',
])

if model not in self.model_type:
raise RuntimeError(f'supported chat type: {list(self.model_type)}')
self.model = model

self.url = 'https://open.bigmodel.cn/api/paas/v4/chat/completions'
self.client = httpx.Client(proxy=proxy, timeout=timeout)
self.headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Authorization': f'Bearer {self.jdata["api_key"]}',
}

def create(self, message, max_tokens=1024, temperature=0.95, top_p=0.7, stream=False):
jmsg = {
'model': self.model,
'messages': [{
"role": "user",
"content": message,
}],
'max_tokens': max_tokens,
'temperature': temperature,
'top_p': top_p,
'stream': stream,
}
r = self.client.post(self.url, headers=self.headers, json=jmsg)
return r.json()

class Chat(Completion):
def __init__(self, model='glm-4-flash', history=[], proxy=None, timeout=None):
super().__init__(model=model, proxy=proxy, timeout=timeout)
self.history = history

def chat(self, message, max_tokens=1024, temperature=0.95, top_p=0.7, stream=False):
self.history.append({
'role': 'user',
'content': message,
})

jmsg = {
'model': self.model,
'messages': self.history,
'max_tokens': max_tokens,
'temperature': temperature,
'top_p': top_p,
'stream': stream,
}
r = self.client.post(self.url, headers=self.headers, json=jmsg)
r = r.json()

if 'choices' in r:
assistant_output = r['choices'][0]['message']
self.history.append(assistant_output)

return r
8 changes: 8 additions & 0 deletions examples/zhipu_chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from chatchat.zhipu import Chat

chat = Chat(model='glm-3-turbo')
while True:
user = input('user: ')
r = chat.chat(user)
message = r['choices'][0]['message']
print(f"{message['role']}: {message['content']}")
19 changes: 19 additions & 0 deletions examples/zhipu_completion.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from chatchat.zhipu import Completion

completion = Completion(model='glm-3-turbo')
r = completion.create('Hi', max_tokens=64)
# {
# 'choices': [
# {
# 'finish_reason': 'stop', 'index': 0, 'message':
# {
# 'content': "Hello 👋! I'm ChatGLM(智谱清言).",
# 'role': 'assistant'
# }
# }
# ],
# 'created': 1111, 'id': '2222', 'model': 'glm-3-turbo', 'request_id': '8730203825971097305', 'usage': {
# 'completion_tokens': 39, 'prompt_tokens': 6, 'total_tokens': 45
# }
# }
print(r)
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'chatchat',
packages = find_packages(exclude=['examples']),
version = '0.1.6',
version = '0.1.7',
license = 'GPL-2.0',
description = 'Large Language Model API',
author = 'JiauZhang',
Expand Down

0 comments on commit 7724cce

Please sign in to comment.