Skip to content

Commit

Permalink
refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
JiauZhang committed Feb 8, 2025
1 parent 0c809f2 commit eb9f41a
Show file tree
Hide file tree
Showing 6 changed files with 44 additions and 59 deletions.
48 changes: 42 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,46 @@
### Large Language Models Python API
- ✅ 百度文心一言/ERNIE
- ✅ 阿里通义千问/QWen
- ✅ 讯飞星火大模型/Spark
- ✅ 腾讯混元大模型/Hunyuan
- ✅ DeepSeek
- ✅ 智谱/ChatGLM
<table align="center">
<tr>
<th>Vendor</th>
<th>Model</th>
</tr>
<tr>
<td>DeepSeek</td>
<td>
deepseek-chat, deepseek-reasoner, deepseek-coder
</td>
</tr>
<tr>
<td>百度</td>
<td>
ernie-lite-8k, ernie-tiny-8k, ernie-speed-8k, ernie-speed-128k, deepseek-v3, deepseek-r1
</td>
</tr>
<tr>
<td>阿里巴巴</td>
<td>
deepseek-v3, deepseek-r1, deepseek-r1-distill-qwen-1.5b, deepseek-r1-distill-qwen-7b, deepseek-r1-distill-qwen-14b, deepseek-r1-distill-qwen-32b
</td>
</tr>
<tr>
<td>讯飞</td>
<td>
lite, generalv3, pro-128k, generalv3.5, max-32k, 4.0Ultra
</td>
</tr>
<tr>
<td>腾讯</td>
<td>
hunyuan-lite, hunyuan-standard, hunyuan-standard-256K, hunyuan-pro
</td>
</tr>
<tr>
<td>智谱</td>
<td>
glm-4-plus, glm-4-air, glm-4-long, glm-4-flash
</td>
</tr>
</table>

### Install
```shell
Expand Down
11 changes: 0 additions & 11 deletions chatchat/alibaba.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,7 @@ def __init__(self, model='qwen-turbo', proxy=None, timeout=None):
super().__init__(__vendor__, __vendor_keys__)

self.api_key = self.secret_data[__vendor_keys__[0]]

# https://bailian.console.aliyun.com
self.model_type = set([
'qwen-turbo',
'qwen-plus',
'qwen-max',
])

if model not in self.model_type:
raise RuntimeError(f'supported chat type: {list(self.model_type)}')
self.model = model

self.api = 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation'
self.client = httpx.Client(proxy=proxy, timeout=timeout)
self.headers = {
Expand Down
6 changes: 0 additions & 6 deletions chatchat/baidu.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,6 @@ def __init__(self, model='ernie-speed-8k', proxy=None, timeout=None):
super().__init__(__vendor__, __vendor_keys__)

self.app_id = self.secret_data[__vendor_keys__[0]]
# https://console.bce.baidu.com/qianfan/ais/console/onlineService
self.model_set = set([
'ernie-lite-8k', 'ernie-tiny-8k', 'ernie-speed-8k', 'ernie-speed-128k',
'deepseek-v3', 'deepseek-r1',
])

self.model = model
self.api = 'https://qianfan.baidubce.com/v2/chat/completions'
self.client = httpx.Client(proxy=proxy, timeout=timeout)
Expand Down
10 changes: 0 additions & 10 deletions chatchat/deepseek.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,7 @@ def __init__(self, model='deepseek-chat', proxy=None, timeout=None):
super().__init__(__vendor__, __vendor_keys__)

self.api_key = self.secret_data[__vendor_keys__[0]]

self.model_type = set([
'deepseek-chat',
'deepseek-reasoner',
'deepseek-coder',
])

if model not in self.model_type:
raise RuntimeError(f'supported chat type: {list(self.model_type)}')
self.model = model

self.host = 'https://api.deepseek.com'
self.model_url = f'{self.host}/models'
self.chat_url = f'{self.host}/chat/completions'
Expand Down
14 changes: 2 additions & 12 deletions chatchat/tencent.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,9 @@ class Completion(Base):
def __init__(self, model='hunyuan-lite', proxy=None, timeout=None):
super().__init__(__vendor__, __vendor_keys__)

self.secret_id = self.secret_data['secret_id']
self.secret_key = self.secret_data['secret_key']

self.model_type = set([
'hunyuan-lite',
'hunyuan-standard',
'hunyuan-standard-256K',
'hunyuan-pro',
])
if model not in self.model_type:
raise RuntimeError(f'supported chat type: {list(self.model_type)}')
self.secret_id = self.secret_data[__vendor_keys__[0]]
self.secret_key = self.secret_data[__vendor_keys__[1]]
self.model = model

self.host = 'hunyuan.tencentcloudapi.com'
self.endpoint = f'https://{self.host}'
self.client = httpx.Client(proxy=proxy, timeout=timeout)
Expand Down
14 changes: 0 additions & 14 deletions chatchat/zhipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,21 +9,7 @@ def __init__(self, model='glm-4-flash', proxy=None, timeout=None):
super().__init__(__vendor__, __vendor_keys__)

self.api_key = self.secret_data[__vendor_keys__[0]]

self.model_type = set([
'glm-4-0520',
'glm-4',
'glm-4-air',
'glm-4-airx',
'glm-4-flash',
'glm-4v',
'glm-3-turbo',
])

if model not in self.model_type:
raise RuntimeError(f'supported chat type: {list(self.model_type)}')
self.model = model

self.url = 'https://open.bigmodel.cn/api/paas/v4/chat/completions'
self.client = httpx.Client(proxy=proxy, timeout=timeout)
self.headers = {
Expand Down

0 comments on commit eb9f41a

Please sign in to comment.