Skip to content

Commit

Permalink
fix azure and compat with v0.2
Browse files Browse the repository at this point in the history
  • Loading branch information
binary-sky committed Nov 11, 2023
1 parent 0002cbb commit 465363f
Show file tree
Hide file tree
Showing 12 changed files with 140 additions and 133 deletions.
103 changes: 0 additions & 103 deletions autogen/gradio_gui/README.md

This file was deleted.

File renamed without changes.
Empty file added samples/app/__init__.py
Empty file.
116 changes: 116 additions & 0 deletions samples/app/gradio_gui/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
# AutoGen's Gradio GUI

This is a GUI for AutoGen. It is written in Python and uses the [Gradio](https://gradio.app/) library.

## ⭐Installation

1. Install AutoGen:

```bash
python -m pip install pyautogen[gui]
```

## ⭐Run

1. Create a file named `OAI_CONFIG_LIST`, input your OpenAI or Azure OpenAI API key(s). For example:

```
[
{
"model": "gpt-3.5-turbo-16k",
"api_key": "----------------------------------",
"api_type": "azure",
"api_version": "2023-07-01-preview",
"base_url": "https://your_deploy_url.openai.azure.com/openai/deployments/your_deploy_name/chat/completions?api-version=2023-05-15",
}
]
```
2. Run following commands to launch the GUI:
- Linux
```bash
# export OAI_CONFIG_LIST='/path/to/OAI_CONFIG_LIST'
export OAI_CONFIG_LIST='./OAI_CONFIG_LIST'
export AUTOGEN_USE_DOCKER='False'
export WEB_PORT=12345
# for more environment variables options, please refer to the void terminal project
python -m samples.app.launch_gradio_gui
```
- Windows CMD
```bash
# set OAI_CONFIG_LIST=/path/to/OAI_CONFIG_LIST
set OAI_CONFIG_LIST=./OAI_CONFIG_LIST
set AUTOGEN_USE_DOCKER=False
set WEB_PORT=12345
# for more environment variables options, please refer to the void terminal project
python -m samples.app.launch_gradio_gui
```
> Note:
> When you run `launch_gradio_gui`, the program will automatically install additional dependencies with `pip install --user ...` if they are not installed.
## ⭐Customization
1. Create a python file `test.py` with templete below
2. Edit and add your own group chat
3. Run it with `python test.py`
```python
from samples.app.gradio_gui import init_config # do not remove this line.
llm_config = init_config() # do not remove this line.
from samples.app.gradio_gui.general import AutoGenGroupChat
from samples.app.gradio_gui.plugin import autogen_terminal
from samples.app.gradio_gui.gradio_service import main
import os
# <------------------- define autogen agents (group chat) ------------------->
class AutoGenGroupChat(AutoGenGroupChat):
def define_agents(self):
from autogen import AssistantAgent, UserProxyAgent
agents = [
{
"name": "Engineer", # name of the agent.
"cls": AssistantAgent, # class of the agent.
"llm_config": llm_config,
"system_message": "Engineer_Prompt."
},
{
"name": "user_proxy", # name of the agent.
"cls": UserProxyAgent, # class of the agent.
"human_input_mode": "NEVER", # never ask for human input.
# disables llm-based auto reply.
"llm_config": False,
"code_execution_config": False,
"system_message": "A_Human_Admin.",
},
]
return agents
def define_group_chat_manager_config(self):
llm_config.update({"temperature": 0})
return {"llm_config": llm_config}
def autogen_terminal_groupchat(*args, **kwargs):
return autogen_terminal(*args, AutoGenFn=AutoGenGroupChat, Callback=f"{os.path.basename(__file__).split('.py')[0]}->autogen_terminal_fn_02", **kwargs)
if __name__ == "__main__":
main(
{
"AutoGen sci group chat": {
"Group": "Agent",
"Color": "stop",
"AsButton": True,
"AdvancedArgs": False,
"Function": autogen_terminal_groupchat
},
}
)
```
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,12 @@ def try_install_deps(deps, reload_m=[]):
import void_terminal
except Exception:
try_install_deps(deps=["void-terminal>=0.0.9"])
try_install_deps(
deps=["https://github.com/binary-husky/gpt_academic/raw/master/docs/gradio-3.32.6-py3-none-any.whl"]
)
try_install_deps(deps=["gradio-stable-fork>=3.32.6"])
return True

if gr.__version__ not in ["3.32.6"]:
# this is a special version of gradio, which is not available on pypi.org
try_install_deps(
deps=["https://github.com/binary-husky/gpt_academic/raw/master/docs/gradio-3.32.6-py3-none-any.whl"]
)
try_install_deps(deps=["gradio-stable-fork>=3.32.6"])
return True


Expand All @@ -50,7 +46,7 @@ def init_config_list():
Please run with
`export OAI_CONFIG_LIST='/path/to/OAI_CONFIG_LIST'`
to set the path to config list file, and then run
`python -m autogen.launch_gui`
`python -m samples.app.launch_gradio_gui`
to start the GUI.
"""
)
Expand Down Expand Up @@ -85,14 +81,14 @@ def init_config():
# void_terminal.set_conf(key="LLM_MODEL", value="gpt-3.5-turbo-16k")
if llm_config["config_list"][0].get("api_type", "") == "azure":
model = "azure-" + llm_config["config_list"][0]["model"]
api_base = llm_config["config_list"][0]["api_base"]
if api_base.endswith("/"):
api_base = api_base[:-1]
base_url = llm_config["config_list"][0]["base_url"]
AZURE_ENDPOINT = base_url.split("openai/deployments/")[0]
AZURE_ENGINE = base_url.split("openai/deployments/")[-1].split("/chat/completions")[0]
AZURE_CFG_ARRAY = {
model: {
"AZURE_ENDPOINT": llm_config["config_list"][0]["api_base"] + "/",
"AZURE_ENDPOINT": AZURE_ENDPOINT,
"AZURE_API_KEY": llm_config["config_list"][0]["api_key"],
"AZURE_ENGINE": llm_config["config_list"][0]["deployment_id"],
"AZURE_ENGINE": AZURE_ENGINE,
"AZURE_MODEL_MAX_TOKEN": 8192,
},
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from void_terminal.toolbox import trimmed_format_exc, ProxyNetworkActivate
from autogen.gradio_gui.utils.pipe import PluginMultiprocessManager, PipeCom
from samples.app.gradio_gui.pipe import PluginMultiprocessManager, PipeCom
import time


Expand Down Expand Up @@ -70,7 +70,7 @@ def exe_autogen(self, input):
# ⭐⭐ run in subprocess
import autogen
from void_terminal.toolbox import trimmed_format_exc, ProxyNetworkActivate
from autogen.gradio_gui.utils.pipe import PipeCom
from samples.app.gradio_gui.pipe import PipeCom

input = input.content
with ProxyNetworkActivate("AutoGen"):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import importlib
import base64
import gradio as gr
from void_terminal.check_proxy import get_current_version
from void_terminal.themes.theme import adjust_theme, advanced_css, theme_declaration, load_dynamic_theme
from void_terminal.request_llms.bridge_all import predict
from void_terminal.core_functional import get_core_functions
Expand All @@ -29,7 +28,7 @@ def main(plugins):
# this is a special version of gradio, which is not available on pypi.org
raise ModuleNotFoundError(
"Use the built-in Gradio for the best experience!"
+ "Please run `pip install -r https://github.com/binary-husky/gpt_academic/raw/master/docs/gradio-3.32.6-py3-none-any.whl` Command to install built-in Gradio and other dependencies, See details in requirements.txt."
+ "Please run `pip uninstall gradio` and `pip install gradio-stable-fork>=3.32.6` Command to install built-in Gradio."
)

proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION = get_conf(
Expand Down
File renamed without changes.
File renamed without changes.
17 changes: 8 additions & 9 deletions autogen/launch_gui.py → samples/app/launch_gradio_gui.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
from autogen.gradio_gui import install_dependencies, init_config
from samples.app.gradio_gui import install_dependencies, init_config # do not move this line

if install_dependencies():
from autogen.gradio_gui.utils.general import AutoGenGeneral, AutoGenGroupChat
from autogen.gradio_gui.plugin import autogen_terminal
from autogen.gradio_gui.gradio_service import main
if install_dependencies(): # do not move this line
llm_config = init_config() # do not move this line
from samples.app.gradio_gui.general import AutoGenGeneral, AutoGenGroupChat
from samples.app.gradio_gui.plugin import autogen_terminal
from samples.app.gradio_gui.gradio_service import main
from void_terminal.toolbox import CatchException

llm_config = init_config()


class AutoGenAskHuman(AutoGenGeneral):
def define_agents(self):
Expand Down Expand Up @@ -90,14 +89,14 @@ def define_group_chat_manager_config(self):
@CatchException
def autogen_terminal_fn_01(*args, **kwargs):
return autogen_terminal(
*args, AutoGenFn=AutoGenAskHuman, Callback="autogen.launch_gui->autogen_terminal_fn_01", **kwargs
*args, AutoGenFn=AutoGenAskHuman, Callback="samples.app.launch_gradio_gui->autogen_terminal_fn_01", **kwargs
)


@CatchException
def autogen_terminal_fn_02(*args, **kwargs):
return autogen_terminal(
*args, AutoGenFn=AutoGenGroupChat, Callback="autogen.launch_gui->autogen_terminal_fn_02", **kwargs
*args, AutoGenFn=AutoGenGroupChat, Callback="samples.app.launch_gradio_gui->autogen_terminal_fn_02", **kwargs
)


Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
"mathchat": ["sympy", "pydantic==1.10.9", "wolframalpha"],
"retrievechat": ["chromadb", "sentence_transformers", "pypdf", "ipython"],
"teachable": ["chromadb"],
"gui": ["void-terminal>=0.0.9"],
"gui": ["void-terminal>=0.0.9", "gradio-stable-fork>=3.32.6"],
"lmm": ["replicate", "pillow"],
},
classifiers=[
Expand Down
8 changes: 4 additions & 4 deletions test/test_gui.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from autogen.gradio_gui import init_config
from autogen.gradio_gui.utils.general import AutoGenGroupChat
from autogen.gradio_gui.plugin import autogen_terminal
from autogen.gradio_gui.gradio_service import main
from samples.app.gradio_gui import init_config
from samples.app.gradio_gui.general import AutoGenGroupChat
from samples.app.gradio_gui.plugin import autogen_terminal
from samples.app.gradio_gui.gradio_service import main
import os

llm_config = init_config()
Expand Down

0 comments on commit 465363f

Please sign in to comment.