From 3058d21bb1fdf86bc4dce2543e7fcf18927339f8 Mon Sep 17 00:00:00 2001 From: Ajinkya Indulkar <26824103+ajndkr@users.noreply.github.com> Date: Sun, 14 May 2023 16:50:11 +0200 Subject: [PATCH] :hammer: update examples --- examples/app/conversation_chain.py | 6 +++--- examples/app/conversational_retrieval.py | 4 ++-- examples/app/retrieval_qa_w_sources.py | 6 +++--- examples/requirements.in | 2 +- examples/requirements.txt | 10 +++++----- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/examples/app/conversation_chain.py b/examples/app/conversation_chain.py index 42634c1..8fad391 100644 --- a/examples/app/conversation_chain.py +++ b/examples/app/conversation_chain.py @@ -8,9 +8,9 @@ from langchain.chat_models import ChatOpenAI from pydantic import BaseModel -from fastapi_async_langchain.responses import StreamingResponse -from fastapi_async_langchain.testing import mount_gradio_app -from fastapi_async_langchain.websockets import WebsocketConnection +from lanarky.responses import StreamingResponse +from lanarky.testing import mount_gradio_app +from lanarky.websockets import WebsocketConnection load_dotenv() diff --git a/examples/app/conversational_retrieval.py b/examples/app/conversational_retrieval.py index 67fca9a..b367ef9 100644 --- a/examples/app/conversational_retrieval.py +++ b/examples/app/conversational_retrieval.py @@ -9,8 +9,8 @@ from langchain.chat_models import ChatOpenAI from pydantic import BaseModel -from fastapi_async_langchain.responses import StreamingResponse -from fastapi_async_langchain.testing import mount_gradio_app +from lanarky.responses import StreamingResponse +from lanarky.testing import mount_gradio_app load_dotenv() diff --git a/examples/app/retrieval_qa_w_sources.py b/examples/app/retrieval_qa_w_sources.py index 45eeb49..8611142 100644 --- a/examples/app/retrieval_qa_w_sources.py +++ b/examples/app/retrieval_qa_w_sources.py @@ -8,9 +8,9 @@ from langchain.chat_models import ChatOpenAI from pydantic import BaseModel -from fastapi_async_langchain.responses import StreamingResponse -from fastapi_async_langchain.testing import mount_gradio_app -from fastapi_async_langchain.websockets import WebsocketConnection +from lanarky.responses import StreamingResponse +from lanarky.testing import mount_gradio_app +from lanarky.websockets import WebsocketConnection load_dotenv() diff --git a/examples/requirements.in b/examples/requirements.in index cbe4680..7e96370 100644 --- a/examples/requirements.in +++ b/examples/requirements.in @@ -5,4 +5,4 @@ faiss-cpu gradio fastapi uvicorn[standard] -fastapi-async-langchain +lanarky diff --git a/examples/requirements.txt b/examples/requirements.txt index 6564708..cb089e7 100644 --- a/examples/requirements.txt +++ b/examples/requirements.txt @@ -50,9 +50,9 @@ faiss-cpu==1.7.4 fastapi==0.95.1 # via # -r requirements.in - # fastapi-async-langchain + # lanarky # gradio -fastapi-async-langchain==0.5.4 +lanarky==0.6.0 # via -r requirements.in ffmpy==0.3.0 # via gradio @@ -109,7 +109,7 @@ kiwisolver==1.4.4 langchain==0.0.167 # via # -r requirements.in - # fastapi-async-langchain + # lanarky linkify-it-py==2.0.2 # via markdown-it-py markdown-it-py[linkify]==2.2.0 @@ -189,7 +189,7 @@ python-dateutil==2.8.2 # pandas python-dotenv==1.0.0 # via - # fastapi-async-langchain + # lanarky # uvicorn python-multipart==0.0.6 # via gradio @@ -253,7 +253,7 @@ uc-micro-py==1.0.2 # via linkify-it-py urllib3==1.26.15 # via - # fastapi-async-langchain + # lanarky # requests uvicorn[standard]==0.22.0 # via