Skip to content

Commit

Permalink
Merge branch 'frontier': windows deps bug fix
Browse files Browse the repository at this point in the history
  • Loading branch information
binary-husky committed Sep 8, 2024
2 parents 8222f63 + 7ef3977 commit 4e041e1
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 5 deletions.
8 changes: 6 additions & 2 deletions crazy_functions/Rag_Interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,14 @@

VECTOR_STORE_TYPE = "Milvus"

if VECTOR_STORE_TYPE == "Milvus":
try:
from crazy_functions.rag_fns.milvus_worker import MilvusRagWorker as LlamaIndexRagWorker
except:
VECTOR_STORE_TYPE = "Simple"

if VECTOR_STORE_TYPE == "Simple":
from crazy_functions.rag_fns.llama_index_worker import LlamaIndexRagWorker
if VECTOR_STORE_TYPE == "Milvus":
from crazy_functions.rag_fns.milvus_worker import MilvusRagWorker as LlamaIndexRagWorker


RAG_WORKER_REGISTER = {}
Expand Down
5 changes: 5 additions & 0 deletions crazy_functions/rag_fns/llama_index_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,11 @@ def load_from_checkpoint(self, checkpoint_dir=None):
def create_new_vs(self):
return GptacVectorStoreIndex.default_vector_store(embed_model=self.embed_model)

def purge(self):
import shutil
shutil.rmtree(self.checkpoint_dir, ignore_errors=True)
self.vs_index = self.create_new_vs()


class LlamaIndexRagWorker(SaveLoad):
def __init__(self, user_name, llm_kwargs, auto_load_checkpoint=True, checkpoint_dir=None) -> None:
Expand Down
4 changes: 1 addition & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@ zhipuai==2.0.1
tiktoken>=0.3.3
requests[socks]
pydantic==2.5.2
llama-index==0.10.47
llama-index-vector-stores-milvus==0.1.16
pymilvus==2.4.2
llama-index==0.10
protobuf==3.20
transformers>=4.27.1,<4.42
scipdf_parser>=0.52
Expand Down

0 comments on commit 4e041e1

Please sign in to comment.