farmax commited on
Commit
eb99b99
·
verified ·
1 Parent(s): d306a4a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -19
app.py CHANGED
@@ -22,6 +22,7 @@ import tqdm
22
  import accelerate
23
  import re
24
  # from chromadb.utils import get_default_config
 
25
 
26
  # default_persist_directory = './chroma_HF/'
27
  list_llm = ["mistralai/Mistral-7B-Instruct-v0.2", "mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.1", \
@@ -72,24 +73,6 @@ def load_db():
72
  embedding_function=embedding)
73
  return vectordb
74
 
75
- def delete_db(vectordb):
76
- # Rimuovi tutte le raccolte
77
- collections = vectordb.collections
78
- for collection in collections:
79
- print(f"Cancellando raccolta: {collection}")
80
- vectordb.delete_collection(collection)
81
-
82
- # Rimuovi il client ChromaDB
83
- vectordb.client.close()
84
-
85
- # Rimuovi il database principale
86
- db_path = vectordb.client.path
87
- if os.path.exists(db_path):
88
- os.remove(db_path)
89
- print(f"Rimosso file database principale: {db_path}")
90
-
91
- print("Tutte le raccolte e il database sono stati completamente eliminati.")
92
-
93
  # Initialize langchain LLM chain
94
  def initialize_llmchain(llm_model, temperature, max_tokens, top_k, vector_db, progress=gr.Progress()):
95
  progress(0.1, desc="Initializing HF tokenizer...")
@@ -224,7 +207,7 @@ def create_collection_name(filepath):
224
  return collection_name
225
 
226
  # Initialize database
227
- def initialize_database(list_file_obj, chunk_size, chunk_overlap, progress=gr.Progress()):
228
  # Create list of documents (when valid)
229
  list_file_path = [x.name for x in list_file_obj if x is not None]
230
  print(list_file_path)
 
22
  import accelerate
23
  import re
24
  # from chromadb.utils import get_default_config
25
+ vector_db = ''
26
 
27
  # default_persist_directory = './chroma_HF/'
28
  list_llm = ["mistralai/Mistral-7B-Instruct-v0.2", "mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.1", \
 
73
  embedding_function=embedding)
74
  return vectordb
75
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
  # Initialize langchain LLM chain
77
  def initialize_llmchain(llm_model, temperature, max_tokens, top_k, vector_db, progress=gr.Progress()):
78
  progress(0.1, desc="Initializing HF tokenizer...")
 
207
  return collection_name
208
 
209
  # Initialize database
210
+ def initialize_database(vector_db, list_file_obj, chunk_size, chunk_overlap, progress=gr.Progress()):
211
  # Create list of documents (when valid)
212
  list_file_path = [x.name for x in list_file_obj if x is not None]
213
  print(list_file_path)