samim2024 commited on
Commit
89478cd
·
verified ·
1 Parent(s): 53b8925

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -13,6 +13,7 @@ from langchain_chroma import Chroma
13
  from langchain_community.document_loaders import TextLoader
14
  from langchain_community.embeddings.sentence_transformer import (SentenceTransformerEmbeddings,)
15
  from langchain_text_splitters import CharacterTextSplitter
 
16
 
17
  #import vertexai
18
  #from langchain.llms import VertexAI
@@ -25,7 +26,7 @@ from langchain_text_splitters import CharacterTextSplitter
25
  #Text models can create include document summaries, answers to questions, and labels that classify content.
26
 
27
  llm = HuggingFaceEndpoint(repo_id="mistralai/Mistral-7B-Instruct-v0.2", Temperature=0.9)
28
- model = SentenceTransformer("all-MiniLM-L6-v2")
29
 
30
  #llm = VertexAI(model_name="text-bison@001",max_output_tokens=256,temperature=0.1,top_p=0.8,top_k=40,verbose=True,)
31
 
@@ -59,7 +60,6 @@ def create_langchain_index(input_text):
59
  # split it into chunks
60
  text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
61
  docs = text_splitter.split_documents(documents)
62
- print(docs)
63
  # create the open-source embedding function
64
  embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2")
65
  # load it into Chroma
@@ -84,7 +84,9 @@ def create_langchain_index(input_text):
84
  @st.cache_data
85
  def get_response(input_text,query):
86
  print(f"--querying---{query}")
87
- response = index.query(query,llm=llm)
 
 
88
  return response
89
 
90
  #The below code is a simple flow to accept the webpage link and process the queries
 
13
  from langchain_community.document_loaders import TextLoader
14
  from langchain_community.embeddings.sentence_transformer import (SentenceTransformerEmbeddings,)
15
  from langchain_text_splitters import CharacterTextSplitter
16
+ from langchain.chains import RetrievalQA
17
 
18
  #import vertexai
19
  #from langchain.llms import VertexAI
 
26
  #Text models can create include document summaries, answers to questions, and labels that classify content.
27
 
28
  llm = HuggingFaceEndpoint(repo_id="mistralai/Mistral-7B-Instruct-v0.2", Temperature=0.9)
29
+ #model = SentenceTransformer("all-MiniLM-L6-v2")
30
 
31
  #llm = VertexAI(model_name="text-bison@001",max_output_tokens=256,temperature=0.1,top_p=0.8,top_k=40,verbose=True,)
32
 
 
60
  # split it into chunks
61
  text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
62
  docs = text_splitter.split_documents(documents)
 
63
  # create the open-source embedding function
64
  embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2")
65
  # load it into Chroma
 
84
  @st.cache_data
85
  def get_response(input_text,query):
86
  print(f"--querying---{query}")
87
+ retrieval_chain = RetrievalQA.from_chain_type(llm, chain_type="stuff", retriever=new_db.as_retriever())
88
+ response = retrieval_chain.run(query)
89
+ #response = index.query(query,llm=llm)
90
  return response
91
 
92
  #The below code is a simple flow to accept the webpage link and process the queries