samim2024 commited on
Commit
7053770
·
verified ·
1 Parent(s): d06f77f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -8
app.py CHANGED
@@ -14,6 +14,7 @@ from langchain.prompts import PromptTemplate
14
  import faiss
15
  import uuid
16
  from dotenv import load_dotenv
 
17
 
18
  # Load environment variables
19
  load_dotenv()
@@ -85,13 +86,16 @@ def process_input(input_data):
85
  # Question-answering logic
86
  def answer_question(vectorstore, query):
87
  if not HUGGINGFACEHUB_API_TOKEN:
88
- raise RuntimeError("Missing Hugging Face API token. Please set it in your secrets.")
89
 
90
- llm = HuggingFaceHub(
91
- repo_id="mistralai/Mistral-7B-Instruct-v0.1",
92
- model_kwargs={"temperature": 0.7, "max_length": 512},
93
- huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN
94
- )
 
 
 
95
 
96
  retriever = vectorstore.as_retriever(search_kwargs={"k": 3})
97
  prompt_template = PromptTemplate(
@@ -107,8 +111,11 @@ def answer_question(vectorstore, query):
107
  chain_type_kwargs={"prompt": prompt_template}
108
  )
109
 
110
- result = qa_chain({"query": query})
111
- return result["result"].split("Answer:")[-1].strip()
 
 
 
112
 
113
  # Sidebar with BSNL logo and authentication
114
  with st.sidebar:
 
14
  import faiss
15
  import uuid
16
  from dotenv import load_dotenv
17
+ import requests
18
 
19
  # Load environment variables
20
  load_dotenv()
 
86
  # Question-answering logic
87
  def answer_question(vectorstore, query):
88
  if not HUGGINGFACEHUB_API_TOKEN:
89
+ raise RuntimeError("Missing Hugging Face API token. Please set it in your .env file.")
90
 
91
+ try:
92
+ llm = HuggingFaceHub(
93
+ repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
94
+ model_kwargs={"temperature": 0.7, "max_length": 512},
95
+ huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN
96
+ )
97
+ except requests.exceptions.HTTPError as e:
98
+ raise RuntimeError(f"Failed to initialize LLM: {str(e)}. Check model availability or API token.")
99
 
100
  retriever = vectorstore.as_retriever(search_kwargs={"k": 3})
101
  prompt_template = PromptTemplate(
 
111
  chain_type_kwargs={"prompt": prompt_template}
112
  )
113
 
114
+ try:
115
+ result = qa_chain({"query": query})
116
+ return result["result"].split("Answer:")[-1].strip()
117
+ except requests.exceptions.HTTPError as e:
118
+ raise RuntimeError(f"Error querying LLM: {str(e)}. Please try again or check model endpoint.")
119
 
120
  # Sidebar with BSNL logo and authentication
121
  with st.sidebar: