ankitv42 commited on
Commit
38f3a0b
Β·
verified Β·
1 Parent(s): af821c4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -24
app.py CHANGED
@@ -1,27 +1,23 @@
1
  import os
2
  import tempfile
3
- import torch
4
  import gradio as gr
5
  from langchain_community.vectorstores import FAISS
6
  from langchain_groq import ChatGroq
7
- from langchain_community.embeddings import HuggingFaceBgeEmbeddings # Fixed import
8
  from langchain.text_splitter import RecursiveCharacterTextSplitter
9
  from langchain_core.runnables import RunnablePassthrough
10
- from langchain_community.document_loaders import PyPDFLoader # Fixed import
11
  from langchain import hub
12
 
13
  # Set API key (Replace with your actual key)
14
- os.environ["GROQ_API_KEY"] = "your_groq_api_key"
15
 
16
- # Check if GPU is available
17
- device = "cuda" if torch.cuda.is_available() else "cpu"
18
-
19
- # Initialize LLM and Embeddings with GPU if available
20
  llm = ChatGroq(model="llama3-8b-8192")
21
  model_name = "BAAI/bge-small-en"
22
- hf_embeddings = HuggingFaceBgeEmbeddings( # Fixed import
23
  model_name=model_name,
24
- model_kwargs={'device': device},
25
  encode_kwargs={'normalize_embeddings': True}
26
  )
27
 
@@ -68,28 +64,23 @@ def ask_question(query):
68
  if "rag_chain" not in globals():
69
  return "Please upload and process a PDF first."
70
 
71
- response = rag_chain.invoke(query)
72
  return response
73
 
74
- # Gradio UI with Mobile-Friendly Fixes
75
  with gr.Blocks() as demo:
76
  gr.Markdown("# πŸ“„ PDF Chatbot with RAG")
77
  gr.Markdown("Upload a PDF and ask questions!")
78
-
79
- with gr.Row():
80
- pdf_input = gr.File(label="Upload PDF", type="binary")
81
- process_button = gr.Button("Process PDF")
82
-
83
  output_message = gr.Textbox(label="Status", interactive=False)
84
-
85
- with gr.Row():
86
- query_input = gr.Textbox(label="Ask a Question")
87
- submit_button = gr.Button("Submit")
88
-
89
  response_output = gr.Textbox(label="AI Response")
90
 
91
  process_button.click(process_pdf, inputs=pdf_input, outputs=output_message)
92
  submit_button.click(ask_question, inputs=query_input, outputs=response_output)
93
 
94
- # πŸš€ Fixed launch (removed invalid 'theme' argument)
95
- demo.launch(share=True)
 
1
  import os
2
  import tempfile
 
3
  import gradio as gr
4
  from langchain_community.vectorstores import FAISS
5
  from langchain_groq import ChatGroq
6
+ from langchain_community.embeddings import HuggingFaceBgeEmbeddings
7
  from langchain.text_splitter import RecursiveCharacterTextSplitter
8
  from langchain_core.runnables import RunnablePassthrough
9
+ from langchain.document_loaders import PyPDFLoader
10
  from langchain import hub
11
 
12
  # Set API key (Replace with your actual key)
13
+ os.environ["GROQ_API_KEY"] = "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"
14
 
15
+ # Initialize LLM and Embeddings
 
 
 
16
  llm = ChatGroq(model="llama3-8b-8192")
17
  model_name = "BAAI/bge-small-en"
18
+ hf_embeddings = HuggingFaceBgeEmbeddings(
19
  model_name=model_name,
20
+ model_kwargs={'device': 'cpu'},
21
  encode_kwargs={'normalize_embeddings': True}
22
  )
23
 
 
64
  if "rag_chain" not in globals():
65
  return "Please upload and process a PDF first."
66
 
67
+ response = rag_chain.invoke(query).content
68
  return response
69
 
70
+ # Gradio UI
71
  with gr.Blocks() as demo:
72
  gr.Markdown("# πŸ“„ PDF Chatbot with RAG")
73
  gr.Markdown("Upload a PDF and ask questions!")
74
+
75
+ pdf_input = gr.File(label="Upload PDF", type="binary")
76
+ process_button = gr.Button("Process PDF")
 
 
77
  output_message = gr.Textbox(label="Status", interactive=False)
78
+
79
+ query_input = gr.Textbox(label="Ask a Question")
80
+ submit_button = gr.Button("Submit")
 
 
81
  response_output = gr.Textbox(label="AI Response")
82
 
83
  process_button.click(process_pdf, inputs=pdf_input, outputs=output_message)
84
  submit_button.click(ask_question, inputs=query_input, outputs=response_output)
85
 
86
+ demo.launch()