habibahmad commited on
Commit
86e4d35
Β·
verified Β·
1 Parent(s): 67edaad

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +55 -45
app.py CHANGED
@@ -1,68 +1,78 @@
1
- import gradio as gr
2
- from langchain_community.document_loaders import PyPDFLoader
3
- from langchain_community.embeddings import HuggingFaceEmbeddings
4
- from langchain_community.vectorstores import Chroma
5
- from langchain_community.chat_models import ChatGroq
6
- from langchain.chains import ConversationalRetrievalChain
7
- from langchain.memory import ConversationBufferMemory
8
  import os
9
- import shutil
10
-
11
- # PDF upload folder
12
- UPLOAD_DIR = "pdf_uploads"
13
- if not os.path.exists(UPLOAD_DIR):
14
- os.makedirs(UPLOAD_DIR)
15
-
16
- # Initialize LLaMA 3 (no API key directly in code)
17
- llm = ChatGroq(model_name="llama3-8b-8192")
18
 
19
- embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
20
- memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
21
 
22
- qa_chain = None
 
23
 
24
- def process_pdf(file_path):
25
- loader = PyPDFLoader(file_path)
26
- pages = loader.load_and_split()
 
 
 
 
27
 
28
- vectordb = Chroma.from_documents(pages, embedding=embeddings)
29
- qa_chain = ConversationalRetrievalChain.from_llm(
30
- llm=llm,
31
- retriever=vectordb.as_retriever(),
32
- memory=memory
33
- )
34
- return qa_chain
35
 
36
  def upload_pdf(file):
37
- global qa_chain
38
- file_path = os.path.join(UPLOAD_DIR, os.path.basename(file))
39
- shutil.copy(file, file_path)
40
- qa_chain = process_pdf(file_path)
41
- return "βœ… PDF uploaded and processed! Ask me anything about it."
42
 
43
  def chatbot(user_message, history):
44
- if qa_chain is None:
45
  return "❌ Please upload a PDF first.", history
46
-
47
- response = qa_chain({"question": user_message, "chat_history": history})
48
- answer = response["answer"]
 
 
 
49
  history.append((user_message, answer))
50
  return "", history
51
 
52
- with gr.Blocks(theme=gr.themes.Soft()) as demo:
53
- gr.Markdown("<h1 style='text-align:center;'>πŸ“„ LLaMA 3 PDF Chatbot</h1>")
 
 
 
 
 
 
 
54
 
55
  with gr.Row():
56
- pdf_upload = gr.File(label="Upload PDF", file_types=[".pdf"])
57
- upload_btn = gr.Button("Process PDF")
 
 
58
 
59
- chatbot_ui = gr.Chatbot(height=400)
60
- user_input = gr.Textbox(label="Ask something about the PDF...", placeholder="Type your question here and hit Enter")
 
 
 
 
 
61
 
62
  upload_btn.click(upload_pdf, inputs=pdf_upload, outputs=chatbot_ui)
63
  user_input.submit(chatbot, [user_input, chatbot_ui], [user_input, chatbot_ui])
64
 
65
- gr.Markdown("<footer style='text-align:center; font-size:0.85rem; color:#64748b;'>Built with LLaMA 3 + LangChain on Hugging Face Spaces πŸš€</footer>")
 
 
 
 
 
 
66
 
67
  if __name__ == "__main__":
68
  demo.launch()
 
 
 
 
 
 
 
 
1
  import os
2
+ import gradio as gr
3
+ from transformers import pipeline
4
+ from PyPDF2 import PdfReader
 
 
 
 
 
 
5
 
6
+ # Load a summarization pipeline using a free HF model (e.g., "facebook/bart-large-cnn")
7
+ summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
8
 
9
+ # Load a QA pipeline
10
+ qa_pipeline = pipeline("question-answering", model="distilbert-base-cased-distilled-squad")
11
 
12
+ # Function to extract text from PDF
13
+ def extract_text_from_pdf(pdf_file):
14
+ reader = PdfReader(pdf_file)
15
+ text = ""
16
+ for page in reader.pages:
17
+ text += page.extract_text() + "\n"
18
+ return text
19
 
20
+ # Process the uploaded PDF and store the text
21
+ pdf_text_store = ""
 
 
 
 
 
22
 
23
  def upload_pdf(file):
24
+ global pdf_text_store
25
+ pdf_text_store = extract_text_from_pdf(file)
26
+ # Summarize for quick overview
27
+ summary = summarizer(pdf_text_store[:1000], max_length=200, min_length=50, do_sample=False)[0]['summary_text']
28
+ return f"βœ… PDF processed! Summary:\n\n{summary}"
29
 
30
  def chatbot(user_message, history):
31
+ if not pdf_text_store:
32
  return "❌ Please upload a PDF first.", history
33
+
34
+ result = qa_pipeline({
35
+ 'context': pdf_text_store,
36
+ 'question': user_message
37
+ })
38
+ answer = result["answer"]
39
  history.append((user_message, answer))
40
  return "", history
41
 
42
+ with gr.Blocks(theme=gr.themes.Soft(primary_hue="cyan", secondary_hue="blue")) as demo:
43
+ gr.HTML(
44
+ """
45
+ <div style="text-align:center;">
46
+ <h1 style="font-size:2.2rem; margin-bottom:0.3em;">πŸ“š Free PDF Chatbot</h1>
47
+ <p style="font-size:1rem; color:#4b5563; margin:0;">Upload your PDF and ask questions – powered by Hugging Face models only.</p>
48
+ </div>
49
+ """
50
+ )
51
 
52
  with gr.Row():
53
+ with gr.Column(scale=2):
54
+ pdf_upload = gr.File(label="Upload PDF", file_types=[".pdf"])
55
+ with gr.Column(scale=1, min_width=100):
56
+ upload_btn = gr.Button("πŸš€ Process PDF", variant="primary")
57
 
58
+ chatbot_ui = gr.Chatbot(label="πŸ’¬ Chat with your PDF", height=450, show_copy_button=True)
59
+
60
+ user_input = gr.Textbox(
61
+ label="Ask a question about your PDF...",
62
+ placeholder="Type your question and press Enter",
63
+ show_label=False
64
+ )
65
 
66
  upload_btn.click(upload_pdf, inputs=pdf_upload, outputs=chatbot_ui)
67
  user_input.submit(chatbot, [user_input, chatbot_ui], [user_input, chatbot_ui])
68
 
69
+ gr.HTML(
70
+ """
71
+ <div style="text-align:center; font-size:0.85rem; color:#9ca3af; margin-top:2em;">
72
+ Free, fast and simple – no API keys needed!
73
+ </div>
74
+ """
75
+ )
76
 
77
  if __name__ == "__main__":
78
  demo.launch()