mahi2k commited on
Commit
1b056ff
·
verified ·
1 Parent(s): 2284eb2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -14
app.py CHANGED
@@ -7,19 +7,6 @@ Original file is located at
7
  https://colab.research.google.com/drive/1z-I8OtD-uFaX9KHENLECbxMmPPuPDNSp
8
  """
9
 
10
- pip install transformers==4.37.2 optimum==1.12.0 --quiet
11
- pip install auto-gptq --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ --quiet
12
- pip install langchain==0.1.9 --quiet
13
- # !pip install chromadb
14
- pip install sentence_transformers==2.4.0 --quiet
15
- pip install unstructured --quiet
16
- pip install pdf2image --quiet
17
- pip install pdfminer.six==20221105 --quiet
18
- pip install unstructured-inference --quiet
19
- pip install faiss-gpu==1.7.2 --quiet
20
- pip install pikepdf==8.13.0 --quiet
21
- pip install pypdf==4.0.2 --quiet
22
- pip install pillow_heif==0.15.0 --quiet
23
 
24
  from langchain.llms import HuggingFacePipeline
25
  from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig, pipeline
@@ -165,7 +152,7 @@ transformers.logging.set_verbosity_error()
165
  # answer = result_text[answer_start_index:].strip()
166
  # print(fill(answer, width=100))
167
 
168
- pip install gradio transformers
169
 
170
  import gradio as gr
171
 
 
7
  https://colab.research.google.com/drive/1z-I8OtD-uFaX9KHENLECbxMmPPuPDNSp
8
  """
9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
  from langchain.llms import HuggingFacePipeline
12
  from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig, pipeline
 
152
  # answer = result_text[answer_start_index:].strip()
153
  # print(fill(answer, width=100))
154
 
155
+
156
 
157
  import gradio as gr
158