Anne31415 commited on
Commit
3237bea
·
1 Parent(s): c0ab49b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -0
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import streamlit as st
2
  from dotenv import load_dotenv
 
3
  import pickle
4
  from huggingface_hub import Repository
5
  from PyPDF2 import PdfReader
@@ -12,6 +13,14 @@ from langchain.chains.question_answering import load_qa_chain
12
  from langchain.callbacks import get_openai_callback
13
  import os
14
 
 
 
 
 
 
 
 
 
15
  # Step 1: Clone the Dataset Repository
16
  repo = Repository(
17
  local_dir="Private_Book", # Local directory to clone the repository
@@ -73,6 +82,10 @@ def load_pdf(file_path):
73
  VectorStore = FAISS.from_texts(chunks, embedding=embeddings)
74
  with open(f"{store_name}.pkl", "wb") as f:
75
  pickle.dump(VectorStore, f)
 
 
 
 
76
 
77
  return VectorStore
78
 
 
1
  import streamlit as st
2
  from dotenv import load_dotenv
3
+ import pinecone
4
  import pickle
5
  from huggingface_hub import Repository
6
  from PyPDF2 import PdfReader
 
13
  from langchain.callbacks import get_openai_callback
14
  import os
15
 
16
+ pinecone.init(api_key="PINECONE_API_KEY")
17
+
18
+ INDEX_NAME = "your_vector_index_name"
19
+ if INDEX_NAME not in pinecone.list_indexes():
20
+ pinecone.create_index(name=INDEX_NAME, metric="cosine", shards=1)
21
+
22
+
23
+
24
  # Step 1: Clone the Dataset Repository
25
  repo = Repository(
26
  local_dir="Private_Book", # Local directory to clone the repository
 
82
  VectorStore = FAISS.from_texts(chunks, embedding=embeddings)
83
  with open(f"{store_name}.pkl", "wb") as f:
84
  pickle.dump(VectorStore, f)
85
+
86
+ # Add Pinecone integration here
87
+ vector_dict = {str(i): vector for i, vector in enumerate(VectorStore.vectors)}
88
+ pinecone.upsert(items=vector_dict, index_name=INDEX_NAME)
89
 
90
  return VectorStore
91