Spaces:
Runtime error
Runtime error
Update app.py
Browse filesChanging to multiple pdf
app.py
CHANGED
@@ -70,8 +70,8 @@ def create_conversational_chain(vector_store):
|
|
70 |
load_dotenv()
|
71 |
llm = Replicate(
|
72 |
streaming = True,
|
73 |
-
model = "replicate/llama-2-70b-chat:58d078176e02c219e11eb4da5a02a7830a283b14cf8f94537af893ccff5ee781",
|
74 |
-
|
75 |
callbacks=[StreamingStdOutCallbackHandler()],
|
76 |
input = {"temperature": 0.01, "max_length" :500,"top_p":1})
|
77 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
@@ -95,13 +95,13 @@ def main():
|
|
95 |
# Initialize session state
|
96 |
initialize_session_state()
|
97 |
st.title("Chat Docs CSA")
|
98 |
-
loader = UnstructuredFileLoader('./Highway Traffic Act, R.S.O. 1990, c. H.8[465] - Copy.pdf')
|
99 |
-
documents = loader.load()
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
|
106 |
text_splitter=CharacterTextSplitter(separator='\n',
|
107 |
chunk_size=1500,
|
|
|
70 |
load_dotenv()
|
71 |
llm = Replicate(
|
72 |
streaming = True,
|
73 |
+
# model = "replicate/llama-2-70b-chat:58d078176e02c219e11eb4da5a02a7830a283b14cf8f94537af893ccff5ee781",
|
74 |
+
model = "meta/llama-2-7b-chat:8e6975e5ed6174911a6ff3d60540dfd4844201974602551e10e9e87ab143d81e",
|
75 |
callbacks=[StreamingStdOutCallbackHandler()],
|
76 |
input = {"temperature": 0.01, "max_length" :500,"top_p":1})
|
77 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
|
|
95 |
# Initialize session state
|
96 |
initialize_session_state()
|
97 |
st.title("Chat Docs CSA")
|
98 |
+
# loader = UnstructuredFileLoader('./Highway Traffic Act, R.S.O. 1990, c. H.8[465] - Copy.pdf')
|
99 |
+
# documents = loader.load()
|
100 |
+
documents = []
|
101 |
+
for file_path in file_paths:
|
102 |
+
loader = UnstructuredFileLoader(file_path)
|
103 |
+
loaded_doc = loader.load() # Assuming this returns a list of pages
|
104 |
+
documents.extend(loaded_doc)
|
105 |
|
106 |
text_splitter=CharacterTextSplitter(separator='\n',
|
107 |
chunk_size=1500,
|