Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,35 +4,36 @@ from langchain_community.vectorstores import FAISS
|
|
4 |
from langchain_huggingface import HuggingFaceEmbeddings
|
5 |
from groq import Groq
|
6 |
|
7 |
-
# Path FAISS index
|
8 |
faiss_path = "faiss_index"
|
9 |
|
10 |
-
# Pastikan
|
11 |
if not os.path.exists(f"{faiss_path}/index.faiss"):
|
12 |
-
raise FileNotFoundError(f"β οΈ
|
13 |
|
14 |
-
# Load FAISS index
|
15 |
vector_store = FAISS.load_local(
|
16 |
faiss_path,
|
17 |
HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2"),
|
18 |
allow_dangerous_deserialization=True
|
19 |
)
|
20 |
|
21 |
-
# Load API Key dari
|
22 |
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
|
23 |
if not GROQ_API_KEY:
|
24 |
raise ValueError("β οΈ API Key Groq tidak ditemukan! Setel variabel lingkungan 'GROQ_API_KEY'.")
|
25 |
|
26 |
-
# Inisialisasi API Groq
|
27 |
client = Groq(api_key=GROQ_API_KEY)
|
28 |
|
29 |
def retrieve_and_generate(query, history=[]):
|
30 |
-
"""Retrieve
|
31 |
-
|
|
|
32 |
docs = vector_store.similarity_search(query, k=3)
|
33 |
context = "\n\n".join([doc.page_content for doc in docs])
|
34 |
|
35 |
-
# Generate
|
36 |
response = client.chat.completions.create(
|
37 |
model="mixtral-8x7b-32768",
|
38 |
messages=[
|
@@ -43,18 +44,22 @@ def retrieve_and_generate(query, history=[]):
|
|
43 |
max_tokens=200
|
44 |
)
|
45 |
|
46 |
-
# Return hasil dalam format chat
|
47 |
bot_response = response.choices[0].message.content
|
48 |
-
history.append((query, bot_response)) # Simpan
|
49 |
-
return history
|
50 |
-
|
51 |
-
# UI dengan Gradio
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
)
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
-
|
|
|
4 |
from langchain_huggingface import HuggingFaceEmbeddings
|
5 |
from groq import Groq
|
6 |
|
7 |
+
# β
Path FAISS index
|
8 |
faiss_path = "faiss_index"
|
9 |
|
10 |
+
# β
Pastikan FAISS index ada sebelum loading
|
11 |
if not os.path.exists(f"{faiss_path}/index.faiss"):
|
12 |
+
raise FileNotFoundError(f"β οΈ FAISS index tidak ditemukan di {faiss_path}. Pastikan Anda telah mengunggahnya!")
|
13 |
|
14 |
+
# β
Load FAISS index
|
15 |
vector_store = FAISS.load_local(
|
16 |
faiss_path,
|
17 |
HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2"),
|
18 |
allow_dangerous_deserialization=True
|
19 |
)
|
20 |
|
21 |
+
# β
Load API Key dari Secrets di Hugging Face Spaces
|
22 |
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
|
23 |
if not GROQ_API_KEY:
|
24 |
raise ValueError("β οΈ API Key Groq tidak ditemukan! Setel variabel lingkungan 'GROQ_API_KEY'.")
|
25 |
|
26 |
+
# β
Inisialisasi API Groq
|
27 |
client = Groq(api_key=GROQ_API_KEY)
|
28 |
|
29 |
def retrieve_and_generate(query, history=[]):
|
30 |
+
"""π Retrieve dokumen & π§ Generate jawaban dari LLM."""
|
31 |
+
|
32 |
+
# π Ambil 3 dokumen yang paling relevan
|
33 |
docs = vector_store.similarity_search(query, k=3)
|
34 |
context = "\n\n".join([doc.page_content for doc in docs])
|
35 |
|
36 |
+
# π§ Generate respons dengan model Groq
|
37 |
response = client.chat.completions.create(
|
38 |
model="mixtral-8x7b-32768",
|
39 |
messages=[
|
|
|
44 |
max_tokens=200
|
45 |
)
|
46 |
|
|
|
47 |
bot_response = response.choices[0].message.content
|
48 |
+
history.append((query, bot_response)) # β
Simpan chat history
|
49 |
+
return history
|
50 |
+
|
51 |
+
# β
UI dengan Gradio
|
52 |
+
with gr.Blocks() as demo:
|
53 |
+
gr.Markdown("## π€ RoboHome RAG Chatbot")
|
54 |
+
gr.Markdown("Chatbot ini menjawab pertanyaan berdasarkan dokumentasi RoboHome.")
|
55 |
+
|
56 |
+
chatbot = gr.Chatbot(label="π¬ Jawaban RoboHome")
|
57 |
+
input_text = gr.Textbox(label="βοΈ Ajukan pertanyaan tentang RoboHome", placeholder="Ketik pertanyaan di sini...")
|
58 |
+
send_button = gr.Button("π Kirim")
|
59 |
+
|
60 |
+
def process_input(user_input, history):
|
61 |
+
return retrieve_and_generate(user_input, history)
|
62 |
+
|
63 |
+
send_button.click(process_input, inputs=[input_text, chatbot], outputs=chatbot)
|
64 |
|
65 |
+
demo.launch(share=True)
|