Spaces:
Sleeping
Sleeping
syedMohib44
commited on
Commit
·
4be8fe7
1
Parent(s):
286c8c5
- Dockerfile +1 -1
- app.py +2 -2
Dockerfile
CHANGED
@@ -6,7 +6,7 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|
6 |
|
7 |
WORKDIR /app
|
8 |
COPY app.py .
|
9 |
-
COPY dataset
|
10 |
|
11 |
# Hugging Face cache fix
|
12 |
ENV TRANSFORMERS_CACHE=/app/models/.cache
|
|
|
6 |
|
7 |
WORKDIR /app
|
8 |
COPY app.py .
|
9 |
+
COPY dataset /tmp
|
10 |
|
11 |
# Hugging Face cache fix
|
12 |
ENV TRANSFORMERS_CACHE=/app/models/.cache
|
app.py
CHANGED
@@ -11,7 +11,7 @@ import gradio as gr
|
|
11 |
from gradio import mount_gradio_app
|
12 |
|
13 |
# ------------------- Config ------------------- #
|
14 |
-
DATA_PATH = "
|
15 |
EMBEDDING_MODEL = "./models/all-MiniLM-L6-v2"
|
16 |
QA_MODEL = "./models/bart-large-cnn"
|
17 |
DEVICE = "cuda" if os.environ.get("USE_CUDA") == "1" else "cpu"
|
@@ -60,7 +60,7 @@ def upload_knowledge(data: UploadData):
|
|
60 |
return {"message": "Data uploaded and indexed."}
|
61 |
|
62 |
# --------- Ask Endpoint --------- #
|
63 |
-
@app.get("/ask/")
|
64 |
def ask(question: str, top_k: int = 3):
|
65 |
question_embedding = embedder.encode([question], convert_to_numpy=True)
|
66 |
distances, indices = index.search(question_embedding, top_k)
|
|
|
11 |
from gradio import mount_gradio_app
|
12 |
|
13 |
# ------------------- Config ------------------- #
|
14 |
+
DATA_PATH = "/tmp/pentagon_core.json" # Use /tmp for temporary storage
|
15 |
EMBEDDING_MODEL = "./models/all-MiniLM-L6-v2"
|
16 |
QA_MODEL = "./models/bart-large-cnn"
|
17 |
DEVICE = "cuda" if os.environ.get("USE_CUDA") == "1" else "cpu"
|
|
|
60 |
return {"message": "Data uploaded and indexed."}
|
61 |
|
62 |
# --------- Ask Endpoint --------- #
|
63 |
+
@app.get("/ask/")
|
64 |
def ask(question: str, top_k: int = 3):
|
65 |
question_embedding = embedder.encode([question], convert_to_numpy=True)
|
66 |
distances, indices = index.search(question_embedding, top_k)
|