Update app.py
Browse files
app.py
CHANGED
@@ -5,7 +5,7 @@ from huggingface_hub import login
|
|
5 |
from fastapi import FastAPI, Request
|
6 |
from fastapi.responses import JSONResponse
|
7 |
import uvicorn
|
8 |
-
import torch
|
9 |
|
10 |
# Cấu hình logging
|
11 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
@@ -24,7 +24,7 @@ try:
|
|
24 |
qa_pipeline = pipeline(
|
25 |
"question-answering",
|
26 |
model="nguyenvulebinh/vi-mrc-base",
|
27 |
-
device=0 if torch.cuda.is_available() else -1
|
28 |
)
|
29 |
logging.info("Model loaded successfully")
|
30 |
except Exception as e:
|
@@ -53,4 +53,4 @@ async def api_answer(request: Request):
|
|
53 |
|
54 |
if __name__ == "__main__":
|
55 |
logging.info("Starting FastAPI...")
|
56 |
-
uvicorn.run(app, host="0.0.0.0"
|
|
|
5 |
from fastapi import FastAPI, Request
|
6 |
from fastapi.responses import JSONResponse
|
7 |
import uvicorn
|
8 |
+
import torch
|
9 |
|
10 |
# Cấu hình logging
|
11 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
|
|
24 |
qa_pipeline = pipeline(
|
25 |
"question-answering",
|
26 |
model="nguyenvulebinh/vi-mrc-base",
|
27 |
+
device=0 if torch.cuda.is_available() else -1
|
28 |
)
|
29 |
logging.info("Model loaded successfully")
|
30 |
except Exception as e:
|
|
|
53 |
|
54 |
if __name__ == "__main__":
|
55 |
logging.info("Starting FastAPI...")
|
56 |
+
uvicorn.run(app, host="0.0.0.0") # Bỏ tham số port để Spaces tự xử lý
|