Spaces:
Running
Running
File size: 924 Bytes
ccd3d82 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 |
from fastapi import FastAPI, UploadFile, File
from transformers import AutoModelForImageClassification, AutoImageProcessor
from PIL import Image
import torch.nn.functional as F
import torch
import io
app = FastAPI()
model = AutoModelForImageClassification.from_pretrained("prithivMLmods/Gender-Classifier-Mini")
processor = AutoImageProcessor.from_pretrained("prithivMLmods/Gender-Classifier-Mini")
@app.post("/classify/")
async def classify_gender(image: UploadFile = File(...)):
contents = await image.read()
img = Image.open(io.BytesIO(contents)).convert("RGB")
inputs = processor(images=img, return_tensors="pt")
with torch.no_grad():
logits = model(**inputs).logits
probs = F.softmax(logits, dim=1)
pred = torch.argmax(probs).item()
confidence = probs[0][pred].item()
label = model.config.id2label[pred]
return {"label": label, "confidence": confidence}
|