Pujan-Dev's picture
feat: updated detector using Ela fft and meta
0b8f50d
raw
history blame contribute delete
773 Bytes
import torch
from .model_loader import get_model_tokenizer
import torch.nn.functional as F
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def classify_text(text: str):
model, tokenizer = get_model_tokenizer()
inputs = tokenizer(text, return_tensors='pt', padding=True, truncation=True, max_length=512)
inputs = {k: v.to(device) for k, v in inputs.items()}
with torch.no_grad():
outputs = model(**inputs)
logits = outputs if isinstance(outputs, torch.Tensor) else outputs.logits
probs = F.softmax(logits, dim=1)
pred = torch.argmax(probs, dim=1).item()
prob_percent = probs[0][pred].item() * 100
return {"label": "Human" if pred == 0 else "AI", "confidence": round(prob_percent, 2)}