Spaces:
Sleeping
Sleeping
import torch | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
import gradio as gr | |
# Load model | |
MODEL_NAME = "tabularisai/multilingual-sentiment-analysis" | |
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) | |
model = AutoModelForSequenceClassification.from_pretrained(MODEL_NAME) | |
# Inference function | |
def analyze_sentiment(user_input): | |
inputs = tokenizer(user_input, return_tensors="pt", truncation=True, padding=True, max_length=512) | |
with torch.no_grad(): | |
outputs = model(**inputs) | |
probs = torch.nn.functional.softmax(outputs.logits, dim=-1) | |
prediction = torch.argmax(probs, dim=-1).item() | |
labels = ["Very Negative", "Negative", "Neutral", "Positive", "Very Positive"] | |
return labels[prediction] | |
# Define Gradio Interface | |
demo = gr.Interface(fn=analyze_sentiment, inputs="text", outputs="text") | |
# This must be present for Hugging Face to work | |
demo.launch() | |