Sergiu2404's picture
initial space deployment
8795c64
raw
history blame
1.4 kB
import gradio as gr
from transformers import AutoTokenizer
import torch
from tiny_finbert import TinyFinBERTRegressor, preprocess_texts
import os
MODEL_DIR = "./saved_model"
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
tokenizer = AutoTokenizer.from_pretrained(MODEL_DIR)
model = TinyFinBERTRegressor().to(DEVICE)
model.load_state_dict(torch.load(os.path.join(MODEL_DIR, "regressor_model.pt"), map_location=DEVICE))
model.eval()
def predict_sentiment(text):
processed = preprocess_texts([text])[0]
inputs = tokenizer(processed, return_tensors="pt", truncation=True, padding='max_length', max_length=128)
inputs = {k: v.to(DEVICE) for k, v in inputs.items() if k != "token_type_ids"}
with torch.no_grad():
score = model(**inputs)["score"].item()
if score > 0.3:
interpretation = "positive"
elif score < -0.3:
interpretation = "negative"
else:
interpretation = "neutral"
return {"score": round(score, 4), "interpretation": interpretation}
iface = gr.Interface(fn=predict_sentiment,
inputs=gr.Textbox(label="Enter financial sentence"),
outputs=[
gr.Number(label="Sentiment Score"),
gr.Textbox(label="Interpretation")
],
title="TinyFinBERT Sentiment Analysis")
iface.launch()