Lord-Raven
Playing with different models.
6c660fd
raw
history blame
685 Bytes
import gradio
import json
from transformers import pipeline
from transformers import AutoTokenizer
classifier = pipeline(task='zero-shot-classification', model='tasksource/deberta-small-long-nli')
def zero_shot_classification(data_string):
print(data_string)
data = json.loads(data_string)
print(data)
results = classifier(data['sequence'], candidate_labels=data['candidate_labels'], hypothesis_template=data['hypothesis_template'], multi_label=data['multi_label'])
return json.dumps(results)
gradio_interface = gradio.Interface(
fn = zero_shot_classification,
inputs = gradio.Textbox(label="JSON Input"),
outputs = "json"
)
gradio_interface.launch()