Lord-Raven
Playing with different models.
7badb14
raw
history blame
743 Bytes
import gradio
import json
from transformers import pipeline
from transformers import AutoTokenizer
classifier = pipeline(task='zero-shot-classification', model='tasksource/deberta-small-long-nli', device=0)
def zero_shot_classification(data_string):
print(data_string)
data = json.loads(data_string)
print(data)
results = classifier(data['sequence'], candidate_labels=data['candidate_labels'], hypothesis_template=data['hypothesis_template'], multi_label=data['multi_label'])
response_string = json.dumps(results)
return response_string
gradio_interface = gradio.Interface(
fn = zero_shot_classification,
inputs = gradio.Textbox(label="JSON Input"),
outputs = gradio.Textbox()
)
gradio_interface.launch()