akoksal's picture
Create app.py
e8f27bb
raw
history blame
886 Bytes
import gradio as gr
from transformers import AutoTokenizer, pipeline
import torch
tokenizer = AutoTokenizer.from_pretrained("akoksal/LongForm-OPT-2.7B")
generate = pipeline('text-generation', model='akoksal/LongForm-OPT-2.7B', tokenizer=tokenizer)
def predict(instruction, topp, temperature):
if "[EOI]" not in instruction:
instruction = instruction + " [EOI]"
x = generate(instruction,
do_sample=True,
max_length=64,
top_p=topp,
num_return_sequences=1,
temperature=temperature
)[0]["generated_text"]
return x
iface = gr.Interface(fn=predict, inputs=["text",\\
gr.inputs.Slider(0, 2, default=1, label="temperature"),\
gr.inputs.Slider(0, 1, default=0.90, label="top_p")]
, outputs="text")
iface.launch()