Spaces:
Runtime error
Runtime error
File size: 494 Bytes
d1b0574 c65a1f7 d1b0574 ea1edb1 c65a1f7 ea1edb1 c65a1f7 ab2ce79 c65a1f7 d1b0574 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
import gradio as gr
from transformers import AutoModel, AutoProcessor
from PIL import Image
import torch
model_name_or_path = "lyttt/VLV_captioner"
model = AutoModel.from_pretrained(model_name_or_path, revision="master", trust_remote_code=True,low_cpu_mem_usage=False)
def greet(image):
image = image.convert("RGB")
with torch.no_grad():
outputs = model([image]).generated_text[0]
return output
demo = gr.Interface(fn=greet, inputs="image", outputs="text")
demo.launch() |