Spaces:
Runtime error
Runtime error
File size: 1,877 Bytes
5c45b64 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
import os
from PIL import Image
import torch
from torchvision import transforms
import gradio as gr
# load model
model = torch.hub.load('datvuthanh/hybridnets', 'hybridnets', pretrained=True)
normalize = transforms.Normalize(
mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
)
transform=transforms.Compose([
transforms.ToTensor(),
# normalize
])
def inference(img):
# print(img.size)
img = img.resize((640, 384))
img = torch.unsqueeze(transform(img), dim=0)
# img = transform(img)
features, regression, classification, anchors, segmentation = model(img)
features_out = features[0][0, :, :].detach().numpy()
regression_out = regression[0][0, :, :].detach().numpy()
classification_out = classification[0][0, :, :].detach().numpy()
anchors_out = anchors[0][0, :, :].detach().numpy()
segmentation_out = segmentation[0][0, :, :].detach().numpy()
return features_out, regression_out, classification_out, anchors_out, segmentation_out
title="HybridNets Demo"
description="Gradio demo for HybridNets: End2End Perception Network pretrained on BDD100k Dataset. To use it, simply upload your image or click on one of the examples to load them. Read more at the links below"
article = "<p style='text-align: center'><a href='https://arxiv.org/abs/2203.09035' target='_blank'>ybridNets: End2End Perception Network</a> | <a href='https://github.com/datvuthanh/HybridNets' target='_blank'>Github Repo</a></p>"
examples=[['frame_00_delay-0.13s.jpg']]
gr.Interface(inference,gr.inputs.Image(type="pil"),[gr.outputs.Image(label='Features'),gr.outputs.Image(label='Regression'),gr.outputs.Image(label='Classification'),gr.outputs.Image(label='Anchors'),gr.outputs.Image(label='sSgmentation ')],article=article,description=description,title=title,examples=examples).launch()
|