Spaces:
Runtime error
Runtime error
File size: 1,447 Bytes
e74e489 1243447 b213e2f a36a6fa f7173df e74e489 5075641 e74e489 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
import gradio as gr
import torch
from utils import get_image_from_url, colorize
from PIL import Image
import matplotlib.pyplot as plt
title = "Interactive demo: ZoeDepth"
description = "Unofficial Gradio Demo for using ZoeDepth: Zero-shot Transfer by Combining Relative and Metric Depth. ZoeDepth is a technique that lets you perform metric depth estimation from a single image. For more information, please refer to the paper or the <a href='https://github.com/isl-org/ZoeDepth' style='text-decoration: underline;' target='_blank'> Github </a> implementation. </p> To use it, simply upload an image or use one of the examples below and click 'Submit'. Results will show up in a few seconds."
examples = [["example.png"],["example_2.png"]]
repo = "isl-org/ZoeDepth"
# Zoe_N
model_zoe_n = torch.hub.load(repo, "ZoeD_N", pretrained=True)
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
zoe = model_zoe_n.to(DEVICE)
def process_image(image):
depth = zoe.infer_pil(image) # as numpy
colored_depth = colorize(depth)
return colored_depth
interface = gr.Interface(fn=process_image,
inputs=[gr.Image(type="pil")],
outputs=[gr.Image(type="pil", label ="Depth")
],
title=title,
description=description,
examples = examples
)
interface.launch(debug=True) |