import gradio as gr import torch from utils import get_image_from_url, colorize from PIL import Image import matplotlib.pyplot as plt title = "Interactive demo: ZoeDepth" description = "Unofficial Gradio Demo for using ZoeDepth: Zero-shot Transfer by Combining Relative and Metric Depth. To use it, simply upload an image or use one of the examples below and click 'Submit'. Results will show up in a few seconds." examples = [["example.png"]] repo = "isl-org/ZoeDepth" # Zoe_N model_zoe_n = torch.hub.load(repo, "ZoeD_N", pretrained=True) DEVICE = "cuda" if torch.cuda.is_available() else "cpu" zoe = model_zoe_n.to(DEVICE) def process_image(image): depth = zoe.infer_pil(image) # as numpy colored_depth = colorize(depth) return colored_depth interface = gr.Interface(fn=process_image, inputs=[gr.Image(type="pil")], outputs=[gr.Image(type="pil", label ="Depth") ], title=title, description=description, examples = examples ) interface.launch(debug=True)