Spaces:
Runtime error
Runtime error
File size: 1,160 Bytes
1cbfd2b b578661 626bf47 15ac7f4 1cbfd2b 20d1027 cb2a0ee 20d1027 cb2a0ee b578661 20d1027 626bf47 15ac7f4 626bf47 1cbfd2b 15ac7f4 1cbfd2b 626bf47 15ac7f4 626bf47 15ac7f4 626bf47 be6c4a0 626bf47 15ac7f4 1cbfd2b e9d0a8b 171b3cf 15ac7f4 1cbfd2b e9d0a8b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import gradio as gr
import kornia as K
from kornia.core import Tensor
from kornia.contrib import ImageStitcher
import kornia.feature as KF
import torch
def inference(file_1, file_2):
img_1: Tensor = K.io.load_image(file_1.name, K.io.ImageLoadType.RGB32)
img_1 = img_1[None] # 1xCxHxW / fp32 / [0, 1]
img_2: Tensor = K.io.load_image(file_2.name, K.io.ImageLoadType.RGB32)
img_2 = img_2[None] # 1xCxHxW / fp32 / [0, 1]
IS = ImageStitcher(KF.LoFTR(pretrained='outdoor'), estimator='ransac')
with torch.no_grad():
result = IS(img_1, img_2)
return K.tensor_to_image(result[0])
examples = [
['examples/foto1B.jpg',
'examples/foto1A.jpg'],
]
inputs = [
gr.inputs.Image(type='file', label='Input Image'),
gr.inputs.Image(type='file', label='Input Image'),
]
outputs = [
gr.outputs.Image(type='file', label='Output Image'),
]
title = "Image Stitching using Kornia and LoFTR"
demo_app = gr.Interface(
fn=inference,
inputs=inputs,
outputs=outputs,
title=title,
examples=examples,
cache_examples=True,
live=True,
theme='huggingface',
)
demo_app.launch()
|