File size: 1,131 Bytes
cef4ba9 cf87232 cef4ba9 399a541 edaddb7 cef4ba9 ae5d2da e9b7aa3 cef4ba9 f40ad78 cef4ba9 f126704 d149e7b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import gradio as gr
from huggan.pytorch.pix2pix.modeling_pix2pix import GeneratorUNet
from PIL import Image
from torchvision.utils import save_image
from torchvision.transforms import Compose, Resize, ToTensor, Normalize, ToPILImage
from diffusers.utils import load_image, make_image_grid
transform = Compose(
[
Resize((256, 256), Image.BICUBIC),
ToTensor(),
Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]
)
transform2 = Compose(
[
#Resize((256, 256), Image.BICUBIC),
ToPILImage(),
#Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]
)
generator = GeneratorUNet.from_pretrained("debisoft/stamen-wc-gan")
def greet(input):
coord_zxy = input
image = load_image("https://c.basemaps.cartocdn.com/rastertiles/voyager_labels_under" + coord_zxy + ".png")
pixel_values = transform(image).unsqueeze(0)
output = generator(pixel_values)
return transform2(output[0])
iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="coord_zxy", value="/18/73237/95677")], outputs=[gr.Image(type="pil", width=256, label="Output Image")])
iface.queue(api_open=True);
iface.launch()
|