Spaces:
Running
Running
File size: 4,388 Bytes
fb59cb8 e9c5f95 fb59cb8 f5184a5 fb59cb8 f5184a5 e9c5f95 fb59cb8 e9c5f95 fb59cb8 e9c5f95 fb59cb8 e9c5f95 fb59cb8 e9c5f95 fb59cb8 e9c5f95 fb59cb8 18988d2 fb59cb8 e9c5f95 fb59cb8 e9c5f95 fb59cb8 f5184a5 fb59cb8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 |
#!/usr/bin/env python
from __future__ import annotations
import argparse
import functools
import os
import pathlib
import tarfile
import deepdanbooru as dd
import gradio as gr
import huggingface_hub
import numpy as np
import PIL.Image
import tensorflow as tf
ORIGINAL_REPO_URL = 'https://github.com/KichangKim/DeepDanbooru'
TITLE = 'KichangKim/DeepDanbooru'
DESCRIPTION = f'A demo for {ORIGINAL_REPO_URL}'
ARTICLE = None
TOKEN = os.environ['TOKEN']
MODEL_REPO = 'hysts/DeepDanbooru'
MODEL_FILENAME = 'model-resnet_custom_v3.h5'
LABEL_FILENAME = 'tags.txt'
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument('--score-slider-step', type=float, default=0.05)
parser.add_argument('--score-threshold', type=float, default=0.5)
parser.add_argument('--theme', type=str, default='dark-grass')
parser.add_argument('--live', action='store_true')
parser.add_argument('--share', action='store_true')
parser.add_argument('--port', type=int)
parser.add_argument('--disable-queue',
dest='enable_queue',
action='store_false')
parser.add_argument('--allow-flagging', type=str, default='never')
parser.add_argument('--allow-screenshot', action='store_true')
return parser.parse_args()
def load_sample_image_paths() -> list[pathlib.Path]:
image_dir = pathlib.Path('images')
if not image_dir.exists():
dataset_repo = 'hysts/sample-images-TADNE'
path = huggingface_hub.hf_hub_download(dataset_repo,
'images.tar.gz',
repo_type='dataset',
use_auth_token=TOKEN)
with tarfile.open(path) as f:
f.extractall()
return sorted(image_dir.glob('*'))
def load_model() -> tf.keras.Model:
path = huggingface_hub.hf_hub_download(MODEL_REPO,
MODEL_FILENAME,
use_auth_token=TOKEN)
model = tf.keras.models.load_model(path)
return model
def load_labels() -> list[str]:
path = huggingface_hub.hf_hub_download(MODEL_REPO,
LABEL_FILENAME,
use_auth_token=TOKEN)
with open(path) as f:
labels = [line.strip() for line in f.readlines()]
return labels
def predict(image: PIL.Image.Image, score_threshold: float,
model: tf.keras.Model, labels: list[str]) -> dict[str, float]:
_, height, width, _ = model.input_shape
image = np.asarray(image)
image = tf.image.resize(image,
size=(height, width),
method=tf.image.ResizeMethod.AREA,
preserve_aspect_ratio=True)
image = image.numpy()
image = dd.image.transform_and_pad_image(image, width, height)
image = image / 255.
probs = model.predict(image[None, ...])[0]
probs = probs.astype(float)
res = dict()
for prob, label in zip(probs.tolist(), labels):
if prob < score_threshold:
continue
res[label] = prob
return res
def main():
gr.close_all()
args = parse_args()
image_paths = load_sample_image_paths()
examples = [[path.as_posix(), args.score_threshold]
for path in image_paths]
model = load_model()
labels = load_labels()
func = functools.partial(predict, model=model, labels=labels)
func = functools.update_wrapper(func, predict)
gr.Interface(
func,
[
gr.inputs.Image(type='pil', label='Input'),
gr.inputs.Slider(0,
1,
step=args.score_slider_step,
default=args.score_threshold,
label='Score Threshold'),
],
gr.outputs.Label(label='Output'),
examples=examples,
title=TITLE,
description=DESCRIPTION,
article=ARTICLE,
theme=args.theme,
allow_screenshot=args.allow_screenshot,
allow_flagging=args.allow_flagging,
live=args.live,
).launch(
enable_queue=args.enable_queue,
server_port=args.port,
share=args.share,
)
if __name__ == '__main__':
main()
|