DeepDanbooru / app.py
hysts's picture
hysts HF staff
Return a list of predicted labels
46d0a90
raw
history blame
4.14 kB
#!/usr/bin/env python
from __future__ import annotations
import argparse
import functools
import os
import pathlib
import tarfile
import tempfile
import deepdanbooru as dd
import gradio as gr
import huggingface_hub
import numpy as np
import PIL.Image
import tensorflow as tf
TITLE = 'KichangKim/DeepDanbooru'
DESCRIPTION = 'This is an unofficial demo for https://github.com/KichangKim/DeepDanbooru.'
ARTICLE = '<center><img src="https://visitor-badge.glitch.me/badge?page_id=hysts.deepdanbooru" alt="visitor badge"/></center>'
HF_TOKEN = os.environ['HF_TOKEN']
MODEL_REPO = 'hysts/DeepDanbooru'
MODEL_FILENAME = 'model-resnet_custom_v3.h5'
LABEL_FILENAME = 'tags.txt'
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument('--score-slider-step', type=float, default=0.05)
parser.add_argument('--score-threshold', type=float, default=0.5)
parser.add_argument('--share', action='store_true')
return parser.parse_args()
def load_sample_image_paths() -> list[pathlib.Path]:
image_dir = pathlib.Path('images')
if not image_dir.exists():
dataset_repo = 'hysts/sample-images-TADNE'
path = huggingface_hub.hf_hub_download(dataset_repo,
'images.tar.gz',
repo_type='dataset',
use_auth_token=HF_TOKEN)
with tarfile.open(path) as f:
f.extractall()
return sorted(image_dir.glob('*'))
def load_model() -> tf.keras.Model:
path = huggingface_hub.hf_hub_download(MODEL_REPO,
MODEL_FILENAME,
use_auth_token=HF_TOKEN)
model = tf.keras.models.load_model(path)
return model
def load_labels() -> list[str]:
path = huggingface_hub.hf_hub_download(MODEL_REPO,
LABEL_FILENAME,
use_auth_token=HF_TOKEN)
with open(path) as f:
labels = [line.strip() for line in f.readlines()]
return labels
def predict(image: PIL.Image.Image, score_threshold: float,
model: tf.keras.Model,
labels: list[str]) -> tuple[dict[str, float], str]:
_, height, width, _ = model.input_shape
image = np.asarray(image)
image = tf.image.resize(image,
size=(height, width),
method=tf.image.ResizeMethod.AREA,
preserve_aspect_ratio=True)
image = image.numpy()
image = dd.image.transform_and_pad_image(image, width, height)
image = image / 255.
probs = model.predict(image[None, ...])[0]
probs = probs.astype(float)
res = dict()
for prob, label in zip(probs.tolist(), labels):
if prob < score_threshold:
continue
res[label] = prob
sorted_preds = sorted(res.items(), key=lambda x: -x[1])
out_path = tempfile.NamedTemporaryFile(suffix='.txt', delete=False)
with open(out_path.name, 'w') as f:
for key, _ in sorted_preds:
f.write(f'{key}\n')
return res, out_path.name
def main():
args = parse_args()
image_paths = load_sample_image_paths()
examples = [[path.as_posix(), args.score_threshold]
for path in image_paths]
model = load_model()
labels = load_labels()
func = functools.partial(predict, model=model, labels=labels)
gr.Interface(
func,
[
gr.Image(type='pil', label='Input'),
gr.Slider(0,
1,
step=args.score_slider_step,
value=args.score_threshold,
label='Score Threshold'),
],
[
gr.Label(label='Output'),
gr.File(label='Tag List'),
],
examples=examples,
title=TITLE,
description=DESCRIPTION,
article=ARTICLE,
allow_flagging='never',
).launch(
enable_queue=True,
share=args.share,
)
if __name__ == '__main__':
main()