nagisa-demo / app.py
taishi-i's picture
update app.py
a0783ba
raw
history blame
1.18 kB
import gradio as gr
import nagisa
def tokenize(text):
tokens = nagisa.tagging(text)
return tokens.words, tokens.postags
num_input_lines = 3
input_placeholder = "Please input text here."
title = "Japanese tokenization demo"
default_text = "ここにテキストを入力し、Enter を押してください。"
description = """
This is a demo page for nagisa's tokenization.
Nagisa is a Python module used for tokenizing and performing Part-of-Speech (POS) tagging on Japanese text.
It is designed to be a simple and easy-to-use tool.
To try it out, enter some text in the box below and press submit.
https://github.com/taishi-i/nagisa
"""
examples = ["Pythonで簡単に使えるツールです", "3月に見た「3月のライオン」", "福岡・博多の観光情報"]
iface = gr.Interface(
fn=tokenize,
inputs=gr.Textbox(
label="Input text",
lines=num_input_lines,
placeholder=input_placeholder,
value=default_text,
),
title=title,
description=description,
examples=examples,
outputs=[
gr.Textbox(label="Words"),
gr.Textbox(label="POS tags"),
],
live=True,
)
iface.launch()