Spaces:
Sleeping
Sleeping
File size: 583 Bytes
f658592 260c79f f658592 260c79f f658592 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
from pythainlp.tokenize import word_tokenize
import gradio as gr
def cut_word(text,separate):
return str(separate).join(word_tokenize(text))
iface = gr.Interface(
fn=cut_word,
inputs=[
gr.Textbox(
label="Text",
info="Thai Text",
lines=3,
#value="The quick brown fox jumped over the lazy dogs.",
),
gr.Textbox(
label="Separate",
info="Word or symbol for word tokenization",
#lines=3,
value="|",
),
],
outputs="textbox")
iface.launch() |