Spaces:
Running
Running
from pythainlp.tokenize import word_tokenize | |
import gradio as gr | |
def cut_word(text,separate): | |
return str(separate).join(word_tokenize(text)) | |
iface = gr.Interface( | |
fn=cut_word, | |
inputs=[ | |
gr.Textbox( | |
label="Text", | |
info="Thai Text", | |
lines=3, | |
#value="The quick brown fox jumped over the lazy dogs.", | |
), | |
gr.Textbox( | |
label="Separate", | |
info="Word or symbol for word tokenization", | |
#lines=3, | |
value="|", | |
), | |
], | |
outputs="textbox") | |
iface.launch() |