asdasda / app.py
kimda's picture
Create app.py
44f92d5
raw
history blame
1.19 kB
import gradio as gr
from transformers import PreTrainedTokenizerFast, BartForConditionalGeneration
# from transformers import๋กœ ์‹œ์ž‘ํ•˜๋Š” import ๋ฌธ์„ ๋ณด๋ฉด
# ๋งŽ์€ ๊ฒฝ์šฐ AutoTokenizer, AutoModel
# tokenizer = AutoTokenizer.from_pretrained("model ์ด๋ฆ„ ์–ด์ฉŒ๊ณ  ์ €์ฉŒ๊ณ ")
# PreTrainedTokenizerFast : https://huggingface.co./docs/transformers/main_classes/tokenizer
# BART๋Š” encoder-decoder ๋ชจ๋ธ์˜ ์˜ˆ์‹œ
model_name = "ainize/kobart-news"
tokenizer = PreTrainedTokenizerFast.from_pretrained(model_name)
model = BartForConditionalGeneration.from_pretrained(model_name)
# ์›๋ฌธ์„ ๋ฐ›์•„์„œ ์š”์•ฝ๋ฌธ์„ ๋ฐ˜ํ™˜
def summ(txt):
input_ids = tokenizer.encode(input_text, return_tensors="pt")
summary_text_ids = model.generate(
input_ids=input_ids,
bos_token_id=model.config.bos_token_id,
eos_token_id=model.config.eos_token_id,
length_penalty=2.0,
max_length=142,
min_length=56,
num_beams=4)
return tokenizer.decode(summary_text_ids[0], skip_special_tokens=True)
interface = gr.Interface(summ,
[gr.Textbox(label="original text")],
[gr.Textbox(label="summary")])
interface.launch(share=True)