Spaces:
Sleeping
Sleeping
File size: 891 Bytes
74e8e71 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
import gradio as gr
from transformers import PreTrainedTokenizerFast, BartForConditionalGeneration
model_name = "ainize/kobart-news"
tokenizer = PreTrainedTokenizerFast.from_pretrained(model_name)
model = BartForConditionalGeneration.from_pretrained(model_name)
def summ(txt):
input_ids = tokenizer.encode(txt, return_tensors="pt")
summary_text_ids = model.generate(
input_ids = input_ids,
bos_token_id=model.config.bos_token_id, # BOS는 Beginning of Sentence
eos_token_id=model.config.eos_token_id, # EOS는 End of Sentence
length_penalty=2.0, # 요약을 얼마나 짧게 할지
max_length=142,
min_length=56,
num_beams=4, # beam search
)
return tokenizer.decode(summary_text_ids[0], skip_special_tokens=True)
interface = gr.interface(summ, [gr.Textbox(label="original_text")], [gr.Textbox(label="summary")])
interface.launch() |