Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import PreTrainedTokenizerFast, BartForConditionalGeneration | |
model_name = "ainize/kobart-news" | |
tokenizer = PreTrainedTokenizerFast.from_pretrained(model_name) | |
model = BartForConditionalGeneration.from_pretrained(model_name) | |
def summ(txt): | |
input_ids = tokenizer.encode(txt, return_tensors="pt") | |
summary_text_ids = model.generate( | |
input_ids = input_ids, | |
bos_token_id = model.config.bos_token_id, # BOS๋ Beginning Of Sentence | |
eos_token_id = model.config.eos_token_id, # EOS๋ End Of Sentence | |
length_penalty = 2.0, #์์ฝ์ ์ผ๋ง๋ ์งง๊ฒ ํ ์ง | |
max_length = 142, | |
min_length = 56, | |
num_beams=4 #beam search | |
) | |
return tokenizer.decode(summary_text_ids[0], skip_special_tokens=True) | |
interface = gr.Interface(summ, | |
[gr.Textbox(label="original text")], | |
[gr.Textbox(label="summary")]) #interface calss instance | |
interface.launch() |