kobart-summary / app.py
Unggi's picture
version2
51b2870
raw
history blame
1.28 kB
import gradio as gr
# make function using import pip to install torch
import pip
pip.main(['install', 'torch'])
pip.main(['install', 'transformers'])
import torch
import transformers
# saved_model
def load_model(model_path):
saved_data = torch.load(
model_path,
map_location="cpu"
)
bart_best = saved_data["model"]
train_config = saved_data["config"]
tokenizer = transformers.PreTrainedTokenizerFast.from_pretrained('gogamza/kobart-base-v1')
## Load weights.
model = transformers.BartForConditionalGeneration.from_pretrained('gogamza/kobart-base-v1')
model.load_state_dict(bart_best)
return model, tokenizer
# main
def inference(prompt):
model_path = "./kobart-model-summary-2.pth"
model, tokenizer = load_model(
model_path=model_path
)
input_ids = tokenizer.encode(prompt)
input_ids = torch.tensor(input_ids)
input_ids = input_ids.unsqueeze(0)
output = model.generate(input_ids)
output = tokenizer.decode(output[0], skip_special_tokens=True)
return output
demo = gr.Interface(
fn=inference,
inputs="text",
outputs="text" #return κ°’
).launch() # launch(share=True)λ₯Ό μ„€μ •ν•˜λ©΄ μ™ΈλΆ€μ—μ„œ 접속 κ°€λŠ₯ν•œ 링크가 생성됨
demo.launch()