mGPT / app.py
ai-forever's picture
Update app.py
af0197c
raw
history blame
1.58 kB
import torch
import gradio as gr
from transformers import GPT2LMHeadModel, GPT2Tokenizer
tokenizer = GPT2Tokenizer.from_pretrained("sberbank-ai/mGPT")
model = GPT2LMHeadModel.from_pretrained("sberbank-ai/mGPT")
#model.cuda()
#model.eval()
description = "Multilingual generation with mGPT"
title = "Generate your own example"
examples = [["""English: The vase with flowers is on the table.\nFinnish translation:""", "In May we celebrate "]]
article = (
"<p style='text-align: center'>"
"<a href='https://github.com/ai-forever/mgpt'>GitHub</a> "
"</p>"
)
device = "cuda" if torch.cuda.is_available() else "cpu"
fp16 = device != 'cpu'
def generate(prompt: str):
input_ids = tokenizer.encode(prompt, return_tensors="pt").to(device)
out = model.generate(input_ids,
min_length=100,
max_length=200,
top_p=0.8,
top_k=0,
no_repeat_ngram_size=5
)
generated_text = list(map(tokenizer.decode, out))[0]
return generated_text
interface = gr.Interface.load("huggingface/sberbank-ai/mGPT",
description=description,
examples=examples,
fn=generate,
inputs="text",
outputs='text',
thumbnail = 'https://habrastorage.org/r/w1560/getpro/habr/upload_files/26a/fa1/3e1/26afa13e1d1a56f54c7b0356761af7b8.png',
theme = "peach",
article = article,
cache_examples=True
)
interface.launch(enable_queue=True)