File size: 1,157 Bytes
3ff1f64
7b02fc1
 
 
 
 
9dc1a69
7b02fc1
 
 
 
 
 
 
9dc1a69
3ff1f64
9dc1a69
3ff1f64
867ad66
7b02fc1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
import gradio as gr
from transformers import AutoModelWithLMHead, AutoTokenizer

tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
model = AutoModelWithLMHead.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")

def get_question(context, answer, max_length=64):
  input_text = "answer: %s  context: %s </s>" % (answer, context)
  features = tokenizer([input_text], return_tensors='pt')

  output = model.generate(input_ids=features['input_ids'], 
               attention_mask=features['attention_mask'],
               max_length=max_length)

  return tokenizer.decode(output[0])[16:-4]

examples = [["The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948.", "1948"], ["The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948.", "Tom Kilburn"], ["The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948.", "computer scientist"]]

demo = gr.Interface(fn=get_question, inputs=["text", "text"], outputs="text", title="Question Generator", examples=examples)
demo.launch()