FahadAlam commited on
Commit
7b02fc1
1 Parent(s): d1872ff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -1
app.py CHANGED
@@ -1,5 +1,20 @@
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
  examples = [["answer: 1948 context: The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948."], ["answer: Tom Kilburn context: The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948."]]
4
 
5
- gr.Interface.load("huggingface/mrm8488/t5-base-finetuned-question-generation-ap", title="Question Generator" ,examples=examples).launch();
 
 
1
  import gradio as gr
2
+ from transformers import AutoModelWithLMHead, AutoTokenizer
3
+
4
+ tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
5
+ model = AutoModelWithLMHead.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
6
+
7
+ def get_question(answer, context, max_length=64):
8
+ input_text = "answer: %s context: %s </s>" % (answer, context)
9
+ features = tokenizer([input_text], return_tensors='pt')
10
+
11
+ output = model.generate(input_ids=features['input_ids'],
12
+ attention_mask=features['attention_mask'],
13
+ max_length=max_length)
14
+
15
+ return tokenizer.decode(output[0])
16
 
17
  examples = [["answer: 1948 context: The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948."], ["answer: Tom Kilburn context: The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948."]]
18
 
19
+ demo = gr.Interface(fn=question_generator, inputs=["text", "text"], outputs="text", title="Question Generator", examples=examples)
20
+ demo.launch()