File size: 3,015 Bytes
82e9de9
374bb44
 
82e9de9
374bb44
47c546d
7180e60
 
e844261
47c546d
82e9de9
374bb44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82e9de9
374bb44
a4d32fc
 
374bb44
 
a4d32fc
82e9de9
 
47c546d
 
 
 
 
a4d32fc
 
e844261
 
 
 
82e9de9
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import gradio
import torch
from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM

def shorten_text(text, min_length, max_length):
  summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
  short_text = text[:1024]
  summary = summarizer(short_text, max_length, min_length, do_sample=False)
  print("** summary", summary)
  return summary[0]["summary_text"]

def paraphrase_text(text, min_length, max_length):
  tokenizer = AutoTokenizer.from_pretrained("Vamsi/T5_Paraphrase_Paws")  
  model = AutoModelForSeq2SeqLM.from_pretrained("Vamsi/T5_Paraphrase_Paws")
  device = "cuda" if torch.cuda.is_available() else "cpu"
  text_instruction =  "paraphrase: " + text + " </s>"
  encoding = tokenizer.encode_plus(text_instruction, padding="longest", return_tensors="pt")
  input_ids, attention_masks = encoding["input_ids"].to(device), encoding["attention_mask"].to(device)
  outputs = model.generate(
    input_ids=input_ids, attention_mask=attention_masks,
    max_length=max_length,
    do_sample=True,
    top_k=120,
    top_p=0.95,
    early_stopping=True,
    num_return_sequences=5
  )
  line = tokenizer.decode(outputs[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
  print("** outputs", len(outputs), line)
  return line

gradio_interface = gradio.Interface(
  fn=paraphrase_text,
  inputs=[
    "text",
    gradio.Slider(5, 200, value=30, label="Min length"),
    gradio.Slider(5, 500, value=130, label="Max length")
  ],
  outputs="text",
  examples=[
    ["""A beautiful golden sun is setting. The sky is on fire. A large neon sign rises into shot. It rests on top of a skyscraper and fills the frame. The building is neither past nor future in design but a bit of both.

Slowly we pan downwards revealing the city that spreads below. A glittering conglomeration of elevated transport tubes, smaller square buildings which are merely huge, with, here and there, the comparatively minuscule relics of previous ages of architecture, pavement level awnings suggesting restaurants and shops. Transparent tubes carry whizzing transport cages past us. An elevated highway carrying traffic composed primarily of large transport lorries passes through frame.

As we descend, the sunlight is blocked out and street lights & neon signs take over as illumination. Eventually we reach the upper levels of a plush shopping precinct.
Xmas decorations are everywhere. People are busy buying, ogling, discussing, choosing wisely from the goodies on display. Shoppers are going by laden with superbly packaged goods. The shop windows are full of elaborately boxed and be-ribboned who-knows-what. In one window is a bank of TV sets on the great majority of the screens is the face of Mr. Helpmann the Deputy Minister of Information. He is being interviewed. No-one bothers to listen to Helpmann.""",
    30, 130]
  ],
  title="Text summarizer",
  description="Shortening texts using `facebook/bart-large-cnn`.",
  article="© Tom Söderlund 2022"
)
gradio_interface.launch()