nlmaldonadog commited on
Commit
81f640a
1 Parent(s): b8764af

:rocket: Deploy model

Browse files
Files changed (1) hide show
  1. app.py +11 -19
app.py CHANGED
@@ -1,19 +1,11 @@
1
- from transformers import AutoModelForSequenceClassification, AutoTokenizer
2
- import gradio as gr
3
- import torch
4
-
5
- # Cargar el modelo
6
- model_name = "nlmaldonadog/mbart-clarification-P8"
7
- model = AutoModelForSequenceClassification.from_pretrained(model_name)
8
- tokenizer = AutoTokenizer.from_pretrained(model_name)
9
-
10
- def predict(text):
11
- inputs = tokenizer(text, return_tensors='pt')
12
- outputs = model(**inputs)
13
- probs = torch.nn.functional.softmax(outputs.logits, dim=-1)
14
- return {'negative': float(probs[0][0]), 'neutral': float(probs[0][1]), 'positive': float(probs[0][2])}
15
-
16
- texto = gr.inputs.Textbox(lines=2, placeholder='Escribe aquí...')
17
-
18
- # Creamos la interfaz y la lanzamos.
19
- gr.Interface(fn=predict, inputs=texto, outputs=gr.outputs.Label()).launch(share=False)
 
1
+ import gradio as gr
2
+ from transformers import pipeline
3
+
4
+ # Cargar el modelo
5
+ model = pipeline('sec2sec', model='nlmaldonadog/mbart-clarification-P8')
6
+
7
+ def predict(text):
8
+ return model(text)[0]['generated_text']
9
+
10
+ iface = gr.Interface(fn=predict, inputs=[gr.Textbox(placeholder='Escribe aquí...')], outputs="text")
11
+ iface.launch(share=True)