Spaces:
Sleeping
Sleeping
nlmaldonadog
commited on
Commit
•
b8764af
1
Parent(s):
9b19d9b
:rocket: Deploy model
Browse files- README.md +13 -13
- app.py +19 -0
- requirements.txt +3 -0
README.md
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
-
---
|
2 |
-
title: Practica 8 Sec2sec
|
3 |
-
emoji:
|
4 |
-
colorFrom: green
|
5 |
-
colorTo: blue
|
6 |
-
sdk: gradio
|
7 |
-
sdk_version:
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
-
license: apache-2.0
|
11 |
-
---
|
12 |
-
|
13 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
+
---
|
2 |
+
title: Practica 8 Sec2sec
|
3 |
+
emoji: 🐨
|
4 |
+
colorFrom: green
|
5 |
+
colorTo: blue
|
6 |
+
sdk: gradio
|
7 |
+
sdk_version: 3.18.0
|
8 |
+
app_file: app.py
|
9 |
+
pinned: false
|
10 |
+
license: apache-2.0
|
11 |
+
---
|
12 |
+
|
13 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
2 |
+
import gradio as gr
|
3 |
+
import torch
|
4 |
+
|
5 |
+
# Cargar el modelo
|
6 |
+
model_name = "nlmaldonadog/mbart-clarification-P8"
|
7 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
8 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
9 |
+
|
10 |
+
def predict(text):
|
11 |
+
inputs = tokenizer(text, return_tensors='pt')
|
12 |
+
outputs = model(**inputs)
|
13 |
+
probs = torch.nn.functional.softmax(outputs.logits, dim=-1)
|
14 |
+
return {'negative': float(probs[0][0]), 'neutral': float(probs[0][1]), 'positive': float(probs[0][2])}
|
15 |
+
|
16 |
+
texto = gr.inputs.Textbox(lines=2, placeholder='Escribe aquí...')
|
17 |
+
|
18 |
+
# Creamos la interfaz y la lanzamos.
|
19 |
+
gr.Interface(fn=predict, inputs=texto, outputs=gr.outputs.Label()).launch(share=False)
|
requirements.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
fastai
|
2 |
+
toml
|
3 |
+
transformers
|