zArabi's picture
Update app.py
cb53036
raw
history blame
2.05 kB
import gradio as gr
from transformers import BertModel, BertConfig, BertTokenizer
import torch
import torch.nn as nn
import torch.nn.functional as F
import huggingface_hub
from huggingface_hub import hf_hub_download
from preprocessing import *
from modelFile import *
modelName = 'HooshvareLab/bert-fa-base-uncased'
class_names = ['negative', 'neutral', 'positive']
label2id = {label: i for i, label in enumerate(class_names)}
id2label = {v: k for k, v in label2id.items()}
config = BertConfig.from_pretrained(
modelName,
num_labels=len(class_names),
id2label=id2label,
label2id=label2id)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
path="parsbert-final-v1(epoch3).bin"
downloadedModel = hf_hub_download(repo_id="zArabi/Persian-Sentiment-Analysis", filename=path)
loaded_model = torch.load(downloadedModel,map_location=device)
loaded_model.eval()
tokenizer = BertTokenizer.from_pretrained(modelName)
max_len=512
def predict(text):
text = cleaning(text)
encoding = tokenizer.encode_plus(
text,
max_length=max_len,
truncation=True,
padding="max_length",
add_special_tokens=True, # Add '[CLS]' and '[SEP]'
return_token_type_ids=True,
return_attention_mask=True,
return_tensors='pt', # Return PyTorch tensors
)
input_ids = encoding["input_ids"].to(device)
attention_mask = encoding["attention_mask"].to(device)
outputs = loaded_model(input_ids, attention_mask)
probs = F.softmax(outputs,dim=1)
values, indices = torch.max(probs, dim=1)
data = {
'comments': text,
'preds': indices.cpu().numpy()[0],
'label': class_names[indices.cpu().numpy()[0]],
'probablities': {class_names[i] : round(probs[0][i].item(),3) for i in range(len(probs[0]))}
}
return {class_names[i] : round(probs[0][i].item(),3) for i in range(len(probs[0]))}
gr.Interface(
predict,
inputs=gr.Textbox(label="Explore your sentence!",lines=2, placeholder="Type Here..."),
outputs=gr.outputs.Label(num_top_classes=3),
title="How are feeling?!!",
).launch()