jonathanjordan21's picture
Update app.py
cc6b847 verified
raw
history blame
1.7 kB
from fastapi import FastAPI
from transformers import AutoModelForSequenceClassification, AutoTokenizer, pipeline
import torch
app = FastAPI()
model_name = "cardiffnlp/twitter-xlm-roberta-base-sentiment"
sentiment_model = AutoModelForSequenceClassification.from_pretrained(model_name)
sentiment_tokenizer = AutoTokenizer.from_pretrained(model_name)
sentiment_model.config.id2label[3] = "mixed"
@app.get("/")
def greet_json():
return {"Hello": "World!"}
@app.post("/sentiment_score")
async def sentiment_score(text: str):
inputs = sentiment_tokenizer(text[:2500], return_tensors='pt')
with torch.no_grad():
logits = sentiment_model(**inputs).logits #+ 1
print(logits)
logits = logits + logits[0,1].abs()
# print(torch.nn.functional.sigmoid(logits))
# logits = logits / 10
# print(logits)
# print(torch.abs(logits[0,0] - logits[0,-1]))
# print(logits[0,1]//torch.max(torch.abs(logits[0,::2])))
logits = torch.cat(
(
logits, (
# ( logits[0,1] + torch.sign(logits[0,0] - logits[0,-1]) * (logits[0,0] - logits[0,-1])/2 )/2 +
# (logits[0,0] + logits[0,-1])/20
(1 - torch.abs(logits[0,0] - logits[0,-1])*(2+(logits[0,1]//torch.max(torch.abs(logits[0,::2])))))
).unsqueeze(0).unsqueeze(0)
), dim=-1
)
softmax = torch.nn.functional.softmax(
logits,
dim=-1
)
return [{"label":sentiment_model.config.id2label[predicted_class_id.tolist()], "score":softmax[0, predicted_class_id].tolist()} for predicted_class_id in softmax.argsort(dim=-1, descending=True)[0]]