Spaces:
Sleeping
Sleeping
File size: 2,131 Bytes
081c5aa 28f8255 081c5aa 9665b8e 081c5aa 28f8255 081c5aa 9665b8e 081c5aa 9665b8e 081c5aa 28f8255 081c5aa 28f8255 081c5aa 28f8255 081c5aa 28f8255 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
# Imports
import os
from typing import Union
from src.utils import preprocess
from fastapi import FastAPI
from fastapi.responses import RedirectResponse
from transformers import AutoModelForSequenceClassification,AutoTokenizer, AutoConfig
import numpy as np
#convert logits to probabilities
from scipy.special import softmax
# Config
app = FastAPI()
#/docs, page to see auto-generated API documentation
#loading ML/DL components
os.environ['SENTENCE_TRANSFORMERS_HOME'] = './.cache'
tokenizer = AutoTokenizer.from_pretrained('bert-base-cased')
model_path = f"Junr-syl/tweet_sentiments_analysis"
config = AutoConfig.from_pretrained(model_path)
config.id2label = {0: 'NEGATIVE', 1: 'NEUTRAL', 2: 'POSITIVE'}
model = AutoModelForSequenceClassification.from_pretrained(model_path)
# Endpoints
# @app.get("/")
# def read_root():
# "Home endpoint"
# return {"greeting": "Hello World..!",
# "cohort": "2",
# "docs": "https://eaedk-tweetsentimentanalysisapi.hf.space/docs",
# }
@app.get("/", include_in_schema=False)
def read_root():
return RedirectResponse(url="/docs")
@app.post("/predict")
def predict(text:str):
"prediction endpoint, classifying tweets"
print(f"\n[Info] Starting prediction")
try:
text = preprocess(text)
# PyTorch-based models
encoded_input = tokenizer(text, return_tensors='pt')
output = model(**encoded_input)
scores = output[0][0].detach().numpy()
scores = softmax(scores)
#Process scores
ranking = np.argsort(scores)
ranking = ranking[::-1]
predicted_label = config.id2label[ranking[0]]
predicted_score = float(scores[ranking[0]])
response = {"text":text,
"predicted_label":predicted_label,
"confidence_score":predicted_score
}
print(f"\n[Info] Prediction done.")
print(f"\n[Info] Have a look at the API response")
print(response)
return response
except Exception as e:
return {
"error": str(e)
} |