Sn25 / api.py
schampoux's picture
hotfix/24h (#3)
0872630 verified
raw
history blame
2.15 kB
import atexit
import datetime
from apscheduler.schedulers.background import BackgroundScheduler
from fastapi import FastAPI
import utils
import pandas as pd
import uvicorn
from classes import Productivity, ProductivityData, Throughput
# Global variables (saves time on loading data)
state_vars = None
reload_timestamp = datetime.datetime.now().strftime('%D %T')
data_all = None
data_24h = None
app = FastAPI()
def load_data():
"""
Reload the state variables
"""
global data_all, data_24h, reload_timestamp
utils.fetch_new_runs()
data_all = utils.preload_data()
data_24h = data_all[(pd.Timestamp.now() - data_all['updated_at'].apply(lambda x: pd.Timestamp(x)) < pd.Timedelta('1 days'))]
reload_timestamp = datetime.datetime.now().strftime('%D %T')
print(f'Reloaded data at {reload_timestamp}')
def start_scheduler():
scheduler = BackgroundScheduler()
scheduler.add_job(func=load_data, trigger="interval", seconds=60*30)
scheduler.start()
# Shut down the scheduler when exiting the app
atexit.register(lambda: scheduler.shutdown())
@app.get("/")
def home():
return "Welcome to the Bittensor Protein Folding Leaderboard API!"
@app.get("/updated")
def updated():
return reload_timestamp
@app.get("/productivity", response_model=Productivity)
def productivity_metrics():
"""
Get the productivity metrics
"""
# Unpack the metrics using the correct keys
result = utils.get_productivity(df_all=data_all, df_24h=data_24h)
all_time = ProductivityData(**result['all_time'])
last_24h = ProductivityData(**result['last_24h'])
return Productivity(all_time=all_time, last_24h=last_24h)
@app.get("/throughput", response_model=Throughput)
def throughput_metrics():
"""
Get the throughput metrics
"""
return Throughput(all_time=utils.get_data_transferred(data_all), last_24h=utils.get_data_transferred(data_24h))
if __name__ == '__main__':
load_data()
start_scheduler()
uvicorn.run(app, host='0.0.0.0', port=5001)
# to test locally
# curl -X GET http://0.0.0.0:5001/data