Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
File size: 3,686 Bytes
e7abd9e 0fe203b e7abd9e 23c96f8 728ad54 e7abd9e ccd1d98 e7abd9e ccd1d98 e7abd9e ccd1d98 e7abd9e 5f010ab e7abd9e 23c96f8 e7abd9e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
"""
ASGI entry point for the Open LLM Leaderboard API.
"""
import os
import uvicorn
import logging
import logging.config
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.gzip import GZipMiddleware
import sys
import sentry_sdk
from app.api.router import router
from app.core.fastapi_cache import setup_cache
from app.core.formatting import LogFormatter
from app.config.base import SENTRY_DSN
from app.config import hf_config
# Configure logging before anything else
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"default": {
"format": "%(name)s - %(levelname)s - %(message)s",
}
},
"handlers": {
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
}
},
"loggers": {
"uvicorn": {
"handlers": ["default"],
"level": "WARNING",
"propagate": False,
},
"uvicorn.error": {
"level": "WARNING",
"handlers": ["default"],
"propagate": False,
},
"uvicorn.access": {
"handlers": ["default"],
"level": "WARNING",
"propagate": False,
},
"app": {
"handlers": ["default"],
"level": "WARNING",
"propagate": False,
}
},
"root": {
"handlers": ["default"],
"level": "WARNING",
}
}
# Apply logging configuration
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("app")
if SENTRY_DSN is not None:
sentry_sdk.init(
dsn=SENTRY_DSN,
# Add data like request headers and IP for users,
# see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info
send_default_pii=False,
# Set traces_sample_rate to 1.0 to capture 100%
# of transactions for tracing.
traces_sample_rate=1.0,
_experiments={
# Set continuous_profiling_auto_start to True
# to automatically start the profiler on when
# possible.
"continuous_profiling_auto_start": True,
},
)
# Create FastAPI application
app = FastAPI(
title="Open LLM Leaderboard",
version="1.0.0",
docs_url="/docs",
)
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Add GZIP compression
app.add_middleware(GZipMiddleware, minimum_size=500)
# Include API router
app.include_router(router, prefix="/api")
@app.on_event("startup")
async def startup_event():
"""Initialize services on startup"""
logger.info("\n")
logger.info(LogFormatter.section("APPLICATION STARTUP"))
# Log HF configuration
logger.info(LogFormatter.section("HUGGING FACE CONFIGURATION"))
logger.info(LogFormatter.info(f"Organization: {hf_config.HF_ORGANIZATION}"))
logger.info(LogFormatter.info(f"Token Status: {'Present' if hf_config.HF_TOKEN else 'Missing'}"))
logger.info(LogFormatter.info(f"Using repositories:"))
logger.info(LogFormatter.info(f" - Queue: {hf_config.QUEUE_REPO}"))
logger.info(LogFormatter.info(f" - Aggregated: {hf_config.AGGREGATED_REPO}"))
logger.info(LogFormatter.info(f" - Votes: {hf_config.VOTES_REPO}"))
logger.info(LogFormatter.info(f" - Official Providers: {hf_config.OFFICIAL_PROVIDERS_REPO}"))
# Setup cache
setup_cache()
logger.info(LogFormatter.success("FastAPI Cache initialized with in-memory backend")) |