hf-transformers-bot's picture
Upload folder using huggingface_hub
678c93b verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "f14d06e0bcaecb02c9a0075ce0c7379345d718bc",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022776783943176268,
"prefill.throughput.value": 307.3304825415065,
"decode.latency.mean": 2.443668212890625,
"decode.throughput.value": 51.97104882326525,
"per_token.latency.mean": 0.01931815664570322,
"per_token.throughput.value": 51.764773334231236
}
},
{
"model": "google/gemma-2b",
"commit": "f14d06e0bcaecb02c9a0075ce0c7379345d718bc",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02206830406188965,
"prefill.throughput.value": 317.1970070907483,
"decode.latency.mean": 2.59590380859375,
"decode.throughput.value": 48.92323035220565,
"per_token.latency.mean": 0.02052478912413827,
"per_token.throughput.value": 48.72157243379156
}
},
{
"model": "google/gemma-2b",
"commit": "f14d06e0bcaecb02c9a0075ce0c7379345d718bc",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.013985567569732666,
"prefill.throughput.value": 500.51597585136864,
"decode.latency.mean": 1.5596350097656249,
"decode.throughput.value": 81.42930827071201,
"per_token.latency.mean": 0.012332881980262724,
"per_token.throughput.value": 81.08404844872256
}
}
]