benchmark_results / 2024-09-07 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
b12aed7 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020622032165527344,
"prefill.throughput.value": 339.4427835148805,
"decode.latency.mean": 2.456687744140625,
"decode.throughput.value": 51.69562159574574,
"per_token.latency.mean": 0.019342747207701672,
"per_token.throughput.value": 51.69896443674927
}
},
{
"model": "google/gemma-2b",
"commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022551328659057616,
"prefill.throughput.value": 310.402996906725,
"decode.latency.mean": 2.63321533203125,
"decode.throughput.value": 48.230009317936336,
"per_token.latency.mean": 0.020732932023176057,
"per_token.throughput.value": 48.2324448313515
}
},
{
"model": "google/gemma-2b",
"commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.01426094388961792,
"prefill.throughput.value": 490.85110033257024,
"decode.latency.mean": 1.5635492553710937,
"decode.throughput.value": 81.2254552031095,
"per_token.latency.mean": 0.012310193380971592,
"per_token.throughput.value": 81.23349236298303
}
}
]