sharpenb's picture
Upload folder using huggingface_hub (#1)
b9a739d verified
raw
history blame
1.74 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 76.91910247802734,
"base_token_generation_latency_async": 76.82386375963688,
"base_token_generation_throughput_sync": 0.013000671715919452,
"base_token_generation_throughput_async": 0.013016788678173697,
"base_token_generation_CO2_emissions": 3.9930267542867864e-05,
"base_token_generation_energy_consumption": 0.013695786831045186,
"base_inference_latency_sync": 75.31489334106445,
"base_inference_latency_async": 74.6358871459961,
"base_inference_throughput_sync": 0.013277586352960592,
"base_inference_throughput_async": 0.013398380299866857,
"base_inference_CO2_emissions": 4.072261623993283e-05,
"base_inference_energy_consumption": 0.00011265253741674144,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 49.13712501525879,
"smashed_token_generation_latency_async": 48.9015968516469,
"smashed_token_generation_throughput_sync": 0.02035121101793125,
"smashed_token_generation_throughput_async": 0.02044922997164503,
"smashed_token_generation_CO2_emissions": 3.524599859739772e-05,
"smashed_token_generation_energy_consumption": 0.009180102018160505,
"smashed_inference_latency_sync": 52.06343727111816,
"smashed_inference_latency_async": 49.8340368270874,
"smashed_inference_throughput_sync": 0.019207337287250974,
"smashed_inference_throughput_async": 0.0200666063531993,
"smashed_inference_CO2_emissions": 3.379301910600281e-05,
"smashed_inference_energy_consumption": 9.774841757057516e-05
}