sharpenb's picture
Upload folder using huggingface_hub (#2)
caeab7d verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.405904769897461,
"base_token_generation_latency_sync": 38.859296798706055,
"base_token_generation_latency_async": 38.65551482886076,
"base_token_generation_throughput_sync": 0.02573386762967101,
"base_token_generation_throughput_async": 0.02586952998627212,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 118.89571762084961,
"base_inference_latency_async": 39.14453983306885,
"base_inference_throughput_sync": 0.00841073185822329,
"base_inference_throughput_async": 0.025546347057967246,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.116766929626465,
"smashed_token_generation_latency_sync": 165.78641815185546,
"smashed_token_generation_latency_async": 166.55846014618874,
"smashed_token_generation_throughput_sync": 0.006031857200051391,
"smashed_token_generation_throughput_async": 0.006003897965448875,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 261.7901077270508,
"smashed_inference_latency_async": 194.0150499343872,
"smashed_inference_throughput_sync": 0.0038198540375812295,
"smashed_inference_throughput_async": 0.005154239324929607,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}