sharpenb's picture
Upload folder using huggingface_hub (#1)
758e314 verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.711935997009277,
"base_token_generation_latency_sync": 37.35238075256348,
"base_token_generation_latency_async": 36.53927445411682,
"base_token_generation_throughput_sync": 0.02677205521715963,
"base_token_generation_throughput_async": 0.027367812167582096,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.15110473632812,
"base_inference_latency_async": 39.10989761352539,
"base_inference_throughput_sync": 0.008392704391729478,
"base_inference_throughput_async": 0.02556897514490474,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.512746810913086,
"smashed_token_generation_latency_sync": 61.119361114501956,
"smashed_token_generation_latency_async": 60.40619984269142,
"smashed_token_generation_throughput_sync": 0.016361427570006574,
"smashed_token_generation_throughput_async": 0.016554592121407724,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 194.49446563720704,
"smashed_inference_latency_async": 109.34836864471436,
"smashed_inference_throughput_sync": 0.0051415344736097145,
"smashed_inference_throughput_async": 0.009145083848933467,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}