Falcon3-1B-lora-FP16 / Falcon3-1B-NF4-lora-FP16-Evaluation_Results.json
HabibAhmed's picture
Upload folder using huggingface_hub
c65e5a8 verified
{
"eval_loss:": 2.746601107561381,
"perplexity:": 15.589554514190564,
"performance_metrics:": {
"accuracy:": 0.9939919893190922,
"precision:": 1.0,
"recall:": 1.0,
"f1:": 1.0,
"bleu:": 0.8631491900537053,
"rouge:": {
"rouge1": 0.9227783862741349,
"rouge2": 0.9216706649447948,
"rougeL": 0.9227142207671568
},
"semantic_similarity_avg:": 0.9864102005958557
},
"mauve:": 0.6804962052299759,
"inference_performance:": {
"min_latency_ms": 46.35119438171387,
"max_latency_ms": 2166.898727416992,
"lower_quartile_ms": 1524.4737267494202,
"median_latency_ms": 1553.6243915557861,
"upper_quartile_ms": 1574.8480558395386,
"avg_latency_ms": 1516.4662039327686,
"min_memory_gb": 0.09034490585327148,
"max_memory_gb": 0.09083318710327148,
"lower_quartile_gb": 0.09034490585327148,
"median_memory_gb": 0.09083318710327148,
"upper_quartile_gb": 0.09083318710327148,
"avg_memory_gb": 0.09066173454152249,
"model_load_memory_gb": 1.6676692962646484,
"avg_inference_memory_gb": 0.09066173454152249
}
}