HabibAhmed commited on
Commit
7e8e606
·
verified ·
1 Parent(s): c66a73b

Upload folder using huggingface_hub

Browse files
.ipynb_checkpoints/Falcon3-1B-FP16-lora-BF16-Evaluation_Results-checkpoint.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_loss": 2.050020990189468,
3
+ "perplexity": 7.768064157733997,
4
+ "performance_metrics": {
5
+ "accuracy:": 0.9666221628838452,
6
+ "precision:": 1.0,
7
+ "recall:": 1.0,
8
+ "f1:": 1.0,
9
+ "bleu:": 0.8543704377273225,
10
+ "rouge:": {
11
+ "rouge1": 0.9106147836704825,
12
+ "rouge2": 0.9066116857854712,
13
+ "rougeL": 0.9095088305670392
14
+ },
15
+ "semantic_similarity_avg:": 0.9741736054420471
16
+ },
17
+ "mauve": 0.9183583018800073,
18
+ "inference_performance": {
19
+ "min_latency_ms": 23.571014404296875,
20
+ "max_latency_ms": 1258.0935955047607,
21
+ "lower_quartile_ms": 813.8142228126526,
22
+ "median_latency_ms": 1087.252140045166,
23
+ "upper_quartile_ms": 1121.3715076446533,
24
+ "avg_latency_ms": 894.5596692399762,
25
+ "min_memory_mb": 0.0,
26
+ "max_memory_mb": 0.00439453125,
27
+ "lower_quartile_mb": 0.0,
28
+ "median_memory_mb": 0.0,
29
+ "upper_quartile_mb": 0.0,
30
+ "avg_memory_mb": 2.9335989652870494e-06
31
+ }
32
+ }
Falcon3-1B-FP16-lora-BF16-Evaluation_Results.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_loss:": 1.9311989277442083,
3
+ "perplexity:": 6.897775220119944,
4
+ "performance_metrics:": {
5
+ "accuracy:": 0.9826435246995995,
6
+ "precision:": 1.0,
7
+ "recall:": 1.0,
8
+ "f1:": 1.0,
9
+ "bleu:": 0.8674816754158438,
10
+ "rouge:": {
11
+ "rouge1": 0.9233287188838122,
12
+ "rouge2": 0.9215922862963115,
13
+ "rougeL": 0.9230407771578816
14
+ },
15
+ "semantic_similarity_avg:": 0.9837943315505981
16
+ },
17
+ "mauve:": 0.6879898985610357,
18
+ "inference_performance:": {
19
+ "min_latency_ms": 28.52606773376465,
20
+ "max_latency_ms": 1896.172046661377,
21
+ "lower_quartile_ms": 1063.4836554527283,
22
+ "median_latency_ms": 1146.3901996612549,
23
+ "upper_quartile_ms": 1169.2450046539307,
24
+ "avg_latency_ms": 1017.8319195084006,
25
+ "min_memory_gb": 0.06690740585327148,
26
+ "max_memory_gb": 0.06739568710327148,
27
+ "lower_quartile_gb": 0.06690740585327148,
28
+ "median_memory_gb": 0.06739568710327148,
29
+ "upper_quartile_gb": 0.06739568710327148,
30
+ "avg_memory_gb": 0.06722423454152249,
31
+ "model_load_memory_gb": 6.151503562927246,
32
+ "avg_inference_memory_gb": 0.06722423454152249
33
+ }
34
+ }
Falcon3-1B-FP16-lora-BF16-Inference_Curve.png ADDED
Falcon3-1B-FP16-lora-BF16-Latency_Histogram.png ADDED
Falcon3-1B-FP16-lora-BF16-Memory_Histogram.png ADDED
Falcon3-1B-FP16-lora-BF16-Memory_Usage_Curve.png ADDED