lapp0 commited on
Commit
6a4fa98
·
verified ·
1 Parent(s): b51142e

Training in progress, step 61875

Browse files
README.md CHANGED
@@ -44,42 +44,42 @@ More information needed
44
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | tinystoriesppl | zhwikippl |
45
  | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |
46
  | **teacher eval** | | 43.25 | 61.25 | | | | | 11.6875 | 19.125 |
47
- | 0 | 0 | 2473901162496.0 | 170424302305280.0 | 25.7744 | 30.2402 | 82.672 | 10.35 | 4060086272.0 | 71468255805440.0 |
48
- | 2500 | 0.0404 | 940.0 | 7712.0 | 6.1058 | 30.4416 | 82.125 | 10.282 | 640.0 | 6272.0 |
49
- | 5000 | 0.0808 | 378.0 | 1880.0 | 5.0293 | 30.4354 | 82.141 | 10.284 | 270.0 | 288.0 |
50
- | 7500 | 0.1212 | 230.0 | 820.0 | 4.5127 | 30.4162 | 82.193 | 10.291 | 201.0 | 174.0 |
51
- | 10000 | 0.1616 | 173.0 | 632.0 | 4.2293 | 30.2401 | 82.672 | 10.351 | 151.0 | 172.0 |
52
- | 12500 | 0.2020 | 127.5 | 482.0 | 3.8556 | 30.2143 | 82.742 | 10.359 | 106.5 | 156.0 |
53
- | 15000 | 0.2424 | 109.0 | 436.0 | 3.6684 | 30.2343 | 82.688 | 10.352 | 87.5 | 144.0 |
54
- | 17500 | 0.2828 | 93.5 | 348.0 | 3.5229 | 30.333 | 82.419 | 10.319 | 73.5 | 122.5 |
55
- | 20000 | 0.3232 | 73.5 | 276.0 | 3.3349 | 30.3063 | 82.491 | 10.328 | 63.25 | 99.5 |
56
- | 22500 | 0.3636 | 67.0 | 219.0 | 3.1509 | 30.3619 | 82.34 | 10.309 | 52.25 | 79.0 |
57
- | 25000 | 0.4040 | 64.5 | 189.0 | 3.0823 | 30.4079 | 82.215 | 10.293 | 45.75 | 97.0 |
58
- | 27500 | 0.4444 | 59.0 | 194.0 | 3.0271 | 30.4181 | 82.188 | 10.29 | 41.25 | 85.5 |
59
- | 30000 | 0.4848 | 59.25 | 194.0 | 3.0192 | 30.2505 | 82.643 | 10.347 | 42.75 | 57.75 |
60
- | 32500 | 0.5253 | 58.5 | 175.0 | 3.0025 | 30.2733 | 82.581 | 10.339 | 40.0 | 62.75 |
61
- | 35000 | 0.5657 | 57.0 | 170.0 | 2.9448 | 30.2658 | 82.601 | 10.342 | 37.0 | 54.25 |
62
- | 37500 | 0.6061 | 57.25 | 155.0 | 2.9182 | 30.2187 | 82.73 | 10.358 | 38.75 | 73.5 |
63
- | 40000 | 0.6465 | 54.75 | 164.0 | 2.8978 | 30.2683 | 82.595 | 10.341 | 35.25 | 70.0 |
64
- | 42500 | 0.6869 | 54.25 | 156.0 | 2.8775 | 30.4126 | 82.203 | 10.292 | 34.75 | 61.75 |
65
- | 45000 | 0.7273 | 50.25 | 137.0 | 2.7761 | 30.3396 | 82.401 | 10.317 | 30.5 | 60.75 |
66
- | 47500 | 0.7677 | 50.25 | 126.5 | 2.7499 | 30.3808 | 82.289 | 10.303 | 29.5 | 37.25 |
67
- | 50000 | 0.8081 | 49.25 | 126.5 | 2.7359 | 30.3056 | 82.493 | 10.328 | 28.625 | 37.75 |
68
- | 52500 | 0.8485 | 48.5 | 122.0 | 2.7258 | 30.3024 | 82.502 | 10.329 | 29.125 | 36.25 |
69
- | 55000 | 0.8889 | 48.0 | 119.0 | 2.7099 | 30.201 | 82.779 | 10.364 | 28.125 | 34.0 |
70
- | 57500 | 0.9293 | 47.5 | 119.0 | 2.7046 | 30.1798 | 82.837 | 10.371 | 27.875 | 33.5 |
71
- | 60000 | 0.9697 | 47.75 | 118.5 | 2.7011 | 30.355 | 82.359 | 10.311 | 27.75 | 33.0 |
72
- | 61875 | 1.0 | 47.75 | 119.0 | 2.7006 | 30.4772 | 82.028 | 10.27 | 27.875 | 33.0 |
73
 
74
  # Resource Usage Comparison
75
 
76
- - VRAM Use: 7.7831 GB
77
 
78
- `# Distillation (Teacher -> Student) Architecture Difference:
79
 
80
  - **Architecture**: `GPT2LMHeadModel` -> `GPT2LMHeadModel`
81
  - **Total Parameters**: 124,439,808 -> 124,439,808
82
- - **Data Type (dtype)**: 124439808 -> torch.bfloat16
83
  - **Model Size**: 0.24 GB -> 0.24 GB
84
 
85
  <details>
@@ -122,7 +122,7 @@ The following hyperparameters were used during training:
122
  - num_epochs: `1.0`
123
  - distillation_objective: `DistillationObjective(logits_loss_component=LossComponent(label=logits, weight=1, loss_fn=kl), attn_loss_component=LossComponent(label=attn, weight=5, loss_fn=cos, layer_mapper=layer-2))`
124
  - train_embeddings: `True`
125
- - lr_scheduler: `<torch.optim.lr_scheduler.LambdaLR object at 0x7fbd2875e9e0>`
126
  - student_model_name_or_path: `None`
127
  - student_config_name_or_path: `None`
128
  - student_model_config: `None`
@@ -154,6 +154,6 @@ The following hyperparameters were used during training:
154
 
155
  # Framework Versions
156
  - Distily 0.2.0
157
- - Transformers 4.44.0
158
- - Pytorch 2.3.0
159
  - Datasets 2.21.0
 
44
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | tinystoriesppl | zhwikippl |
45
  | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |
46
  | **teacher eval** | | 43.25 | 61.25 | | | | | 11.6875 | 19.125 |
47
+ | 0 | 0 | 2473901162496.0 | 170424302305280.0 | 25.7744 | 25.131 | 99.479 | 12.455 | 4060086272.0 | 71468255805440.0 |
48
+ | 2500 | 0.0404 | 960.0 | 8064.0 | 6.1231 | 25.2285 | 99.094 | 12.407 | 652.0 | 6816.0 |
49
+ | 5000 | 0.0808 | 380.0 | 1896.0 | 5.0307 | 25.2563 | 98.985 | 12.393 | 270.0 | 286.0 |
50
+ | 7500 | 0.1212 | 230.0 | 824.0 | 4.5129 | 25.128 | 99.491 | 12.456 | 202.0 | 174.0 |
51
+ | 10000 | 0.1616 | 171.0 | 628.0 | 4.2261 | 25.2755 | 98.91 | 12.384 | 151.0 | 173.0 |
52
+ | 12500 | 0.2020 | 126.5 | 482.0 | 3.8533 | 25.2021 | 99.198 | 12.42 | 106.0 | 156.0 |
53
+ | 15000 | 0.2424 | 109.5 | 430.0 | 3.6650 | 25.2487 | 99.015 | 12.397 | 88.0 | 155.0 |
54
+ | 17500 | 0.2828 | 93.0 | 350.0 | 3.5198 | 25.1751 | 99.305 | 12.433 | 73.5 | 119.0 |
55
+ | 20000 | 0.3232 | 77.5 | 282.0 | 3.3352 | 25.2573 | 98.981 | 12.392 | 63.25 | 135.0 |
56
+ | 22500 | 0.3636 | 66.5 | 213.0 | 3.1511 | 25.1782 | 99.292 | 12.431 | 50.75 | 80.0 |
57
+ | 25000 | 0.4040 | 63.25 | 197.0 | 3.0803 | 25.2258 | 99.105 | 12.408 | 44.5 | 80.5 |
58
+ | 27500 | 0.4444 | 58.5 | 212.0 | 3.0299 | 25.2357 | 99.066 | 12.403 | 41.75 | 68.5 |
59
+ | 30000 | 0.4848 | 58.5 | 202.0 | 3.0169 | 25.2481 | 99.017 | 12.397 | 43.25 | 91.5 |
60
+ | 32500 | 0.5253 | 58.75 | 173.0 | 3.0014 | 25.2575 | 98.981 | 12.392 | 41.5 | 62.75 |
61
+ | 35000 | 0.5657 | 57.25 | 164.0 | 2.9385 | 25.2523 | 99.001 | 12.395 | 38.0 | 49.0 |
62
+ | 37500 | 0.6061 | 57.0 | 157.0 | 2.9163 | 25.1539 | 99.388 | 12.443 | 39.25 | 61.75 |
63
+ | 40000 | 0.6465 | 54.75 | 172.0 | 2.8984 | 25.2388 | 99.054 | 12.402 | 35.0 | 67.5 |
64
+ | 42500 | 0.6869 | 53.0 | 151.0 | 2.8789 | 25.2418 | 99.042 | 12.4 | 35.25 | 49.75 |
65
+ | 45000 | 0.7273 | 49.5 | 134.0 | 2.7753 | 25.2511 | 99.005 | 12.395 | 30.25 | 42.25 |
66
+ | 47500 | 0.7677 | 50.0 | 124.0 | 2.7506 | 25.2475 | 99.02 | 12.397 | 29.5 | 38.75 |
67
+ | 50000 | 0.8081 | 49.0 | 124.5 | 2.7361 | 25.2146 | 99.149 | 12.413 | 28.75 | 38.25 |
68
+ | 52500 | 0.8485 | 48.25 | 120.0 | 2.7262 | 25.1855 | 99.264 | 12.428 | 29.125 | 35.0 |
69
+ | 55000 | 0.8889 | 47.75 | 117.0 | 2.7099 | 25.2332 | 99.076 | 12.404 | 28.25 | 33.0 |
70
+ | 57500 | 0.9293 | 47.25 | 117.5 | 2.7045 | 25.2693 | 98.934 | 12.387 | 28.0 | 32.5 |
71
+ | 60000 | 0.9697 | 47.25 | 116.5 | 2.7013 | 25.2549 | 98.991 | 12.394 | 27.875 | 32.25 |
72
+ | 61875 | 1.0 | 47.25 | 116.5 | 2.7009 | 25.2212 | 99.123 | 12.41 | 28.0 | 32.25 |
73
 
74
  # Resource Usage Comparison
75
 
76
+ - VRAM Use: 7.7830 GB
77
 
78
+ # Distillation (Teacher -> Student) Architecture Difference:
79
 
80
  - **Architecture**: `GPT2LMHeadModel` -> `GPT2LMHeadModel`
81
  - **Total Parameters**: 124,439,808 -> 124,439,808
82
+ - **Data Type (dtype)**: torch.bfloat16 -> torch.bfloat16
83
  - **Model Size**: 0.24 GB -> 0.24 GB
84
 
85
  <details>
 
122
  - num_epochs: `1.0`
123
  - distillation_objective: `DistillationObjective(logits_loss_component=LossComponent(label=logits, weight=1, loss_fn=kl), attn_loss_component=LossComponent(label=attn, weight=5, loss_fn=cos, layer_mapper=layer-2))`
124
  - train_embeddings: `True`
125
+ - lr_scheduler: `<torch.optim.lr_scheduler.LambdaLR object at 0x7f14d416e830>`
126
  - student_model_name_or_path: `None`
127
  - student_config_name_or_path: `None`
128
  - student_model_config: `None`
 
154
 
155
  # Framework Versions
156
  - Distily 0.2.0
157
+ - Transformers 4.44.1
158
+ - Pytorch 2.5.0.dev20240821+cu121
159
  - Datasets 2.21.0
config.json CHANGED
@@ -33,7 +33,7 @@
33
  }
34
  },
35
  "torch_dtype": "bfloat16",
36
- "transformers_version": "4.44.0",
37
  "use_cache": true,
38
  "vocab_size": 50257
39
  }
 
33
  }
34
  },
35
  "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.44.1",
37
  "use_cache": true,
38
  "vocab_size": 50257
39
  }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
- "transformers_version": "4.44.0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
+ "transformers_version": "4.44.1"
6
  }
logs/attn_loss_fn=cos, attn_weight=25.0, layer_mapper=layer-2, projector=linear/events.out.tfevents.1724385184.e3f806ea38c9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64c09730f43cf90e51e24df23317667b7b046086ec1ad64c14ec5f752a077714
3
+ size 8084083
logs/attn_loss_fn=cos, attn_weight=25.0, layer_mapper=layer-2, projector=linear/events.out.tfevents.1724387479.e3f806ea38c9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7997098c27617e2a27c143aa98479f22ddcce8da86c3ad2c1e8148a6dd6d6a1d
3
+ size 13256345
logs/attn_loss_fn=cos, attn_weight=5, layer_mapper=layer-2, projector=linear/completed.flag ADDED
File without changes
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9de163c64545e506410b51e708d3647559bf19be3b3dd504e77fb8c3b3d3051a
3
  size 248894656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1e3f1d9530f5ab44df7ced276f9aad18a5896067fa83aebc347729d0fbd1d5b
3
  size 248894656
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3e24bd773ab39cd8217ed631d6db55bc6c7a82ad2f423d1b2d0445e7de80f459
3
- size 1017899144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:154c9ae48a6d8c0a8eabb43967035cb981a54c02d7049008d68d63ac6b4d652b
3
+ size 5368