lapp0 commited on
Commit
5d59b20
·
verified ·
1 Parent(s): 1f33bc9

Training in progress, step 61875

Browse files
README.md CHANGED
@@ -44,42 +44,42 @@ More information needed
44
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | tinystoriesppl | zhwikippl |
45
  | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |
46
  | **teacher eval** | | 43.25 | 61.25 | | | | | 11.6875 | 19.125 |
47
- | 0 | 0 | 2473901162496.0 | 170424302305280.0 | 45.7764 | 30.2746 | 82.577 | 10.339 | 4060086272.0 | 71468255805440.0 |
48
- | 2500 | 0.0404 | 2040.0 | 20608.0 | 20.8472 | 30.2064 | 82.764 | 10.362 | 1472.0 | 60160.0 |
49
- | 5000 | 0.0808 | 488.0 | 3120.0 | 18.4130 | 30.6595 | 81.541 | 10.209 | 338.0 | 1112.0 |
50
- | 7500 | 0.1212 | 276.0 | 1296.0 | 17.0012 | 30.289 | 82.538 | 10.334 | 255.0 | 249.0 |
51
- | 10000 | 0.1616 | 202.0 | 756.0 | 16.2078 | 30.2709 | 82.588 | 10.34 | 188.0 | 304.0 |
52
- | 12500 | 0.2020 | 145.0 | 540.0 | 15.0996 | 30.2185 | 82.731 | 10.358 | 131.0 | 176.0 |
53
- | 15000 | 0.2424 | 123.5 | 490.0 | 14.5283 | 30.2533 | 82.636 | 10.346 | 93.0 | 146.0 |
54
- | 17500 | 0.2828 | 95.0 | 376.0 | 14.1520 | 30.2403 | 82.671 | 10.35 | 75.5 | 137.0 |
55
- | 20000 | 0.3232 | 79.0 | 306.0 | 13.6446 | 30.1204 | 83.0 | 10.392 | 63.25 | 130.0 |
56
- | 22500 | 0.3636 | 66.0 | 219.0 | 13.1452 | 30.158 | 82.897 | 10.379 | 50.0 | 80.5 |
57
- | 25000 | 0.4040 | 63.0 | 200.0 | 12.9619 | 30.1269 | 82.982 | 10.389 | 43.75 | 77.5 |
58
- | 27500 | 0.4444 | 59.0 | 197.0 | 12.8388 | 30.3214 | 82.45 | 10.323 | 40.5 | 73.5 |
59
- | 30000 | 0.4848 | 59.5 | 204.0 | 12.8191 | 30.3164 | 82.464 | 10.324 | 40.5 | 70.5 |
60
- | 32500 | 0.5253 | 58.25 | 176.0 | 12.7778 | 30.2231 | 82.718 | 10.356 | 38.75 | 61.75 |
61
- | 35000 | 0.5657 | 58.25 | 169.0 | 12.6562 | 30.35 | 82.372 | 10.313 | 36.5 | 45.5 |
62
- | 37500 | 0.6061 | 56.75 | 158.0 | 12.6014 | 30.3685 | 82.322 | 10.307 | 37.0 | 50.5 |
63
- | 40000 | 0.6465 | 55.0 | 156.0 | 12.5674 | 30.3598 | 82.346 | 10.31 | 33.75 | 59.5 |
64
- | 42500 | 0.6869 | 54.5 | 147.0 | 12.5141 | 30.3209 | 82.451 | 10.323 | 34.25 | 52.5 |
65
- | 45000 | 0.7273 | 50.75 | 135.0 | 12.2860 | 30.244 | 82.661 | 10.349 | 29.5 | 41.75 |
66
- | 47500 | 0.7677 | 50.5 | 127.0 | 12.2408 | 30.3366 | 82.409 | 10.318 | 28.875 | 35.0 |
67
- | 50000 | 0.8081 | 50.25 | 125.5 | 12.2160 | 30.2563 | 82.627 | 10.345 | 28.625 | 39.0 |
68
- | 52500 | 0.8485 | 49.25 | 123.0 | 12.1936 | 30.2253 | 82.712 | 10.356 | 28.5 | 35.5 |
69
- | 55000 | 0.8889 | 49.25 | 121.0 | 12.1620 | 30.1898 | 82.81 | 10.368 | 27.875 | 35.0 |
70
- | 57500 | 0.9293 | 48.75 | 120.0 | 12.1488 | 30.2559 | 82.628 | 10.345 | 27.75 | 33.5 |
71
- | 60000 | 0.9697 | 48.75 | 119.5 | 12.1404 | 30.1517 | 82.914 | 10.381 | 27.625 | 33.25 |
72
- | 61875 | 1.0 | 48.75 | 120.0 | 12.1402 | 30.2129 | 82.746 | 10.36 | 27.625 | 33.5 |
73
 
74
  # Resource Usage Comparison
75
 
76
- - VRAM Use: 7.7831 GB
77
 
78
- `# Distillation (Teacher -> Student) Architecture Difference:
79
 
80
  - **Architecture**: `GPT2LMHeadModel` -> `GPT2LMHeadModel`
81
  - **Total Parameters**: 124,439,808 -> 124,439,808
82
- - **Data Type (dtype)**: 124439808 -> torch.bfloat16
83
  - **Model Size**: 0.24 GB -> 0.24 GB
84
 
85
  <details>
@@ -122,7 +122,7 @@ The following hyperparameters were used during training:
122
  - num_epochs: `1.0`
123
  - distillation_objective: `DistillationObjective(logits_loss_component=LossComponent(label=logits, weight=1, loss_fn=kl), attn_loss_component=LossComponent(label=attn, weight=25.0, loss_fn=cos, layer_mapper=layer-2))`
124
  - train_embeddings: `True`
125
- - lr_scheduler: `<torch.optim.lr_scheduler.LambdaLR object at 0x7fbd2823f2b0>`
126
  - student_model_name_or_path: `None`
127
  - student_config_name_or_path: `None`
128
  - student_model_config: `None`
@@ -154,6 +154,6 @@ The following hyperparameters were used during training:
154
 
155
  # Framework Versions
156
  - Distily 0.2.0
157
- - Transformers 4.44.0
158
- - Pytorch 2.3.0
159
  - Datasets 2.21.0
 
44
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | tinystoriesppl | zhwikippl |
45
  | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |
46
  | **teacher eval** | | 43.25 | 61.25 | | | | | 11.6875 | 19.125 |
47
+ | 0 | 0 | 2473901162496.0 | 170424302305280.0 | 45.7764 | 25.1947 | 99.227 | 12.423 | 4060086272.0 | 71468255805440.0 |
48
+ | 2500 | 0.0404 | 2496.0 | 25856.0 | 21.0926 | 25.2323 | 99.079 | 12.405 | 2576.0 | 47616.0 |
49
+ | 5000 | 0.0808 | 486.0 | 3120.0 | 18.4104 | 25.2286 | 99.094 | 12.407 | 338.0 | 1104.0 |
50
+ | 7500 | 0.1212 | 276.0 | 1296.0 | 17.0012 | 25.2472 | 99.021 | 12.397 | 254.0 | 247.0 |
51
+ | 10000 | 0.1616 | 202.0 | 752.0 | 16.2103 | 25.2451 | 99.029 | 12.398 | 187.0 | 296.0 |
52
+ | 12500 | 0.2020 | 145.0 | 536.0 | 15.0990 | 25.1956 | 99.224 | 12.423 | 131.0 | 173.0 |
53
+ | 15000 | 0.2424 | 123.5 | 488.0 | 14.5275 | 25.2611 | 98.966 | 12.391 | 93.5 | 147.0 |
54
+ | 17500 | 0.2828 | 95.5 | 376.0 | 14.1507 | 25.2295 | 99.09 | 12.406 | 76.0 | 134.0 |
55
+ | 20000 | 0.3232 | 78.0 | 308.0 | 13.6430 | 25.2434 | 99.036 | 12.399 | 63.5 | 136.0 |
56
+ | 22500 | 0.3636 | 66.5 | 218.0 | 13.1470 | 25.2484 | 99.016 | 12.397 | 49.75 | 83.0 |
57
+ | 25000 | 0.4040 | 63.0 | 204.0 | 12.9643 | 25.2039 | 99.191 | 12.419 | 43.25 | 82.5 |
58
+ | 27500 | 0.4444 | 59.75 | 196.0 | 12.8397 | 25.1629 | 99.353 | 12.439 | 40.0 | 76.5 |
59
+ | 30000 | 0.4848 | 58.5 | 192.0 | 12.8201 | 25.1971 | 99.218 | 12.422 | 41.0 | 61.5 |
60
+ | 32500 | 0.5253 | 58.25 | 170.0 | 12.7767 | 25.2324 | 99.079 | 12.405 | 39.5 | 58.75 |
61
+ | 35000 | 0.5657 | 57.75 | 170.0 | 12.6563 | 25.193 | 99.234 | 12.424 | 36.25 | 44.75 |
62
+ | 37500 | 0.6061 | 56.25 | 155.0 | 12.5982 | 25.2106 | 99.165 | 12.415 | 36.75 | 50.5 |
63
+ | 40000 | 0.6465 | 55.5 | 163.0 | 12.5850 | 25.222 | 99.12 | 12.41 | 33.5 | 62.25 |
64
+ | 42500 | 0.6869 | 54.75 | 151.0 | 12.5192 | 25.2047 | 99.188 | 12.418 | 34.25 | 50.75 |
65
+ | 45000 | 0.7273 | 51.25 | 135.0 | 12.2871 | 25.2624 | 98.961 | 12.39 | 29.5 | 42.0 |
66
+ | 47500 | 0.7677 | 51.0 | 125.5 | 12.2422 | 25.232 | 99.081 | 12.405 | 28.375 | 35.75 |
67
+ | 50000 | 0.8081 | 50.5 | 124.5 | 12.2162 | 25.177 | 99.297 | 12.432 | 28.375 | 37.75 |
68
+ | 52500 | 0.8485 | 49.25 | 121.0 | 12.1929 | 25.1839 | 99.27 | 12.429 | 28.5 | 34.5 |
69
+ | 55000 | 0.8889 | 49.25 | 120.5 | 12.1608 | 25.2063 | 99.182 | 12.418 | 27.625 | 34.75 |
70
+ | 57500 | 0.9293 | 48.75 | 119.5 | 12.1482 | 25.2548 | 98.991 | 12.394 | 27.5 | 32.75 |
71
+ | 60000 | 0.9697 | 48.75 | 119.0 | 12.1412 | 25.2068 | 99.18 | 12.417 | 27.5 | 32.5 |
72
+ | 61875 | 1.0 | 48.75 | 119.5 | 12.1400 | 25.2169 | 99.14 | 12.412 | 27.5 | 32.75 |
73
 
74
  # Resource Usage Comparison
75
 
76
+ - VRAM Use: 7.7830 GB
77
 
78
+ # Distillation (Teacher -> Student) Architecture Difference:
79
 
80
  - **Architecture**: `GPT2LMHeadModel` -> `GPT2LMHeadModel`
81
  - **Total Parameters**: 124,439,808 -> 124,439,808
82
+ - **Data Type (dtype)**: torch.bfloat16 -> torch.bfloat16
83
  - **Model Size**: 0.24 GB -> 0.24 GB
84
 
85
  <details>
 
122
  - num_epochs: `1.0`
123
  - distillation_objective: `DistillationObjective(logits_loss_component=LossComponent(label=logits, weight=1, loss_fn=kl), attn_loss_component=LossComponent(label=attn, weight=25.0, loss_fn=cos, layer_mapper=layer-2))`
124
  - train_embeddings: `True`
125
+ - lr_scheduler: `<torch.optim.lr_scheduler.LambdaLR object at 0x7f146e3bfc10>`
126
  - student_model_name_or_path: `None`
127
  - student_config_name_or_path: `None`
128
  - student_model_config: `None`
 
154
 
155
  # Framework Versions
156
  - Distily 0.2.0
157
+ - Transformers 4.44.1
158
+ - Pytorch 2.5.0.dev20240821+cu121
159
  - Datasets 2.21.0
config.json CHANGED
@@ -33,7 +33,7 @@
33
  }
34
  },
35
  "torch_dtype": "bfloat16",
36
- "transformers_version": "4.44.0",
37
  "use_cache": true,
38
  "vocab_size": 50257
39
  }
 
33
  }
34
  },
35
  "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.44.1",
37
  "use_cache": true,
38
  "vocab_size": 50257
39
  }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
- "transformers_version": "4.44.0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
+ "transformers_version": "4.44.1"
6
  }
logs/attn_loss_fn=cos, attn_weight=25.0, layer_mapper=last, projector=linear/completed.flag ADDED
File without changes
logs/attn_loss_fn=cos, attn_weight=5, layer_mapper=layer-2, projector=linear/events.out.tfevents.1724377064.e3f806ea38c9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:767b5680d7fc5940e024c7b0057f763c8c41f28fc0daaea52f4829f95f069bea
3
+ size 29632522
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c4a972c54e417c7197bd9968ddd13c8ce2ee329876ee772f1bce5dc12273cdf8
3
  size 248894656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:867d38910c544ebe94a0a1365511001a27c3b8b68ef244f5851770b3ea52b01f
3
  size 248894656
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fde8414a41236e4d37875a5c20c33c02761432d024649e3d004bf722974cfae4
3
  size 1017899144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfc82283cd56cebafe8e7032bcd35f00d46c1a33e3d44b89d5c422804675d400
3
  size 1017899144