adobe-codemay2025 commited on
Commit
bd2093b
·
verified ·
1 Parent(s): 6ffd255

adobe-codemay2025/injection-detector

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. README.md +12 -11
  2. config.json +27 -39
  3. model.safetensors +2 -2
  4. special_tokens_map.json +5 -35
  5. tokenizer.json +0 -0
  6. tokenizer_config.json +13 -900
  7. training_args.bin +1 -1
  8. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/config.json +35 -0
  9. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/model.safetensors +3 -0
  10. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/optimizer.pt +3 -0
  11. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/rng_state.pth +3 -0
  12. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/scheduler.pt +3 -0
  13. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/special_tokens_map.json +7 -0
  14. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/tokenizer.json +0 -0
  15. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/tokenizer_config.json +58 -0
  16. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/trainer_state.json +50 -0
  17. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/training_args.bin +3 -0
  18. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/vocab.txt +0 -0
  19. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/config.json +35 -0
  20. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/model.safetensors +3 -0
  21. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/optimizer.pt +3 -0
  22. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/rng_state.pth +3 -0
  23. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/scheduler.pt +3 -0
  24. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/special_tokens_map.json +7 -0
  25. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/tokenizer.json +0 -0
  26. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/tokenizer_config.json +58 -0
  27. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/trainer_state.json +62 -0
  28. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/training_args.bin +3 -0
  29. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/vocab.txt +0 -0
  30. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/config.json +35 -0
  31. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/model.safetensors +3 -0
  32. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/optimizer.pt +3 -0
  33. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/rng_state.pth +3 -0
  34. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/scheduler.pt +3 -0
  35. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/special_tokens_map.json +7 -0
  36. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/tokenizer.json +0 -0
  37. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/tokenizer_config.json +58 -0
  38. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/trainer_state.json +74 -0
  39. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/training_args.bin +3 -0
  40. trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/vocab.txt +0 -0
  41. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/config.json +35 -0
  42. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/model.safetensors +3 -0
  43. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/optimizer.pt +3 -0
  44. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/rng_state.pth +3 -0
  45. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/scheduler.pt +3 -0
  46. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/special_tokens_map.json +7 -0
  47. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/tokenizer.json +0 -0
  48. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/tokenizer_config.json +58 -0
  49. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/trainer_state.json +62 -0
  50. trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/training_args.bin +3 -0
README.md CHANGED
@@ -1,7 +1,7 @@
1
  ---
2
  library_name: transformers
3
  license: apache-2.0
4
- base_model: answerdotai/ModernBERT-base
5
  tags:
6
  - generated_from_trainer
7
  metrics:
@@ -19,13 +19,13 @@ should probably proofread and complete it, then remove this comment. -->
19
 
20
  # results
21
 
22
- This model is a fine-tuned version of [answerdotai/ModernBERT-base](https://huggingface.co/answerdotai/ModernBERT-base) on the None dataset.
23
  It achieves the following results on the evaluation set:
24
- - Loss: 0.6145
25
- - Accuracy: 0.625
26
- - Precision: 0.6389
27
- - Recall: 0.625
28
- - F1: 0.5615
29
 
30
  ## Model description
31
 
@@ -44,20 +44,21 @@ More information needed
44
  ### Training hyperparameters
45
 
46
  The following hyperparameters were used during training:
47
- - learning_rate: 3.1008099293113248e-06
48
  - train_batch_size: 32
49
  - eval_batch_size: 16
50
  - seed: 42
51
  - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
52
  - lr_scheduler_type: linear
53
- - num_epochs: 2
54
 
55
  ### Training results
56
 
57
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 |
58
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:---------:|:------:|:------:|
59
- | No log | 1.0 | 7 | 0.6498 | 0.5417 | 0.5044 | 0.5417 | 0.5000 |
60
- | No log | 2.0 | 14 | 0.6145 | 0.625 | 0.6389 | 0.625 | 0.5615 |
 
61
 
62
 
63
  ### Framework versions
 
1
  ---
2
  library_name: transformers
3
  license: apache-2.0
4
+ base_model: distilbert/distilbert-base-uncased-finetuned-sst-2-english
5
  tags:
6
  - generated_from_trainer
7
  metrics:
 
19
 
20
  # results
21
 
22
+ This model is a fine-tuned version of [distilbert/distilbert-base-uncased-finetuned-sst-2-english](https://huggingface.co/distilbert/distilbert-base-uncased-finetuned-sst-2-english) on the None dataset.
23
  It achieves the following results on the evaluation set:
24
+ - Loss: 0.1401
25
+ - Accuracy: 0.9583
26
+ - Precision: 0.9621
27
+ - Recall: 0.9583
28
+ - F1: 0.9586
29
 
30
  ## Model description
31
 
 
44
  ### Training hyperparameters
45
 
46
  The following hyperparameters were used during training:
47
+ - learning_rate: 9.755035812704661e-05
48
  - train_batch_size: 32
49
  - eval_batch_size: 16
50
  - seed: 42
51
  - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
52
  - lr_scheduler_type: linear
53
+ - num_epochs: 3
54
 
55
  ### Training results
56
 
57
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 |
58
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:---------:|:------:|:------:|
59
+ | No log | 1.0 | 7 | 0.2914 | 0.875 | 0.9038 | 0.875 | 0.8757 |
60
+ | No log | 2.0 | 14 | 0.2127 | 0.9583 | 0.9621 | 0.9583 | 0.9586 |
61
+ | No log | 3.0 | 21 | 0.1401 | 0.9583 | 0.9621 | 0.9583 | 0.9586 |
62
 
63
 
64
  ### Framework versions
config.json CHANGED
@@ -1,47 +1,35 @@
1
  {
2
- "_name_or_path": "answerdotai/ModernBERT-base",
 
3
  "architectures": [
4
- "ModernBertForSequenceClassification"
5
  ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "bos_token_id": 50281,
9
- "classifier_activation": "gelu",
10
- "classifier_bias": false,
11
- "classifier_dropout": 0.0,
12
- "classifier_pooling": "mean",
13
- "cls_token_id": 50281,
14
- "decoder_bias": true,
15
- "deterministic_flash_attn": false,
16
- "embedding_dropout": 0.0,
17
- "eos_token_id": 50282,
18
- "global_attn_every_n_layers": 3,
19
- "global_rope_theta": 160000.0,
20
- "gradient_checkpointing": false,
21
- "hidden_activation": "gelu",
22
- "hidden_size": 768,
23
- "initializer_cutoff_factor": 2.0,
24
  "initializer_range": 0.02,
25
- "intermediate_size": 1152,
26
- "layer_norm_eps": 1e-05,
27
- "local_attention": 128,
28
- "local_rope_theta": 10000.0,
29
- "max_position_embeddings": 8192,
30
- "mlp_bias": false,
31
- "mlp_dropout": 0.0,
32
- "model_type": "modernbert",
33
- "norm_bias": false,
34
- "norm_eps": 1e-05,
35
- "num_attention_heads": 12,
36
- "num_hidden_layers": 22,
37
- "pad_token_id": 50283,
38
- "position_embedding_type": "absolute",
39
  "problem_type": "single_label_classification",
40
- "reference_compile": true,
41
- "sep_token_id": 50282,
42
- "sparse_pred_ignore_index": -100,
43
- "sparse_prediction": false,
44
  "torch_dtype": "float32",
45
  "transformers_version": "4.48.0.dev0",
46
- "vocab_size": 50368
47
  }
 
1
  {
2
+ "_name_or_path": "distilbert/distilbert-base-uncased-finetuned-sst-2-english",
3
+ "activation": "gelu",
4
  "architectures": [
5
+ "DistilBertForSequenceClassification"
6
  ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "finetuning_task": "sst-2",
11
+ "hidden_dim": 3072,
12
+ "id2label": {
13
+ "0": "NEGATIVE",
14
+ "1": "POSITIVE"
15
+ },
 
 
 
 
 
 
 
 
 
16
  "initializer_range": 0.02,
17
+ "label2id": {
18
+ "NEGATIVE": 0,
19
+ "POSITIVE": 1
20
+ },
21
+ "max_position_embeddings": 512,
22
+ "model_type": "distilbert",
23
+ "n_heads": 12,
24
+ "n_layers": 6,
25
+ "output_past": true,
26
+ "pad_token_id": 0,
 
 
 
 
27
  "problem_type": "single_label_classification",
28
+ "qa_dropout": 0.1,
29
+ "seq_classif_dropout": 0.2,
30
+ "sinusoidal_pos_embds": false,
31
+ "tie_weights_": true,
32
  "torch_dtype": "float32",
33
  "transformers_version": "4.48.0.dev0",
34
+ "vocab_size": 30522
35
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:36fea5347aaf8d126227bcbbe158c1ea586bbaaa39f805037d42c61debe3a8ed
3
- size 598439784
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d45d3c6340d2d2aeb88f11a27ee3db0df9baae52ab03fd12581c020d1f0fc7ed
3
+ size 267832560
special_tokens_map.json CHANGED
@@ -1,37 +1,7 @@
1
  {
2
- "cls_token": {
3
- "content": "[CLS]",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "mask_token": {
10
- "content": "[MASK]",
11
- "lstrip": true,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "[PAD]",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "sep_token": {
24
- "content": "[SEP]",
25
- "lstrip": false,
26
- "normalized": false,
27
- "rstrip": false,
28
- "single_word": false
29
- },
30
- "unk_token": {
31
- "content": "[UNK]",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false
36
- }
37
  }
 
1
  {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,230 +1,14 @@
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
- "content": "|||IP_ADDRESS|||",
5
- "lstrip": false,
6
- "normalized": true,
7
- "rstrip": false,
8
- "single_word": false,
9
- "special": false
10
- },
11
- "1": {
12
- "content": "<|padding|>",
13
- "lstrip": false,
14
- "normalized": false,
15
- "rstrip": false,
16
- "single_word": false,
17
- "special": true
18
- },
19
- "50254": {
20
- "content": " ",
21
- "lstrip": false,
22
- "normalized": true,
23
- "rstrip": false,
24
- "single_word": false,
25
- "special": false
26
- },
27
- "50255": {
28
- "content": " ",
29
- "lstrip": false,
30
- "normalized": true,
31
- "rstrip": false,
32
- "single_word": false,
33
- "special": false
34
- },
35
- "50256": {
36
- "content": " ",
37
- "lstrip": false,
38
- "normalized": true,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": false
42
- },
43
- "50257": {
44
- "content": " ",
45
- "lstrip": false,
46
- "normalized": true,
47
- "rstrip": false,
48
- "single_word": false,
49
- "special": false
50
- },
51
- "50258": {
52
- "content": " ",
53
- "lstrip": false,
54
- "normalized": true,
55
- "rstrip": false,
56
- "single_word": false,
57
- "special": false
58
- },
59
- "50259": {
60
- "content": " ",
61
- "lstrip": false,
62
- "normalized": true,
63
- "rstrip": false,
64
- "single_word": false,
65
- "special": false
66
- },
67
- "50260": {
68
- "content": " ",
69
- "lstrip": false,
70
- "normalized": true,
71
- "rstrip": false,
72
- "single_word": false,
73
- "special": false
74
- },
75
- "50261": {
76
- "content": " ",
77
- "lstrip": false,
78
- "normalized": true,
79
- "rstrip": false,
80
- "single_word": false,
81
- "special": false
82
- },
83
- "50262": {
84
- "content": " ",
85
- "lstrip": false,
86
- "normalized": true,
87
- "rstrip": false,
88
- "single_word": false,
89
- "special": false
90
- },
91
- "50263": {
92
- "content": " ",
93
- "lstrip": false,
94
- "normalized": true,
95
- "rstrip": false,
96
- "single_word": false,
97
- "special": false
98
- },
99
- "50264": {
100
- "content": " ",
101
- "lstrip": false,
102
- "normalized": true,
103
- "rstrip": false,
104
- "single_word": false,
105
- "special": false
106
- },
107
- "50265": {
108
- "content": " ",
109
- "lstrip": false,
110
- "normalized": true,
111
- "rstrip": false,
112
- "single_word": false,
113
- "special": false
114
- },
115
- "50266": {
116
- "content": " ",
117
- "lstrip": false,
118
- "normalized": true,
119
- "rstrip": false,
120
- "single_word": false,
121
- "special": false
122
- },
123
- "50267": {
124
- "content": " ",
125
- "lstrip": false,
126
- "normalized": true,
127
- "rstrip": false,
128
- "single_word": false,
129
- "special": false
130
- },
131
- "50268": {
132
- "content": " ",
133
- "lstrip": false,
134
- "normalized": true,
135
- "rstrip": false,
136
- "single_word": false,
137
- "special": false
138
- },
139
- "50269": {
140
- "content": " ",
141
- "lstrip": false,
142
- "normalized": true,
143
- "rstrip": false,
144
- "single_word": false,
145
- "special": false
146
- },
147
- "50270": {
148
- "content": " ",
149
- "lstrip": false,
150
- "normalized": true,
151
- "rstrip": false,
152
- "single_word": false,
153
- "special": false
154
- },
155
- "50271": {
156
- "content": " ",
157
- "lstrip": false,
158
- "normalized": true,
159
- "rstrip": false,
160
- "single_word": false,
161
- "special": false
162
- },
163
- "50272": {
164
- "content": " ",
165
- "lstrip": false,
166
- "normalized": true,
167
- "rstrip": false,
168
- "single_word": false,
169
- "special": false
170
- },
171
- "50273": {
172
- "content": " ",
173
- "lstrip": false,
174
- "normalized": true,
175
- "rstrip": false,
176
- "single_word": false,
177
- "special": false
178
- },
179
- "50274": {
180
- "content": " ",
181
- "lstrip": false,
182
- "normalized": true,
183
- "rstrip": false,
184
- "single_word": false,
185
- "special": false
186
- },
187
- "50275": {
188
- "content": " ",
189
- "lstrip": false,
190
- "normalized": true,
191
- "rstrip": false,
192
- "single_word": false,
193
- "special": false
194
- },
195
- "50276": {
196
- "content": " ",
197
- "lstrip": false,
198
- "normalized": true,
199
- "rstrip": false,
200
- "single_word": false,
201
- "special": false
202
- },
203
- "50277": {
204
- "content": "|||EMAIL_ADDRESS|||",
205
- "lstrip": false,
206
- "normalized": true,
207
- "rstrip": false,
208
- "single_word": false,
209
- "special": false
210
- },
211
- "50278": {
212
- "content": "|||PHONE_NUMBER|||",
213
- "lstrip": false,
214
- "normalized": true,
215
- "rstrip": false,
216
- "single_word": false,
217
- "special": false
218
- },
219
- "50279": {
220
- "content": "<|endoftext|>",
221
  "lstrip": false,
222
  "normalized": false,
223
  "rstrip": false,
224
  "single_word": false,
225
  "special": true
226
  },
227
- "50280": {
228
  "content": "[UNK]",
229
  "lstrip": false,
230
  "normalized": false,
@@ -232,7 +16,7 @@
232
  "single_word": false,
233
  "special": true
234
  },
235
- "50281": {
236
  "content": "[CLS]",
237
  "lstrip": false,
238
  "normalized": false,
@@ -240,7 +24,7 @@
240
  "single_word": false,
241
  "special": true
242
  },
243
- "50282": {
244
  "content": "[SEP]",
245
  "lstrip": false,
246
  "normalized": false,
@@ -248,698 +32,27 @@
248
  "single_word": false,
249
  "special": true
250
  },
251
- "50283": {
252
- "content": "[PAD]",
253
- "lstrip": false,
254
- "normalized": false,
255
- "rstrip": false,
256
- "single_word": false,
257
- "special": true
258
- },
259
- "50284": {
260
  "content": "[MASK]",
261
- "lstrip": true,
262
  "normalized": false,
263
  "rstrip": false,
264
  "single_word": false,
265
  "special": true
266
- },
267
- "50285": {
268
- "content": "[unused0]",
269
- "lstrip": false,
270
- "normalized": true,
271
- "rstrip": false,
272
- "single_word": false,
273
- "special": false
274
- },
275
- "50286": {
276
- "content": "[unused1]",
277
- "lstrip": false,
278
- "normalized": true,
279
- "rstrip": false,
280
- "single_word": false,
281
- "special": false
282
- },
283
- "50287": {
284
- "content": "[unused2]",
285
- "lstrip": false,
286
- "normalized": true,
287
- "rstrip": false,
288
- "single_word": false,
289
- "special": false
290
- },
291
- "50288": {
292
- "content": "[unused3]",
293
- "lstrip": false,
294
- "normalized": true,
295
- "rstrip": false,
296
- "single_word": false,
297
- "special": false
298
- },
299
- "50289": {
300
- "content": "[unused4]",
301
- "lstrip": false,
302
- "normalized": true,
303
- "rstrip": false,
304
- "single_word": false,
305
- "special": false
306
- },
307
- "50290": {
308
- "content": "[unused5]",
309
- "lstrip": false,
310
- "normalized": true,
311
- "rstrip": false,
312
- "single_word": false,
313
- "special": false
314
- },
315
- "50291": {
316
- "content": "[unused6]",
317
- "lstrip": false,
318
- "normalized": true,
319
- "rstrip": false,
320
- "single_word": false,
321
- "special": false
322
- },
323
- "50292": {
324
- "content": "[unused7]",
325
- "lstrip": false,
326
- "normalized": true,
327
- "rstrip": false,
328
- "single_word": false,
329
- "special": false
330
- },
331
- "50293": {
332
- "content": "[unused8]",
333
- "lstrip": false,
334
- "normalized": true,
335
- "rstrip": false,
336
- "single_word": false,
337
- "special": false
338
- },
339
- "50294": {
340
- "content": "[unused9]",
341
- "lstrip": false,
342
- "normalized": true,
343
- "rstrip": false,
344
- "single_word": false,
345
- "special": false
346
- },
347
- "50295": {
348
- "content": "[unused10]",
349
- "lstrip": false,
350
- "normalized": true,
351
- "rstrip": false,
352
- "single_word": false,
353
- "special": false
354
- },
355
- "50296": {
356
- "content": "[unused11]",
357
- "lstrip": false,
358
- "normalized": true,
359
- "rstrip": false,
360
- "single_word": false,
361
- "special": false
362
- },
363
- "50297": {
364
- "content": "[unused12]",
365
- "lstrip": false,
366
- "normalized": true,
367
- "rstrip": false,
368
- "single_word": false,
369
- "special": false
370
- },
371
- "50298": {
372
- "content": "[unused13]",
373
- "lstrip": false,
374
- "normalized": true,
375
- "rstrip": false,
376
- "single_word": false,
377
- "special": false
378
- },
379
- "50299": {
380
- "content": "[unused14]",
381
- "lstrip": false,
382
- "normalized": true,
383
- "rstrip": false,
384
- "single_word": false,
385
- "special": false
386
- },
387
- "50300": {
388
- "content": "[unused15]",
389
- "lstrip": false,
390
- "normalized": true,
391
- "rstrip": false,
392
- "single_word": false,
393
- "special": false
394
- },
395
- "50301": {
396
- "content": "[unused16]",
397
- "lstrip": false,
398
- "normalized": true,
399
- "rstrip": false,
400
- "single_word": false,
401
- "special": false
402
- },
403
- "50302": {
404
- "content": "[unused17]",
405
- "lstrip": false,
406
- "normalized": true,
407
- "rstrip": false,
408
- "single_word": false,
409
- "special": false
410
- },
411
- "50303": {
412
- "content": "[unused18]",
413
- "lstrip": false,
414
- "normalized": true,
415
- "rstrip": false,
416
- "single_word": false,
417
- "special": false
418
- },
419
- "50304": {
420
- "content": "[unused19]",
421
- "lstrip": false,
422
- "normalized": true,
423
- "rstrip": false,
424
- "single_word": false,
425
- "special": false
426
- },
427
- "50305": {
428
- "content": "[unused20]",
429
- "lstrip": false,
430
- "normalized": true,
431
- "rstrip": false,
432
- "single_word": false,
433
- "special": false
434
- },
435
- "50306": {
436
- "content": "[unused21]",
437
- "lstrip": false,
438
- "normalized": true,
439
- "rstrip": false,
440
- "single_word": false,
441
- "special": false
442
- },
443
- "50307": {
444
- "content": "[unused22]",
445
- "lstrip": false,
446
- "normalized": true,
447
- "rstrip": false,
448
- "single_word": false,
449
- "special": false
450
- },
451
- "50308": {
452
- "content": "[unused23]",
453
- "lstrip": false,
454
- "normalized": true,
455
- "rstrip": false,
456
- "single_word": false,
457
- "special": false
458
- },
459
- "50309": {
460
- "content": "[unused24]",
461
- "lstrip": false,
462
- "normalized": true,
463
- "rstrip": false,
464
- "single_word": false,
465
- "special": false
466
- },
467
- "50310": {
468
- "content": "[unused25]",
469
- "lstrip": false,
470
- "normalized": true,
471
- "rstrip": false,
472
- "single_word": false,
473
- "special": false
474
- },
475
- "50311": {
476
- "content": "[unused26]",
477
- "lstrip": false,
478
- "normalized": true,
479
- "rstrip": false,
480
- "single_word": false,
481
- "special": false
482
- },
483
- "50312": {
484
- "content": "[unused27]",
485
- "lstrip": false,
486
- "normalized": true,
487
- "rstrip": false,
488
- "single_word": false,
489
- "special": false
490
- },
491
- "50313": {
492
- "content": "[unused28]",
493
- "lstrip": false,
494
- "normalized": true,
495
- "rstrip": false,
496
- "single_word": false,
497
- "special": false
498
- },
499
- "50314": {
500
- "content": "[unused29]",
501
- "lstrip": false,
502
- "normalized": true,
503
- "rstrip": false,
504
- "single_word": false,
505
- "special": false
506
- },
507
- "50315": {
508
- "content": "[unused30]",
509
- "lstrip": false,
510
- "normalized": true,
511
- "rstrip": false,
512
- "single_word": false,
513
- "special": false
514
- },
515
- "50316": {
516
- "content": "[unused31]",
517
- "lstrip": false,
518
- "normalized": true,
519
- "rstrip": false,
520
- "single_word": false,
521
- "special": false
522
- },
523
- "50317": {
524
- "content": "[unused32]",
525
- "lstrip": false,
526
- "normalized": true,
527
- "rstrip": false,
528
- "single_word": false,
529
- "special": false
530
- },
531
- "50318": {
532
- "content": "[unused33]",
533
- "lstrip": false,
534
- "normalized": true,
535
- "rstrip": false,
536
- "single_word": false,
537
- "special": false
538
- },
539
- "50319": {
540
- "content": "[unused34]",
541
- "lstrip": false,
542
- "normalized": true,
543
- "rstrip": false,
544
- "single_word": false,
545
- "special": false
546
- },
547
- "50320": {
548
- "content": "[unused35]",
549
- "lstrip": false,
550
- "normalized": true,
551
- "rstrip": false,
552
- "single_word": false,
553
- "special": false
554
- },
555
- "50321": {
556
- "content": "[unused36]",
557
- "lstrip": false,
558
- "normalized": true,
559
- "rstrip": false,
560
- "single_word": false,
561
- "special": false
562
- },
563
- "50322": {
564
- "content": "[unused37]",
565
- "lstrip": false,
566
- "normalized": true,
567
- "rstrip": false,
568
- "single_word": false,
569
- "special": false
570
- },
571
- "50323": {
572
- "content": "[unused38]",
573
- "lstrip": false,
574
- "normalized": true,
575
- "rstrip": false,
576
- "single_word": false,
577
- "special": false
578
- },
579
- "50324": {
580
- "content": "[unused39]",
581
- "lstrip": false,
582
- "normalized": true,
583
- "rstrip": false,
584
- "single_word": false,
585
- "special": false
586
- },
587
- "50325": {
588
- "content": "[unused40]",
589
- "lstrip": false,
590
- "normalized": true,
591
- "rstrip": false,
592
- "single_word": false,
593
- "special": false
594
- },
595
- "50326": {
596
- "content": "[unused41]",
597
- "lstrip": false,
598
- "normalized": true,
599
- "rstrip": false,
600
- "single_word": false,
601
- "special": false
602
- },
603
- "50327": {
604
- "content": "[unused42]",
605
- "lstrip": false,
606
- "normalized": true,
607
- "rstrip": false,
608
- "single_word": false,
609
- "special": false
610
- },
611
- "50328": {
612
- "content": "[unused43]",
613
- "lstrip": false,
614
- "normalized": true,
615
- "rstrip": false,
616
- "single_word": false,
617
- "special": false
618
- },
619
- "50329": {
620
- "content": "[unused44]",
621
- "lstrip": false,
622
- "normalized": true,
623
- "rstrip": false,
624
- "single_word": false,
625
- "special": false
626
- },
627
- "50330": {
628
- "content": "[unused45]",
629
- "lstrip": false,
630
- "normalized": true,
631
- "rstrip": false,
632
- "single_word": false,
633
- "special": false
634
- },
635
- "50331": {
636
- "content": "[unused46]",
637
- "lstrip": false,
638
- "normalized": true,
639
- "rstrip": false,
640
- "single_word": false,
641
- "special": false
642
- },
643
- "50332": {
644
- "content": "[unused47]",
645
- "lstrip": false,
646
- "normalized": true,
647
- "rstrip": false,
648
- "single_word": false,
649
- "special": false
650
- },
651
- "50333": {
652
- "content": "[unused48]",
653
- "lstrip": false,
654
- "normalized": true,
655
- "rstrip": false,
656
- "single_word": false,
657
- "special": false
658
- },
659
- "50334": {
660
- "content": "[unused49]",
661
- "lstrip": false,
662
- "normalized": true,
663
- "rstrip": false,
664
- "single_word": false,
665
- "special": false
666
- },
667
- "50335": {
668
- "content": "[unused50]",
669
- "lstrip": false,
670
- "normalized": true,
671
- "rstrip": false,
672
- "single_word": false,
673
- "special": false
674
- },
675
- "50336": {
676
- "content": "[unused51]",
677
- "lstrip": false,
678
- "normalized": true,
679
- "rstrip": false,
680
- "single_word": false,
681
- "special": false
682
- },
683
- "50337": {
684
- "content": "[unused52]",
685
- "lstrip": false,
686
- "normalized": true,
687
- "rstrip": false,
688
- "single_word": false,
689
- "special": false
690
- },
691
- "50338": {
692
- "content": "[unused53]",
693
- "lstrip": false,
694
- "normalized": true,
695
- "rstrip": false,
696
- "single_word": false,
697
- "special": false
698
- },
699
- "50339": {
700
- "content": "[unused54]",
701
- "lstrip": false,
702
- "normalized": true,
703
- "rstrip": false,
704
- "single_word": false,
705
- "special": false
706
- },
707
- "50340": {
708
- "content": "[unused55]",
709
- "lstrip": false,
710
- "normalized": true,
711
- "rstrip": false,
712
- "single_word": false,
713
- "special": false
714
- },
715
- "50341": {
716
- "content": "[unused56]",
717
- "lstrip": false,
718
- "normalized": true,
719
- "rstrip": false,
720
- "single_word": false,
721
- "special": false
722
- },
723
- "50342": {
724
- "content": "[unused57]",
725
- "lstrip": false,
726
- "normalized": true,
727
- "rstrip": false,
728
- "single_word": false,
729
- "special": false
730
- },
731
- "50343": {
732
- "content": "[unused58]",
733
- "lstrip": false,
734
- "normalized": true,
735
- "rstrip": false,
736
- "single_word": false,
737
- "special": false
738
- },
739
- "50344": {
740
- "content": "[unused59]",
741
- "lstrip": false,
742
- "normalized": true,
743
- "rstrip": false,
744
- "single_word": false,
745
- "special": false
746
- },
747
- "50345": {
748
- "content": "[unused60]",
749
- "lstrip": false,
750
- "normalized": true,
751
- "rstrip": false,
752
- "single_word": false,
753
- "special": false
754
- },
755
- "50346": {
756
- "content": "[unused61]",
757
- "lstrip": false,
758
- "normalized": true,
759
- "rstrip": false,
760
- "single_word": false,
761
- "special": false
762
- },
763
- "50347": {
764
- "content": "[unused62]",
765
- "lstrip": false,
766
- "normalized": true,
767
- "rstrip": false,
768
- "single_word": false,
769
- "special": false
770
- },
771
- "50348": {
772
- "content": "[unused63]",
773
- "lstrip": false,
774
- "normalized": true,
775
- "rstrip": false,
776
- "single_word": false,
777
- "special": false
778
- },
779
- "50349": {
780
- "content": "[unused64]",
781
- "lstrip": false,
782
- "normalized": true,
783
- "rstrip": false,
784
- "single_word": false,
785
- "special": false
786
- },
787
- "50350": {
788
- "content": "[unused65]",
789
- "lstrip": false,
790
- "normalized": true,
791
- "rstrip": false,
792
- "single_word": false,
793
- "special": false
794
- },
795
- "50351": {
796
- "content": "[unused66]",
797
- "lstrip": false,
798
- "normalized": true,
799
- "rstrip": false,
800
- "single_word": false,
801
- "special": false
802
- },
803
- "50352": {
804
- "content": "[unused67]",
805
- "lstrip": false,
806
- "normalized": true,
807
- "rstrip": false,
808
- "single_word": false,
809
- "special": false
810
- },
811
- "50353": {
812
- "content": "[unused68]",
813
- "lstrip": false,
814
- "normalized": true,
815
- "rstrip": false,
816
- "single_word": false,
817
- "special": false
818
- },
819
- "50354": {
820
- "content": "[unused69]",
821
- "lstrip": false,
822
- "normalized": true,
823
- "rstrip": false,
824
- "single_word": false,
825
- "special": false
826
- },
827
- "50355": {
828
- "content": "[unused70]",
829
- "lstrip": false,
830
- "normalized": true,
831
- "rstrip": false,
832
- "single_word": false,
833
- "special": false
834
- },
835
- "50356": {
836
- "content": "[unused71]",
837
- "lstrip": false,
838
- "normalized": true,
839
- "rstrip": false,
840
- "single_word": false,
841
- "special": false
842
- },
843
- "50357": {
844
- "content": "[unused72]",
845
- "lstrip": false,
846
- "normalized": true,
847
- "rstrip": false,
848
- "single_word": false,
849
- "special": false
850
- },
851
- "50358": {
852
- "content": "[unused73]",
853
- "lstrip": false,
854
- "normalized": true,
855
- "rstrip": false,
856
- "single_word": false,
857
- "special": false
858
- },
859
- "50359": {
860
- "content": "[unused74]",
861
- "lstrip": false,
862
- "normalized": true,
863
- "rstrip": false,
864
- "single_word": false,
865
- "special": false
866
- },
867
- "50360": {
868
- "content": "[unused75]",
869
- "lstrip": false,
870
- "normalized": true,
871
- "rstrip": false,
872
- "single_word": false,
873
- "special": false
874
- },
875
- "50361": {
876
- "content": "[unused76]",
877
- "lstrip": false,
878
- "normalized": true,
879
- "rstrip": false,
880
- "single_word": false,
881
- "special": false
882
- },
883
- "50362": {
884
- "content": "[unused77]",
885
- "lstrip": false,
886
- "normalized": true,
887
- "rstrip": false,
888
- "single_word": false,
889
- "special": false
890
- },
891
- "50363": {
892
- "content": "[unused78]",
893
- "lstrip": false,
894
- "normalized": true,
895
- "rstrip": false,
896
- "single_word": false,
897
- "special": false
898
- },
899
- "50364": {
900
- "content": "[unused79]",
901
- "lstrip": false,
902
- "normalized": true,
903
- "rstrip": false,
904
- "single_word": false,
905
- "special": false
906
- },
907
- "50365": {
908
- "content": "[unused80]",
909
- "lstrip": false,
910
- "normalized": true,
911
- "rstrip": false,
912
- "single_word": false,
913
- "special": false
914
- },
915
- "50366": {
916
- "content": "[unused81]",
917
- "lstrip": false,
918
- "normalized": true,
919
- "rstrip": false,
920
- "single_word": false,
921
- "special": false
922
- },
923
- "50367": {
924
- "content": "[unused82]",
925
- "lstrip": false,
926
- "normalized": true,
927
- "rstrip": false,
928
- "single_word": false,
929
- "special": false
930
  }
931
  },
932
  "clean_up_tokenization_spaces": true,
933
  "cls_token": "[CLS]",
 
 
934
  "extra_special_tokens": {},
935
  "mask_token": "[MASK]",
936
- "model_input_names": [
937
- "input_ids",
938
- "attention_mask"
939
- ],
940
- "model_max_length": 8192,
941
  "pad_token": "[PAD]",
942
  "sep_token": "[SEP]",
943
- "tokenizer_class": "PreTrainedTokenizerFast",
 
 
944
  "unk_token": "[UNK]"
945
  }
 
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
+ "content": "[PAD]",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
+ "100": {
12
  "content": "[UNK]",
13
  "lstrip": false,
14
  "normalized": false,
 
16
  "single_word": false,
17
  "special": true
18
  },
19
+ "101": {
20
  "content": "[CLS]",
21
  "lstrip": false,
22
  "normalized": false,
 
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "102": {
28
  "content": "[SEP]",
29
  "lstrip": false,
30
  "normalized": false,
 
32
  "single_word": false,
33
  "special": true
34
  },
35
+ "103": {
 
 
 
 
 
 
 
 
36
  "content": "[MASK]",
37
+ "lstrip": false,
38
  "normalized": false,
39
  "rstrip": false,
40
  "single_word": false,
41
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  }
43
  },
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
  "extra_special_tokens": {},
49
  "mask_token": "[MASK]",
50
+ "model_max_length": 512,
51
+ "never_split": null,
 
 
 
52
  "pad_token": "[PAD]",
53
  "sep_token": "[SEP]",
54
+ "strip_accents": null,
55
+ "tokenize_chinese_chars": true,
56
+ "tokenizer_class": "DistilBertTokenizer",
57
  "unk_token": "[UNK]"
58
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8893bc65b7c023594e1f5e168212876ac4146bba0d157ebc60a10b6711957757
3
  size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8a6b0729a29c2098575f3533b396747556def8d2ffc03b2630747b8ab51429e
3
  size 5368
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilbert/distilbert-base-uncased-finetuned-sst-2-english",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForSequenceClassification"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "finetuning_task": "sst-2",
11
+ "hidden_dim": 3072,
12
+ "id2label": {
13
+ "0": "NEGATIVE",
14
+ "1": "POSITIVE"
15
+ },
16
+ "initializer_range": 0.02,
17
+ "label2id": {
18
+ "NEGATIVE": 0,
19
+ "POSITIVE": 1
20
+ },
21
+ "max_position_embeddings": 512,
22
+ "model_type": "distilbert",
23
+ "n_heads": 12,
24
+ "n_layers": 6,
25
+ "output_past": true,
26
+ "pad_token_id": 0,
27
+ "problem_type": "single_label_classification",
28
+ "qa_dropout": 0.1,
29
+ "seq_classif_dropout": 0.2,
30
+ "sinusoidal_pos_embds": false,
31
+ "tie_weights_": true,
32
+ "torch_dtype": "float32",
33
+ "transformers_version": "4.48.0.dev0",
34
+ "vocab_size": 30522
35
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a563fb7e73103a7b604d083e043eb785045fd605e856f317de8fac665dd6a86f
3
+ size 267832560
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eff2c710630c9a43acd1cb095c8fdaf73b9c38c277a4df4f5522defca80a0b96
3
+ size 535727290
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80ef0c229eb7e41a472af9577be0ab8f9a527c90c97f4cb8ba6946afe6a769ba
3
+ size 14308
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6082019d8c92290a8b1a20e35867d31ba202a10378b8f414f67d035bf114c590
3
+ size 1064
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/tokenizer_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "extra_special_tokens": {},
49
+ "mask_token": "[MASK]",
50
+ "model_max_length": 512,
51
+ "never_split": null,
52
+ "pad_token": "[PAD]",
53
+ "sep_token": "[SEP]",
54
+ "strip_accents": null,
55
+ "tokenize_chinese_chars": true,
56
+ "tokenizer_class": "DistilBertTokenizer",
57
+ "unk_token": "[UNK]"
58
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/trainer_state.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.705686630369026,
3
+ "best_model_checkpoint": "./results/trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14",
4
+ "epoch": 1.0,
5
+ "eval_steps": 500,
6
+ "global_step": 14,
7
+ "is_hyper_param_search": true,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "eval_accuracy": 0.7083333333333334,
14
+ "eval_f1": 0.705686630369026,
15
+ "eval_loss": 1.1053823232650757,
16
+ "eval_precision": 0.7055555555555554,
17
+ "eval_recall": 0.7083333333333334,
18
+ "eval_runtime": 0.0552,
19
+ "eval_samples_per_second": 434.724,
20
+ "eval_steps_per_second": 36.227,
21
+ "step": 14
22
+ }
23
+ ],
24
+ "logging_steps": 500,
25
+ "max_steps": 42,
26
+ "num_input_tokens_seen": 0,
27
+ "num_train_epochs": 3,
28
+ "save_steps": 500,
29
+ "stateful_callbacks": {
30
+ "TrainerControl": {
31
+ "args": {
32
+ "should_epoch_stop": false,
33
+ "should_evaluate": false,
34
+ "should_log": false,
35
+ "should_save": true,
36
+ "should_training_stop": false
37
+ },
38
+ "attributes": {}
39
+ }
40
+ },
41
+ "total_flos": 0,
42
+ "train_batch_size": 16,
43
+ "trial_name": "trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>",
44
+ "trial_params": {
45
+ "learning_rate": 1.6736924699428189e-06,
46
+ "num_train_epochs": 3,
47
+ "per_device_train_batch_size": 16,
48
+ "seed": 29
49
+ }
50
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:002043f78001120c873ae3422dfacd766cf7dd3ecc538f26b6be14cf76b90ecb
3
+ size 5304
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-14/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilbert/distilbert-base-uncased-finetuned-sst-2-english",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForSequenceClassification"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "finetuning_task": "sst-2",
11
+ "hidden_dim": 3072,
12
+ "id2label": {
13
+ "0": "NEGATIVE",
14
+ "1": "POSITIVE"
15
+ },
16
+ "initializer_range": 0.02,
17
+ "label2id": {
18
+ "NEGATIVE": 0,
19
+ "POSITIVE": 1
20
+ },
21
+ "max_position_embeddings": 512,
22
+ "model_type": "distilbert",
23
+ "n_heads": 12,
24
+ "n_layers": 6,
25
+ "output_past": true,
26
+ "pad_token_id": 0,
27
+ "problem_type": "single_label_classification",
28
+ "qa_dropout": 0.1,
29
+ "seq_classif_dropout": 0.2,
30
+ "sinusoidal_pos_embds": false,
31
+ "tie_weights_": true,
32
+ "torch_dtype": "float32",
33
+ "transformers_version": "4.48.0.dev0",
34
+ "vocab_size": 30522
35
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23e8f6097bd785dc4a80efde0071814d2a8a24d197b0446264aa57b13e973986
3
+ size 267832560
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:455593754683f63e2066f814da10587b0320f32aa6429277fcaf1910b88ec125
3
+ size 535727290
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9778e33f0e2c5d7c90bc2020333ba9a60259dce674098743e0923ebc46cd3527
3
+ size 14308
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45bdc9092a0b7caca774a6f53e11ba41f5ba6bcc6fb603e0312cbe92daeca125
3
+ size 1064
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/tokenizer_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "extra_special_tokens": {},
49
+ "mask_token": "[MASK]",
50
+ "model_max_length": 512,
51
+ "never_split": null,
52
+ "pad_token": "[PAD]",
53
+ "sep_token": "[SEP]",
54
+ "strip_accents": null,
55
+ "tokenize_chinese_chars": true,
56
+ "tokenizer_class": "DistilBertTokenizer",
57
+ "unk_token": "[UNK]"
58
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/trainer_state.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.75,
3
+ "best_model_checkpoint": "./results/trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28",
4
+ "epoch": 2.0,
5
+ "eval_steps": 500,
6
+ "global_step": 28,
7
+ "is_hyper_param_search": true,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "eval_accuracy": 0.7083333333333334,
14
+ "eval_f1": 0.705686630369026,
15
+ "eval_loss": 1.1053823232650757,
16
+ "eval_precision": 0.7055555555555554,
17
+ "eval_recall": 0.7083333333333334,
18
+ "eval_runtime": 0.0552,
19
+ "eval_samples_per_second": 434.724,
20
+ "eval_steps_per_second": 36.227,
21
+ "step": 14
22
+ },
23
+ {
24
+ "epoch": 2.0,
25
+ "eval_accuracy": 0.75,
26
+ "eval_f1": 0.75,
27
+ "eval_loss": 0.9916526675224304,
28
+ "eval_precision": 0.75,
29
+ "eval_recall": 0.75,
30
+ "eval_runtime": 0.0571,
31
+ "eval_samples_per_second": 420.605,
32
+ "eval_steps_per_second": 35.05,
33
+ "step": 28
34
+ }
35
+ ],
36
+ "logging_steps": 500,
37
+ "max_steps": 42,
38
+ "num_input_tokens_seen": 0,
39
+ "num_train_epochs": 3,
40
+ "save_steps": 500,
41
+ "stateful_callbacks": {
42
+ "TrainerControl": {
43
+ "args": {
44
+ "should_epoch_stop": false,
45
+ "should_evaluate": false,
46
+ "should_log": false,
47
+ "should_save": true,
48
+ "should_training_stop": false
49
+ },
50
+ "attributes": {}
51
+ }
52
+ },
53
+ "total_flos": 0,
54
+ "train_batch_size": 16,
55
+ "trial_name": "trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>",
56
+ "trial_params": {
57
+ "learning_rate": 1.6736924699428189e-06,
58
+ "num_train_epochs": 3,
59
+ "per_device_train_batch_size": 16,
60
+ "seed": 29
61
+ }
62
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:002043f78001120c873ae3422dfacd766cf7dd3ecc538f26b6be14cf76b90ecb
3
+ size 5304
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-28/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilbert/distilbert-base-uncased-finetuned-sst-2-english",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForSequenceClassification"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "finetuning_task": "sst-2",
11
+ "hidden_dim": 3072,
12
+ "id2label": {
13
+ "0": "NEGATIVE",
14
+ "1": "POSITIVE"
15
+ },
16
+ "initializer_range": 0.02,
17
+ "label2id": {
18
+ "NEGATIVE": 0,
19
+ "POSITIVE": 1
20
+ },
21
+ "max_position_embeddings": 512,
22
+ "model_type": "distilbert",
23
+ "n_heads": 12,
24
+ "n_layers": 6,
25
+ "output_past": true,
26
+ "pad_token_id": 0,
27
+ "problem_type": "single_label_classification",
28
+ "qa_dropout": 0.1,
29
+ "seq_classif_dropout": 0.2,
30
+ "sinusoidal_pos_embds": false,
31
+ "tie_weights_": true,
32
+ "torch_dtype": "float32",
33
+ "transformers_version": "4.48.0.dev0",
34
+ "vocab_size": 30522
35
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:581e7934f2e3c380c9f98e7052b140030db513c25463ed1297b28c5f37c0a15f
3
+ size 267832560
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cae7abc84af99dfd05cda7735ddd269b879b061c664d5d9cf7680197a7505f1
3
+ size 535727290
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b165aacf5bb384cb3eac3a98838abb3b01e9ed93c2d1f0f1ffc804d8ad536af2
3
+ size 14308
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5cd22dd91f3b3918b1b30ba6ec3fa2fa8023dddc9ce47c221fda726c77dbba4
3
+ size 1064
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/tokenizer_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "extra_special_tokens": {},
49
+ "mask_token": "[MASK]",
50
+ "model_max_length": 512,
51
+ "never_split": null,
52
+ "pad_token": "[PAD]",
53
+ "sep_token": "[SEP]",
54
+ "strip_accents": null,
55
+ "tokenize_chinese_chars": true,
56
+ "tokenizer_class": "DistilBertTokenizer",
57
+ "unk_token": "[UNK]"
58
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/trainer_state.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.7897761645493043,
3
+ "best_model_checkpoint": "./results/trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42",
4
+ "epoch": 3.0,
5
+ "eval_steps": 500,
6
+ "global_step": 42,
7
+ "is_hyper_param_search": true,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "eval_accuracy": 0.7083333333333334,
14
+ "eval_f1": 0.705686630369026,
15
+ "eval_loss": 1.1053823232650757,
16
+ "eval_precision": 0.7055555555555554,
17
+ "eval_recall": 0.7083333333333334,
18
+ "eval_runtime": 0.0552,
19
+ "eval_samples_per_second": 434.724,
20
+ "eval_steps_per_second": 36.227,
21
+ "step": 14
22
+ },
23
+ {
24
+ "epoch": 2.0,
25
+ "eval_accuracy": 0.75,
26
+ "eval_f1": 0.75,
27
+ "eval_loss": 0.9916526675224304,
28
+ "eval_precision": 0.75,
29
+ "eval_recall": 0.75,
30
+ "eval_runtime": 0.0571,
31
+ "eval_samples_per_second": 420.605,
32
+ "eval_steps_per_second": 35.05,
33
+ "step": 28
34
+ },
35
+ {
36
+ "epoch": 3.0,
37
+ "eval_accuracy": 0.7916666666666666,
38
+ "eval_f1": 0.7897761645493043,
39
+ "eval_loss": 0.9532522559165955,
40
+ "eval_precision": 0.7907407407407407,
41
+ "eval_recall": 0.7916666666666666,
42
+ "eval_runtime": 0.0717,
43
+ "eval_samples_per_second": 334.856,
44
+ "eval_steps_per_second": 27.905,
45
+ "step": 42
46
+ }
47
+ ],
48
+ "logging_steps": 500,
49
+ "max_steps": 42,
50
+ "num_input_tokens_seen": 0,
51
+ "num_train_epochs": 3,
52
+ "save_steps": 500,
53
+ "stateful_callbacks": {
54
+ "TrainerControl": {
55
+ "args": {
56
+ "should_epoch_stop": false,
57
+ "should_evaluate": false,
58
+ "should_log": false,
59
+ "should_save": true,
60
+ "should_training_stop": true
61
+ },
62
+ "attributes": {}
63
+ }
64
+ },
65
+ "total_flos": 0,
66
+ "train_batch_size": 16,
67
+ "trial_name": "trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>",
68
+ "trial_params": {
69
+ "learning_rate": 1.6736924699428189e-06,
70
+ "num_train_epochs": 3,
71
+ "per_device_train_batch_size": 16,
72
+ "seed": 29
73
+ }
74
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:002043f78001120c873ae3422dfacd766cf7dd3ecc538f26b6be14cf76b90ecb
3
+ size 5304
trial_<optuna.trial._trial.Trial object at 0x7d9ff2550150>/checkpoint-42/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilbert/distilbert-base-uncased-finetuned-sst-2-english",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForSequenceClassification"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "finetuning_task": "sst-2",
11
+ "hidden_dim": 3072,
12
+ "id2label": {
13
+ "0": "NEGATIVE",
14
+ "1": "POSITIVE"
15
+ },
16
+ "initializer_range": 0.02,
17
+ "label2id": {
18
+ "NEGATIVE": 0,
19
+ "POSITIVE": 1
20
+ },
21
+ "max_position_embeddings": 512,
22
+ "model_type": "distilbert",
23
+ "n_heads": 12,
24
+ "n_layers": 6,
25
+ "output_past": true,
26
+ "pad_token_id": 0,
27
+ "problem_type": "single_label_classification",
28
+ "qa_dropout": 0.1,
29
+ "seq_classif_dropout": 0.2,
30
+ "sinusoidal_pos_embds": false,
31
+ "tie_weights_": true,
32
+ "torch_dtype": "float32",
33
+ "transformers_version": "4.48.0.dev0",
34
+ "vocab_size": 30522
35
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:697fed9d1e7489d81117749aae0dc51f0780a65282e9ffbe28552f0c9ca840f2
3
+ size 267832560
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1aef5a4357e0b16fe5bf3cdf9c0a0c261343b6b815348db058da89d4cf729dd1
3
+ size 535727290
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5f3ae4cbee033473d464e45f9bd12dce3f9e537ccc9d9eac3ad2d28f7d3f81b
3
+ size 14308
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b499a6062aa4c5ff07cdad92bf04236dd5068c81c294bb0b38da2a045a00443f
3
+ size 1064
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/tokenizer_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "extra_special_tokens": {},
49
+ "mask_token": "[MASK]",
50
+ "model_max_length": 512,
51
+ "never_split": null,
52
+ "pad_token": "[PAD]",
53
+ "sep_token": "[SEP]",
54
+ "strip_accents": null,
55
+ "tokenize_chinese_chars": true,
56
+ "tokenizer_class": "DistilBertTokenizer",
57
+ "unk_token": "[UNK]"
58
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/trainer_state.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.9172494172494172,
3
+ "best_model_checkpoint": "./results/trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-7",
4
+ "epoch": 2.0,
5
+ "eval_steps": 500,
6
+ "global_step": 14,
7
+ "is_hyper_param_search": true,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "eval_accuracy": 0.9166666666666666,
14
+ "eval_f1": 0.9172494172494172,
15
+ "eval_loss": 0.354879230260849,
16
+ "eval_precision": 0.9305555555555557,
17
+ "eval_recall": 0.9166666666666666,
18
+ "eval_runtime": 0.0704,
19
+ "eval_samples_per_second": 340.712,
20
+ "eval_steps_per_second": 28.393,
21
+ "step": 7
22
+ },
23
+ {
24
+ "epoch": 2.0,
25
+ "eval_accuracy": 0.9166666666666666,
26
+ "eval_f1": 0.9172494172494172,
27
+ "eval_loss": 0.23112539947032928,
28
+ "eval_precision": 0.9305555555555557,
29
+ "eval_recall": 0.9166666666666666,
30
+ "eval_runtime": 0.0594,
31
+ "eval_samples_per_second": 403.879,
32
+ "eval_steps_per_second": 33.657,
33
+ "step": 14
34
+ }
35
+ ],
36
+ "logging_steps": 500,
37
+ "max_steps": 21,
38
+ "num_input_tokens_seen": 0,
39
+ "num_train_epochs": 3,
40
+ "save_steps": 500,
41
+ "stateful_callbacks": {
42
+ "TrainerControl": {
43
+ "args": {
44
+ "should_epoch_stop": false,
45
+ "should_evaluate": false,
46
+ "should_log": false,
47
+ "should_save": true,
48
+ "should_training_stop": false
49
+ },
50
+ "attributes": {}
51
+ }
52
+ },
53
+ "total_flos": 0,
54
+ "train_batch_size": 32,
55
+ "trial_name": "trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>",
56
+ "trial_params": {
57
+ "learning_rate": 9.755035812704661e-05,
58
+ "num_train_epochs": 3,
59
+ "per_device_train_batch_size": 32,
60
+ "seed": 8
61
+ }
62
+ }
trial_<optuna.trial._trial.Trial object at 0x7d9ff26e0850>/checkpoint-14/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4fd53fe6c7e1a01e4ffb6cdd3f0842509c111b0ada3d4f491be073c0d4c862f5
3
+ size 5304