arcwarden46 commited on
Commit
6b9094e
·
verified ·
1 Parent(s): c4658c5

Training in progress, step 450, checkpoint

Browse files
last-checkpoint/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ab1d4235f4cd4ffbf9da7dd14dd4eed563e3cc9a8305774d50524a4bdfe4c321
3
  size 1101095848
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e75e76ad495c6f04db1db938b83a8c0a347ed4aae0aa2da55a2e4abed15fc6bd
3
  size 1101095848
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2ad9283a7e0b4973552b057e0266f4c82fc7940b8b14717f7f155a2d90913b85
3
  size 559894868
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a6754dc365b79a92d5e2347eb928b390eae344331ae7e043e300718c75c9c02
3
  size 559894868
last-checkpoint/rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7fe71d56aa1da2ecb9c7f19f08dd9b691384b7a27b82073a828a1fee61ea9417
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ada0c212eb58bca56bed03d6bd6d8c0e2a39e3c77d936483f633c2e099bd000
3
  size 14244
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2681c20f6b04cb297ca42ba79b92543a57c49e07fb40458eca8cb625497628aa
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9636ae38b683f4b5b714bdf172e563b0c593e0efe94f07eea78547963bfbfae
3
  size 1064
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
- "best_metric": 0.9937074780464172,
3
- "best_model_checkpoint": "miner_id_24/checkpoint-300",
4
- "epoch": 0.039379122501886916,
5
  "eval_steps": 150,
6
- "global_step": 300,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -2131,6 +2131,1064 @@
2131
  "eval_samples_per_second": 13.344,
2132
  "eval_steps_per_second": 3.336,
2133
  "step": 300
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2134
  }
2135
  ],
2136
  "logging_steps": 1,
@@ -2154,12 +3212,12 @@
2154
  "should_evaluate": false,
2155
  "should_log": false,
2156
  "should_save": true,
2157
- "should_training_stop": false
2158
  },
2159
  "attributes": {}
2160
  }
2161
  },
2162
- "total_flos": 4.400266461000499e+17,
2163
  "train_batch_size": 8,
2164
  "trial_name": null,
2165
  "trial_params": null
 
1
  {
2
+ "best_metric": 0.9613198041915894,
3
+ "best_model_checkpoint": "miner_id_24/checkpoint-450",
4
+ "epoch": 0.05906868375283037,
5
  "eval_steps": 150,
6
+ "global_step": 450,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
2131
  "eval_samples_per_second": 13.344,
2132
  "eval_steps_per_second": 3.336,
2133
  "step": 300
2134
+ },
2135
+ {
2136
+ "epoch": 0.039510386243559875,
2137
+ "grad_norm": 0.20209898054599762,
2138
+ "learning_rate": 2.6813618894527138e-05,
2139
+ "loss": 1.0962,
2140
+ "step": 301
2141
+ },
2142
+ {
2143
+ "epoch": 0.03964164998523283,
2144
+ "grad_norm": 0.2070281207561493,
2145
+ "learning_rate": 2.6490591592961578e-05,
2146
+ "loss": 1.0854,
2147
+ "step": 302
2148
+ },
2149
+ {
2150
+ "epoch": 0.039772913726905786,
2151
+ "grad_norm": 0.21131707727909088,
2152
+ "learning_rate": 2.6168819172567392e-05,
2153
+ "loss": 1.1135,
2154
+ "step": 303
2155
+ },
2156
+ {
2157
+ "epoch": 0.039904177468578744,
2158
+ "grad_norm": 0.2207150012254715,
2159
+ "learning_rate": 2.5848318808857606e-05,
2160
+ "loss": 1.1269,
2161
+ "step": 304
2162
+ },
2163
+ {
2164
+ "epoch": 0.040035441210251696,
2165
+ "grad_norm": 0.24536538124084473,
2166
+ "learning_rate": 2.5529107609445733e-05,
2167
+ "loss": 1.1705,
2168
+ "step": 305
2169
+ },
2170
+ {
2171
+ "epoch": 0.040166704951924655,
2172
+ "grad_norm": 0.27333205938339233,
2173
+ "learning_rate": 2.521120261313241e-05,
2174
+ "loss": 1.2272,
2175
+ "step": 306
2176
+ },
2177
+ {
2178
+ "epoch": 0.04029796869359761,
2179
+ "grad_norm": 0.42892393469810486,
2180
+ "learning_rate": 2.4894620788996037e-05,
2181
+ "loss": 1.2927,
2182
+ "step": 307
2183
+ },
2184
+ {
2185
+ "epoch": 0.040429232435270565,
2186
+ "grad_norm": 0.4479203224182129,
2187
+ "learning_rate": 2.457937903548695e-05,
2188
+ "loss": 1.1936,
2189
+ "step": 308
2190
+ },
2191
+ {
2192
+ "epoch": 0.040560496176943524,
2193
+ "grad_norm": 0.5271134376525879,
2194
+ "learning_rate": 2.426549417952542e-05,
2195
+ "loss": 1.2875,
2196
+ "step": 309
2197
+ },
2198
+ {
2199
+ "epoch": 0.04069175991861648,
2200
+ "grad_norm": 0.6161456108093262,
2201
+ "learning_rate": 2.3952982975603496e-05,
2202
+ "loss": 1.3302,
2203
+ "step": 310
2204
+ },
2205
+ {
2206
+ "epoch": 0.040823023660289434,
2207
+ "grad_norm": 0.6787443161010742,
2208
+ "learning_rate": 2.3641862104890595e-05,
2209
+ "loss": 1.2439,
2210
+ "step": 311
2211
+ },
2212
+ {
2213
+ "epoch": 0.04095428740196239,
2214
+ "grad_norm": 0.8013172149658203,
2215
+ "learning_rate": 2.3332148174343254e-05,
2216
+ "loss": 1.3476,
2217
+ "step": 312
2218
+ },
2219
+ {
2220
+ "epoch": 0.04108555114363535,
2221
+ "grad_norm": 0.8128992319107056,
2222
+ "learning_rate": 2.3023857715818532e-05,
2223
+ "loss": 1.2517,
2224
+ "step": 313
2225
+ },
2226
+ {
2227
+ "epoch": 0.0412168148853083,
2228
+ "grad_norm": 0.7722633481025696,
2229
+ "learning_rate": 2.2717007185191674e-05,
2230
+ "loss": 1.2515,
2231
+ "step": 314
2232
+ },
2233
+ {
2234
+ "epoch": 0.04134807862698126,
2235
+ "grad_norm": 0.7521525621414185,
2236
+ "learning_rate": 2.24116129614777e-05,
2237
+ "loss": 1.1528,
2238
+ "step": 315
2239
+ },
2240
+ {
2241
+ "epoch": 0.04147934236865422,
2242
+ "grad_norm": 0.7824714183807373,
2243
+ "learning_rate": 2.2107691345957133e-05,
2244
+ "loss": 1.2161,
2245
+ "step": 316
2246
+ },
2247
+ {
2248
+ "epoch": 0.04161060611032717,
2249
+ "grad_norm": 0.7399042248725891,
2250
+ "learning_rate": 2.1805258561305862e-05,
2251
+ "loss": 1.2162,
2252
+ "step": 317
2253
+ },
2254
+ {
2255
+ "epoch": 0.04174186985200013,
2256
+ "grad_norm": 0.7101216912269592,
2257
+ "learning_rate": 2.1504330750729186e-05,
2258
+ "loss": 1.1717,
2259
+ "step": 318
2260
+ },
2261
+ {
2262
+ "epoch": 0.04187313359367309,
2263
+ "grad_norm": 0.6807271838188171,
2264
+ "learning_rate": 2.120492397710022e-05,
2265
+ "loss": 1.0815,
2266
+ "step": 319
2267
+ },
2268
+ {
2269
+ "epoch": 0.04200439733534604,
2270
+ "grad_norm": 0.7126163244247437,
2271
+ "learning_rate": 2.090705422210237e-05,
2272
+ "loss": 1.1976,
2273
+ "step": 320
2274
+ },
2275
+ {
2276
+ "epoch": 0.042135661077019,
2277
+ "grad_norm": 0.6584108471870422,
2278
+ "learning_rate": 2.061073738537635e-05,
2279
+ "loss": 1.0962,
2280
+ "step": 321
2281
+ },
2282
+ {
2283
+ "epoch": 0.04226692481869196,
2284
+ "grad_norm": 0.6849985718727112,
2285
+ "learning_rate": 2.0315989283671473e-05,
2286
+ "loss": 1.169,
2287
+ "step": 322
2288
+ },
2289
+ {
2290
+ "epoch": 0.04239818856036491,
2291
+ "grad_norm": 0.673360288143158,
2292
+ "learning_rate": 2.0022825650001387e-05,
2293
+ "loss": 1.0906,
2294
+ "step": 323
2295
+ },
2296
+ {
2297
+ "epoch": 0.04252945230203787,
2298
+ "grad_norm": 0.6716238856315613,
2299
+ "learning_rate": 1.9731262132804274e-05,
2300
+ "loss": 1.0272,
2301
+ "step": 324
2302
+ },
2303
+ {
2304
+ "epoch": 0.04266071604371083,
2305
+ "grad_norm": 0.6913533806800842,
2306
+ "learning_rate": 1.9441314295107537e-05,
2307
+ "loss": 1.0831,
2308
+ "step": 325
2309
+ },
2310
+ {
2311
+ "epoch": 0.04279197978538378,
2312
+ "grad_norm": 0.7192745208740234,
2313
+ "learning_rate": 1.9152997613697183e-05,
2314
+ "loss": 0.9957,
2315
+ "step": 326
2316
+ },
2317
+ {
2318
+ "epoch": 0.04292324352705674,
2319
+ "grad_norm": 0.6721824407577515,
2320
+ "learning_rate": 1.8866327478291546e-05,
2321
+ "loss": 1.1222,
2322
+ "step": 327
2323
+ },
2324
+ {
2325
+ "epoch": 0.0430545072687297,
2326
+ "grad_norm": 0.6699678301811218,
2327
+ "learning_rate": 1.8581319190720035e-05,
2328
+ "loss": 0.9989,
2329
+ "step": 328
2330
+ },
2331
+ {
2332
+ "epoch": 0.04318577101040265,
2333
+ "grad_norm": 0.6625519394874573,
2334
+ "learning_rate": 1.8297987964106115e-05,
2335
+ "loss": 0.9872,
2336
+ "step": 329
2337
+ },
2338
+ {
2339
+ "epoch": 0.04331703475207561,
2340
+ "grad_norm": 0.7252044081687927,
2341
+ "learning_rate": 1.801634892205545e-05,
2342
+ "loss": 1.0245,
2343
+ "step": 330
2344
+ },
2345
+ {
2346
+ "epoch": 0.043448298493748566,
2347
+ "grad_norm": 0.7490676641464233,
2348
+ "learning_rate": 1.7736417097848506e-05,
2349
+ "loss": 1.0401,
2350
+ "step": 331
2351
+ },
2352
+ {
2353
+ "epoch": 0.04357956223542152,
2354
+ "grad_norm": 0.6983168721199036,
2355
+ "learning_rate": 1.7458207433638223e-05,
2356
+ "loss": 0.9628,
2357
+ "step": 332
2358
+ },
2359
+ {
2360
+ "epoch": 0.04371082597709448,
2361
+ "grad_norm": 0.7303330898284912,
2362
+ "learning_rate": 1.718173477965236e-05,
2363
+ "loss": 0.9528,
2364
+ "step": 333
2365
+ },
2366
+ {
2367
+ "epoch": 0.043842089718767435,
2368
+ "grad_norm": 0.7304815053939819,
2369
+ "learning_rate": 1.6907013893400837e-05,
2370
+ "loss": 0.9777,
2371
+ "step": 334
2372
+ },
2373
+ {
2374
+ "epoch": 0.04397335346044039,
2375
+ "grad_norm": 0.6960116028785706,
2376
+ "learning_rate": 1.6634059438888033e-05,
2377
+ "loss": 0.9643,
2378
+ "step": 335
2379
+ },
2380
+ {
2381
+ "epoch": 0.044104617202113346,
2382
+ "grad_norm": 0.6498481631278992,
2383
+ "learning_rate": 1.636288598583e-05,
2384
+ "loss": 0.7516,
2385
+ "step": 336
2386
+ },
2387
+ {
2388
+ "epoch": 0.044235880943786304,
2389
+ "grad_norm": 0.7724774479866028,
2390
+ "learning_rate": 1.6093508008876857e-05,
2391
+ "loss": 0.9599,
2392
+ "step": 337
2393
+ },
2394
+ {
2395
+ "epoch": 0.044367144685459256,
2396
+ "grad_norm": 0.7501699924468994,
2397
+ "learning_rate": 1.5825939886840037e-05,
2398
+ "loss": 0.8397,
2399
+ "step": 338
2400
+ },
2401
+ {
2402
+ "epoch": 0.044498408427132215,
2403
+ "grad_norm": 0.7359670400619507,
2404
+ "learning_rate": 1.5560195901924894e-05,
2405
+ "loss": 0.8147,
2406
+ "step": 339
2407
+ },
2408
+ {
2409
+ "epoch": 0.04462967216880517,
2410
+ "grad_norm": 0.7074993252754211,
2411
+ "learning_rate": 1.5296290238968303e-05,
2412
+ "loss": 0.7271,
2413
+ "step": 340
2414
+ },
2415
+ {
2416
+ "epoch": 0.044760935910478125,
2417
+ "grad_norm": 0.7447213530540466,
2418
+ "learning_rate": 1.50342369846815e-05,
2419
+ "loss": 0.7732,
2420
+ "step": 341
2421
+ },
2422
+ {
2423
+ "epoch": 0.044892199652151084,
2424
+ "grad_norm": 0.7885531187057495,
2425
+ "learning_rate": 1.4774050126898164e-05,
2426
+ "loss": 0.6494,
2427
+ "step": 342
2428
+ },
2429
+ {
2430
+ "epoch": 0.04502346339382404,
2431
+ "grad_norm": 0.7818116545677185,
2432
+ "learning_rate": 1.451574355382776e-05,
2433
+ "loss": 0.7047,
2434
+ "step": 343
2435
+ },
2436
+ {
2437
+ "epoch": 0.045154727135496994,
2438
+ "grad_norm": 0.6587514281272888,
2439
+ "learning_rate": 1.425933105331429e-05,
2440
+ "loss": 0.4821,
2441
+ "step": 344
2442
+ },
2443
+ {
2444
+ "epoch": 0.04528599087716995,
2445
+ "grad_norm": 0.7465260624885559,
2446
+ "learning_rate": 1.4004826312100216e-05,
2447
+ "loss": 0.496,
2448
+ "step": 345
2449
+ },
2450
+ {
2451
+ "epoch": 0.04541725461884291,
2452
+ "grad_norm": 0.7443161010742188,
2453
+ "learning_rate": 1.3752242915095992e-05,
2454
+ "loss": 0.5114,
2455
+ "step": 346
2456
+ },
2457
+ {
2458
+ "epoch": 0.04554851836051586,
2459
+ "grad_norm": 0.7163719534873962,
2460
+ "learning_rate": 1.3501594344654884e-05,
2461
+ "loss": 0.5043,
2462
+ "step": 347
2463
+ },
2464
+ {
2465
+ "epoch": 0.04567978210218882,
2466
+ "grad_norm": 0.7420056462287903,
2467
+ "learning_rate": 1.3252893979853304e-05,
2468
+ "loss": 0.5634,
2469
+ "step": 348
2470
+ },
2471
+ {
2472
+ "epoch": 0.04581104584386178,
2473
+ "grad_norm": 0.6838533282279968,
2474
+ "learning_rate": 1.3006155095776707e-05,
2475
+ "loss": 0.3495,
2476
+ "step": 349
2477
+ },
2478
+ {
2479
+ "epoch": 0.04594230958553473,
2480
+ "grad_norm": 0.8066396117210388,
2481
+ "learning_rate": 1.2761390862810907e-05,
2482
+ "loss": 0.424,
2483
+ "step": 350
2484
+ },
2485
+ {
2486
+ "epoch": 0.04607357332720769,
2487
+ "grad_norm": 0.20562657713890076,
2488
+ "learning_rate": 1.2518614345939212e-05,
2489
+ "loss": 1.0868,
2490
+ "step": 351
2491
+ },
2492
+ {
2493
+ "epoch": 0.04620483706888065,
2494
+ "grad_norm": 0.19780656695365906,
2495
+ "learning_rate": 1.227783850404487e-05,
2496
+ "loss": 1.1027,
2497
+ "step": 352
2498
+ },
2499
+ {
2500
+ "epoch": 0.0463361008105536,
2501
+ "grad_norm": 0.21005481481552124,
2502
+ "learning_rate": 1.2039076189219517e-05,
2503
+ "loss": 1.1234,
2504
+ "step": 353
2505
+ },
2506
+ {
2507
+ "epoch": 0.04646736455222656,
2508
+ "grad_norm": 0.2190805971622467,
2509
+ "learning_rate": 1.1802340146077045e-05,
2510
+ "loss": 1.1,
2511
+ "step": 354
2512
+ },
2513
+ {
2514
+ "epoch": 0.04659862829389952,
2515
+ "grad_norm": 0.24044179916381836,
2516
+ "learning_rate": 1.1567643011073392e-05,
2517
+ "loss": 1.1298,
2518
+ "step": 355
2519
+ },
2520
+ {
2521
+ "epoch": 0.04672989203557247,
2522
+ "grad_norm": 0.2597028613090515,
2523
+ "learning_rate": 1.1334997311832002e-05,
2524
+ "loss": 1.134,
2525
+ "step": 356
2526
+ },
2527
+ {
2528
+ "epoch": 0.04686115577724543,
2529
+ "grad_norm": 0.3216112554073334,
2530
+ "learning_rate": 1.1104415466475087e-05,
2531
+ "loss": 1.1735,
2532
+ "step": 357
2533
+ },
2534
+ {
2535
+ "epoch": 0.04699241951891839,
2536
+ "grad_norm": 0.4165898561477661,
2537
+ "learning_rate": 1.0875909782960886e-05,
2538
+ "loss": 1.2389,
2539
+ "step": 358
2540
+ },
2541
+ {
2542
+ "epoch": 0.04712368326059134,
2543
+ "grad_norm": 0.4193871319293976,
2544
+ "learning_rate": 1.0649492458426564e-05,
2545
+ "loss": 1.1649,
2546
+ "step": 359
2547
+ },
2548
+ {
2549
+ "epoch": 0.0472549470022643,
2550
+ "grad_norm": 0.4490433931350708,
2551
+ "learning_rate": 1.0425175578537299e-05,
2552
+ "loss": 1.2062,
2553
+ "step": 360
2554
+ },
2555
+ {
2556
+ "epoch": 0.04738621074393726,
2557
+ "grad_norm": 0.4922597110271454,
2558
+ "learning_rate": 1.020297111684101e-05,
2559
+ "loss": 1.2046,
2560
+ "step": 361
2561
+ },
2562
+ {
2563
+ "epoch": 0.047517474485610216,
2564
+ "grad_norm": 0.5809623003005981,
2565
+ "learning_rate": 9.98289093412938e-06,
2566
+ "loss": 1.2574,
2567
+ "step": 362
2568
+ },
2569
+ {
2570
+ "epoch": 0.04764873822728317,
2571
+ "grad_norm": 0.5852945446968079,
2572
+ "learning_rate": 9.764946777804646e-06,
2573
+ "loss": 1.2615,
2574
+ "step": 363
2575
+ },
2576
+ {
2577
+ "epoch": 0.047780001968956126,
2578
+ "grad_norm": 0.6204507946968079,
2579
+ "learning_rate": 9.549150281252633e-06,
2580
+ "loss": 1.2761,
2581
+ "step": 364
2582
+ },
2583
+ {
2584
+ "epoch": 0.047911265710629085,
2585
+ "grad_norm": 0.6129657626152039,
2586
+ "learning_rate": 9.335512963221732e-06,
2587
+ "loss": 1.2447,
2588
+ "step": 365
2589
+ },
2590
+ {
2591
+ "epoch": 0.04804252945230204,
2592
+ "grad_norm": 0.613051176071167,
2593
+ "learning_rate": 9.124046227208082e-06,
2594
+ "loss": 1.2447,
2595
+ "step": 366
2596
+ },
2597
+ {
2598
+ "epoch": 0.048173793193974995,
2599
+ "grad_norm": 0.6088857650756836,
2600
+ "learning_rate": 8.914761360846869e-06,
2601
+ "loss": 1.2113,
2602
+ "step": 367
2603
+ },
2604
+ {
2605
+ "epoch": 0.048305056935647954,
2606
+ "grad_norm": 0.6392094492912292,
2607
+ "learning_rate": 8.707669535309793e-06,
2608
+ "loss": 1.1427,
2609
+ "step": 368
2610
+ },
2611
+ {
2612
+ "epoch": 0.048436320677320906,
2613
+ "grad_norm": 0.6354860663414001,
2614
+ "learning_rate": 8.502781804708826e-06,
2615
+ "loss": 1.1504,
2616
+ "step": 369
2617
+ },
2618
+ {
2619
+ "epoch": 0.048567584418993864,
2620
+ "grad_norm": 0.6489117741584778,
2621
+ "learning_rate": 8.30010910550611e-06,
2622
+ "loss": 1.1651,
2623
+ "step": 370
2624
+ },
2625
+ {
2626
+ "epoch": 0.04869884816066682,
2627
+ "grad_norm": 0.6395854949951172,
2628
+ "learning_rate": 8.09966225593024e-06,
2629
+ "loss": 1.1448,
2630
+ "step": 371
2631
+ },
2632
+ {
2633
+ "epoch": 0.048830111902339775,
2634
+ "grad_norm": 0.6275275349617004,
2635
+ "learning_rate": 7.901451955398792e-06,
2636
+ "loss": 1.1262,
2637
+ "step": 372
2638
+ },
2639
+ {
2640
+ "epoch": 0.04896137564401273,
2641
+ "grad_norm": 0.6410810351371765,
2642
+ "learning_rate": 7.705488783947202e-06,
2643
+ "loss": 1.1578,
2644
+ "step": 373
2645
+ },
2646
+ {
2647
+ "epoch": 0.04909263938568569,
2648
+ "grad_norm": 0.623022735118866,
2649
+ "learning_rate": 7.511783201664052e-06,
2650
+ "loss": 1.0632,
2651
+ "step": 374
2652
+ },
2653
+ {
2654
+ "epoch": 0.049223903127358644,
2655
+ "grad_norm": 0.6711010336875916,
2656
+ "learning_rate": 7.320345548132679e-06,
2657
+ "loss": 1.0221,
2658
+ "step": 375
2659
+ },
2660
+ {
2661
+ "epoch": 0.0493551668690316,
2662
+ "grad_norm": 0.7072113156318665,
2663
+ "learning_rate": 7.131186041879357e-06,
2664
+ "loss": 1.1742,
2665
+ "step": 376
2666
+ },
2667
+ {
2668
+ "epoch": 0.04948643061070456,
2669
+ "grad_norm": 0.7174622416496277,
2670
+ "learning_rate": 6.944314779827749e-06,
2671
+ "loss": 1.0623,
2672
+ "step": 377
2673
+ },
2674
+ {
2675
+ "epoch": 0.04961769435237751,
2676
+ "grad_norm": 0.6921060085296631,
2677
+ "learning_rate": 6.759741736760061e-06,
2678
+ "loss": 0.9611,
2679
+ "step": 378
2680
+ },
2681
+ {
2682
+ "epoch": 0.04974895809405047,
2683
+ "grad_norm": 0.6697357296943665,
2684
+ "learning_rate": 6.577476764784546e-06,
2685
+ "loss": 0.9576,
2686
+ "step": 379
2687
+ },
2688
+ {
2689
+ "epoch": 0.04988022183572343,
2690
+ "grad_norm": 0.7066754698753357,
2691
+ "learning_rate": 6.397529592809614e-06,
2692
+ "loss": 1.0607,
2693
+ "step": 380
2694
+ },
2695
+ {
2696
+ "epoch": 0.05001148557739638,
2697
+ "grad_norm": 0.6927306652069092,
2698
+ "learning_rate": 6.219909826024589e-06,
2699
+ "loss": 0.9993,
2700
+ "step": 381
2701
+ },
2702
+ {
2703
+ "epoch": 0.05014274931906934,
2704
+ "grad_norm": 0.7213545441627502,
2705
+ "learning_rate": 6.0446269453868945e-06,
2706
+ "loss": 1.0223,
2707
+ "step": 382
2708
+ },
2709
+ {
2710
+ "epoch": 0.0502740130607423,
2711
+ "grad_norm": 0.7020846009254456,
2712
+ "learning_rate": 5.871690307116107e-06,
2713
+ "loss": 0.954,
2714
+ "step": 383
2715
+ },
2716
+ {
2717
+ "epoch": 0.05040527680241525,
2718
+ "grad_norm": 0.7054339051246643,
2719
+ "learning_rate": 5.701109142194422e-06,
2720
+ "loss": 0.8829,
2721
+ "step": 384
2722
+ },
2723
+ {
2724
+ "epoch": 0.05053654054408821,
2725
+ "grad_norm": 0.7075545191764832,
2726
+ "learning_rate": 5.532892555874059e-06,
2727
+ "loss": 0.8735,
2728
+ "step": 385
2729
+ },
2730
+ {
2731
+ "epoch": 0.05066780428576117,
2732
+ "grad_norm": 0.6708042621612549,
2733
+ "learning_rate": 5.3670495271910925e-06,
2734
+ "loss": 0.7844,
2735
+ "step": 386
2736
+ },
2737
+ {
2738
+ "epoch": 0.05079906802743412,
2739
+ "grad_norm": 0.6817059516906738,
2740
+ "learning_rate": 5.203588908486279e-06,
2741
+ "loss": 0.8793,
2742
+ "step": 387
2743
+ },
2744
+ {
2745
+ "epoch": 0.05093033176910708,
2746
+ "grad_norm": 0.7711595296859741,
2747
+ "learning_rate": 5.042519424932513e-06,
2748
+ "loss": 0.9413,
2749
+ "step": 388
2750
+ },
2751
+ {
2752
+ "epoch": 0.05106159551078004,
2753
+ "grad_norm": 0.8680550456047058,
2754
+ "learning_rate": 4.883849674069058e-06,
2755
+ "loss": 0.8458,
2756
+ "step": 389
2757
+ },
2758
+ {
2759
+ "epoch": 0.05119285925245299,
2760
+ "grad_norm": 0.6733363270759583,
2761
+ "learning_rate": 4.727588125342669e-06,
2762
+ "loss": 0.6715,
2763
+ "step": 390
2764
+ },
2765
+ {
2766
+ "epoch": 0.05132412299412595,
2767
+ "grad_norm": 0.6882769465446472,
2768
+ "learning_rate": 4.573743119655516e-06,
2769
+ "loss": 0.6921,
2770
+ "step": 391
2771
+ },
2772
+ {
2773
+ "epoch": 0.05145538673579891,
2774
+ "grad_norm": 0.7219226956367493,
2775
+ "learning_rate": 4.422322868919937e-06,
2776
+ "loss": 0.6872,
2777
+ "step": 392
2778
+ },
2779
+ {
2780
+ "epoch": 0.05158665047747186,
2781
+ "grad_norm": 0.6878823637962341,
2782
+ "learning_rate": 4.273335455620097e-06,
2783
+ "loss": 0.6209,
2784
+ "step": 393
2785
+ },
2786
+ {
2787
+ "epoch": 0.05171791421914482,
2788
+ "grad_norm": 0.7288166284561157,
2789
+ "learning_rate": 4.126788832380629e-06,
2790
+ "loss": 0.6846,
2791
+ "step": 394
2792
+ },
2793
+ {
2794
+ "epoch": 0.051849177960817776,
2795
+ "grad_norm": 0.6843792796134949,
2796
+ "learning_rate": 3.982690821542035e-06,
2797
+ "loss": 0.5668,
2798
+ "step": 395
2799
+ },
2800
+ {
2801
+ "epoch": 0.05198044170249073,
2802
+ "grad_norm": 0.6692485809326172,
2803
+ "learning_rate": 3.8410491147432395e-06,
2804
+ "loss": 0.4893,
2805
+ "step": 396
2806
+ },
2807
+ {
2808
+ "epoch": 0.052111705444163686,
2809
+ "grad_norm": 0.5910897850990295,
2810
+ "learning_rate": 3.7018712725109926e-06,
2811
+ "loss": 0.4123,
2812
+ "step": 397
2813
+ },
2814
+ {
2815
+ "epoch": 0.052242969185836645,
2816
+ "grad_norm": 0.7011106014251709,
2817
+ "learning_rate": 3.5651647238562904e-06,
2818
+ "loss": 0.4728,
2819
+ "step": 398
2820
+ },
2821
+ {
2822
+ "epoch": 0.0523742329275096,
2823
+ "grad_norm": 0.6013720035552979,
2824
+ "learning_rate": 3.430936765877857e-06,
2825
+ "loss": 0.4168,
2826
+ "step": 399
2827
+ },
2828
+ {
2829
+ "epoch": 0.052505496669182555,
2830
+ "grad_norm": 0.6685484051704407,
2831
+ "learning_rate": 3.299194563372604e-06,
2832
+ "loss": 0.4504,
2833
+ "step": 400
2834
+ },
2835
+ {
2836
+ "epoch": 0.052636760410855514,
2837
+ "grad_norm": 0.1889829933643341,
2838
+ "learning_rate": 3.1699451484532463e-06,
2839
+ "loss": 1.0635,
2840
+ "step": 401
2841
+ },
2842
+ {
2843
+ "epoch": 0.052768024152528466,
2844
+ "grad_norm": 0.2015289068222046,
2845
+ "learning_rate": 3.0431954201728784e-06,
2846
+ "loss": 1.0821,
2847
+ "step": 402
2848
+ },
2849
+ {
2850
+ "epoch": 0.052899287894201424,
2851
+ "grad_norm": 0.20403572916984558,
2852
+ "learning_rate": 2.9189521441567726e-06,
2853
+ "loss": 1.1017,
2854
+ "step": 403
2855
+ },
2856
+ {
2857
+ "epoch": 0.05303055163587438,
2858
+ "grad_norm": 0.2169038951396942,
2859
+ "learning_rate": 2.797221952241219e-06,
2860
+ "loss": 1.0893,
2861
+ "step": 404
2862
+ },
2863
+ {
2864
+ "epoch": 0.053161815377547335,
2865
+ "grad_norm": 0.22941887378692627,
2866
+ "learning_rate": 2.6780113421195298e-06,
2867
+ "loss": 1.1287,
2868
+ "step": 405
2869
+ },
2870
+ {
2871
+ "epoch": 0.053293079119220294,
2872
+ "grad_norm": 0.27469444274902344,
2873
+ "learning_rate": 2.561326676995218e-06,
2874
+ "loss": 1.2098,
2875
+ "step": 406
2876
+ },
2877
+ {
2878
+ "epoch": 0.05342434286089325,
2879
+ "grad_norm": 0.3559771180152893,
2880
+ "learning_rate": 2.4471741852423237e-06,
2881
+ "loss": 1.1814,
2882
+ "step": 407
2883
+ },
2884
+ {
2885
+ "epoch": 0.053555606602566204,
2886
+ "grad_norm": 0.4387180507183075,
2887
+ "learning_rate": 2.3355599600729915e-06,
2888
+ "loss": 1.2586,
2889
+ "step": 408
2890
+ },
2891
+ {
2892
+ "epoch": 0.05368687034423916,
2893
+ "grad_norm": 0.43076077103614807,
2894
+ "learning_rate": 2.2264899592121744e-06,
2895
+ "loss": 1.1326,
2896
+ "step": 409
2897
+ },
2898
+ {
2899
+ "epoch": 0.05381813408591212,
2900
+ "grad_norm": 0.4769658148288727,
2901
+ "learning_rate": 2.1199700045797077e-06,
2902
+ "loss": 1.1753,
2903
+ "step": 410
2904
+ },
2905
+ {
2906
+ "epoch": 0.05394939782758507,
2907
+ "grad_norm": 0.49549686908721924,
2908
+ "learning_rate": 2.0160057819794466e-06,
2909
+ "loss": 1.1529,
2910
+ "step": 411
2911
+ },
2912
+ {
2913
+ "epoch": 0.05408066156925803,
2914
+ "grad_norm": 0.5492319464683533,
2915
+ "learning_rate": 1.9146028407958484e-06,
2916
+ "loss": 1.2282,
2917
+ "step": 412
2918
+ },
2919
+ {
2920
+ "epoch": 0.05421192531093099,
2921
+ "grad_norm": 0.5631099939346313,
2922
+ "learning_rate": 1.8157665936977263e-06,
2923
+ "loss": 1.2408,
2924
+ "step": 413
2925
+ },
2926
+ {
2927
+ "epoch": 0.05434318905260394,
2928
+ "grad_norm": 0.5960178375244141,
2929
+ "learning_rate": 1.7195023163493252e-06,
2930
+ "loss": 1.2945,
2931
+ "step": 414
2932
+ },
2933
+ {
2934
+ "epoch": 0.0544744527942769,
2935
+ "grad_norm": 0.5799109935760498,
2936
+ "learning_rate": 1.6258151471287396e-06,
2937
+ "loss": 1.1414,
2938
+ "step": 415
2939
+ },
2940
+ {
2941
+ "epoch": 0.05460571653594986,
2942
+ "grad_norm": 0.6928263306617737,
2943
+ "learning_rate": 1.5347100868536246e-06,
2944
+ "loss": 1.2234,
2945
+ "step": 416
2946
+ },
2947
+ {
2948
+ "epoch": 0.05473698027762281,
2949
+ "grad_norm": 0.6215956807136536,
2950
+ "learning_rate": 1.4461919985142735e-06,
2951
+ "loss": 1.1876,
2952
+ "step": 417
2953
+ },
2954
+ {
2955
+ "epoch": 0.05486824401929577,
2956
+ "grad_norm": 0.6092692613601685,
2957
+ "learning_rate": 1.3602656070140275e-06,
2958
+ "loss": 1.1264,
2959
+ "step": 418
2960
+ },
2961
+ {
2962
+ "epoch": 0.05499950776096873,
2963
+ "grad_norm": 0.6288320422172546,
2964
+ "learning_rate": 1.27693549891707e-06,
2965
+ "loss": 1.1799,
2966
+ "step": 419
2967
+ },
2968
+ {
2969
+ "epoch": 0.05513077150264168,
2970
+ "grad_norm": 0.6499227285385132,
2971
+ "learning_rate": 1.196206122203647e-06,
2972
+ "loss": 1.14,
2973
+ "step": 420
2974
+ },
2975
+ {
2976
+ "epoch": 0.05526203524431464,
2977
+ "grad_norm": 0.6873496174812317,
2978
+ "learning_rate": 1.1180817860325599e-06,
2979
+ "loss": 1.2302,
2980
+ "step": 421
2981
+ },
2982
+ {
2983
+ "epoch": 0.0553932989859876,
2984
+ "grad_norm": 0.6340437531471252,
2985
+ "learning_rate": 1.0425666605112517e-06,
2986
+ "loss": 1.1138,
2987
+ "step": 422
2988
+ },
2989
+ {
2990
+ "epoch": 0.05552456272766055,
2991
+ "grad_norm": 0.663411557674408,
2992
+ "learning_rate": 9.696647764731337e-07,
2993
+ "loss": 1.1017,
2994
+ "step": 423
2995
+ },
2996
+ {
2997
+ "epoch": 0.05565582646933351,
2998
+ "grad_norm": 0.6668544411659241,
2999
+ "learning_rate": 8.993800252624862e-07,
3000
+ "loss": 1.0881,
3001
+ "step": 424
3002
+ },
3003
+ {
3004
+ "epoch": 0.05578709021100647,
3005
+ "grad_norm": 0.6418658494949341,
3006
+ "learning_rate": 8.317161585266964e-07,
3007
+ "loss": 1.0638,
3008
+ "step": 425
3009
+ },
3010
+ {
3011
+ "epoch": 0.05591835395267942,
3012
+ "grad_norm": 0.703060507774353,
3013
+ "learning_rate": 7.666767880160464e-07,
3014
+ "loss": 1.1153,
3015
+ "step": 426
3016
+ },
3017
+ {
3018
+ "epoch": 0.05604961769435238,
3019
+ "grad_norm": 0.6755343079566956,
3020
+ "learning_rate": 7.042653853909064e-07,
3021
+ "loss": 0.9271,
3022
+ "step": 427
3023
+ },
3024
+ {
3025
+ "epoch": 0.056180881436025336,
3026
+ "grad_norm": 0.7167825698852539,
3027
+ "learning_rate": 6.444852820364222e-07,
3028
+ "loss": 1.1272,
3029
+ "step": 428
3030
+ },
3031
+ {
3032
+ "epoch": 0.05631214517769829,
3033
+ "grad_norm": 0.7692311406135559,
3034
+ "learning_rate": 5.87339668884701e-07,
3035
+ "loss": 1.1391,
3036
+ "step": 429
3037
+ },
3038
+ {
3039
+ "epoch": 0.056443408919371246,
3040
+ "grad_norm": 0.7314695119857788,
3041
+ "learning_rate": 5.328315962444874e-07,
3042
+ "loss": 1.0775,
3043
+ "step": 430
3044
+ },
3045
+ {
3046
+ "epoch": 0.056574672661044205,
3047
+ "grad_norm": 0.7138880491256714,
3048
+ "learning_rate": 4.809639736383431e-07,
3049
+ "loss": 0.9616,
3050
+ "step": 431
3051
+ },
3052
+ {
3053
+ "epoch": 0.05670593640271716,
3054
+ "grad_norm": 0.7646660208702087,
3055
+ "learning_rate": 4.317395696473214e-07,
3056
+ "loss": 1.0419,
3057
+ "step": 432
3058
+ },
3059
+ {
3060
+ "epoch": 0.056837200144390115,
3061
+ "grad_norm": 0.7127510905265808,
3062
+ "learning_rate": 3.851610117632354e-07,
3063
+ "loss": 0.9304,
3064
+ "step": 433
3065
+ },
3066
+ {
3067
+ "epoch": 0.056968463886063074,
3068
+ "grad_norm": 0.6503668427467346,
3069
+ "learning_rate": 3.4123078624834216e-07,
3070
+ "loss": 0.8488,
3071
+ "step": 434
3072
+ },
3073
+ {
3074
+ "epoch": 0.057099727627736026,
3075
+ "grad_norm": 0.7812151908874512,
3076
+ "learning_rate": 2.9995123800270476e-07,
3077
+ "loss": 0.971,
3078
+ "step": 435
3079
+ },
3080
+ {
3081
+ "epoch": 0.057230991369408984,
3082
+ "grad_norm": 0.7327147722244263,
3083
+ "learning_rate": 2.613245704389644e-07,
3084
+ "loss": 0.9165,
3085
+ "step": 436
3086
+ },
3087
+ {
3088
+ "epoch": 0.05736225511108194,
3089
+ "grad_norm": 0.7133559584617615,
3090
+ "learning_rate": 2.2535284536476242e-07,
3091
+ "loss": 0.8214,
3092
+ "step": 437
3093
+ },
3094
+ {
3095
+ "epoch": 0.057493518852754895,
3096
+ "grad_norm": 0.6528218984603882,
3097
+ "learning_rate": 1.920379828726726e-07,
3098
+ "loss": 0.6965,
3099
+ "step": 438
3100
+ },
3101
+ {
3102
+ "epoch": 0.057624782594427854,
3103
+ "grad_norm": 0.7258195281028748,
3104
+ "learning_rate": 1.6138176123770554e-07,
3105
+ "loss": 0.8052,
3106
+ "step": 439
3107
+ },
3108
+ {
3109
+ "epoch": 0.05775604633610081,
3110
+ "grad_norm": 0.7617157697677612,
3111
+ "learning_rate": 1.333858168224178e-07,
3112
+ "loss": 0.7333,
3113
+ "step": 440
3114
+ },
3115
+ {
3116
+ "epoch": 0.057887310077773764,
3117
+ "grad_norm": 0.7330428957939148,
3118
+ "learning_rate": 1.0805164398952072e-07,
3119
+ "loss": 0.8641,
3120
+ "step": 441
3121
+ },
3122
+ {
3123
+ "epoch": 0.05801857381944672,
3124
+ "grad_norm": 0.713188648223877,
3125
+ "learning_rate": 8.53805950221498e-08,
3126
+ "loss": 0.5969,
3127
+ "step": 442
3128
+ },
3129
+ {
3130
+ "epoch": 0.05814983756111968,
3131
+ "grad_norm": 0.6643161773681641,
3132
+ "learning_rate": 6.537388005167233e-08,
3133
+ "loss": 0.6351,
3134
+ "step": 443
3135
+ },
3136
+ {
3137
+ "epoch": 0.05828110130279263,
3138
+ "grad_norm": 0.7042210102081299,
3139
+ "learning_rate": 4.8032566993089225e-08,
3140
+ "loss": 0.6916,
3141
+ "step": 444
3142
+ },
3143
+ {
3144
+ "epoch": 0.05841236504446559,
3145
+ "grad_norm": 0.7722613215446472,
3146
+ "learning_rate": 3.3357581488030475e-08,
3147
+ "loss": 0.6977,
3148
+ "step": 445
3149
+ },
3150
+ {
3151
+ "epoch": 0.05854362878613855,
3152
+ "grad_norm": 0.6429854035377502,
3153
+ "learning_rate": 2.134970685536697e-08,
3154
+ "loss": 0.4461,
3155
+ "step": 446
3156
+ },
3157
+ {
3158
+ "epoch": 0.0586748925278115,
3159
+ "grad_norm": 0.628583550453186,
3160
+ "learning_rate": 1.200958404936059e-08,
3161
+ "loss": 0.4453,
3162
+ "step": 447
3163
+ },
3164
+ {
3165
+ "epoch": 0.05880615626948446,
3166
+ "grad_norm": 0.6538881063461304,
3167
+ "learning_rate": 5.337711625497121e-09,
3168
+ "loss": 0.462,
3169
+ "step": 448
3170
+ },
3171
+ {
3172
+ "epoch": 0.05893742001115742,
3173
+ "grad_norm": 0.6723350286483765,
3174
+ "learning_rate": 1.3344457138297906e-09,
3175
+ "loss": 0.4467,
3176
+ "step": 449
3177
+ },
3178
+ {
3179
+ "epoch": 0.05906868375283037,
3180
+ "grad_norm": 0.8126013875007629,
3181
+ "learning_rate": 0.0,
3182
+ "loss": 0.4989,
3183
+ "step": 450
3184
+ },
3185
+ {
3186
+ "epoch": 0.05906868375283037,
3187
+ "eval_loss": 0.9613198041915894,
3188
+ "eval_runtime": 960.9133,
3189
+ "eval_samples_per_second": 13.353,
3190
+ "eval_steps_per_second": 3.338,
3191
+ "step": 450
3192
  }
3193
  ],
3194
  "logging_steps": 1,
 
3212
  "should_evaluate": false,
3213
  "should_log": false,
3214
  "should_save": true,
3215
+ "should_training_stop": true
3216
  },
3217
  "attributes": {}
3218
  }
3219
  },
3220
+ "total_flos": 6.602152785708319e+17,
3221
  "train_batch_size": 8,
3222
  "trial_name": null,
3223
  "trial_params": null