Azamat1k commited on
Commit
5062b6e
·
verified ·
1 Parent(s): d824a09

Upload 7 files

Browse files
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:37f8b4988525f847a7f93271f6fe93e9ecae4f6d2ee9c62ce5f9b3d34eff8a8c
3
  size 536991984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcc58b43c486c55cd0df2d6042c7ef53ace3843c047209b9cb917a999346456b
3
  size 536991984
added_tokens.json ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 147090,
3
+ "<tool_call>": 147089,
4
+ "<|box_end|>": 147081,
5
+ "<|box_start|>": 147080,
6
+ "<|endoftext|>": 147075,
7
+ "<|file_sep|>": 147096,
8
+ "<|fim_middle|>": 147092,
9
+ "<|fim_pad|>": 147094,
10
+ "<|fim_prefix|>": 147091,
11
+ "<|fim_suffix|>": 147093,
12
+ "<|free_token100|>": 147196,
13
+ "<|free_token101|>": 147197,
14
+ "<|free_token102|>": 147198,
15
+ "<|free_token103|>": 147199,
16
+ "<|free_token10|>": 147106,
17
+ "<|free_token11|>": 147107,
18
+ "<|free_token12|>": 147108,
19
+ "<|free_token13|>": 147109,
20
+ "<|free_token14|>": 147110,
21
+ "<|free_token15|>": 147111,
22
+ "<|free_token16|>": 147112,
23
+ "<|free_token17|>": 147113,
24
+ "<|free_token18|>": 147114,
25
+ "<|free_token19|>": 147115,
26
+ "<|free_token1|>": 147097,
27
+ "<|free_token20|>": 147116,
28
+ "<|free_token21|>": 147117,
29
+ "<|free_token22|>": 147118,
30
+ "<|free_token23|>": 147119,
31
+ "<|free_token24|>": 147120,
32
+ "<|free_token25|>": 147121,
33
+ "<|free_token26|>": 147122,
34
+ "<|free_token27|>": 147123,
35
+ "<|free_token28|>": 147124,
36
+ "<|free_token29|>": 147125,
37
+ "<|free_token2|>": 147098,
38
+ "<|free_token30|>": 147126,
39
+ "<|free_token31|>": 147127,
40
+ "<|free_token32|>": 147128,
41
+ "<|free_token33|>": 147129,
42
+ "<|free_token34|>": 147130,
43
+ "<|free_token35|>": 147131,
44
+ "<|free_token36|>": 147132,
45
+ "<|free_token37|>": 147133,
46
+ "<|free_token38|>": 147134,
47
+ "<|free_token39|>": 147135,
48
+ "<|free_token3|>": 147099,
49
+ "<|free_token40|>": 147136,
50
+ "<|free_token41|>": 147137,
51
+ "<|free_token42|>": 147138,
52
+ "<|free_token43|>": 147139,
53
+ "<|free_token44|>": 147140,
54
+ "<|free_token45|>": 147141,
55
+ "<|free_token46|>": 147142,
56
+ "<|free_token47|>": 147143,
57
+ "<|free_token48|>": 147144,
58
+ "<|free_token49|>": 147145,
59
+ "<|free_token4|>": 147100,
60
+ "<|free_token50|>": 147146,
61
+ "<|free_token51|>": 147147,
62
+ "<|free_token52|>": 147148,
63
+ "<|free_token53|>": 147149,
64
+ "<|free_token54|>": 147150,
65
+ "<|free_token55|>": 147151,
66
+ "<|free_token56|>": 147152,
67
+ "<|free_token57|>": 147153,
68
+ "<|free_token58|>": 147154,
69
+ "<|free_token59|>": 147155,
70
+ "<|free_token5|>": 147101,
71
+ "<|free_token60|>": 147156,
72
+ "<|free_token61|>": 147157,
73
+ "<|free_token62|>": 147158,
74
+ "<|free_token63|>": 147159,
75
+ "<|free_token64|>": 147160,
76
+ "<|free_token65|>": 147161,
77
+ "<|free_token66|>": 147162,
78
+ "<|free_token67|>": 147163,
79
+ "<|free_token68|>": 147164,
80
+ "<|free_token69|>": 147165,
81
+ "<|free_token6|>": 147102,
82
+ "<|free_token70|>": 147166,
83
+ "<|free_token71|>": 147167,
84
+ "<|free_token72|>": 147168,
85
+ "<|free_token73|>": 147169,
86
+ "<|free_token74|>": 147170,
87
+ "<|free_token75|>": 147171,
88
+ "<|free_token76|>": 147172,
89
+ "<|free_token77|>": 147173,
90
+ "<|free_token78|>": 147174,
91
+ "<|free_token79|>": 147175,
92
+ "<|free_token7|>": 147103,
93
+ "<|free_token80|>": 147176,
94
+ "<|free_token81|>": 147177,
95
+ "<|free_token82|>": 147178,
96
+ "<|free_token83|>": 147179,
97
+ "<|free_token84|>": 147180,
98
+ "<|free_token85|>": 147181,
99
+ "<|free_token86|>": 147182,
100
+ "<|free_token87|>": 147183,
101
+ "<|free_token88|>": 147184,
102
+ "<|free_token89|>": 147185,
103
+ "<|free_token8|>": 147104,
104
+ "<|free_token90|>": 147186,
105
+ "<|free_token91|>": 147187,
106
+ "<|free_token92|>": 147188,
107
+ "<|free_token93|>": 147189,
108
+ "<|free_token94|>": 147190,
109
+ "<|free_token95|>": 147191,
110
+ "<|free_token96|>": 147192,
111
+ "<|free_token97|>": 147193,
112
+ "<|free_token98|>": 147194,
113
+ "<|free_token99|>": 147195,
114
+ "<|free_token9|>": 147105,
115
+ "<|im_end|>": 147077,
116
+ "<|im_start|>": 147076,
117
+ "<|image_pad|>": 147087,
118
+ "<|object_ref_end|>": 147079,
119
+ "<|object_ref_start|>": 147078,
120
+ "<|quad_end|>": 147083,
121
+ "<|quad_start|>": 147082,
122
+ "<|repo_name|>": 147095,
123
+ "<|video_pad|>": 147088,
124
+ "<|vision_end|>": 147085,
125
+ "<|vision_pad|>": 147086,
126
+ "<|vision_start|>": 147084
127
+ }
optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6ea6ededfa7ec287607469c97649380774f9146eaa56b6b19f700bd9f0c87b43
3
- size 273699350
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b660ef0d620373c532b67fa6276c72202e837286196a5c035377015ef686e8a6
3
+ size 276017170
rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0caaa0dbfafe218cca8c3a1c16b0444332d561b419a54e75c8c505df4dcbcb72
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39b824e8acc71bb62addc638c32e739324e2c8ea78f23fdeff754ab9676443af
3
  size 14244
scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bbcaba89c676c6a55b90e3d34fb47d84a02176e72e5b8a504d64b718ac0c48eb
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:939f8fd57ba6afce9827e7d63bf66494444ef8d81fbec5fe6834bad4c8d0088f
3
  size 1064
trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
- "best_metric": null,
3
- "best_model_checkpoint": null,
4
- "epoch": 1.2,
5
  "eval_steps": 299,
6
- "global_step": 299,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -210,6 +210,224 @@
210
  "learning_rate": 0.00012299465240641713,
211
  "loss": 0.387,
212
  "step": 290
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
213
  }
214
  ],
215
  "logging_steps": 10,
@@ -229,7 +447,7 @@
229
  "attributes": {}
230
  }
231
  },
232
- "total_flos": 1.8939591462152602e+18,
233
  "train_batch_size": 1,
234
  "trial_name": null,
235
  "trial_params": null
 
1
  {
2
+ "best_metric": 0.45234861969947815,
3
+ "best_model_checkpoint": "qlora-extractor-checkpoints/checkpoint-598",
4
+ "epoch": 2.392,
5
  "eval_steps": 299,
6
+ "global_step": 598,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
210
  "learning_rate": 0.00012299465240641713,
211
  "loss": 0.387,
212
  "step": 290
213
+ },
214
+ {
215
+ "epoch": 1.2,
216
+ "grad_norm": 0.18218065798282623,
217
+ "learning_rate": 0.00012005347593582887,
218
+ "loss": 0.319,
219
+ "step": 300
220
+ },
221
+ {
222
+ "epoch": 1.24,
223
+ "grad_norm": 0.14094972610473633,
224
+ "learning_rate": 0.00011737967914438503,
225
+ "loss": 0.6182,
226
+ "step": 310
227
+ },
228
+ {
229
+ "epoch": 1.28,
230
+ "grad_norm": 0.20308974385261536,
231
+ "learning_rate": 0.00011470588235294118,
232
+ "loss": 0.4492,
233
+ "step": 320
234
+ },
235
+ {
236
+ "epoch": 1.32,
237
+ "grad_norm": 0.18815161287784576,
238
+ "learning_rate": 0.00011203208556149734,
239
+ "loss": 0.4472,
240
+ "step": 330
241
+ },
242
+ {
243
+ "epoch": 1.3599999999999999,
244
+ "grad_norm": 0.1986108422279358,
245
+ "learning_rate": 0.00010935828877005347,
246
+ "loss": 0.3996,
247
+ "step": 340
248
+ },
249
+ {
250
+ "epoch": 1.4,
251
+ "grad_norm": 0.1628069281578064,
252
+ "learning_rate": 0.00010668449197860964,
253
+ "loss": 0.3645,
254
+ "step": 350
255
+ },
256
+ {
257
+ "epoch": 1.44,
258
+ "grad_norm": 0.14933669567108154,
259
+ "learning_rate": 0.00010401069518716577,
260
+ "loss": 0.5977,
261
+ "step": 360
262
+ },
263
+ {
264
+ "epoch": 1.48,
265
+ "grad_norm": 0.18734735250473022,
266
+ "learning_rate": 0.00010133689839572193,
267
+ "loss": 0.3784,
268
+ "step": 370
269
+ },
270
+ {
271
+ "epoch": 1.52,
272
+ "grad_norm": 0.2121153324842453,
273
+ "learning_rate": 9.866310160427808e-05,
274
+ "loss": 0.4581,
275
+ "step": 380
276
+ },
277
+ {
278
+ "epoch": 1.56,
279
+ "grad_norm": 0.19748178124427795,
280
+ "learning_rate": 9.598930481283423e-05,
281
+ "loss": 0.3858,
282
+ "step": 390
283
+ },
284
+ {
285
+ "epoch": 1.6,
286
+ "grad_norm": 0.18506589531898499,
287
+ "learning_rate": 9.331550802139037e-05,
288
+ "loss": 0.3706,
289
+ "step": 400
290
+ },
291
+ {
292
+ "epoch": 1.6400000000000001,
293
+ "grad_norm": 0.15329745411872864,
294
+ "learning_rate": 9.064171122994652e-05,
295
+ "loss": 0.5347,
296
+ "step": 410
297
+ },
298
+ {
299
+ "epoch": 1.6800000000000002,
300
+ "grad_norm": 0.19260501861572266,
301
+ "learning_rate": 8.796791443850267e-05,
302
+ "loss": 0.4812,
303
+ "step": 420
304
+ },
305
+ {
306
+ "epoch": 1.72,
307
+ "grad_norm": 0.1763940006494522,
308
+ "learning_rate": 8.529411764705883e-05,
309
+ "loss": 0.4317,
310
+ "step": 430
311
+ },
312
+ {
313
+ "epoch": 1.76,
314
+ "grad_norm": 0.20417028665542603,
315
+ "learning_rate": 8.262032085561498e-05,
316
+ "loss": 0.4145,
317
+ "step": 440
318
+ },
319
+ {
320
+ "epoch": 1.8,
321
+ "grad_norm": 0.2001723051071167,
322
+ "learning_rate": 7.994652406417112e-05,
323
+ "loss": 0.3884,
324
+ "step": 450
325
+ },
326
+ {
327
+ "epoch": 1.8399999999999999,
328
+ "grad_norm": 0.2101861983537674,
329
+ "learning_rate": 7.727272727272727e-05,
330
+ "loss": 0.4758,
331
+ "step": 460
332
+ },
333
+ {
334
+ "epoch": 1.88,
335
+ "grad_norm": 0.2131994664669037,
336
+ "learning_rate": 7.459893048128342e-05,
337
+ "loss": 0.466,
338
+ "step": 470
339
+ },
340
+ {
341
+ "epoch": 1.92,
342
+ "grad_norm": 0.21261604130268097,
343
+ "learning_rate": 7.192513368983958e-05,
344
+ "loss": 0.4088,
345
+ "step": 480
346
+ },
347
+ {
348
+ "epoch": 1.96,
349
+ "grad_norm": 0.21039697527885437,
350
+ "learning_rate": 6.925133689839573e-05,
351
+ "loss": 0.3858,
352
+ "step": 490
353
+ },
354
+ {
355
+ "epoch": 2.0,
356
+ "grad_norm": 0.18674618005752563,
357
+ "learning_rate": 6.657754010695188e-05,
358
+ "loss": 0.3672,
359
+ "step": 500
360
+ },
361
+ {
362
+ "epoch": 2.04,
363
+ "grad_norm": 0.1681678146123886,
364
+ "learning_rate": 6.390374331550802e-05,
365
+ "loss": 0.5486,
366
+ "step": 510
367
+ },
368
+ {
369
+ "epoch": 2.08,
370
+ "grad_norm": 0.26280567049980164,
371
+ "learning_rate": 6.122994652406417e-05,
372
+ "loss": 0.3358,
373
+ "step": 520
374
+ },
375
+ {
376
+ "epoch": 2.12,
377
+ "grad_norm": 0.24697232246398926,
378
+ "learning_rate": 5.8556149732620325e-05,
379
+ "loss": 0.3871,
380
+ "step": 530
381
+ },
382
+ {
383
+ "epoch": 2.16,
384
+ "grad_norm": 0.22857435047626495,
385
+ "learning_rate": 5.588235294117647e-05,
386
+ "loss": 0.3822,
387
+ "step": 540
388
+ },
389
+ {
390
+ "epoch": 2.2,
391
+ "grad_norm": 0.2402905523777008,
392
+ "learning_rate": 5.320855614973263e-05,
393
+ "loss": 0.3009,
394
+ "step": 550
395
+ },
396
+ {
397
+ "epoch": 2.24,
398
+ "grad_norm": 0.19268670678138733,
399
+ "learning_rate": 5.0534759358288774e-05,
400
+ "loss": 0.3916,
401
+ "step": 560
402
+ },
403
+ {
404
+ "epoch": 2.2800000000000002,
405
+ "grad_norm": 0.2288196086883545,
406
+ "learning_rate": 4.786096256684492e-05,
407
+ "loss": 0.338,
408
+ "step": 570
409
+ },
410
+ {
411
+ "epoch": 2.32,
412
+ "grad_norm": 0.2444332093000412,
413
+ "learning_rate": 4.518716577540107e-05,
414
+ "loss": 0.3971,
415
+ "step": 580
416
+ },
417
+ {
418
+ "epoch": 2.36,
419
+ "grad_norm": 0.26502081751823425,
420
+ "learning_rate": 4.251336898395722e-05,
421
+ "loss": 0.2905,
422
+ "step": 590
423
+ },
424
+ {
425
+ "epoch": 2.392,
426
+ "eval_loss": 0.45234861969947815,
427
+ "eval_runtime": 967.7709,
428
+ "eval_samples_per_second": 0.207,
429
+ "eval_steps_per_second": 0.207,
430
+ "step": 598
431
  }
432
  ],
433
  "logging_steps": 10,
 
447
  "attributes": {}
448
  }
449
  },
450
+ "total_flos": 3.783168801781924e+18,
451
  "train_batch_size": 1,
452
  "trial_name": null,
453
  "trial_params": null
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4d37e9a62aed39344048c48527742993a6401e878e4c27e49b2fedf8f6539267
3
  size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57a8acdebd4b287f899231aaa627e1307e96ad0a6aac4f0ee5f02d9205331add
3
  size 5304