File size: 11,747 Bytes
42bcf36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
{
    "model_config": {
        "vocab_size": 151662,
        "max_position_embeddings": 32768,
        "hidden_size": 1536,
        "intermediate_size": 8960,
        "num_hidden_layers": 28,
        "num_attention_heads": 12,
        "use_sliding_window": false,
        "sliding_window": 32768,
        "max_window_layers": 28,
        "num_key_value_heads": 2,
        "hidden_act": "silu",
        "initializer_range": 0.02,
        "rms_norm_eps": 1e-06,
        "use_cache": false,
        "rope_theta": 1000000.0,
        "attention_dropout": 0.0,
        "rope_scaling": {
            "mrope_section": [
                16,
                24,
                24
            ],
            "rope_type": "default",
            "type": "default"
        },
        "return_dict": true,
        "output_hidden_states": false,
        "output_attentions": false,
        "torchscript": false,
        "use_bfloat16": false,
        "tf_legacy_loss": false,
        "pruned_heads": {},
        "tie_word_embeddings": true,
        "chunk_size_feed_forward": 0,
        "is_encoder_decoder": false,
        "is_decoder": false,
        "cross_attention_hidden_size": null,
        "add_cross_attention": false,
        "tie_encoder_decoder": false,
        "max_length": 20,
        "min_length": 0,
        "do_sample": false,
        "early_stopping": false,
        "num_beams": 1,
        "num_beam_groups": 1,
        "diversity_penalty": 0.0,
        "temperature": 1.0,
        "top_k": 50,
        "top_p": 1.0,
        "typical_p": 1.0,
        "repetition_penalty": 1.0,
        "length_penalty": 1.0,
        "no_repeat_ngram_size": 0,
        "encoder_no_repeat_ngram_size": 0,
        "bad_words_ids": null,
        "num_return_sequences": 1,
        "output_scores": false,
        "return_dict_in_generate": false,
        "forced_bos_token_id": null,
        "forced_eos_token_id": null,
        "remove_invalid_values": false,
        "exponential_decay_length_penalty": null,
        "suppress_tokens": null,
        "begin_suppress_tokens": null,
        "architectures": [
            "Qwen2VLForConditionalGenerationWithPointer"
        ],
        "finetuning_task": null,
        "id2label": {
            "0": "LABEL_0",
            "1": "LABEL_1"
        },
        "label2id": {
            "LABEL_0": 0,
            "LABEL_1": 1
        },
        "tokenizer_class": null,
        "prefix": null,
        "bos_token_id": 151643,
        "pad_token_id": null,
        "eos_token_id": [
            151658
        ],
        "sep_token_id": null,
        "decoder_start_token_id": null,
        "task_specific_params": null,
        "problem_type": null,
        "_name_or_path": "/mnt/checkpoints/aguvis_final/qwen2vl7binstruct_stage1_ep1_lr0.0001_bs1_mp5720064_mml24576_ufallFalse_ufpnTrue_uflmFalse_ufbmFalse_ufntTrue_ufvFalse_ploss1.0_lmloss-1.0_stage0_all_params_uground_a100_n8_final_fixed_2B/",
        "_commit_hash": null,
        "_attn_implementation_internal": "flash_attention_2",
        "_attn_implementation_autoset": true,
        "transformers_version": "4.47.1",
        "image_token_id": 151655,
        "model_type": "qwen2_vl",
        "pointer_end_token_id": 151660,
        "pointer_pad_token_id": 151661,
        "pointer_start_token_id": 151659,
        "video_token_id": 151656,
        "vision_end_token_id": 151653,
        "vision_start_token_id": 151652,
        "vision_token_id": 151654
    },
    "data_processor_config": {
        "image_token": "<|image_pad|>",
        "video_token": "<|video_pad|>",
        "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}"
    },
    "image_processor_config": {
        "_processor_class": "Qwen2VLProcessor",
        "image_processor_type": "Qwen2VLImageProcessor",
        "size": {
            "min_pixels": 3136,
            "max_pixels": 5720064
        },
        "do_resize": true,
        "resample": 3,
        "do_rescale": true,
        "rescale_factor": 0.00392156862745098,
        "do_normalize": true,
        "image_mean": [
            0.48145466,
            0.4578275,
            0.40821073
        ],
        "image_std": [
            0.26862954,
            0.26130258,
            0.27577711
        ],
        "min_pixels": 3136,
        "max_pixels": 5720064,
        "patch_size": 14,
        "temporal_patch_size": 2,
        "merge_size": 2,
        "do_convert_rgb": true
    },
    "model_args": {
        "model_name_or_path": "/mnt/checkpoints/aguvis_final/qwen2vl7binstruct_stage1_ep1_lr0.0001_bs1_mp5720064_mml24576_ufallFalse_ufpnTrue_uflmFalse_ufbmFalse_ufntTrue_ufvFalse_ploss1.0_lmloss-1.0_stage0_all_params_uground_a100_n8_final_fixed_2B/",
        "flash_attn_2_enabled": true
    },
    "data_args": {
        "data_path": "data/amlt_stage1.yaml",
        "early_mix_text": false,
        "image_folder": "/mnt/data/aguvis",
        "min_pixels": 3136,
        "max_pixels": 5720064,
        "max_conv_turns": 10
    },
    "training_args": {
        "output_dir": "/mnt/checkpoints/aguvis_final/qwen2vl7binstruct_stage1_ep1_lr5e-06_bs1_mp5720064_mml24576_ufallTrue_ufpnFalse_uflmFalse_ufbmFalse_ufntFalse_ufvFalse_ploss1.0_lmloss1.0_stage1_all_params_uground_a100_n8_final_fixed_2B",
        "overwrite_output_dir": false,
        "do_train": false,
        "do_eval": false,
        "do_predict": false,
        "eval_strategy": "no",
        "prediction_loss_only": false,
        "per_device_train_batch_size": 1,
        "per_device_eval_batch_size": 4,
        "per_gpu_train_batch_size": null,
        "per_gpu_eval_batch_size": null,
        "gradient_accumulation_steps": 1,
        "eval_accumulation_steps": null,
        "eval_delay": 0,
        "torch_empty_cache_steps": null,
        "learning_rate": 5e-06,
        "weight_decay": 0.0,
        "adam_beta1": 0.9,
        "adam_beta2": 0.999,
        "adam_epsilon": 1e-08,
        "max_grad_norm": 1.0,
        "num_train_epochs": 1.0,
        "max_steps": -1,
        "lr_scheduler_type": "cosine",
        "lr_scheduler_kwargs": {},
        "warmup_ratio": 0.03,
        "warmup_steps": 0,
        "log_level": "passive",
        "log_level_replica": "warning",
        "log_on_each_node": true,
        "logging_dir": "/mnt/checkpoints/aguvis_final/qwen2vl7binstruct_stage1_ep1_lr5e-06_bs1_mp5720064_mml24576_ufallTrue_ufpnFalse_uflmFalse_ufbmFalse_ufntFalse_ufvFalse_ploss1.0_lmloss1.0_stage1_all_params_uground_a100_n8_final_fixed_2B/runs/May22_17-23-22_node-7",
        "logging_strategy": "steps",
        "logging_first_step": false,
        "logging_steps": 10,
        "logging_nan_inf_filter": true,
        "save_strategy": "steps",
        "save_steps": 1000,
        "save_total_limit": null,
        "save_safetensors": true,
        "save_on_each_node": false,
        "save_only_model": false,
        "restore_callback_states_from_checkpoint": false,
        "no_cuda": false,
        "use_cpu": false,
        "use_mps_device": false,
        "seed": 42,
        "data_seed": null,
        "jit_mode_eval": false,
        "use_ipex": false,
        "bf16": true,
        "fp16": false,
        "fp16_opt_level": "O1",
        "half_precision_backend": "auto",
        "bf16_full_eval": false,
        "fp16_full_eval": false,
        "tf32": true,
        "local_rank": 0,
        "ddp_backend": null,
        "tpu_num_cores": null,
        "tpu_metrics_debug": false,
        "debug": [],
        "dataloader_drop_last": false,
        "eval_steps": null,
        "dataloader_num_workers": 8,
        "dataloader_prefetch_factor": null,
        "past_index": -1,
        "run_name": "qwen2vl7binstruct_stage1_ep1_lr5e-06_bs1_mp5720064_mml24576_ufallTrue_ufpnFalse_uflmFalse_ufbmFalse_ufntFalse_ufvFalse_ploss1.0_lmloss1.0_stage1_all_params_uground_a100_n8_final_fixed_2B",
        "disable_tqdm": false,
        "remove_unused_columns": true,
        "label_names": null,
        "load_best_model_at_end": false,
        "metric_for_best_model": null,
        "greater_is_better": null,
        "ignore_data_skip": false,
        "fsdp": [],
        "fsdp_min_num_params": 0,
        "fsdp_config": {
            "min_num_params": 0,
            "xla": false,
            "xla_fsdp_v2": false,
            "xla_fsdp_grad_ckpt": false
        },
        "fsdp_transformer_layer_cls_to_wrap": null,
        "deepspeed": "./scripts/zero3.json",
        "label_smoothing_factor": 0.0,
        "optim": "adamw_torch",
        "optim_args": null,
        "adafactor": false,
        "group_by_length": false,
        "length_column_name": "length",
        "report_to": [
            "wandb"
        ],
        "ddp_find_unused_parameters": null,
        "ddp_bucket_cap_mb": null,
        "ddp_broadcast_buffers": null,
        "dataloader_pin_memory": true,
        "dataloader_persistent_workers": false,
        "skip_memory_metrics": true,
        "use_legacy_prediction_loop": false,
        "push_to_hub": false,
        "resume_from_checkpoint": null,
        "hub_model_id": null,
        "hub_strategy": "every_save",
        "hub_token": null,
        "hub_private_repo": null,
        "hub_always_push": false,
        "gradient_checkpointing": true,
        "gradient_checkpointing_kwargs": null,
        "include_inputs_for_metrics": false,
        "include_for_metrics": [],
        "eval_do_concat_batches": true,
        "fp16_backend": "auto",
        "evaluation_strategy": null,
        "push_to_hub_model_id": null,
        "push_to_hub_organization": null,
        "push_to_hub_token": null,
        "mp_parameters": "",
        "auto_find_batch_size": false,
        "full_determinism": false,
        "torchdynamo": null,
        "ray_scope": "last",
        "ddp_timeout": 1800,
        "torch_compile": false,
        "torch_compile_backend": null,
        "torch_compile_mode": null,
        "dispatch_batches": null,
        "split_batches": null,
        "include_tokens_per_second": false,
        "include_num_input_tokens_seen": false,
        "neftune_noise_alpha": null,
        "optim_target_modules": null,
        "batch_eval_metrics": false,
        "eval_on_start": false,
        "use_liger_kernel": false,
        "eval_use_gather_object": false,
        "average_tokens_across_devices": false,
        "cache_dir": null,
        "model_max_length": 24576,
        "group_by_modality_length": true,
        "verbose_logging": false,
        "unfreeze_all_parameters": true,
        "unfreeze_pointer_head": false,
        "unfreeze_lm_head": false,
        "unfreeze_base_model": false,
        "unfreeze_last_n_layers": -1,
        "unfreeze_new_tokens": false,
        "unfreeze_visual": false,
        "pointer_loss_weight": 1.0,
        "lm_loss_weight": 1.0,
        "_n_gpu": 1
    }
}