ShuaiYang03's picture
Upload folder using huggingface_hub
d243e0d verified
{"hparams": {"action_dim": 7, "action_model_type": "DiT-B", "data_root_dir": "/mnt/inspurfs/efm_t/robot_data/cache/LIBERO/dataset", "debug": false, "disable_instruction": false, "fix_system1": false, "future_action_window_size": 7, "hf_token": "hf_token", "image_aug": true, "is_resume": false, "load_all_data_for_training": true, "num_of_meta_query": 64, "past_action_window_size": 0, "pretrained_checkpoint": null, "repeated_diffusion_steps": 4, "resume_epoch": null, "resume_step": null, "run_id": "sys12_meta_query_action_only_sync_pretraining_v2_query_64_mlp_lora_libero_10_wrist--image_augstage1", "run_id_note": null, "run_root_dir": "outputs/libero_wrist", "save_interval": 1500, "seed": 42, "stage": "stage1", "trackers": ["jsonl", "wandb"], "use_ema": false, "use_mm": false, "vla": {"action_tokenizer": "extra_action_tokenizer", "base_vlm": "/mnt/petrelfs/yangshuai1/yangshuai1/share_mllm/Eagle2-2B", "data_mix": "libero_10_no_noops", "enable_gradient_checkpointing": true, "enable_mixed_precision_training": true, "epochs": 100, "expected_world_size": 8, "freeze_llm_backbone": false, "freeze_vision_backbone": false, "global_batch_size": 256, "learning_rate": 5e-05, "lr_scheduler_type": "constant", "max_grad_norm": 1.0, "max_steps": null, "per_device_batch_size": 32, "reduce_in_full_precision": true, "shuffle_buffer_size": 250000, "train_strategy": "fsdp-full-shard", "type": "prism-qwen25-dinosiglip-224px+0_5b", "unfreeze_last_llm_layer": false, "vla_id": "prism-qwen25-dinosiglip-224px+0_5b", "warmup_ratio": 0.0, "weight_decay": 0.0}, "wandb_entity": "shuaiyang2003", "wandb_project": "dual_sys_libero", "with_pointing": true}, "run_id": "sys12_meta_query_action_only_sync_pretraining_v2_query_64_mlp_lora_libero_10_wrist--image_augstage1"}