|
{ |
|
"dataset": { |
|
"repo_id": "local/robot_sim.PickNPlace", |
|
"root": "/scratch/cbjp404/Isaac-GR00T/demo_data/robot_sim.PickNPlace", |
|
"episodes": [ |
|
0, |
|
1, |
|
2, |
|
3, |
|
4 |
|
], |
|
"image_transforms": { |
|
"enable": false, |
|
"max_num_transforms": 3, |
|
"random_order": false, |
|
"tfs": { |
|
"brightness": { |
|
"weight": 1.0, |
|
"type": "ColorJitter", |
|
"kwargs": { |
|
"brightness": [ |
|
0.8, |
|
1.2 |
|
] |
|
} |
|
}, |
|
"contrast": { |
|
"weight": 1.0, |
|
"type": "ColorJitter", |
|
"kwargs": { |
|
"contrast": [ |
|
0.8, |
|
1.2 |
|
] |
|
} |
|
}, |
|
"saturation": { |
|
"weight": 1.0, |
|
"type": "ColorJitter", |
|
"kwargs": { |
|
"saturation": [ |
|
0.5, |
|
1.5 |
|
] |
|
} |
|
}, |
|
"hue": { |
|
"weight": 1.0, |
|
"type": "ColorJitter", |
|
"kwargs": { |
|
"hue": [ |
|
-0.05, |
|
0.05 |
|
] |
|
} |
|
}, |
|
"sharpness": { |
|
"weight": 1.0, |
|
"type": "SharpnessJitter", |
|
"kwargs": { |
|
"sharpness": [ |
|
0.5, |
|
1.5 |
|
] |
|
} |
|
} |
|
} |
|
}, |
|
"revision": null, |
|
"use_imagenet_stats": false, |
|
"video_backend": "pyav" |
|
}, |
|
"env": null, |
|
"policy": { |
|
"type": "gemma_le", |
|
"n_obs_steps": 1, |
|
"normalization_mapping": { |
|
"VISUAL": "MEAN_STD", |
|
"STATE": "MIN_MAX", |
|
"ACTION": "MIN_MAX" |
|
}, |
|
"input_features": { |
|
"observation.images.egoview": { |
|
"type": "VISUAL", |
|
"shape": [ |
|
3, |
|
800, |
|
1280 |
|
] |
|
}, |
|
"observation.images.ego_view": { |
|
"type": "VISUAL", |
|
"shape": [ |
|
3, |
|
256, |
|
256 |
|
] |
|
}, |
|
"observation.state": { |
|
"type": "STATE", |
|
"shape": [ |
|
44 |
|
] |
|
} |
|
}, |
|
"output_features": { |
|
"action": { |
|
"type": "ACTION", |
|
"shape": [ |
|
44 |
|
] |
|
} |
|
}, |
|
"device": "cuda", |
|
"use_amp": true, |
|
"chunk_size": 16, |
|
"n_action_steps": 8, |
|
"vision_model_id": "/scratch/cbjp404/.cache/hf/models/siglip-so400m-patch14-384", |
|
"text_model_id": "/scratch/cbjp404/.cache/hf/models/gemma-3-4b-it", |
|
"use_2d_rope": false, |
|
"lora_rank": 16, |
|
"lora_alpha": 16, |
|
"lora_dropout": 0.1, |
|
"lora_target_modules": [ |
|
"q_proj", |
|
"k_proj", |
|
"v_proj", |
|
"o_proj" |
|
], |
|
"scaledp_num_layers": 4, |
|
"scaledp_num_heads": 8, |
|
"scaledp_dim_model": 512, |
|
"scaledp_dim_feedforward": 2048, |
|
"num_diffusion_steps": 100, |
|
"conditioning_dim": 768, |
|
"plan_update_interval": 10, |
|
"optimizer_lr": 0.0001, |
|
"optimizer_weight_decay": 1e-06 |
|
}, |
|
"output_dir": "outputs/train/2025-08-12/13-06-07_gemma_le", |
|
"job_name": "gemma_le", |
|
"resume": false, |
|
"seed": 1000, |
|
"num_workers": 0, |
|
"batch_size": 1, |
|
"steps": 20000, |
|
"eval_freq": 20000, |
|
"log_freq": 10, |
|
"save_checkpoint": true, |
|
"save_freq": 5000, |
|
"use_policy_training_preset": true, |
|
"optimizer": { |
|
"type": "adamw", |
|
"lr": 0.0001, |
|
"weight_decay": 1e-06, |
|
"grad_clip_norm": 10.0, |
|
"betas": [ |
|
0.9, |
|
0.999 |
|
], |
|
"eps": 1e-08 |
|
}, |
|
"scheduler": null, |
|
"eval": { |
|
"n_episodes": 50, |
|
"batch_size": 50, |
|
"use_async_envs": false |
|
}, |
|
"wandb": { |
|
"enable": false, |
|
"disable_artifact": false, |
|
"project": "lerobot", |
|
"entity": null, |
|
"notes": null, |
|
"run_id": null |
|
}, |
|
"progress_bar": true, |
|
"push_to_hub": false, |
|
"push_repo_id": null, |
|
"push_branch": null, |
|
"push_private": false, |
|
"push_exist_ok": false |
|
} |