File size: 1,773 Bytes
1b818a6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
{
"type": "pi0fast",
"n_obs_steps": 1,
"normalization_mapping": {
"VISUAL": "IDENTITY",
"STATE": "MEAN_STD",
"ACTION": "MEAN_STD"
},
"input_features": {
"observation.image": {
"type": "VISUAL",
"shape": [
3,
256,
256
]
},
"observation.image2": {
"type": "VISUAL",
"shape": [
3,
256,
256
]
},
"observation.image3": {
"type": "VISUAL",
"shape": [
3,
256,
256
]
},
"observation.state": {
"type": "STATE",
"shape": [
8
]
}
},
"output_features": {
"action": {
"type": "ACTION",
"shape": [
7
]
}
},
"chunk_size": 10,
"n_action_steps": 5,
"max_state_dim": 32,
"max_action_dim": 32,
"resize_imgs_with_padding": [
224,
224
],
"interpolate_like_pi": false,
"empty_cameras": 0,
"adapt_to_pi_aloha": false,
"use_delta_joint_actions_aloha": false,
"tokenizer_max_length": 48,
"proj_width": 1024,
"max_decoding_steps": 256,
"fast_skip_tokens": 128,
"max_input_seq_len": 256,
"use_cache": true,
"freeze_vision_encoder": true,
"freeze_lm_head": true,
"optimizer_lr": 0.0001,
"optimizer_betas": [
0.9,
0.95
],
"optimizer_eps": 1e-08,
"optimizer_weight_decay": 1e-5,
"scheduler_warmup_steps": 1000,
"scheduler_decay_steps": 30000,
"scheduler_decay_lr": 2.5e-06
}
|