Ryukijano commited on
Commit
2d23564
·
verified ·
1 Parent(s): ca01f36

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +63 -62
config.json CHANGED
@@ -1,64 +1,65 @@
1
  {
2
- "action_dim": 32,
3
- "action_head_cfg": {
4
- "action_dim": 32,
5
- "action_horizon": 16,
6
- "add_pos_embed": true,
7
- "backbone_embedding_dim": 2048,
8
- "diffusion_model_cfg": {
9
- "attention_head_dim": 48,
10
- "cross_attention_dim": 2048,
11
- "dropout": 0.2,
12
- "final_dropout": true,
13
- "interleave_self_attention": true,
14
- "norm_type": "ada_norm",
15
- "num_attention_heads": 32,
16
- "num_layers": 16,
17
- "output_dim": 1024,
18
- "positional_embeddings": null
19
  },
20
- "hidden_size": 1024,
21
- "input_embedding_dim": 1536,
22
- "max_action_dim": 32,
23
- "max_state_dim": 64,
24
- "model_dtype": "float32",
25
- "noise_beta_alpha": 1.5,
26
- "noise_beta_beta": 1.0,
27
- "noise_s": 0.999,
28
- "num_inference_timesteps": 4,
29
- "num_target_vision_tokens": 32,
30
- "num_timestep_buckets": 1000,
31
- "tune_diffusion_model": true,
32
- "tune_projector": true,
33
- "use_vlln": true,
34
- "vl_self_attention_cfg": {
35
- "attention_head_dim": 64,
36
- "dropout": 0.2,
37
- "final_dropout": true,
38
- "num_attention_heads": 32,
39
- "num_layers": 4,
40
- "positional_embeddings": null
41
- }
42
- },
43
- "action_horizon": 16,
44
- "architectures": [
45
- "GR00T_N1_5"
46
- ],
47
- "attn_implementation": null,
48
- "backbone_cfg": {
49
- "eagle_path": "NVEagle/eagle_er-qwen3_1_7B-Siglip2_400M_stage1_5_128gpu_er_v7_1mlp_nops",
50
- "load_bf16": false,
51
- "project_to_dim": null,
52
- "reproject_vision": false,
53
- "select_layer": 12,
54
- "tune_llm": false,
55
- "tune_visual": true,
56
- "use_flash_attention": true
57
- },
58
- "compute_dtype": "bfloat16",
59
- "hidden_size": 2048,
60
- "model_dtype": "float32",
61
- "model_type": "gr00t_n1_5",
62
- "torch_dtype": "bfloat16",
63
- "transformers_version": "4.51.3"
64
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "n_obs_steps": 1,
3
+ "normalization_mapping": {
4
+ "VISUAL": "MEAN_STD",
5
+ "STATE": "MIN_MAX",
6
+ "ACTION": "MIN_MAX"
 
 
 
 
 
 
 
 
 
 
 
 
7
  },
8
+ "input_features": {
9
+ "observation.images.egoview": {
10
+ "type": "VISUAL",
11
+ "shape": [
12
+ 3,
13
+ 800,
14
+ 1280
15
+ ]
16
+ },
17
+ "observation.images.ego_view": {
18
+ "type": "VISUAL",
19
+ "shape": [
20
+ 3,
21
+ 256,
22
+ 256
23
+ ]
24
+ },
25
+ "observation.state": {
26
+ "type": "STATE",
27
+ "shape": [
28
+ 44
29
+ ]
30
+ }
31
+ },
32
+ "output_features": {
33
+ "action": {
34
+ "type": "ACTION",
35
+ "shape": [
36
+ 44
37
+ ]
38
+ }
39
+ },
40
+ "device": "cuda",
41
+ "use_amp": true,
42
+ "chunk_size": 16,
43
+ "n_action_steps": 8,
44
+ "vision_model_id": "/scratch/cbjp404/.cache/hf/models/siglip-so400m-patch14-384",
45
+ "text_model_id": "/scratch/cbjp404/.cache/hf/models/gemma-3-4b-it",
46
+ "use_2d_rope": false,
47
+ "lora_rank": 16,
48
+ "lora_alpha": 16,
49
+ "lora_dropout": 0.1,
50
+ "lora_target_modules": [
51
+ "q_proj",
52
+ "k_proj",
53
+ "v_proj",
54
+ "o_proj"
55
+ ],
56
+ "scaledp_num_layers": 4,
57
+ "scaledp_num_heads": 8,
58
+ "scaledp_dim_model": 512,
59
+ "scaledp_dim_feedforward": 2048,
60
+ "num_diffusion_steps": 100,
61
+ "conditioning_dim": 768,
62
+ "plan_update_interval": 10,
63
+ "optimizer_lr": 0.0001,
64
+ "optimizer_weight_decay": 1e-06
65
+ }