Delete config.yaml
Browse files- config.yaml +0 -194
config.yaml
DELETED
@@ -1,194 +0,0 @@
|
|
1 |
-
MODEL:
|
2 |
-
WEIGHTS: path/to/stage1/run/model_final.pth
|
3 |
-
pretrained_weights: []
|
4 |
-
compute_precision:
|
5 |
-
grad_scaler: true
|
6 |
-
teacher:
|
7 |
-
backbone:
|
8 |
-
sharding_strategy: NO_SHARD
|
9 |
-
mixed_precision:
|
10 |
-
param_dtype: fp16
|
11 |
-
reduce_dtype: fp16
|
12 |
-
buffer_dtype: fp32
|
13 |
-
dino_head:
|
14 |
-
sharding_strategy: NO_SHARD
|
15 |
-
mixed_precision:
|
16 |
-
param_dtype: fp16
|
17 |
-
reduce_dtype: fp16
|
18 |
-
buffer_dtype: fp32
|
19 |
-
ibot_head:
|
20 |
-
sharding_strategy: NO_SHARD
|
21 |
-
mixed_precision:
|
22 |
-
param_dtype: fp16
|
23 |
-
reduce_dtype: fp16
|
24 |
-
buffer_dtype: fp32
|
25 |
-
student:
|
26 |
-
backbone:
|
27 |
-
sharding_strategy: NO_SHARD
|
28 |
-
mixed_precision:
|
29 |
-
param_dtype: fp16
|
30 |
-
reduce_dtype: fp16
|
31 |
-
buffer_dtype: fp32
|
32 |
-
dino_head:
|
33 |
-
sharding_strategy: NO_SHARD
|
34 |
-
mixed_precision:
|
35 |
-
param_dtype: fp16
|
36 |
-
reduce_dtype: fp32
|
37 |
-
buffer_dtype: fp32
|
38 |
-
ibot_head:
|
39 |
-
sharding_strategy: NO_SHARD
|
40 |
-
mixed_precision:
|
41 |
-
param_dtype: fp16
|
42 |
-
reduce_dtype: fp32
|
43 |
-
buffer_dtype: fp32
|
44 |
-
dino:
|
45 |
-
loss_weight: 1.0
|
46 |
-
head_n_prototypes: 65536
|
47 |
-
head_bottleneck_dim: 256
|
48 |
-
head_nlayers: 3
|
49 |
-
head_hidden_dim: 2048
|
50 |
-
koleo_loss_weight: 0.0
|
51 |
-
ibot:
|
52 |
-
loss_weight: 0.1
|
53 |
-
mask_sample_probability: 0.5
|
54 |
-
mask_ratio_min_max:
|
55 |
-
- 0.1
|
56 |
-
- 0.5
|
57 |
-
separate_head: false
|
58 |
-
head_n_prototypes: 65536
|
59 |
-
head_bottleneck_dim: 256
|
60 |
-
head_nlayers: 3
|
61 |
-
head_hidden_dim: 2048
|
62 |
-
aux_loss:
|
63 |
-
enable: false
|
64 |
-
gating_loss_weight: 0.0
|
65 |
-
diversity_loss_weight: 0.0
|
66 |
-
sparsity_loss_weight: 0.0
|
67 |
-
gradient_accumulation_steps: 1
|
68 |
-
train:
|
69 |
-
dataset:
|
70 |
-
id: ConcatDataset
|
71 |
-
datasets:
|
72 |
-
- id: FmowDataset
|
73 |
-
root: ${oc.env:RDIR}/datasets/
|
74 |
-
split: ${oc.env:RDIR}/datasets/fmow/metadata_v2/fmow_iwm_onid_train_val_savioclean_min42max1024.parquet
|
75 |
-
num_sens: 2
|
76 |
-
full_spectra: false
|
77 |
-
subset: 800000 # super-sample to adjust for the indexing by location (several 100 views per location)
|
78 |
-
- id: SatlasDataset
|
79 |
-
root: ${oc.env:RDIR}/datasets/satlas
|
80 |
-
num_sens: 2
|
81 |
-
full_spectra: false
|
82 |
-
metadata_path: ${oc.env:RDIR}/datasets/satlas/metadata_v2/fmow_iwm_onid_3sensors_all_clean.parquet
|
83 |
-
- id: MMEarth
|
84 |
-
base_path: ${oc.env:RDIR}/datasets/mmearth/data_1M_v001
|
85 |
-
split: train
|
86 |
-
modalities: MODALITY_MINIMAL_SET1
|
87 |
-
full_spectra: false
|
88 |
-
- id: SpectralEarth
|
89 |
-
split: train
|
90 |
-
root: ${oc.env:RDIR}/datasets/
|
91 |
-
full_spectra: false
|
92 |
-
dino_augm:
|
93 |
-
id: PanopticonAugmentation
|
94 |
-
global_crops_number: 2
|
95 |
-
global_crops_size: 224
|
96 |
-
global_crops_scale:
|
97 |
-
- 0.32
|
98 |
-
- 1.0
|
99 |
-
global_crops_spectral_size:
|
100 |
-
- 4
|
101 |
-
- 13
|
102 |
-
global_multi_select_view_in_single_sensor_modes: false
|
103 |
-
global_hs_modes_probs:
|
104 |
-
- 1
|
105 |
-
- 0
|
106 |
-
local_crops_number: 4
|
107 |
-
local_crops_size: 98
|
108 |
-
local_crops_scale:
|
109 |
-
- 0.05
|
110 |
-
- 0.32
|
111 |
-
local_crops_spectral_size:
|
112 |
-
- 1
|
113 |
-
- 4
|
114 |
-
local_multi_select_view_in_single_sensor_modes: true
|
115 |
-
local_hs_modes_probs:
|
116 |
-
- 1
|
117 |
-
- 0
|
118 |
-
batch_size_per_gpu: 75
|
119 |
-
num_workers: 16
|
120 |
-
drop_last: true
|
121 |
-
pin_memory: true
|
122 |
-
persistent_workers: true
|
123 |
-
use_wandb: true
|
124 |
-
saveckp_freq: 5
|
125 |
-
seed: 21
|
126 |
-
OFFICIAL_EPOCH_LENGTH: 1250
|
127 |
-
centering: centering
|
128 |
-
log_every_n_steps: 10
|
129 |
-
student:
|
130 |
-
arch: vit_base
|
131 |
-
embed_layer: ChnFusionPE
|
132 |
-
patch_size: 14
|
133 |
-
pos_emb_img_size: 518
|
134 |
-
drop_path_rate: 0.0
|
135 |
-
layerscale: 1.0e-05
|
136 |
-
drop_path_uniform: true
|
137 |
-
pretrained_weights: []
|
138 |
-
ffn_layer: mlp
|
139 |
-
block_chunks: 0
|
140 |
-
qkv_bias: true
|
141 |
-
proj_bias: true
|
142 |
-
ffn_bias: true
|
143 |
-
num_register_tokens: 0
|
144 |
-
interpolate_antialias: false
|
145 |
-
interpolate_offset: 0.1
|
146 |
-
pe_args:
|
147 |
-
attn_dim: 2304
|
148 |
-
chnfus_cfg:
|
149 |
-
layer_norm: false
|
150 |
-
attn_cfg:
|
151 |
-
num_heads: 16
|
152 |
-
teacher:
|
153 |
-
pretrained_weights: []
|
154 |
-
momentum_teacher: 0.994
|
155 |
-
final_momentum_teacher: 1.0
|
156 |
-
warmup_teacher_temp: 0.04
|
157 |
-
teacher_temp: 0.07
|
158 |
-
warmup_teacher_temp_epochs: 5
|
159 |
-
optim:
|
160 |
-
epochs: 70
|
161 |
-
weight_decay: 0.04
|
162 |
-
weight_decay_end: 0.2
|
163 |
-
base_lr: 0.0005
|
164 |
-
lr: 0.0005412658773652743
|
165 |
-
warmup_epochs: 5
|
166 |
-
min_lr: 1.0e-06
|
167 |
-
lr_multiplier: blocks=0.2
|
168 |
-
freeze_weights: last_layer=1
|
169 |
-
scaling_rule: sqrt_wrt_1024
|
170 |
-
clip_grad: 3.0
|
171 |
-
layerwise_decay: 0.9
|
172 |
-
adamw_beta1: 0.9
|
173 |
-
adamw_beta2: 0.999
|
174 |
-
break_at_epochs: -1
|
175 |
-
online_lr_batch_scaling: false
|
176 |
-
hold_epochs: 0
|
177 |
-
eval:
|
178 |
-
only_eval: false
|
179 |
-
skip: false
|
180 |
-
config_obj: ${oc.env:CDIR}/eval/oe
|
181 |
-
overwrite: false
|
182 |
-
overwrites:
|
183 |
-
optim:
|
184 |
-
dl:
|
185 |
-
batch_size: 200
|
186 |
-
num_workers: 8
|
187 |
-
eval_period_epoch: 2
|
188 |
-
eval_period_iterations: -1
|
189 |
-
remove_ckpts: false
|
190 |
-
include_final_ckpt: true
|
191 |
-
final_model:
|
192 |
-
config_obj: ${oc.env:CDIR}/eval/oe;${oc.env:CDIR}/eval/fm/
|
193 |
-
overwrite: false
|
194 |
-
overwrites: null
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|