File size: 1,187 Bytes
225bda6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
{
    "trainer": {
        "trainer_class": "BatchTopKCrossCoderTrainer",
        "dict_class": "BatchTopKCrossCoder",
        "lr": 0.0001,
        "steps": 97656,
        "auxk_alpha": 0.03125,
        "warmup_steps": 1000,
        "decay_start": null,
        "threshold_beta": 0.999,
        "threshold_start_step": 1000,
        "top_k_aux": 1024,
        "seed": null,
        "activation_dim": 2048,
        "dict_size": 65536,
        "k": 100,
        "code_normalization": "CROSSCODER",
        "code_normalization_alpha_sae": 1.0,
        "code_normalization_alpha_cc": 0.1,
        "device": "cuda",
        "layer": 8,
        "lm_name": "meta-llama/Llama-3.2-1B-Instruct-meta-llama/Llama-3.2-1B",
        "wandb_name": "Llama-3.2-1B-L8-k100-lr1e-04-local-shuffling-Crosscoder",
        "submodule_name": null,
        "dict_class_kwargs": {
            "same_init_for_all_layers": "True",
            "norm_init_scale": "1.0",
            "init_with_transpose": "True",
            "encoder_layers": "None",
            "code_normalization": "crosscoder",
            "code_normalization_alpha_sae": "1.0",
            "code_normalization_alpha_cc": "0.1"
        }
    }
}