File size: 812 Bytes
fea966d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
{
    "global_step": 10000,
    "epoch": 0,
    "args": {
        "sdxl_model": "/home/envy/sd_xl_base_1.0.safetensors",
        "llm_model": "Qwen/Qwen1.5-7B-Chat",
        "output_path": "/home/envy/qwip-10layers-pooled",
        "resume_from_checkpoint": null,
        "prompts_dir": "/home/envy/promptgen",
        "train_layers": 10,
        "epochs": 1,
        "batch_size": 4,
        "lr": 1e-05,
        "scheduler_type": "cosine",
        "cosine_num_cycles": 1,
        "warmup_ratio": 0.1,
        "loss_type": "mse",
        "precision": "bf16",
        "max_llm_length": 128,
        "save_every_n_steps": 5000,
        "seed": 42,
        "gradient_accumulation_steps": 1,
        "use_flash_attention_2": true,
        "gradient_checkpointing": false,
        "dataloader_num_workers": 4
    }
}