Add files using upload-large-folder tool
Browse files- .gitattributes +14 -0
- checkpoints/0000052500/.metadata +0 -0
- checkpoints/0000052500/__0_0.distcp +3 -0
- checkpoints/0000052500/__1_0.distcp +3 -0
- checkpoints/0000052500/__2_0.distcp +3 -0
- checkpoints/0000052500/__3_0.distcp +3 -0
- checkpoints/0000052500/params.json +1 -0
- checkpoints/0000052500/train_state_00000.json +1 -0
- checkpoints/0000052500/train_state_00001.json +1 -0
- checkpoints/0000052500/train_state_00002.json +1 -0
- checkpoints/0000052500/train_state_00003.json +1 -0
- checkpoints/0000055000/.metadata +0 -0
- checkpoints/0000055000/__0_0.distcp +3 -0
- checkpoints/0000055000/__1_0.distcp +3 -0
- checkpoints/0000055000/__2_0.distcp +3 -0
- checkpoints/0000055000/__3_0.distcp +3 -0
- checkpoints/0000055000/params.json +1 -0
- checkpoints/0000055000/train_state_00000.json +1 -0
- checkpoints/0000055000/train_state_00001.json +1 -0
- checkpoints/0000055000/train_state_00002.json +1 -0
- checkpoints/0000055000/train_state_00003.json +1 -0
- checkpoints/0000057500/.metadata +0 -0
- checkpoints/0000057500/__0_0.distcp +3 -0
- checkpoints/0000057500/__1_0.distcp +3 -0
- checkpoints/0000057500/__2_0.distcp +3 -0
- checkpoints/0000057500/__3_0.distcp +3 -0
- checkpoints/0000057500/params.json +1 -0
- checkpoints/0000057500/train_state_00000.json +1 -0
- checkpoints/0000057500/train_state_00001.json +1 -0
- checkpoints/0000057500/train_state_00002.json +1 -0
- checkpoints/0000057500/train_state_00003.json +1 -0
- config.yaml +127 -0
- metrics.jsonl +3 -0
- profiling/memory_trace_plot/000004_stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104882.html +0 -0
- profiling/memory_trace_plot/000004_stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104883.html +0 -0
- profiling/memory_trace_plot/000004_stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104884.html +0 -0
- profiling/memory_trace_plot/000004_stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104885.html +0 -0
- profiling/profile_CPU_CUDA_000104/stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104882.1736675622108438789.pt.trace.json.gz +3 -0
- profiling/profile_CPU_CUDA_000104/stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104883.1736675622100620970.pt.trace.json.gz +3 -0
- profiling/profile_CPU_CUDA_000104/stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104884.1736675622099254891.pt.trace.json.gz +3 -0
- profiling/profile_CPU_CUDA_000104/stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104885.1736675622113109902.pt.trace.json.gz +3 -0
- train.log +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,17 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
train.log filter=lfs diff=lfs merge=lfs -text
|
37 |
+
metrics.jsonl filter=lfs diff=lfs merge=lfs -text
|
38 |
+
checkpoints/0000057500/__3_0.distcp filter=lfs diff=lfs merge=lfs -text
|
39 |
+
checkpoints/0000052500/__1_0.distcp filter=lfs diff=lfs merge=lfs -text
|
40 |
+
checkpoints/0000052500/__2_0.distcp filter=lfs diff=lfs merge=lfs -text
|
41 |
+
checkpoints/0000055000/__1_0.distcp filter=lfs diff=lfs merge=lfs -text
|
42 |
+
checkpoints/0000055000/__3_0.distcp filter=lfs diff=lfs merge=lfs -text
|
43 |
+
checkpoints/0000057500/__2_0.distcp filter=lfs diff=lfs merge=lfs -text
|
44 |
+
checkpoints/0000057500/__1_0.distcp filter=lfs diff=lfs merge=lfs -text
|
45 |
+
checkpoints/0000057500/__0_0.distcp filter=lfs diff=lfs merge=lfs -text
|
46 |
+
checkpoints/0000055000/__2_0.distcp filter=lfs diff=lfs merge=lfs -text
|
47 |
+
checkpoints/0000052500/__0_0.distcp filter=lfs diff=lfs merge=lfs -text
|
48 |
+
checkpoints/0000055000/__0_0.distcp filter=lfs diff=lfs merge=lfs -text
|
49 |
+
checkpoints/0000052500/__3_0.distcp filter=lfs diff=lfs merge=lfs -text
|
checkpoints/0000052500/.metadata
ADDED
Binary file (667 kB). View file
|
|
checkpoints/0000052500/__0_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ba1059c3c7933e00bef22dad2c9196b61e9ed51625d8f9c4c98d40d41247220b
|
3 |
+
size 5089728720
|
checkpoints/0000052500/__1_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a36dfca6c235e04d8fe21b84b9ba645984353a652d5bc883a55f660dc472eea5
|
3 |
+
size 5089821856
|
checkpoints/0000052500/__2_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:49014ebf3b1d6e4cfa12b7120216731bef8f392cea7eb04afe03368e32a3541c
|
3 |
+
size 5089821856
|
checkpoints/0000052500/__3_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c4afe60b60d096c63ce7c24944d1fe7dd33e5a227b503258ab80a78243a517a9
|
3 |
+
size 5089830112
|
checkpoints/0000052500/params.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"name": "large_lm", "dump_dir": "./dump_dir_llama1b2", "seed": 777, "grad_acc_steps": 2, "gc_collect_freq": 1000, "probe_freq": null, "steps": 60000, "data": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "batch_size": 8, "seq_len": 4096, "n_views": 2, "seed": 42, "add_bos": true, "add_eos": true, "load_async": true, "prefetch_size": 1024, "tokenizer": {"name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}}, "optim": {"lr": 0.003, "weight_decay": 0.033, "epsilon": 1e-08, "beta1": 0.9, "beta2": 0.95, "clip": 1.0, "scheduler": "cosine", "warmup": 5000, "lr_min_ratio": 1e-06, "cycle_length": 1.0, "cosine_theta": 1.0, "annealing_step": 1000, "decay_fraction": 0.1, "exp_factor": 0.5}, "model": {"dim": 2048, "n_layers": 25, "head_dim": null, "n_heads": 16, "n_kv_heads": null, "ffn_dim_multiplier": null, "multiple_of": 256, "norm_eps": 1e-05, "rope_theta": 10000.0, "init_base_std": null, "init_std_factor": "disabled", "rope_type": "original", "rope_inv_freq_learnable": false, "max_seqlen": 4096, "use_mla": "", "q_lora_rank": 1536, "kv_lora_rank": 512, "seed": 42, "vocab_size": 100512, "weight_tying": false, "sliding_window": null}, "distributed": {"dp_shard": 1, "dp_replicate": 4, "tp_size": 1, "selective_activation_checkpointing": false, "compile": true, "fsdp_type": "full_shard", "model_dtype": "bf16", "float8_recipe": null, "float8_filter": "layers\\.[0-9]+\\.", "matmul_allow_tf32": true, "detect_anomaly": false, "compile_cache_size_limit": 8, "spawn_method": "forkserver"}, "env": {"MKL_SERVICE_FORCE_INTEL": "GNU", "OMP_NUM_THREADS": "1", "MKL_NUM_THREADS": "1", "ENABLE_INTRA_NODE_COMM": "1", "TORCH_NCCL_AVOID_RECORD_STREAMS": "1", "NCCL_IB_TIMEOUT": "22", "NCCL_DEBUG": "INFO", "TORCH_NCCL_ASYNC_ERROR_HANDLING": "1"}, "checkpoint": {"dump": {"every": 2500, "keep": 3}, "eval": {"every": 5000000000, "keep": -1}, "path": "dump_dir_llama1b2/checkpoints", "init_ckpt_path": null, "continue_training_from_init": false}, "profiling": {"run": true, "trace_folder": "profiling", "mem_warmup": 0, "mem_steps": 4, "profile_warmup": 100, "profile_steps": 4}, "logging": {"freq": 1, "acc_freq": null, "wandb": null}, "async_eval_gpus": 1, "eval": {"harness": {"tasks": ["hellaswag", {"task": "boolq", "dataset_kwargs": {"trust_remote_code": true}}, "piqa", {"task": "social_iqa", "dataset_kwargs": {"trust_remote_code": true}}, "winogrande", "openbookqa", "arc_easy", "arc_challenge", "race", "commonsense_qa", "copa"]}, "validation": {"max_steps": 1000}, "generator": {"max_tokens": 16384, "dtype": "bf16"}}}
|
checkpoints/0000052500/train_state_00000.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 52500, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 0, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 20632936879, "block_size": 4, "offset": 0, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 104764078563362074937563996671037632573, "inc": 11676600559890430755450356507027720041}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 552, "rng_state": {"bit_generator": "PCG64", "state": {"state": 2395845221803597463567811736478288695, "inc": 77357518920597472829800677777012462921}, "has_uint32": 1, "uinteger": 1272976797}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 52500, "verbose": false, "_step_count": 52501, "_get_lr_called_within_step": false, "_last_lr": [0.00013555487141621536], "lr_lambdas": [{}]}}
|
checkpoints/0000052500/train_state_00001.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 52500, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 233, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 20814694346, "block_size": 4, "offset": 1, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 153609858426494565531725423363918940328, "inc": 239634081480473411747239400828488620799}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 552, "rng_state": {"bit_generator": "PCG64", "state": {"state": 129706445657155535128902585122205091844, "inc": 270234035871729269002159329014059236425}, "has_uint32": 1, "uinteger": 3215027928}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 52500, "verbose": false, "_step_count": 52501, "_get_lr_called_within_step": false, "_last_lr": [0.00013555487141621536], "lr_lambdas": [{}]}}
|
checkpoints/0000052500/train_state_00002.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 52500, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 1876, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 20698029952, "block_size": 4, "offset": 2, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 331143196790987356343970786542895835071, "inc": 6027823433652931085739778990793808165}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 552, "rng_state": {"bit_generator": "PCG64", "state": {"state": 10307761932790020400304415503036386848, "inc": 188564971970541749319992297790591572713}, "has_uint32": 0, "uinteger": 2239832520}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 52500, "verbose": false, "_step_count": 52501, "_get_lr_called_within_step": false, "_last_lr": [0.00013555487141621536], "lr_lambdas": [{}]}}
|
checkpoints/0000052500/train_state_00003.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 52500, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 1407, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 20664159395, "block_size": 4, "offset": 3, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 41541280331522767549157535813803619959, "inc": 92941856108932518968286621281627530405}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 552, "rng_state": {"bit_generator": "PCG64", "state": {"state": 294838761640198597003972074272309035329, "inc": 66050176413739185524746886687120723265}, "has_uint32": 0, "uinteger": 2517394453}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 52500, "verbose": false, "_step_count": 52501, "_get_lr_called_within_step": false, "_last_lr": [0.00013555487141621536], "lr_lambdas": [{}]}}
|
checkpoints/0000055000/.metadata
ADDED
Binary file (667 kB). View file
|
|
checkpoints/0000055000/__0_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b7d6edb3a8333652a8f0172196d8ac6945b699a77d26cf04434197182d283c6d
|
3 |
+
size 5089728720
|
checkpoints/0000055000/__1_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:61fedef57addb6b20eb0c1d28b7822a2b0a07da97c15f14f5e390d79ab31bb70
|
3 |
+
size 5089821856
|
checkpoints/0000055000/__2_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d40341bb007c8ed258605c36e918752570574def9780afcadb40087f543992db
|
3 |
+
size 5089821856
|
checkpoints/0000055000/__3_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3d606638e713a00934d28a6be17b11b337bc382b130e09f2683c03922319514d
|
3 |
+
size 5089830112
|
checkpoints/0000055000/params.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"name": "large_lm", "dump_dir": "./dump_dir_llama1b2", "seed": 777, "grad_acc_steps": 2, "gc_collect_freq": 1000, "probe_freq": null, "steps": 60000, "data": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "batch_size": 8, "seq_len": 4096, "n_views": 2, "seed": 42, "add_bos": true, "add_eos": true, "load_async": true, "prefetch_size": 1024, "tokenizer": {"name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}}, "optim": {"lr": 0.003, "weight_decay": 0.033, "epsilon": 1e-08, "beta1": 0.9, "beta2": 0.95, "clip": 1.0, "scheduler": "cosine", "warmup": 5000, "lr_min_ratio": 1e-06, "cycle_length": 1.0, "cosine_theta": 1.0, "annealing_step": 1000, "decay_fraction": 0.1, "exp_factor": 0.5}, "model": {"dim": 2048, "n_layers": 25, "head_dim": null, "n_heads": 16, "n_kv_heads": null, "ffn_dim_multiplier": null, "multiple_of": 256, "norm_eps": 1e-05, "rope_theta": 10000.0, "init_base_std": null, "init_std_factor": "disabled", "rope_type": "original", "rope_inv_freq_learnable": false, "max_seqlen": 4096, "use_mla": "", "q_lora_rank": 1536, "kv_lora_rank": 512, "seed": 42, "vocab_size": 100512, "weight_tying": false, "sliding_window": null}, "distributed": {"dp_shard": 1, "dp_replicate": 4, "tp_size": 1, "selective_activation_checkpointing": false, "compile": true, "fsdp_type": "full_shard", "model_dtype": "bf16", "float8_recipe": null, "float8_filter": "layers\\.[0-9]+\\.", "matmul_allow_tf32": true, "detect_anomaly": false, "compile_cache_size_limit": 8, "spawn_method": "forkserver"}, "env": {"MKL_SERVICE_FORCE_INTEL": "GNU", "OMP_NUM_THREADS": "1", "MKL_NUM_THREADS": "1", "ENABLE_INTRA_NODE_COMM": "1", "TORCH_NCCL_AVOID_RECORD_STREAMS": "1", "NCCL_IB_TIMEOUT": "22", "NCCL_DEBUG": "INFO", "TORCH_NCCL_ASYNC_ERROR_HANDLING": "1"}, "checkpoint": {"dump": {"every": 2500, "keep": 3}, "eval": {"every": 5000000000, "keep": -1}, "path": "dump_dir_llama1b2/checkpoints", "init_ckpt_path": null, "continue_training_from_init": false}, "profiling": {"run": true, "trace_folder": "profiling", "mem_warmup": 0, "mem_steps": 4, "profile_warmup": 100, "profile_steps": 4}, "logging": {"freq": 1, "acc_freq": null, "wandb": null}, "async_eval_gpus": 1, "eval": {"harness": {"tasks": ["hellaswag", {"task": "boolq", "dataset_kwargs": {"trust_remote_code": true}}, "piqa", {"task": "social_iqa", "dataset_kwargs": {"trust_remote_code": true}}, "winogrande", "openbookqa", "arc_easy", "arc_challenge", "race", "commonsense_qa", "copa"]}, "validation": {"max_steps": 1000}, "generator": {"max_tokens": 16384, "dtype": "bf16"}}}
|
checkpoints/0000055000/train_state_00000.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 55000, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 171, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 24089526430, "block_size": 4, "offset": 0, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 113480112311283759058948928503237735128, "inc": 11676600559890430755450356507027720041}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 432, "rng_state": {"bit_generator": "PCG64", "state": {"state": 284361689960843992510826632036204406926, "inc": 77357518920597472829800677777012462921}, "has_uint32": 1, "uinteger": 1083475834}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 55000, "verbose": false, "_step_count": 55001, "_get_lr_called_within_step": false, "_last_lr": [6.0763478817714537e-05], "lr_lambdas": [{}]}}
|
checkpoints/0000055000/train_state_00001.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 55000, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 629, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 24319442893, "block_size": 4, "offset": 1, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 336838252608328762642367373257392679475, "inc": 239634081480473411747239400828488620799}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 432, "rng_state": {"bit_generator": "PCG64", "state": {"state": 42938996891315000473233379793990467732, "inc": 270234035871729269002159329014059236425}, "has_uint32": 1, "uinteger": 3585551809}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 55000, "verbose": false, "_step_count": 55001, "_get_lr_called_within_step": false, "_last_lr": [6.0763478817714537e-05], "lr_lambdas": [{}]}}
|
checkpoints/0000055000/train_state_00002.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 55000, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 96, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 24174101596, "block_size": 4, "offset": 2, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 117467736827509381561014469368633930793, "inc": 6027823433652931085739778990793808165}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 432, "rng_state": {"bit_generator": "PCG64", "state": {"state": 266218648628129980865360739940674595406, "inc": 188564971970541749319992297790591572713}, "has_uint32": 0, "uinteger": 3020109911}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 55000, "verbose": false, "_step_count": 55001, "_get_lr_called_within_step": false, "_last_lr": [6.0763478817714537e-05], "lr_lambdas": [{}]}}
|
checkpoints/0000055000/train_state_00003.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 55000, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 2506, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 24171240780, "block_size": 4, "offset": 3, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 144077674547792537725026078244309307985, "inc": 92941856108932518968286621281627530405}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 432, "rng_state": {"bit_generator": "PCG64", "state": {"state": 288361091733615707256981067197297720635, "inc": 66050176413739185524746886687120723265}, "has_uint32": 0, "uinteger": 1686196970}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 55000, "verbose": false, "_step_count": 55001, "_get_lr_called_within_step": false, "_last_lr": [6.0763478817714537e-05], "lr_lambdas": [{}]}}
|
checkpoints/0000057500/.metadata
ADDED
Binary file (667 kB). View file
|
|
checkpoints/0000057500/__0_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a4e863b6332f706742956f827ea6c07f0a394130df58522c8a462df09faf4713
|
3 |
+
size 5089728720
|
checkpoints/0000057500/__1_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a1e3db8a7502ca8272668aadaf6970032f278244021d43d822b2e8b77784e761
|
3 |
+
size 5089821856
|
checkpoints/0000057500/__2_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cfe11383349fd9531badeb4d5d095826e6145949d4a65509f688496ea0c874a0
|
3 |
+
size 5089821856
|
checkpoints/0000057500/__3_0.distcp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:401094cbd65964036806c50646e229fe446fac62c76b361a6c777721cf4b6b9a
|
3 |
+
size 5089830112
|
checkpoints/0000057500/params.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"name": "large_lm", "dump_dir": "./dump_dir_llama1b2", "seed": 777, "grad_acc_steps": 2, "gc_collect_freq": 1000, "probe_freq": null, "steps": 60000, "data": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "batch_size": 8, "seq_len": 4096, "n_views": 2, "seed": 42, "add_bos": true, "add_eos": true, "load_async": true, "prefetch_size": 1024, "tokenizer": {"name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}}, "optim": {"lr": 0.003, "weight_decay": 0.033, "epsilon": 1e-08, "beta1": 0.9, "beta2": 0.95, "clip": 1.0, "scheduler": "cosine", "warmup": 5000, "lr_min_ratio": 1e-06, "cycle_length": 1.0, "cosine_theta": 1.0, "annealing_step": 1000, "decay_fraction": 0.1, "exp_factor": 0.5}, "model": {"dim": 2048, "n_layers": 25, "head_dim": null, "n_heads": 16, "n_kv_heads": null, "ffn_dim_multiplier": null, "multiple_of": 256, "norm_eps": 1e-05, "rope_theta": 10000.0, "init_base_std": null, "init_std_factor": "disabled", "rope_type": "original", "rope_inv_freq_learnable": false, "max_seqlen": 4096, "use_mla": "", "q_lora_rank": 1536, "kv_lora_rank": 512, "seed": 42, "vocab_size": 100512, "weight_tying": false, "sliding_window": null}, "distributed": {"dp_shard": 1, "dp_replicate": 4, "tp_size": 1, "selective_activation_checkpointing": false, "compile": true, "fsdp_type": "full_shard", "model_dtype": "bf16", "float8_recipe": null, "float8_filter": "layers\\.[0-9]+\\.", "matmul_allow_tf32": true, "detect_anomaly": false, "compile_cache_size_limit": 8, "spawn_method": "forkserver"}, "env": {"MKL_SERVICE_FORCE_INTEL": "GNU", "OMP_NUM_THREADS": "1", "MKL_NUM_THREADS": "1", "ENABLE_INTRA_NODE_COMM": "1", "TORCH_NCCL_AVOID_RECORD_STREAMS": "1", "NCCL_IB_TIMEOUT": "22", "NCCL_DEBUG": "INFO", "TORCH_NCCL_ASYNC_ERROR_HANDLING": "1"}, "checkpoint": {"dump": {"every": 2500, "keep": 3}, "eval": {"every": 5000000000, "keep": -1}, "path": "dump_dir_llama1b2/checkpoints", "init_ckpt_path": null, "continue_training_from_init": false}, "profiling": {"run": true, "trace_folder": "profiling", "mem_warmup": 0, "mem_steps": 4, "profile_warmup": 100, "profile_steps": 4}, "logging": {"freq": 1, "acc_freq": null, "wandb": null}, "async_eval_gpus": 1, "eval": {"harness": {"tasks": ["hellaswag", {"task": "boolq", "dataset_kwargs": {"trust_remote_code": true}}, "piqa", {"task": "social_iqa", "dataset_kwargs": {"trust_remote_code": true}}, "winogrande", "openbookqa", "arc_easy", "arc_challenge", "race", "commonsense_qa", "copa"]}, "validation": {"max_steps": 1000}, "generator": {"max_tokens": 16384, "dtype": "bf16"}}}
|
checkpoints/0000057500/train_state_00000.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 57500, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 320, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 27577514266, "block_size": 4, "offset": 0, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 288376215711660853319606319848059944895, "inc": 11676600559890430755450356507027720041}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 312, "rng_state": {"bit_generator": "PCG64", "state": {"state": 11493284078251286074307567083922618830, "inc": 77357518920597472829800677777012462921}, "has_uint32": 1, "uinteger": 1302367895}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 57500, "verbose": false, "_step_count": 57501, "_get_lr_called_within_step": false, "_last_lr": [1.5270821910763795e-05], "lr_lambdas": [{}]}}
|
checkpoints/0000057500/train_state_00001.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 57500, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 983, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 27815738732, "block_size": 4, "offset": 1, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 12028358019300960955490458574390826077, "inc": 239634081480473411747239400828488620799}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 312, "rng_state": {"bit_generator": "PCG64", "state": {"state": 187150612758938462197782101941292227468, "inc": 270234035871729269002159329014059236425}, "has_uint32": 0, "uinteger": 741317957}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 57500, "verbose": false, "_step_count": 57501, "_get_lr_called_within_step": false, "_last_lr": [1.5270821910763795e-05], "lr_lambdas": [{}]}}
|
checkpoints/0000057500/train_state_00002.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 57500, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 317, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 27648204381, "block_size": 4, "offset": 2, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 81244764968533379643081700530451726139, "inc": 6027823433652931085739778990793808165}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 312, "rng_state": {"bit_generator": "PCG64", "state": {"state": 13311883204234167281050828487226707073, "inc": 188564971970541749319992297790591572713}, "has_uint32": 0, "uinteger": 1212867564}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 57500, "verbose": false, "_step_count": 57501, "_get_lr_called_within_step": false, "_last_lr": [1.5270821910763795e-05], "lr_lambdas": [{}]}}
|
checkpoints/0000057500/train_state_00003.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 57500, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 752, "it_state": {"it_state": {"root_dir": "./data", "sources": {"fineweb_edu_10bt_shuffled": 100.0}, "source_to_state": {"fineweb_edu_10bt_shuffled": {"file_path": "data/fineweb_edu_10bt_shuffled/fineweb_edu_10bt.chunk.00.jsonl", "position": 27639381247, "block_size": 4, "offset": 3, "current_iter": 1}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 286741500864262708623674448239672053530, "inc": 92941856108932518968286621281627530405}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "tiktoken", "path": "tokenizers/cl100k_base.tiktoken"}, "output_seq_len": 4096, "n_views": 2}, "seq_idx": 312, "rng_state": {"bit_generator": "PCG64", "state": {"state": 240889267456687567804847541390924151647, "inc": 66050176413739185524746886687120723265}, "has_uint32": 1, "uinteger": 2653545164}, "batch_size": 8, "prefetch_size": 1024}, "scheduler": {"base_lrs": [0.003], "last_epoch": 57500, "verbose": false, "_step_count": 57501, "_get_lr_called_within_step": false, "_last_lr": [1.5270821910763795e-05], "lr_lambdas": [{}]}}
|
config.yaml
ADDED
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: large_lm
|
2 |
+
dump_dir: ./dump_dir_llama1b2
|
3 |
+
seed: 777
|
4 |
+
grad_acc_steps: 2
|
5 |
+
gc_collect_freq: 1000
|
6 |
+
probe_freq: null
|
7 |
+
steps: 60000
|
8 |
+
data:
|
9 |
+
root_dir: ./data
|
10 |
+
sources:
|
11 |
+
fineweb_edu_10bt_shuffled: 100.0
|
12 |
+
batch_size: 8
|
13 |
+
seq_len: 4096
|
14 |
+
n_views: 2
|
15 |
+
seed: 42
|
16 |
+
add_bos: true
|
17 |
+
add_eos: true
|
18 |
+
load_async: true
|
19 |
+
prefetch_size: 1024
|
20 |
+
tokenizer:
|
21 |
+
name: tiktoken
|
22 |
+
path: tokenizers/cl100k_base.tiktoken
|
23 |
+
optim:
|
24 |
+
lr: 0.003
|
25 |
+
weight_decay: 0.033
|
26 |
+
epsilon: 1.0e-08
|
27 |
+
beta1: 0.9
|
28 |
+
beta2: 0.95
|
29 |
+
clip: 1.0
|
30 |
+
scheduler: cosine
|
31 |
+
warmup: 5000
|
32 |
+
lr_min_ratio: 1.0e-06
|
33 |
+
cycle_length: 1.0
|
34 |
+
cosine_theta: 1.0
|
35 |
+
annealing_step: 1000
|
36 |
+
decay_fraction: 0.1
|
37 |
+
exp_factor: 0.5
|
38 |
+
model:
|
39 |
+
dim: 2048
|
40 |
+
n_layers: 25
|
41 |
+
head_dim: null
|
42 |
+
n_heads: 16
|
43 |
+
n_kv_heads: null
|
44 |
+
ffn_dim_multiplier: null
|
45 |
+
multiple_of: 256
|
46 |
+
norm_eps: 1.0e-05
|
47 |
+
rope_theta: 10000.0
|
48 |
+
init_base_std: null
|
49 |
+
init_std_factor: disabled
|
50 |
+
rope_type: original
|
51 |
+
rope_inv_freq_learnable: false
|
52 |
+
max_seqlen: 4096
|
53 |
+
use_mla: ''
|
54 |
+
q_lora_rank: 1536
|
55 |
+
kv_lora_rank: 512
|
56 |
+
seed: 42
|
57 |
+
vocab_size: 100512
|
58 |
+
weight_tying: false
|
59 |
+
sliding_window: null
|
60 |
+
distributed:
|
61 |
+
dp_shard: 1
|
62 |
+
dp_replicate: 4
|
63 |
+
tp_size: 1
|
64 |
+
selective_activation_checkpointing: false
|
65 |
+
compile: true
|
66 |
+
fsdp_type: full_shard
|
67 |
+
model_dtype: bf16
|
68 |
+
float8_recipe: null
|
69 |
+
float8_filter: layers\.[0-9]+\.
|
70 |
+
matmul_allow_tf32: true
|
71 |
+
detect_anomaly: false
|
72 |
+
compile_cache_size_limit: 8
|
73 |
+
spawn_method: forkserver
|
74 |
+
env:
|
75 |
+
MKL_SERVICE_FORCE_INTEL: GNU
|
76 |
+
OMP_NUM_THREADS: '1'
|
77 |
+
MKL_NUM_THREADS: '1'
|
78 |
+
ENABLE_INTRA_NODE_COMM: '1'
|
79 |
+
TORCH_NCCL_AVOID_RECORD_STREAMS: '1'
|
80 |
+
NCCL_IB_TIMEOUT: '22'
|
81 |
+
NCCL_DEBUG: INFO
|
82 |
+
TORCH_NCCL_ASYNC_ERROR_HANDLING: '1'
|
83 |
+
checkpoint:
|
84 |
+
dump:
|
85 |
+
every: 2500
|
86 |
+
keep: 3
|
87 |
+
eval:
|
88 |
+
every: 5000000000
|
89 |
+
keep: -1
|
90 |
+
path: dump_dir_llama1b2/checkpoints
|
91 |
+
init_ckpt_path: null
|
92 |
+
continue_training_from_init: false
|
93 |
+
profiling:
|
94 |
+
run: true
|
95 |
+
trace_folder: profiling
|
96 |
+
mem_warmup: 0
|
97 |
+
mem_steps: 4
|
98 |
+
profile_warmup: 100
|
99 |
+
profile_steps: 4
|
100 |
+
logging:
|
101 |
+
freq: 1
|
102 |
+
acc_freq: null
|
103 |
+
wandb: null
|
104 |
+
async_eval_gpus: 1
|
105 |
+
eval:
|
106 |
+
harness:
|
107 |
+
tasks:
|
108 |
+
- hellaswag
|
109 |
+
- task: boolq
|
110 |
+
dataset_kwargs:
|
111 |
+
trust_remote_code: true
|
112 |
+
- piqa
|
113 |
+
- task: social_iqa
|
114 |
+
dataset_kwargs:
|
115 |
+
trust_remote_code: true
|
116 |
+
- winogrande
|
117 |
+
- openbookqa
|
118 |
+
- arc_easy
|
119 |
+
- arc_challenge
|
120 |
+
- race
|
121 |
+
- commonsense_qa
|
122 |
+
- copa
|
123 |
+
validation:
|
124 |
+
max_steps: 1000
|
125 |
+
generator:
|
126 |
+
max_tokens: 16384
|
127 |
+
dtype: bf16
|
metrics.jsonl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:60ccbed3e27ce72b330634d0bfa6a212eff253136067d0beb8b26cb1934673f1
|
3 |
+
size 35436667
|
profiling/memory_trace_plot/000004_stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104882.html
ADDED
The diff for this file is too large to render.
See raw diff
|
|
profiling/memory_trace_plot/000004_stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104883.html
ADDED
The diff for this file is too large to render.
See raw diff
|
|
profiling/memory_trace_plot/000004_stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104884.html
ADDED
The diff for this file is too large to render.
See raw diff
|
|
profiling/memory_trace_plot/000004_stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104885.html
ADDED
The diff for this file is too large to render.
See raw diff
|
|
profiling/profile_CPU_CUDA_000104/stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104882.1736675622108438789.pt.trace.json.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e194fb23d0de28b2144d65b59b6909603576adff80b2c65fd525dbb50a6c8172
|
3 |
+
size 1926496
|
profiling/profile_CPU_CUDA_000104/stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104883.1736675622100620970.pt.trace.json.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fbd90924612b2a2b89d112772902d8442c4dd6f487a3b06e2ddc89472c2e0f63
|
3 |
+
size 1933461
|
profiling/profile_CPU_CUDA_000104/stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104884.1736675622099254891.pt.trace.json.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0c975ec2b8575f74c6297a1fc762fd9ee8c8938becedae0148f5766b8e059cd6
|
3 |
+
size 1932617
|
profiling/profile_CPU_CUDA_000104/stable-diffusion-xl-dev-2-retina-newsroom-gpu-v3-85f5d97fdc6s78_104885.1736675622113109902.pt.trace.json.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0bc95d196acb821e88127f671380d311037f68facc50ef41291d148d128493a6
|
3 |
+
size 1933550
|
train.log
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bc27de864c79cc208f7490628012f3e8350a66828e1177ff724ecc241c0f35c3
|
3 |
+
size 12290594
|