zhiyang1 commited on
Commit
bbd6ad9
·
verified ·
1 Parent(s): 20dfc38

Upload folder using huggingface_hub

Browse files
Files changed (47) hide show
  1. .gitattributes +2 -0
  2. checkpoint-122000/config.json +79 -0
  3. checkpoint-122000/generation_config.json +6 -0
  4. checkpoint-122000/model.safetensors +3 -0
  5. checkpoint-122000/optimizer.pt +3 -0
  6. checkpoint-122000/rng_state_0.pth +3 -0
  7. checkpoint-122000/rng_state_1.pth +3 -0
  8. checkpoint-122000/rng_state_10.pth +3 -0
  9. checkpoint-122000/rng_state_11.pth +3 -0
  10. checkpoint-122000/rng_state_12.pth +3 -0
  11. checkpoint-122000/rng_state_13.pth +3 -0
  12. checkpoint-122000/rng_state_14.pth +3 -0
  13. checkpoint-122000/rng_state_15.pth +3 -0
  14. checkpoint-122000/rng_state_2.pth +3 -0
  15. checkpoint-122000/rng_state_3.pth +3 -0
  16. checkpoint-122000/rng_state_4.pth +3 -0
  17. checkpoint-122000/rng_state_5.pth +3 -0
  18. checkpoint-122000/rng_state_6.pth +3 -0
  19. checkpoint-122000/rng_state_7.pth +3 -0
  20. checkpoint-122000/rng_state_8.pth +3 -0
  21. checkpoint-122000/rng_state_9.pth +3 -0
  22. checkpoint-122000/scheduler.pt +3 -0
  23. checkpoint-122000/trainer_state.json +3 -0
  24. checkpoint-122000/training_args.bin +3 -0
  25. checkpoint-123000/config.json +79 -0
  26. checkpoint-123000/generation_config.json +6 -0
  27. checkpoint-123000/model.safetensors +3 -0
  28. checkpoint-123000/optimizer.pt +3 -0
  29. checkpoint-123000/rng_state_0.pth +3 -0
  30. checkpoint-123000/rng_state_1.pth +3 -0
  31. checkpoint-123000/rng_state_10.pth +3 -0
  32. checkpoint-123000/rng_state_11.pth +3 -0
  33. checkpoint-123000/rng_state_12.pth +3 -0
  34. checkpoint-123000/rng_state_13.pth +3 -0
  35. checkpoint-123000/rng_state_14.pth +3 -0
  36. checkpoint-123000/rng_state_15.pth +3 -0
  37. checkpoint-123000/rng_state_2.pth +3 -0
  38. checkpoint-123000/rng_state_3.pth +3 -0
  39. checkpoint-123000/rng_state_4.pth +3 -0
  40. checkpoint-123000/rng_state_5.pth +3 -0
  41. checkpoint-123000/rng_state_6.pth +3 -0
  42. checkpoint-123000/rng_state_7.pth +3 -0
  43. checkpoint-123000/rng_state_8.pth +3 -0
  44. checkpoint-123000/rng_state_9.pth +3 -0
  45. checkpoint-123000/scheduler.pt +3 -0
  46. checkpoint-123000/trainer_state.json +3 -0
  47. checkpoint-123000/training_args.bin +3 -0
.gitattributes CHANGED
@@ -52,3 +52,5 @@ checkpoint-110000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
52
  checkpoint-111000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
53
  checkpoint-119000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
54
  checkpoint-120000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
 
 
 
52
  checkpoint-111000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
53
  checkpoint-119000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
54
  checkpoint-120000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
55
+ checkpoint-122000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
56
+ checkpoint-123000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
checkpoint-122000/config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "ar_steps": 1,
3
+ "architectures": [
4
+ "DiffVLMBaseline"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "condition_layer": -1,
9
+ "eos_token_id": 151645,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1536,
12
+ "image_token_id": 151655,
13
+ "img_cross_attention_dim": 2048,
14
+ "img_diffuser_depth": 2,
15
+ "img_ffn_dim_multiplier": null,
16
+ "img_hidden_size": 1536,
17
+ "img_multiple_of": 256,
18
+ "img_norm_eps": 1e-05,
19
+ "img_num_attention_heads": 12,
20
+ "img_num_kv_heads": 12,
21
+ "img_qk_norm": true,
22
+ "in_channels": 32,
23
+ "initializer_range": 0.02,
24
+ "inject_img_diffuser": false,
25
+ "input_size": 32,
26
+ "intermediate_size": 8960,
27
+ "layer_group_size": 7,
28
+ "layerwise_start_idx": 0,
29
+ "lora_alpha": 128,
30
+ "lora_bias": "none",
31
+ "lora_dropout": 0.05,
32
+ "lora_enable": false,
33
+ "lora_r": 64,
34
+ "max_position_embeddings": 32768,
35
+ "max_window_layers": 28,
36
+ "model_type": "qwen2_vl",
37
+ "non_linearity": 1,
38
+ "norm_elementwise_affine": true,
39
+ "num_attention_heads": 12,
40
+ "num_hidden_layers": 28,
41
+ "num_key_value_heads": 2,
42
+ "patch_size": 1,
43
+ "repa_coeff": 0.1,
44
+ "repa_layers": "2",
45
+ "repa_shared": false,
46
+ "rms_norm_eps": 1e-06,
47
+ "rope_scaling": {
48
+ "mrope_section": [
49
+ 16,
50
+ 24,
51
+ 24
52
+ ],
53
+ "rope_type": "default",
54
+ "type": "default"
55
+ },
56
+ "rope_theta": 1000000.0,
57
+ "sample_size": 128,
58
+ "sampling_steps": 28,
59
+ "sliding_window": null,
60
+ "tie_word_embeddings": true,
61
+ "torch_dtype": "bfloat16",
62
+ "transformers_version": "4.47.0",
63
+ "use_cache": true,
64
+ "use_repa": false,
65
+ "use_residual_attn": false,
66
+ "use_sliding_window": false,
67
+ "vae_path": "mit-han-lab/dc-ae-f32c32-in-1.0-diffusers",
68
+ "video_token_id": 151656,
69
+ "vision_config": {
70
+ "hidden_size": 1536,
71
+ "in_chans": 3,
72
+ "model_type": "qwen2_vl",
73
+ "spatial_patch_size": 14
74
+ },
75
+ "vision_end_token_id": 151653,
76
+ "vision_start_token_id": 151652,
77
+ "vision_token_id": 151654,
78
+ "vocab_size": 151936
79
+ }
checkpoint-122000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "eos_token_id": 151645,
5
+ "transformers_version": "4.47.0"
6
+ }
checkpoint-122000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:388e145dc36e76c2e457f012334bf98cd43a1b84baf07ab1faac75e9738ce090
3
+ size 4410723984
checkpoint-122000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54babfab3df9fffe0ba5c81fd758f52eabc4210913bfd7c626b82fd1767a1d9d
3
+ size 6330255386
checkpoint-122000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:552586bf25837b52e5bd160ee4b2f947a2556bac2791fefa840a8dc4a67ea553
3
+ size 15984
checkpoint-122000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c2b36ed6ab9b33e7f17803ab9adf794611c2ad6490e03d8cdd5dadfd9867efd
3
+ size 15984
checkpoint-122000/rng_state_10.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daf502a644978d524ac14f9325de29ad241f094e2d59ee2ef1b1dc6db00d4eba
3
+ size 15997
checkpoint-122000/rng_state_11.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6c5165ec0351f7956082c29db662b747ba4f5748132627e60cc8fa57a1c33cc
3
+ size 15997
checkpoint-122000/rng_state_12.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5224bb495cc512b66e503a4fd279b6cb1f6622acba78ab2d45ce5f2f32e3975e
3
+ size 15997
checkpoint-122000/rng_state_13.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b922ea03bcd8e3cc35b24247f2488a7bb7300e9cffe0083fd31f6323eaba561
3
+ size 15997
checkpoint-122000/rng_state_14.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ffdaea5f2178af2fdc6f326adcb78749e2efe626252f38dd61e0e624c0cbf745
3
+ size 15997
checkpoint-122000/rng_state_15.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75b80db94f62ea70f24ff6c28dce515ee7e2e006cc1d8ee9de6afcf6795eb873
3
+ size 15997
checkpoint-122000/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21e3929063b9d231b8ae443ef12c77b428328d2489521cee7de9f84678305fe4
3
+ size 15984
checkpoint-122000/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31cd3d04455ba68c379b475cdc57b8bdf84a8fc1b0b4b79ec36695d57f55b3a5
3
+ size 15984
checkpoint-122000/rng_state_4.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:796269bf3f55eeef6276d4251e1ca15ba22b6afeb9b156bbb8cd4431a3bf069e
3
+ size 15984
checkpoint-122000/rng_state_5.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c1034346a48b5f4deadf0696a4247a49abe753505366ec4d4f85d6720653454
3
+ size 15984
checkpoint-122000/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86f7cab497528ac052ac0eef87cb6618041ae10bde2c91d8f602f3bd4958a98e
3
+ size 15984
checkpoint-122000/rng_state_7.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:032e69fec3c8203be8b9af61782435c04604c66f40b536ba3939822fcb129ca1
3
+ size 15984
checkpoint-122000/rng_state_8.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:519900da08656613ea9e8d8f80663f01623e7e246037dff9a5086905f1db71ab
3
+ size 15984
checkpoint-122000/rng_state_9.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3a8ea53ccdf8799d0fae6a0aae67056fee9b1c8321e005d434a5732c5c2ff15
3
+ size 15984
checkpoint-122000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5b58bf33df20c5fd6485dd15c8c51a385cb0e5ef2f4bec88b55b3714519ed84
3
+ size 1064
checkpoint-122000/trainer_state.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ea5ae382c0d465a777dba65b3991404cf0afea02c71158cc0943ceca7dd2534
3
+ size 18601829
checkpoint-122000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a32b4ade2f7660c8124474be94d22c205a5d72834858a8e6578f5c1e9ab499f0
3
+ size 5944
checkpoint-123000/config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "ar_steps": 1,
3
+ "architectures": [
4
+ "DiffVLMBaseline"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "condition_layer": -1,
9
+ "eos_token_id": 151645,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1536,
12
+ "image_token_id": 151655,
13
+ "img_cross_attention_dim": 2048,
14
+ "img_diffuser_depth": 2,
15
+ "img_ffn_dim_multiplier": null,
16
+ "img_hidden_size": 1536,
17
+ "img_multiple_of": 256,
18
+ "img_norm_eps": 1e-05,
19
+ "img_num_attention_heads": 12,
20
+ "img_num_kv_heads": 12,
21
+ "img_qk_norm": true,
22
+ "in_channels": 32,
23
+ "initializer_range": 0.02,
24
+ "inject_img_diffuser": false,
25
+ "input_size": 32,
26
+ "intermediate_size": 8960,
27
+ "layer_group_size": 7,
28
+ "layerwise_start_idx": 0,
29
+ "lora_alpha": 128,
30
+ "lora_bias": "none",
31
+ "lora_dropout": 0.05,
32
+ "lora_enable": false,
33
+ "lora_r": 64,
34
+ "max_position_embeddings": 32768,
35
+ "max_window_layers": 28,
36
+ "model_type": "qwen2_vl",
37
+ "non_linearity": 1,
38
+ "norm_elementwise_affine": true,
39
+ "num_attention_heads": 12,
40
+ "num_hidden_layers": 28,
41
+ "num_key_value_heads": 2,
42
+ "patch_size": 1,
43
+ "repa_coeff": 0.1,
44
+ "repa_layers": "2",
45
+ "repa_shared": false,
46
+ "rms_norm_eps": 1e-06,
47
+ "rope_scaling": {
48
+ "mrope_section": [
49
+ 16,
50
+ 24,
51
+ 24
52
+ ],
53
+ "rope_type": "default",
54
+ "type": "default"
55
+ },
56
+ "rope_theta": 1000000.0,
57
+ "sample_size": 128,
58
+ "sampling_steps": 28,
59
+ "sliding_window": null,
60
+ "tie_word_embeddings": true,
61
+ "torch_dtype": "bfloat16",
62
+ "transformers_version": "4.47.0",
63
+ "use_cache": true,
64
+ "use_repa": false,
65
+ "use_residual_attn": false,
66
+ "use_sliding_window": false,
67
+ "vae_path": "mit-han-lab/dc-ae-f32c32-in-1.0-diffusers",
68
+ "video_token_id": 151656,
69
+ "vision_config": {
70
+ "hidden_size": 1536,
71
+ "in_chans": 3,
72
+ "model_type": "qwen2_vl",
73
+ "spatial_patch_size": 14
74
+ },
75
+ "vision_end_token_id": 151653,
76
+ "vision_start_token_id": 151652,
77
+ "vision_token_id": 151654,
78
+ "vocab_size": 151936
79
+ }
checkpoint-123000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "eos_token_id": 151645,
5
+ "transformers_version": "4.47.0"
6
+ }
checkpoint-123000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ee75085e4ba449da883526482993674dc9e76d82530279b3692c0017b63b549
3
+ size 4410723984
checkpoint-123000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be372ac5fb9d8c8d5d75aa979ca5718c8c8ff3127ed8caed03ee12337b490d9b
3
+ size 6330255386
checkpoint-123000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6c45b726853317915582945cae82a789e41ea184dc56b74ca8c3cab168478c9
3
+ size 15984
checkpoint-123000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2cc377ce0bc25d9bf0131eaa58709e3acba8183e9efd829f256773065d86bdad
3
+ size 15984
checkpoint-123000/rng_state_10.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35b832d91df17e17ce261897d1edf7b943fabca278a60560f72e89e176256979
3
+ size 15997
checkpoint-123000/rng_state_11.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e59b7ebb1685a62fbd0668da4d52352a344aec6b89899235255578d141bca306
3
+ size 15997
checkpoint-123000/rng_state_12.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:495190e0335e7b4ac851a956c8318320a688bfa96203c153c5d54a8c5e600b57
3
+ size 15997
checkpoint-123000/rng_state_13.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:125cb3d20839ff0e0e62b9f713a6029f77453fd55195c270a1eb884a9461626a
3
+ size 15997
checkpoint-123000/rng_state_14.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b41e4135c6266f6c2879365a6b746c502392cfd770f272018cc47deea0e569bd
3
+ size 15997
checkpoint-123000/rng_state_15.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3ea2f4589c6fa5a495f97992c85dfddecd139001b78c59bfaf8e892fa069518
3
+ size 15997
checkpoint-123000/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:509f63d786fde947682dfa64b5da5180248ce52e1953952595cefb5e1577ea7f
3
+ size 15984
checkpoint-123000/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37ed9d13b9e7572f71d80eed4383471875a7c63b2672012f2ad0feaf1f50c08c
3
+ size 15984
checkpoint-123000/rng_state_4.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dfa7f14a9d94fe42f9a2a8637fb058a21541dd94175cdf90b5f0e653ba73dcb7
3
+ size 15984
checkpoint-123000/rng_state_5.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a4871003ddcbf6d2d37467105983ec2fad035b68185443aa0085b736b1a53ff
3
+ size 15984
checkpoint-123000/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d7d81d545efb29edf563f8f1ede8d141b6abdf3512ec454c984ce571364bd4d
3
+ size 15984
checkpoint-123000/rng_state_7.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c32ec3466ba086075b0277a61e8a5b8e7ce250ce32fc50cd56acc8d0a9174b7
3
+ size 15984
checkpoint-123000/rng_state_8.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9db72884f2796e7583cb73d8f8dbdcf92b4b5b916b323148c71618c6d6f66fd9
3
+ size 15984
checkpoint-123000/rng_state_9.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d92a56af344c8a3efa3475aeb84cd50a7c0aa8147140d6f807003b4f0e348c30
3
+ size 15984
checkpoint-123000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:852c4cc2b80bc85de5a7ccb1852c9d4338c2449df46e369e5d9d35ab1c85afaf
3
+ size 1064
checkpoint-123000/trainer_state.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72131018d03381c33e4b2c70c8fb62b509751d01065df70867bd120dad4da084
3
+ size 18755594
checkpoint-123000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e08963069b131157e9fdf5de70029ade904e900663502c1010020837523bb124
3
+ size 5944