kashif HF Staff commited on
Commit
b0b2701
·
verified ·
1 Parent(s): 8260067

Fix mrope_section config: [2] -> [1, 1] for Liger kernel compatibility

Browse files
Files changed (3) hide show
  1. config.json +10 -10
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -7,20 +7,22 @@
7
  "dtype": "bfloat16",
8
  "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
- "hidden_size": 16,
11
  "image_token_id": 151655,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 11008,
14
  "max_position_embeddings": 128000,
15
  "max_window_layers": 70,
16
  "model_type": "qwen2_5_vl",
17
- "num_attention_heads": 4,
18
- "num_hidden_layers": 2,
19
  "num_key_value_heads": 2,
20
  "rms_norm_eps": 1e-06,
21
  "rope_scaling": {
22
  "mrope_section": [
23
- 2
 
 
24
  ],
25
  "rope_type": "default",
26
  "type": "default"
@@ -28,6 +30,7 @@
28
  "rope_theta": 1000000.0,
29
  "sliding_window": 32768,
30
  "text_config": {
 
31
  "architectures": [
32
  "Qwen2_5_VLForConditionalGeneration"
33
  ],
@@ -37,7 +40,6 @@
37
  "eos_token_id": 151645,
38
  "hidden_act": "silu",
39
  "hidden_size": 16,
40
- "image_token_id": null,
41
  "initializer_range": 0.02,
42
  "intermediate_size": 11008,
43
  "layer_types": [
@@ -53,7 +55,8 @@
53
  "rms_norm_eps": 1e-06,
54
  "rope_scaling": {
55
  "mrope_section": [
56
- 2
 
57
  ],
58
  "rope_type": "default",
59
  "type": "default"
@@ -63,13 +66,10 @@
63
  "tie_word_embeddings": true,
64
  "use_cache": true,
65
  "use_sliding_window": false,
66
- "video_token_id": null,
67
- "vision_end_token_id": 151653,
68
- "vision_start_token_id": 151652,
69
  "vision_token_id": 151654,
70
  "vocab_size": 151936
71
  },
72
- "transformers_version": "4.56.1",
73
  "use_cache": true,
74
  "use_sliding_window": false,
75
  "video_token_id": 151656,
 
7
  "dtype": "bfloat16",
8
  "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
+ "hidden_size": 2048,
11
  "image_token_id": 151655,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 11008,
14
  "max_position_embeddings": 128000,
15
  "max_window_layers": 70,
16
  "model_type": "qwen2_5_vl",
17
+ "num_attention_heads": 16,
18
+ "num_hidden_layers": 36,
19
  "num_key_value_heads": 2,
20
  "rms_norm_eps": 1e-06,
21
  "rope_scaling": {
22
  "mrope_section": [
23
+ 16,
24
+ 24,
25
+ 24
26
  ],
27
  "rope_type": "default",
28
  "type": "default"
 
30
  "rope_theta": 1000000.0,
31
  "sliding_window": 32768,
32
  "text_config": {
33
+ "_name_or_path": "Qwen/Qwen2.5-VL-3B-Instruct",
34
  "architectures": [
35
  "Qwen2_5_VLForConditionalGeneration"
36
  ],
 
40
  "eos_token_id": 151645,
41
  "hidden_act": "silu",
42
  "hidden_size": 16,
 
43
  "initializer_range": 0.02,
44
  "intermediate_size": 11008,
45
  "layer_types": [
 
55
  "rms_norm_eps": 1e-06,
56
  "rope_scaling": {
57
  "mrope_section": [
58
+ 1,
59
+ 1
60
  ],
61
  "rope_type": "default",
62
  "type": "default"
 
66
  "tie_word_embeddings": true,
67
  "use_cache": true,
68
  "use_sliding_window": false,
 
 
 
69
  "vision_token_id": 151654,
70
  "vocab_size": 151936
71
  },
72
+ "transformers_version": "4.57.1",
73
  "use_cache": true,
74
  "use_sliding_window": false,
75
  "video_token_id": 151656,
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 151643,
4
  "eos_token_id": 151645,
5
- "transformers_version": "4.56.1"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 151643,
4
  "eos_token_id": 151645,
5
+ "transformers_version": "4.57.1"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:776cc6705b0d1ea00c79f471c3c673bf6bc575493b8202f5baa094ae1ddb8516
3
  size 18086192
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9ee6de012cbabb328744133306ae85256f9d1d994145c50a0a341918c7af35c
3
  size 18086192