Felladrin commited on
Commit
857fadb
·
verified ·
1 Parent(s): 5acaf7b

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +151 -0
config.json ADDED
@@ -0,0 +1,151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Phi3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi3.Phi3Config",
9
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM",
10
+ "AutoTokenizer": "Xenova/gpt-4o"
11
+ },
12
+ "bos_token_id": 199999,
13
+ "embd_pdrop": 0.0,
14
+ "eos_token_id": 199999,
15
+ "full_attn_mod": 1,
16
+ "hidden_act": "silu",
17
+ "hidden_size": 3072,
18
+ "initializer_range": 0.02,
19
+ "intermediate_size": 8192,
20
+ "interpolate_factor": 1,
21
+ "lm_head_bias": false,
22
+ "max_position_embeddings": 131072,
23
+ "mlp_bias": false,
24
+ "model_type": "phi3",
25
+ "num_attention_heads": 24,
26
+ "num_hidden_layers": 32,
27
+ "num_key_value_heads": 8,
28
+ "original_max_position_embeddings": 4096,
29
+ "pad_token_id": 199999,
30
+ "partial_rotary_factor": 0.75,
31
+ "quantization": {
32
+ "group_size": 64,
33
+ "bits": 6
34
+ },
35
+ "quantization_config": {
36
+ "group_size": 64,
37
+ "bits": 6
38
+ },
39
+ "resid_pdrop": 0.0,
40
+ "rms_norm_eps": 1e-05,
41
+ "rope_scaling": {
42
+ "long_factor": [
43
+ 1,
44
+ 1.118320672,
45
+ 1.250641126,
46
+ 1.398617824,
47
+ 1.564103225,
48
+ 1.74916897,
49
+ 1.956131817,
50
+ 2.187582649,
51
+ 2.446418898,
52
+ 2.735880826,
53
+ 3.059592084,
54
+ 3.421605075,
55
+ 3.826451687,
56
+ 4.279200023,
57
+ 4.785517845,
58
+ 5.351743533,
59
+ 5.984965424,
60
+ 6.693110555,
61
+ 7.485043894,
62
+ 8.370679318,
63
+ 9.36110372,
64
+ 10.4687158,
65
+ 11.70738129,
66
+ 13.09260651,
67
+ 14.64173252,
68
+ 16.37415215,
69
+ 18.31155283,
70
+ 20.47818807,
71
+ 22.90118105,
72
+ 25.61086418,
73
+ 28.64115884,
74
+ 32.03,
75
+ 32.1,
76
+ 32.13,
77
+ 32.23,
78
+ 32.6,
79
+ 32.61,
80
+ 32.64,
81
+ 32.66,
82
+ 32.7,
83
+ 32.71,
84
+ 32.93,
85
+ 32.97,
86
+ 33.28,
87
+ 33.49,
88
+ 33.5,
89
+ 44.16,
90
+ 47.77
91
+ ],
92
+ "short_factor": [
93
+ 1.0,
94
+ 1.0,
95
+ 1.0,
96
+ 1.0,
97
+ 1.0,
98
+ 1.0,
99
+ 1.0,
100
+ 1.0,
101
+ 1.0,
102
+ 1.0,
103
+ 1.0,
104
+ 1.0,
105
+ 1.0,
106
+ 1.0,
107
+ 1.0,
108
+ 1.0,
109
+ 1.0,
110
+ 1.0,
111
+ 1.0,
112
+ 1.0,
113
+ 1.0,
114
+ 1.0,
115
+ 1.0,
116
+ 1.0,
117
+ 1.0,
118
+ 1.0,
119
+ 1.0,
120
+ 1.0,
121
+ 1.0,
122
+ 1.0,
123
+ 1.0,
124
+ 1.0,
125
+ 1.0,
126
+ 1.0,
127
+ 1.0,
128
+ 1.0,
129
+ 1.0,
130
+ 1.0,
131
+ 1.0,
132
+ 1.0,
133
+ 1.0,
134
+ 1.0,
135
+ 1.0,
136
+ 1.0,
137
+ 1.0,
138
+ 1.0,
139
+ 1.0,
140
+ 1.0
141
+ ],
142
+ "type": "longrope"
143
+ },
144
+ "rope_theta": 10000.0,
145
+ "sliding_window": 262144,
146
+ "tie_word_embeddings": true,
147
+ "torch_dtype": "bfloat16",
148
+ "transformers_version": "4.45.0",
149
+ "use_cache": true,
150
+ "vocab_size": 200064
151
+ }