openvino-ci commited on
Commit
7b27131
1 Parent(s): 2210167

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -14,7 +14,7 @@ This is [codegen25-7b-multi](https://huggingface.co/Salesforce/codegen25-7b-mult
14
  Weight compression was performed using `nncf.compress_weights` with the following parameters:
15
 
16
  * mode: **int4_asym**
17
- * ratio: **0.8**
18
  * group_size: **128**
19
 
20
  For more information on quantization, check the [OpenVINO model optimization guide](https://docs.openvino.ai/2024/openvino-workflow/model-optimization-guide/weight-compression.html).
@@ -24,11 +24,10 @@ For more information on quantization, check the [OpenVINO model optimization gui
24
 
25
  The provided OpenVINO™ IR model is compatible with:
26
 
27
- * OpenVINO version 2024.2.0 and higher
28
- * Optimum Intel 1.19.0 and higher
29
-
30
- ## Running Model Inference with [Optimum Intel](https://huggingface.co/docs/optimum/intel/index)
31
 
 
32
 
33
  1. Install packages required for using [Optimum Intel](https://huggingface.co/docs/optimum/intel/index) integration with the OpenVINO backend:
34
 
@@ -46,7 +45,7 @@ model_id = "OpenVINO/codegen25-7b-multi-int4-ov"
46
  tokenizer = AutoTokenizer.from_pretrained(model_id)
47
  model = OVModelForCausalLM.from_pretrained(model_id)
48
 
49
- inputs = tokenizer("def print_hello_world():", return_tensors="pt")
50
 
51
  outputs = model.generate(**inputs, max_length=200)
52
  text = tokenizer.batch_decode(outputs)[0]
@@ -55,40 +54,9 @@ print(text)
55
 
56
  For more examples and possible optimizations, refer to the [OpenVINO Large Language Model Inference Guide](https://docs.openvino.ai/2024/learn-openvino/llm_inference_guide.html).
57
 
58
- ## Running Model Inference with [OpenVINO GenAI](https://github.com/openvinotoolkit/openvino.genai)
59
-
60
- 1. Install packages required for using OpenVINO GenAI.
61
- ```
62
- pip install openvino-genai huggingface_hub
63
- ```
64
-
65
- 2. Download model from HuggingFace Hub
66
-
67
- ```
68
- import huggingface_hub as hf_hub
69
-
70
- model_id = "OpenVINO/codegen25-7b-multi-int4-ov"
71
- model_path = "codegen25-7b-multi-int4-ov"
72
-
73
- hf_hub.snapshot_download(model_id, local_dir=model_path)
74
-
75
- ```
76
-
77
- 3. Run model inference:
78
-
79
- ```
80
- import openvino_genai as ov_genai
81
-
82
- device = "CPU"
83
- pipe = ov_genai.LLMPipeline(model_path, device)
84
- print(pipe.generate("def print_hello_world():", max_length=200))
85
- ```
86
-
87
- More GenAI usage examples can be found in OpenVINO GenAI library [docs](https://github.com/openvinotoolkit/openvino.genai/blob/master/src/README.md) and [samples](https://github.com/openvinotoolkit/openvino.genai?tab=readme-ov-file#openvino-genai-samples)
88
-
89
  ## Limitations
90
 
91
- Check the original model card for [limitations]().
92
 
93
  ## Legal information
94
 
 
14
  Weight compression was performed using `nncf.compress_weights` with the following parameters:
15
 
16
  * mode: **int4_asym**
17
+ * ratio: **1**
18
  * group_size: **128**
19
 
20
  For more information on quantization, check the [OpenVINO model optimization guide](https://docs.openvino.ai/2024/openvino-workflow/model-optimization-guide/weight-compression.html).
 
24
 
25
  The provided OpenVINO™ IR model is compatible with:
26
 
27
+ * OpenVINO version 2024.4.0 and higher
28
+ * Optimum Intel 1.20.0 and higher
 
 
29
 
30
+ ## Running Model Inference
31
 
32
  1. Install packages required for using [Optimum Intel](https://huggingface.co/docs/optimum/intel/index) integration with the OpenVINO backend:
33
 
 
45
  tokenizer = AutoTokenizer.from_pretrained(model_id)
46
  model = OVModelForCausalLM.from_pretrained(model_id)
47
 
48
+ inputs = tokenizer("What is OpenVINO?", return_tensors="pt")
49
 
50
  outputs = model.generate(**inputs, max_length=200)
51
  text = tokenizer.batch_decode(outputs)[0]
 
54
 
55
  For more examples and possible optimizations, refer to the [OpenVINO Large Language Model Inference Guide](https://docs.openvino.ai/2024/learn-openvino/llm_inference_guide.html).
56
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  ## Limitations
58
 
59
+ Check the original model card for [original model card](https://huggingface.co/Salesforce/codegen25-7b-multi) for limitations.
60
 
61
  ## Legal information
62
 
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "OpenVINO/codegen25-7b-multi-int4-ov",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -7,6 +7,7 @@
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 50256,
9
  "eos_token_id": 50256,
 
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
@@ -23,7 +24,7 @@
23
  "rope_scaling": null,
24
  "rope_theta": 10000.0,
25
  "tie_word_embeddings": false,
26
- "transformers_version": "4.41.2",
27
  "use_cache": true,
28
  "vocab_size": 51200
29
  }
 
1
  {
2
+ "_name_or_path": "Salesforce/codegen25-7b-multi",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 50256,
9
  "eos_token_id": 50256,
10
+ "head_dim": 128,
11
  "hidden_act": "silu",
12
  "hidden_size": 4096,
13
  "initializer_range": 0.02,
 
24
  "rope_scaling": null,
25
  "rope_theta": 10000.0,
26
  "tie_word_embeddings": false,
27
+ "transformers_version": "4.45.2",
28
  "use_cache": true,
29
  "vocab_size": 51200
30
  }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
- "transformers_version": "4.41.2"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
+ "transformers_version": "4.45.2"
6
  }
openvino_detokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17900eba810d745a2800e475e41bff8587f51217883ee7680fa3f3000bafdfaf
3
+ size 535202
openvino_detokenizer.xml ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="detokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_62216" type="Parameter" version="opset1">
5
+ <data shape="?,?" element_type="i64" />
6
+ <output>
7
+ <port id="0" precision="I64" names="Parameter_62216">
8
+ <dim>-1</dim>
9
+ <dim>-1</dim>
10
+ </port>
11
+ </output>
12
+ </layer>
13
+ <layer id="1" name="Convert_62235" type="Convert" version="opset1">
14
+ <data destination_type="i32" />
15
+ <input>
16
+ <port id="0" precision="I64">
17
+ <dim>-1</dim>
18
+ <dim>-1</dim>
19
+ </port>
20
+ </input>
21
+ <output>
22
+ <port id="1" precision="I32">
23
+ <dim>-1</dim>
24
+ <dim>-1</dim>
25
+ </port>
26
+ </output>
27
+ </layer>
28
+ <layer id="2" name="Constant_62218" type="Const" version="opset1">
29
+ <data element_type="u8" shape="535153" offset="0" size="535153" />
30
+ <output>
31
+ <port id="0" precision="U8">
32
+ <dim>535153</dim>
33
+ </port>
34
+ </output>
35
+ </layer>
36
+ <layer id="3" name="StringTensorUnpack_62219" type="StringTensorUnpack" version="extension">
37
+ <data mode="begins_ends" />
38
+ <input>
39
+ <port id="0" precision="U8">
40
+ <dim>535153</dim>
41
+ </port>
42
+ </input>
43
+ <output>
44
+ <port id="1" precision="I32">
45
+ <dim>-1</dim>
46
+ </port>
47
+ <port id="2" precision="I32">
48
+ <dim>-1</dim>
49
+ </port>
50
+ <port id="3" precision="U8">
51
+ <dim>-1</dim>
52
+ </port>
53
+ </output>
54
+ </layer>
55
+ <layer id="4" name="VocabDecoder_62220" type="VocabDecoder" version="extension">
56
+ <data skip_tokens="50256" />
57
+ <input>
58
+ <port id="0" precision="I32">
59
+ <dim>-1</dim>
60
+ <dim>-1</dim>
61
+ </port>
62
+ <port id="1" precision="I32">
63
+ <dim>-1</dim>
64
+ </port>
65
+ <port id="2" precision="I32">
66
+ <dim>-1</dim>
67
+ </port>
68
+ <port id="3" precision="U8">
69
+ <dim>-1</dim>
70
+ </port>
71
+ </input>
72
+ <output>
73
+ <port id="4" precision="I32">
74
+ <dim>-1</dim>
75
+ </port>
76
+ <port id="5" precision="I32">
77
+ <dim>-1</dim>
78
+ </port>
79
+ <port id="6" precision="I32">
80
+ <dim>-1</dim>
81
+ </port>
82
+ <port id="7" precision="I32">
83
+ <dim>-1</dim>
84
+ </port>
85
+ <port id="8" precision="U8">
86
+ <dim>-1</dim>
87
+ </port>
88
+ </output>
89
+ </layer>
90
+ <layer id="5" name="FuzeRagged_62221" type="FuzeRagged" version="extension">
91
+ <input>
92
+ <port id="0" precision="I32">
93
+ <dim>-1</dim>
94
+ </port>
95
+ <port id="1" precision="I32">
96
+ <dim>-1</dim>
97
+ </port>
98
+ <port id="2" precision="I32">
99
+ <dim>-1</dim>
100
+ </port>
101
+ <port id="3" precision="I32">
102
+ <dim>-1</dim>
103
+ </port>
104
+ </input>
105
+ <output>
106
+ <port id="4" precision="I32">
107
+ <dim>-1</dim>
108
+ </port>
109
+ <port id="5" precision="I32">
110
+ <dim>-1</dim>
111
+ </port>
112
+ </output>
113
+ </layer>
114
+ <layer id="6" name="Constant_62223" type="Const" version="opset1">
115
+ <data element_type="u8" shape="47" offset="535153" size="47" />
116
+ <output>
117
+ <port id="0" precision="U8">
118
+ <dim>47</dim>
119
+ </port>
120
+ </output>
121
+ </layer>
122
+ <layer id="7" name="Constant_62225" type="Const" version="opset1">
123
+ <data element_type="u8" shape="2" offset="535200" size="2" />
124
+ <output>
125
+ <port id="0" precision="U8">
126
+ <dim>2</dim>
127
+ </port>
128
+ </output>
129
+ </layer>
130
+ <layer id="8" name="RegexNormalization_62226" type="RegexNormalization" version="extension">
131
+ <data global_replace="true" />
132
+ <input>
133
+ <port id="0" precision="I32">
134
+ <dim>-1</dim>
135
+ </port>
136
+ <port id="1" precision="I32">
137
+ <dim>-1</dim>
138
+ </port>
139
+ <port id="2" precision="U8">
140
+ <dim>-1</dim>
141
+ </port>
142
+ <port id="3" precision="U8">
143
+ <dim>47</dim>
144
+ </port>
145
+ <port id="4" precision="U8">
146
+ <dim>2</dim>
147
+ </port>
148
+ </input>
149
+ <output>
150
+ <port id="5" precision="I32">
151
+ <dim>-1</dim>
152
+ </port>
153
+ <port id="6" precision="I32">
154
+ <dim>-1</dim>
155
+ </port>
156
+ <port id="7" precision="U8">
157
+ <dim>-1</dim>
158
+ </port>
159
+ </output>
160
+ </layer>
161
+ <layer id="9" name="StringTensorPack_62227" type="StringTensorPack" version="extension">
162
+ <data mode="begins_ends" />
163
+ <input>
164
+ <port id="0" precision="I32">
165
+ <dim>-1</dim>
166
+ </port>
167
+ <port id="1" precision="I32">
168
+ <dim>-1</dim>
169
+ </port>
170
+ <port id="2" precision="U8">
171
+ <dim>-1</dim>
172
+ </port>
173
+ </input>
174
+ <output>
175
+ <port id="3" precision="STRING" names="string_output">
176
+ <dim>-1</dim>
177
+ </port>
178
+ </output>
179
+ </layer>
180
+ <layer id="10" name="Result_62228" type="Result" version="opset1">
181
+ <input>
182
+ <port id="0" precision="STRING">
183
+ <dim>-1</dim>
184
+ </port>
185
+ </input>
186
+ </layer>
187
+ </layers>
188
+ <edges>
189
+ <edge from-layer="0" from-port="0" to-layer="1" to-port="0" />
190
+ <edge from-layer="1" from-port="1" to-layer="4" to-port="0" />
191
+ <edge from-layer="2" from-port="0" to-layer="3" to-port="0" />
192
+ <edge from-layer="3" from-port="1" to-layer="4" to-port="1" />
193
+ <edge from-layer="3" from-port="2" to-layer="4" to-port="2" />
194
+ <edge from-layer="3" from-port="3" to-layer="4" to-port="3" />
195
+ <edge from-layer="4" from-port="7" to-layer="5" to-port="3" />
196
+ <edge from-layer="4" from-port="8" to-layer="8" to-port="2" />
197
+ <edge from-layer="4" from-port="6" to-layer="5" to-port="2" />
198
+ <edge from-layer="4" from-port="5" to-layer="5" to-port="1" />
199
+ <edge from-layer="4" from-port="4" to-layer="5" to-port="0" />
200
+ <edge from-layer="5" from-port="4" to-layer="8" to-port="0" />
201
+ <edge from-layer="5" from-port="5" to-layer="8" to-port="1" />
202
+ <edge from-layer="6" from-port="0" to-layer="8" to-port="3" />
203
+ <edge from-layer="7" from-port="0" to-layer="8" to-port="4" />
204
+ <edge from-layer="8" from-port="5" to-layer="9" to-port="0" />
205
+ <edge from-layer="8" from-port="6" to-layer="9" to-port="1" />
206
+ <edge from-layer="8" from-port="7" to-layer="9" to-port="2" />
207
+ <edge from-layer="9" from-port="3" to-layer="10" to-port="0" />
208
+ </edges>
209
+ <rt_info>
210
+ <eos_token_id value="50256" />
211
+ <original_tokenizer_class value="&lt;class 'transformers_modules.pytorch.tokenization_codegen25.CodeGen25Tokenizer'>" />
212
+ </rt_info>
213
+ </net>
openvino_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a14a2a2f26ff811c95361a9e3919c27039d78362a07c7eed3e2d1396919692d0
3
- size 4425597304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a563bfaa847ac981b85d0e015e2271b3334995110d143bcc9d8064bd3776ae26
3
+ size 3785290240
openvino_model.xml CHANGED
The diff for this file is too large to render. See raw diff
 
openvino_tokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:348f1a1512e75e2213d7b2d2c2749c15e9458d56bae02b1336f7b502f881d509
3
+ size 1272940
openvino_tokenizer.xml ADDED
@@ -0,0 +1,714 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="tokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_62109" type="Parameter" version="opset1">
5
+ <data shape="?" element_type="string" />
6
+ <output>
7
+ <port id="0" precision="STRING" names="Parameter_62109">
8
+ <dim>-1</dim>
9
+ </port>
10
+ </output>
11
+ </layer>
12
+ <layer id="1" name="Constant_62115" type="Const" version="opset1">
13
+ <data element_type="i64" shape="" offset="0" size="8" />
14
+ <output>
15
+ <port id="0" precision="I64" />
16
+ </output>
17
+ </layer>
18
+ <layer id="2" name="StringTensorUnpack_62110" type="StringTensorUnpack" version="extension">
19
+ <data mode="begins_ends" />
20
+ <input>
21
+ <port id="0" precision="STRING">
22
+ <dim>-1</dim>
23
+ </port>
24
+ </input>
25
+ <output>
26
+ <port id="1" precision="I32">
27
+ <dim>-1</dim>
28
+ </port>
29
+ <port id="2" precision="I32">
30
+ <dim>-1</dim>
31
+ </port>
32
+ <port id="3" precision="U8">
33
+ <dim>-1</dim>
34
+ </port>
35
+ </output>
36
+ </layer>
37
+ <layer id="3" name="ShapeOf_62111" type="ShapeOf" version="opset3">
38
+ <data output_type="i64" />
39
+ <input>
40
+ <port id="0" precision="I32">
41
+ <dim>-1</dim>
42
+ </port>
43
+ </input>
44
+ <output>
45
+ <port id="1" precision="I64">
46
+ <dim>1</dim>
47
+ </port>
48
+ </output>
49
+ </layer>
50
+ <layer id="4" name="Constant_62112" type="Const" version="opset1">
51
+ <data element_type="i64" shape="" offset="0" size="8" />
52
+ <output>
53
+ <port id="0" precision="I64" />
54
+ </output>
55
+ </layer>
56
+ <layer id="5" name="Constant_62113" type="Const" version="opset1">
57
+ <data element_type="i64" shape="" offset="0" size="8" />
58
+ <output>
59
+ <port id="0" precision="I64" />
60
+ </output>
61
+ </layer>
62
+ <layer id="6" name="Gather_62114" type="Gather" version="opset8">
63
+ <data batch_dims="0" />
64
+ <input>
65
+ <port id="0" precision="I64">
66
+ <dim>1</dim>
67
+ </port>
68
+ <port id="1" precision="I64" />
69
+ <port id="2" precision="I64" />
70
+ </input>
71
+ <output>
72
+ <port id="3" precision="I64" />
73
+ </output>
74
+ </layer>
75
+ <layer id="7" name="Constant_62116" type="Const" version="opset1">
76
+ <data element_type="i64" shape="" offset="8" size="8" />
77
+ <output>
78
+ <port id="0" precision="I64" />
79
+ </output>
80
+ </layer>
81
+ <layer id="8" name="Range_62117" type="Range" version="opset4">
82
+ <data output_type="i32" />
83
+ <input>
84
+ <port id="0" precision="I64" />
85
+ <port id="1" precision="I64" />
86
+ <port id="2" precision="I64" />
87
+ </input>
88
+ <output>
89
+ <port id="3" precision="I32">
90
+ <dim>-1</dim>
91
+ </port>
92
+ </output>
93
+ </layer>
94
+ <layer id="9" name="Constant_62118" type="Const" version="opset1">
95
+ <data element_type="i64" shape="" offset="8" size="8" />
96
+ <output>
97
+ <port id="0" precision="I64" />
98
+ </output>
99
+ </layer>
100
+ <layer id="10" name="Constant_62119" type="Const" version="opset1">
101
+ <data element_type="i64" shape="" offset="8" size="8" />
102
+ <output>
103
+ <port id="0" precision="I64" />
104
+ </output>
105
+ </layer>
106
+ <layer id="11" name="Add_62120" type="Add" version="opset1">
107
+ <data auto_broadcast="numpy" />
108
+ <input>
109
+ <port id="0" precision="I64" />
110
+ <port id="1" precision="I64" />
111
+ </input>
112
+ <output>
113
+ <port id="2" precision="I64" />
114
+ </output>
115
+ </layer>
116
+ <layer id="12" name="Constant_62121" type="Const" version="opset1">
117
+ <data element_type="i64" shape="" offset="8" size="8" />
118
+ <output>
119
+ <port id="0" precision="I64" />
120
+ </output>
121
+ </layer>
122
+ <layer id="13" name="Range_62122" type="Range" version="opset4">
123
+ <data output_type="i32" />
124
+ <input>
125
+ <port id="0" precision="I64" />
126
+ <port id="1" precision="I64" />
127
+ <port id="2" precision="I64" />
128
+ </input>
129
+ <output>
130
+ <port id="3" precision="I32">
131
+ <dim>-1</dim>
132
+ </port>
133
+ </output>
134
+ </layer>
135
+ <layer id="14" name="Constant_62184" type="Const" version="opset1">
136
+ <data element_type="u8" shape="19" offset="16" size="19" />
137
+ <output>
138
+ <port id="0" precision="U8">
139
+ <dim>19</dim>
140
+ </port>
141
+ </output>
142
+ </layer>
143
+ <layer id="15" name="SpecialTokensSplit_62185" type="SpecialTokensSplit" version="extension">
144
+ <input>
145
+ <port id="0" precision="I32">
146
+ <dim>-1</dim>
147
+ </port>
148
+ <port id="1" precision="I32">
149
+ <dim>-1</dim>
150
+ </port>
151
+ <port id="2" precision="I32">
152
+ <dim>-1</dim>
153
+ </port>
154
+ <port id="3" precision="I32">
155
+ <dim>-1</dim>
156
+ </port>
157
+ <port id="4" precision="U8">
158
+ <dim>-1</dim>
159
+ </port>
160
+ <port id="5" precision="U8">
161
+ <dim>19</dim>
162
+ </port>
163
+ </input>
164
+ <output>
165
+ <port id="6" precision="I32">
166
+ <dim>-1</dim>
167
+ </port>
168
+ <port id="7" precision="I32">
169
+ <dim>-1</dim>
170
+ </port>
171
+ <port id="8" precision="I32">
172
+ <dim>-1</dim>
173
+ </port>
174
+ <port id="9" precision="I32">
175
+ <dim>-1</dim>
176
+ </port>
177
+ <port id="10" precision="U8">
178
+ <dim>-1</dim>
179
+ </port>
180
+ <port id="11" precision="BOOL">
181
+ <dim>-1</dim>
182
+ </port>
183
+ </output>
184
+ </layer>
185
+ <layer id="16" name="NormalizeUnicode_62186" type="NormalizeUnicode" version="extension">
186
+ <data normalization_form="NFC" />
187
+ <input>
188
+ <port id="0" precision="I32">
189
+ <dim>-1</dim>
190
+ </port>
191
+ <port id="1" precision="I32">
192
+ <dim>-1</dim>
193
+ </port>
194
+ <port id="2" precision="U8">
195
+ <dim>-1</dim>
196
+ </port>
197
+ <port id="3" precision="BOOL">
198
+ <dim>-1</dim>
199
+ </port>
200
+ </input>
201
+ <output>
202
+ <port id="4" precision="I32">
203
+ <dim>-1</dim>
204
+ </port>
205
+ <port id="5" precision="I32">
206
+ <dim>-1</dim>
207
+ </port>
208
+ <port id="6" precision="U8">
209
+ <dim>-1</dim>
210
+ </port>
211
+ <port id="7" precision="BOOL">
212
+ <dim>-1</dim>
213
+ </port>
214
+ </output>
215
+ </layer>
216
+ <layer id="17" name="Constant_62188" type="Const" version="opset1">
217
+ <data element_type="u8" shape="79" offset="35" size="79" />
218
+ <output>
219
+ <port id="0" precision="U8">
220
+ <dim>79</dim>
221
+ </port>
222
+ </output>
223
+ </layer>
224
+ <layer id="18" name="RegexSplit_62189" type="RegexSplit" version="extension">
225
+ <data behaviour="contiguous" invert="false" max_splits="-1" />
226
+ <input>
227
+ <port id="0" precision="I32">
228
+ <dim>-1</dim>
229
+ </port>
230
+ <port id="1" precision="I32">
231
+ <dim>-1</dim>
232
+ </port>
233
+ <port id="2" precision="I32">
234
+ <dim>-1</dim>
235
+ </port>
236
+ <port id="3" precision="I32">
237
+ <dim>-1</dim>
238
+ </port>
239
+ <port id="4" precision="U8">
240
+ <dim>-1</dim>
241
+ </port>
242
+ <port id="5" precision="BOOL">
243
+ <dim>-1</dim>
244
+ </port>
245
+ <port id="6" precision="U8">
246
+ <dim>79</dim>
247
+ </port>
248
+ </input>
249
+ <output>
250
+ <port id="7" precision="I32">
251
+ <dim>-1</dim>
252
+ </port>
253
+ <port id="8" precision="I32">
254
+ <dim>-1</dim>
255
+ </port>
256
+ <port id="9" precision="I32">
257
+ <dim>-1</dim>
258
+ </port>
259
+ <port id="10" precision="I32">
260
+ <dim>-1</dim>
261
+ </port>
262
+ <port id="11" precision="U8">
263
+ <dim>-1</dim>
264
+ </port>
265
+ <port id="12" precision="BOOL">
266
+ <dim>-1</dim>
267
+ </port>
268
+ </output>
269
+ </layer>
270
+ <layer id="19" name="Constant_62191" type="Const" version="opset1">
271
+ <data element_type="u8" shape="535153" offset="114" size="535153" />
272
+ <output>
273
+ <port id="0" precision="U8">
274
+ <dim>535153</dim>
275
+ </port>
276
+ </output>
277
+ </layer>
278
+ <layer id="20" name="StringTensorUnpack_62192" type="StringTensorUnpack" version="extension">
279
+ <data mode="begins_ends" />
280
+ <input>
281
+ <port id="0" precision="U8">
282
+ <dim>535153</dim>
283
+ </port>
284
+ </input>
285
+ <output>
286
+ <port id="1" precision="I32">
287
+ <dim>-1</dim>
288
+ </port>
289
+ <port id="2" precision="I32">
290
+ <dim>-1</dim>
291
+ </port>
292
+ <port id="3" precision="U8">
293
+ <dim>-1</dim>
294
+ </port>
295
+ </output>
296
+ </layer>
297
+ <layer id="21" name="Constant_62197" type="Const" version="opset1">
298
+ <data element_type="u8" shape="367533" offset="535267" size="367533" />
299
+ <output>
300
+ <port id="0" precision="U8">
301
+ <dim>367533</dim>
302
+ </port>
303
+ </output>
304
+ </layer>
305
+ <layer id="22" name="StringTensorUnpack_62198" type="StringTensorUnpack" version="extension">
306
+ <data mode="begins_ends" />
307
+ <input>
308
+ <port id="0" precision="U8">
309
+ <dim>367533</dim>
310
+ </port>
311
+ </input>
312
+ <output>
313
+ <port id="1" precision="I32">
314
+ <dim>-1</dim>
315
+ </port>
316
+ <port id="2" precision="I32">
317
+ <dim>-1</dim>
318
+ </port>
319
+ <port id="3" precision="U8">
320
+ <dim>-1</dim>
321
+ </port>
322
+ </output>
323
+ </layer>
324
+ <layer id="23" name="Constant_62200" type="Const" version="opset1">
325
+ <data element_type="u8" shape="353061" offset="902800" size="353061" />
326
+ <output>
327
+ <port id="0" precision="U8">
328
+ <dim>353061</dim>
329
+ </port>
330
+ </output>
331
+ </layer>
332
+ <layer id="24" name="StringTensorUnpack_62201" type="StringTensorUnpack" version="extension">
333
+ <data mode="begins_ends" />
334
+ <input>
335
+ <port id="0" precision="U8">
336
+ <dim>353061</dim>
337
+ </port>
338
+ </input>
339
+ <output>
340
+ <port id="1" precision="I32">
341
+ <dim>-1</dim>
342
+ </port>
343
+ <port id="2" precision="I32">
344
+ <dim>-1</dim>
345
+ </port>
346
+ <port id="3" precision="U8">
347
+ <dim>-1</dim>
348
+ </port>
349
+ </output>
350
+ </layer>
351
+ <layer id="25" name="Constant_62194" type="Const" version="opset1">
352
+ <data element_type="u8" shape="13303" offset="1255861" size="13303" />
353
+ <output>
354
+ <port id="0" precision="U8">
355
+ <dim>13303</dim>
356
+ </port>
357
+ </output>
358
+ </layer>
359
+ <layer id="26" name="StringTensorUnpack_62195" type="StringTensorUnpack" version="extension">
360
+ <data mode="begins_ends" />
361
+ <input>
362
+ <port id="0" precision="U8">
363
+ <dim>13303</dim>
364
+ </port>
365
+ </input>
366
+ <output>
367
+ <port id="1" precision="I32">
368
+ <dim>-1</dim>
369
+ </port>
370
+ <port id="2" precision="I32">
371
+ <dim>-1</dim>
372
+ </port>
373
+ <port id="3" precision="U8">
374
+ <dim>-1</dim>
375
+ </port>
376
+ </output>
377
+ </layer>
378
+ <layer id="27" name="Constant_62202" type="Const" version="opset1">
379
+ <data element_type="i32" shape="942" offset="1269164" size="3768" />
380
+ <output>
381
+ <port id="0" precision="I32">
382
+ <dim>942</dim>
383
+ </port>
384
+ </output>
385
+ </layer>
386
+ <layer id="28" name="BPETokenizer_62203" type="BPETokenizer" version="extension">
387
+ <data unk_token="" fuse_unk="false" suffix_indicator="" end_suffix="" byte_fallback="false" cache_capacity="20000" />
388
+ <input>
389
+ <port id="0" precision="I32">
390
+ <dim>-1</dim>
391
+ </port>
392
+ <port id="1" precision="I32">
393
+ <dim>-1</dim>
394
+ </port>
395
+ <port id="2" precision="I32">
396
+ <dim>-1</dim>
397
+ </port>
398
+ <port id="3" precision="I32">
399
+ <dim>-1</dim>
400
+ </port>
401
+ <port id="4" precision="U8">
402
+ <dim>-1</dim>
403
+ </port>
404
+ <port id="5" precision="I32">
405
+ <dim>-1</dim>
406
+ </port>
407
+ <port id="6" precision="I32">
408
+ <dim>-1</dim>
409
+ </port>
410
+ <port id="7" precision="U8">
411
+ <dim>-1</dim>
412
+ </port>
413
+ <port id="8" precision="I32">
414
+ <dim>-1</dim>
415
+ </port>
416
+ <port id="9" precision="I32">
417
+ <dim>-1</dim>
418
+ </port>
419
+ <port id="10" precision="U8">
420
+ <dim>-1</dim>
421
+ </port>
422
+ <port id="11" precision="I32">
423
+ <dim>-1</dim>
424
+ </port>
425
+ <port id="12" precision="I32">
426
+ <dim>-1</dim>
427
+ </port>
428
+ <port id="13" precision="U8">
429
+ <dim>-1</dim>
430
+ </port>
431
+ <port id="14" precision="I32">
432
+ <dim>-1</dim>
433
+ </port>
434
+ <port id="15" precision="I32">
435
+ <dim>-1</dim>
436
+ </port>
437
+ <port id="16" precision="U8">
438
+ <dim>-1</dim>
439
+ </port>
440
+ <port id="17" precision="I32">
441
+ <dim>942</dim>
442
+ </port>
443
+ </input>
444
+ <output>
445
+ <port id="18" precision="I32">
446
+ <dim>-1</dim>
447
+ </port>
448
+ <port id="19" precision="I32">
449
+ <dim>-1</dim>
450
+ </port>
451
+ <port id="20" precision="I32">
452
+ <dim>-1</dim>
453
+ </port>
454
+ </output>
455
+ </layer>
456
+ <layer id="29" name="Subtract_62204" type="Subtract" version="opset1">
457
+ <data auto_broadcast="numpy" />
458
+ <input>
459
+ <port id="0" precision="I32">
460
+ <dim>-1</dim>
461
+ </port>
462
+ <port id="1" precision="I32">
463
+ <dim>-1</dim>
464
+ </port>
465
+ </input>
466
+ <output>
467
+ <port id="2" precision="I32">
468
+ <dim>-1</dim>
469
+ </port>
470
+ </output>
471
+ </layer>
472
+ <layer id="30" name="Constant_62205" type="Const" version="opset1">
473
+ <data element_type="i32" shape="" offset="1272932" size="4" />
474
+ <output>
475
+ <port id="0" precision="I32" />
476
+ </output>
477
+ </layer>
478
+ <layer id="31" name="Minimum_62206" type="Minimum" version="opset1">
479
+ <data auto_broadcast="numpy" />
480
+ <input>
481
+ <port id="0" precision="I32">
482
+ <dim>-1</dim>
483
+ </port>
484
+ <port id="1" precision="I32" />
485
+ </input>
486
+ <output>
487
+ <port id="2" precision="I32">
488
+ <dim>-1</dim>
489
+ </port>
490
+ </output>
491
+ </layer>
492
+ <layer id="32" name="Subtract_62207" type="Subtract" version="opset1">
493
+ <data auto_broadcast="numpy" />
494
+ <input>
495
+ <port id="0" precision="I32">
496
+ <dim>-1</dim>
497
+ </port>
498
+ <port id="1" precision="I32">
499
+ <dim>-1</dim>
500
+ </port>
501
+ </input>
502
+ <output>
503
+ <port id="2" precision="I32">
504
+ <dim>-1</dim>
505
+ </port>
506
+ </output>
507
+ </layer>
508
+ <layer id="33" name="Subtract_62208" type="Subtract" version="opset1">
509
+ <data auto_broadcast="numpy" />
510
+ <input>
511
+ <port id="0" precision="I32">
512
+ <dim>-1</dim>
513
+ </port>
514
+ <port id="1" precision="I32">
515
+ <dim>-1</dim>
516
+ </port>
517
+ </input>
518
+ <output>
519
+ <port id="2" precision="I32">
520
+ <dim>-1</dim>
521
+ </port>
522
+ </output>
523
+ </layer>
524
+ <layer id="34" name="Constant_62209" type="Const" version="opset1">
525
+ <data element_type="i32" shape="" offset="1272936" size="4" />
526
+ <output>
527
+ <port id="0" precision="I32" />
528
+ </output>
529
+ </layer>
530
+ <layer id="35" name="ReduceMax_62210" type="ReduceMax" version="opset1">
531
+ <data keep_dims="false" />
532
+ <input>
533
+ <port id="0" precision="I32">
534
+ <dim>-1</dim>
535
+ </port>
536
+ <port id="1" precision="I32" />
537
+ </input>
538
+ <output>
539
+ <port id="2" precision="I32" />
540
+ </output>
541
+ </layer>
542
+ <layer id="36" name="Constant_62211" type="Const" version="opset1">
543
+ <data element_type="i32" shape="" offset="1272936" size="4" />
544
+ <output>
545
+ <port id="0" precision="I32" />
546
+ </output>
547
+ </layer>
548
+ <layer id="37" name="RaggedToDense_62212" type="RaggedToDense" version="extension">
549
+ <data pad_right="false" />
550
+ <input>
551
+ <port id="0" precision="I32">
552
+ <dim>-1</dim>
553
+ </port>
554
+ <port id="1" precision="I32">
555
+ <dim>-1</dim>
556
+ </port>
557
+ <port id="2" precision="I32">
558
+ <dim>-1</dim>
559
+ </port>
560
+ <port id="3" precision="I32" />
561
+ <port id="4" precision="I32" />
562
+ </input>
563
+ <output>
564
+ <port id="5" precision="I32">
565
+ <dim>-1</dim>
566
+ <dim>-1</dim>
567
+ </port>
568
+ <port id="6" precision="BOOL">
569
+ <dim>-1</dim>
570
+ <dim>-1</dim>
571
+ </port>
572
+ </output>
573
+ </layer>
574
+ <layer id="38" name="Convert_62213" type="Convert" version="opset1">
575
+ <data destination_type="i32" />
576
+ <input>
577
+ <port id="0" precision="BOOL">
578
+ <dim>-1</dim>
579
+ <dim>-1</dim>
580
+ </port>
581
+ </input>
582
+ <output>
583
+ <port id="1" precision="I32">
584
+ <dim>-1</dim>
585
+ <dim>-1</dim>
586
+ </port>
587
+ </output>
588
+ </layer>
589
+ <layer id="39" name="Convert_62213" type="Convert" version="opset1">
590
+ <data destination_type="i64" />
591
+ <input>
592
+ <port id="0" precision="I32">
593
+ <dim>-1</dim>
594
+ <dim>-1</dim>
595
+ </port>
596
+ </input>
597
+ <output>
598
+ <port id="1" precision="I64" names="attention_mask">
599
+ <dim>-1</dim>
600
+ <dim>-1</dim>
601
+ </port>
602
+ </output>
603
+ </layer>
604
+ <layer id="41" name="RaggedToDense_62212.0" type="Convert" version="opset1">
605
+ <data destination_type="i64" />
606
+ <input>
607
+ <port id="0" precision="I32">
608
+ <dim>-1</dim>
609
+ <dim>-1</dim>
610
+ </port>
611
+ </input>
612
+ <output>
613
+ <port id="1" precision="I64" names="input_ids">
614
+ <dim>-1</dim>
615
+ <dim>-1</dim>
616
+ </port>
617
+ </output>
618
+ </layer>
619
+ <layer id="42" name="Result_62214" type="Result" version="opset1">
620
+ <input>
621
+ <port id="0" precision="I64">
622
+ <dim>-1</dim>
623
+ <dim>-1</dim>
624
+ </port>
625
+ </input>
626
+ </layer>
627
+ <layer id="40" name="Result_62215" type="Result" version="opset1">
628
+ <input>
629
+ <port id="0" precision="I64">
630
+ <dim>-1</dim>
631
+ <dim>-1</dim>
632
+ </port>
633
+ </input>
634
+ </layer>
635
+ </layers>
636
+ <edges>
637
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
638
+ <edge from-layer="1" from-port="0" to-layer="8" to-port="0" />
639
+ <edge from-layer="2" from-port="1" to-layer="3" to-port="0" />
640
+ <edge from-layer="2" from-port="3" to-layer="15" to-port="4" />
641
+ <edge from-layer="2" from-port="2" to-layer="15" to-port="3" />
642
+ <edge from-layer="2" from-port="1" to-layer="15" to-port="2" />
643
+ <edge from-layer="3" from-port="1" to-layer="6" to-port="0" />
644
+ <edge from-layer="4" from-port="0" to-layer="6" to-port="1" />
645
+ <edge from-layer="5" from-port="0" to-layer="6" to-port="2" />
646
+ <edge from-layer="6" from-port="3" to-layer="11" to-port="0" />
647
+ <edge from-layer="6" from-port="3" to-layer="8" to-port="1" />
648
+ <edge from-layer="7" from-port="0" to-layer="8" to-port="2" />
649
+ <edge from-layer="8" from-port="3" to-layer="15" to-port="0" />
650
+ <edge from-layer="9" from-port="0" to-layer="13" to-port="0" />
651
+ <edge from-layer="10" from-port="0" to-layer="11" to-port="1" />
652
+ <edge from-layer="11" from-port="2" to-layer="13" to-port="1" />
653
+ <edge from-layer="12" from-port="0" to-layer="13" to-port="2" />
654
+ <edge from-layer="13" from-port="3" to-layer="15" to-port="1" />
655
+ <edge from-layer="14" from-port="0" to-layer="15" to-port="5" />
656
+ <edge from-layer="15" from-port="9" to-layer="16" to-port="1" />
657
+ <edge from-layer="15" from-port="7" to-layer="18" to-port="1" />
658
+ <edge from-layer="15" from-port="6" to-layer="18" to-port="0" />
659
+ <edge from-layer="15" from-port="11" to-layer="16" to-port="3" />
660
+ <edge from-layer="15" from-port="10" to-layer="16" to-port="2" />
661
+ <edge from-layer="15" from-port="8" to-layer="16" to-port="0" />
662
+ <edge from-layer="16" from-port="4" to-layer="18" to-port="2" />
663
+ <edge from-layer="16" from-port="5" to-layer="18" to-port="3" />
664
+ <edge from-layer="16" from-port="6" to-layer="18" to-port="4" />
665
+ <edge from-layer="16" from-port="7" to-layer="18" to-port="5" />
666
+ <edge from-layer="17" from-port="0" to-layer="18" to-port="6" />
667
+ <edge from-layer="18" from-port="11" to-layer="28" to-port="4" />
668
+ <edge from-layer="18" from-port="10" to-layer="28" to-port="3" />
669
+ <edge from-layer="18" from-port="9" to-layer="28" to-port="2" />
670
+ <edge from-layer="18" from-port="8" to-layer="28" to-port="1" />
671
+ <edge from-layer="18" from-port="7" to-layer="28" to-port="0" />
672
+ <edge from-layer="19" from-port="0" to-layer="20" to-port="0" />
673
+ <edge from-layer="20" from-port="1" to-layer="28" to-port="5" />
674
+ <edge from-layer="20" from-port="2" to-layer="28" to-port="6" />
675
+ <edge from-layer="20" from-port="3" to-layer="28" to-port="7" />
676
+ <edge from-layer="21" from-port="0" to-layer="22" to-port="0" />
677
+ <edge from-layer="22" from-port="1" to-layer="28" to-port="8" />
678
+ <edge from-layer="22" from-port="2" to-layer="28" to-port="9" />
679
+ <edge from-layer="22" from-port="3" to-layer="28" to-port="10" />
680
+ <edge from-layer="23" from-port="0" to-layer="24" to-port="0" />
681
+ <edge from-layer="24" from-port="1" to-layer="28" to-port="11" />
682
+ <edge from-layer="24" from-port="2" to-layer="28" to-port="12" />
683
+ <edge from-layer="24" from-port="3" to-layer="28" to-port="13" />
684
+ <edge from-layer="25" from-port="0" to-layer="26" to-port="0" />
685
+ <edge from-layer="26" from-port="1" to-layer="28" to-port="14" />
686
+ <edge from-layer="26" from-port="2" to-layer="28" to-port="15" />
687
+ <edge from-layer="26" from-port="3" to-layer="28" to-port="16" />
688
+ <edge from-layer="27" from-port="0" to-layer="28" to-port="17" />
689
+ <edge from-layer="28" from-port="20" to-layer="37" to-port="2" />
690
+ <edge from-layer="28" from-port="18" to-layer="29" to-port="1" />
691
+ <edge from-layer="28" from-port="19" to-layer="37" to-port="1" />
692
+ <edge from-layer="28" from-port="19" to-layer="33" to-port="0" />
693
+ <edge from-layer="28" from-port="19" to-layer="32" to-port="0" />
694
+ <edge from-layer="28" from-port="19" to-layer="29" to-port="0" />
695
+ <edge from-layer="29" from-port="2" to-layer="31" to-port="0" />
696
+ <edge from-layer="30" from-port="0" to-layer="31" to-port="1" />
697
+ <edge from-layer="31" from-port="2" to-layer="32" to-port="1" />
698
+ <edge from-layer="32" from-port="2" to-layer="33" to-port="1" />
699
+ <edge from-layer="32" from-port="2" to-layer="37" to-port="0" />
700
+ <edge from-layer="33" from-port="2" to-layer="35" to-port="0" />
701
+ <edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
702
+ <edge from-layer="35" from-port="2" to-layer="37" to-port="3" />
703
+ <edge from-layer="36" from-port="0" to-layer="37" to-port="4" />
704
+ <edge from-layer="37" from-port="6" to-layer="38" to-port="0" />
705
+ <edge from-layer="37" from-port="5" to-layer="41" to-port="0" />
706
+ <edge from-layer="38" from-port="1" to-layer="39" to-port="0" />
707
+ <edge from-layer="39" from-port="1" to-layer="40" to-port="0" />
708
+ <edge from-layer="41" from-port="1" to-layer="42" to-port="0" />
709
+ </edges>
710
+ <rt_info>
711
+ <eos_token_id value="50256" />
712
+ <original_tokenizer_class value="&lt;class 'transformers_modules.pytorch.tokenization_codegen25.CodeGen25Tokenizer'>" />
713
+ </rt_info>
714
+ </net>
tokenization_codegen25.py CHANGED
@@ -245,4 +245,4 @@ class CodeGen25Tokenizer(PreTrainedTokenizer):
245
 
246
  # has no vocab file
247
  def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None):
248
- return ()
 
245
 
246
  # has no vocab file
247
  def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None):
248
+ return ()
tokenizer_config.json CHANGED
@@ -18,7 +18,7 @@
18
  },
19
  "clean_up_tokenization_spaces": true,
20
  "eos_token": "<|endoftext|>",
21
- "model_max_length": 2048,
22
  "pad_token": null,
23
  "tokenizer_class": "CodeGen25Tokenizer"
24
  }
 
18
  },
19
  "clean_up_tokenization_spaces": true,
20
  "eos_token": "<|endoftext|>",
21
+ "model_max_length": 1000000000000000019884624838656,
22
  "pad_token": null,
23
  "tokenizer_class": "CodeGen25Tokenizer"
24
  }