huseinzol05
commited on
Commit
•
5eb3f50
1
Parent(s):
ef01ad1
Upload autoawq-mistral-7b.ipynb
Browse files- autoawq-mistral-7b.ipynb +529 -0
autoawq-mistral-7b.ipynb
ADDED
@@ -0,0 +1,529 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"id": "da47e672",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [],
|
9 |
+
"source": [
|
10 |
+
"# !pip3 install https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.6/autoawq-0.1.6+cu118-cp310-cp310-linux_x86_64.whl"
|
11 |
+
]
|
12 |
+
},
|
13 |
+
{
|
14 |
+
"cell_type": "code",
|
15 |
+
"execution_count": 2,
|
16 |
+
"id": "27063032",
|
17 |
+
"metadata": {},
|
18 |
+
"outputs": [
|
19 |
+
{
|
20 |
+
"name": "stdout",
|
21 |
+
"output_type": "stream",
|
22 |
+
"text": [
|
23 |
+
"Tue Nov 7 13:34:15 2023 \r\n",
|
24 |
+
"+-----------------------------------------------------------------------------+\r\n",
|
25 |
+
"| NVIDIA-SMI 525.85.12 Driver Version: 525.85.12 CUDA Version: 12.0 |\r\n",
|
26 |
+
"|-------------------------------+----------------------+----------------------+\r\n",
|
27 |
+
"| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\r\n",
|
28 |
+
"| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\r\n",
|
29 |
+
"| | | MIG M. |\r\n",
|
30 |
+
"|===============================+======================+======================|\r\n",
|
31 |
+
"| 0 NVIDIA A100 80G... On | 00000001:00:00.0 Off | 0 |\r\n",
|
32 |
+
"| N/A 37C P0 65W / 300W | 5536MiB / 81920MiB | 0% Default |\r\n",
|
33 |
+
"| | | Disabled |\r\n",
|
34 |
+
"+-------------------------------+----------------------+----------------------+\r\n",
|
35 |
+
" \r\n",
|
36 |
+
"+-----------------------------------------------------------------------------+\r\n",
|
37 |
+
"| Processes: |\r\n",
|
38 |
+
"| GPU GI CI PID Type Process name GPU Memory |\r\n",
|
39 |
+
"| ID ID Usage |\r\n",
|
40 |
+
"|=============================================================================|\r\n",
|
41 |
+
"+-----------------------------------------------------------------------------+\r\n"
|
42 |
+
]
|
43 |
+
}
|
44 |
+
],
|
45 |
+
"source": [
|
46 |
+
"!nvidia-smi"
|
47 |
+
]
|
48 |
+
},
|
49 |
+
{
|
50 |
+
"cell_type": "code",
|
51 |
+
"execution_count": 20,
|
52 |
+
"id": "1bde5916",
|
53 |
+
"metadata": {
|
54 |
+
"scrolled": true
|
55 |
+
},
|
56 |
+
"outputs": [],
|
57 |
+
"source": [
|
58 |
+
"from awq import AutoAWQForCausalLM\n",
|
59 |
+
"from transformers import AutoConfig, AwqConfig, AutoTokenizer, AutoModelForCausalLM\n",
|
60 |
+
"import torch\n",
|
61 |
+
"\n",
|
62 |
+
"model_path = 'mesolitica/malaysian-mistral-7b-32k-instructions'"
|
63 |
+
]
|
64 |
+
},
|
65 |
+
{
|
66 |
+
"cell_type": "code",
|
67 |
+
"execution_count": 4,
|
68 |
+
"id": "c658280e",
|
69 |
+
"metadata": {},
|
70 |
+
"outputs": [],
|
71 |
+
"source": [
|
72 |
+
"# !pip3 install transformers==4.35.0"
|
73 |
+
]
|
74 |
+
},
|
75 |
+
{
|
76 |
+
"cell_type": "code",
|
77 |
+
"execution_count": 6,
|
78 |
+
"id": "838ddb85",
|
79 |
+
"metadata": {},
|
80 |
+
"outputs": [],
|
81 |
+
"source": [
|
82 |
+
"# model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype = torch.bfloat16)"
|
83 |
+
]
|
84 |
+
},
|
85 |
+
{
|
86 |
+
"cell_type": "code",
|
87 |
+
"execution_count": 7,
|
88 |
+
"id": "637b41e1",
|
89 |
+
"metadata": {},
|
90 |
+
"outputs": [],
|
91 |
+
"source": [
|
92 |
+
"# model.save_pretrained('./test', safe_serialization = False)"
|
93 |
+
]
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"cell_type": "code",
|
97 |
+
"execution_count": 9,
|
98 |
+
"id": "417dbbf5",
|
99 |
+
"metadata": {},
|
100 |
+
"outputs": [
|
101 |
+
{
|
102 |
+
"data": {
|
103 |
+
"application/vnd.jupyter.widget-view+json": {
|
104 |
+
"model_id": "61e83560a1344a4593dd8e2d806992c4",
|
105 |
+
"version_major": 2,
|
106 |
+
"version_minor": 0
|
107 |
+
},
|
108 |
+
"text/plain": [
|
109 |
+
"Loading checkpoint shards: 0%| | 0/3 [00:00<?, ?it/s]"
|
110 |
+
]
|
111 |
+
},
|
112 |
+
"metadata": {},
|
113 |
+
"output_type": "display_data"
|
114 |
+
}
|
115 |
+
],
|
116 |
+
"source": [
|
117 |
+
"model = AutoAWQForCausalLM.from_pretrained('./test')"
|
118 |
+
]
|
119 |
+
},
|
120 |
+
{
|
121 |
+
"cell_type": "code",
|
122 |
+
"execution_count": 10,
|
123 |
+
"id": "212056b5",
|
124 |
+
"metadata": {
|
125 |
+
"scrolled": true
|
126 |
+
},
|
127 |
+
"outputs": [
|
128 |
+
{
|
129 |
+
"data": {
|
130 |
+
"application/vnd.jupyter.widget-view+json": {
|
131 |
+
"model_id": "d9fe43e4b9644a29ae5763a62f5cb1d3",
|
132 |
+
"version_major": 2,
|
133 |
+
"version_minor": 0
|
134 |
+
},
|
135 |
+
"text/plain": [
|
136 |
+
"Downloading data files: 0%| | 0/1 [00:00<?, ?it/s]"
|
137 |
+
]
|
138 |
+
},
|
139 |
+
"metadata": {},
|
140 |
+
"output_type": "display_data"
|
141 |
+
},
|
142 |
+
{
|
143 |
+
"data": {
|
144 |
+
"application/vnd.jupyter.widget-view+json": {
|
145 |
+
"model_id": "8950b7ab9bcf4119bb65f235a2bd0b63",
|
146 |
+
"version_major": 2,
|
147 |
+
"version_minor": 0
|
148 |
+
},
|
149 |
+
"text/plain": [
|
150 |
+
"Downloading data: 0%| | 0.00/351M [00:00<?, ?B/s]"
|
151 |
+
]
|
152 |
+
},
|
153 |
+
"metadata": {},
|
154 |
+
"output_type": "display_data"
|
155 |
+
},
|
156 |
+
{
|
157 |
+
"data": {
|
158 |
+
"application/vnd.jupyter.widget-view+json": {
|
159 |
+
"model_id": "7406dbb6a9434d31bb0eb46f9c98ff58",
|
160 |
+
"version_major": 2,
|
161 |
+
"version_minor": 0
|
162 |
+
},
|
163 |
+
"text/plain": [
|
164 |
+
"Extracting data files: 0%| | 0/1 [00:00<?, ?it/s]"
|
165 |
+
]
|
166 |
+
},
|
167 |
+
"metadata": {},
|
168 |
+
"output_type": "display_data"
|
169 |
+
},
|
170 |
+
{
|
171 |
+
"data": {
|
172 |
+
"application/vnd.jupyter.widget-view+json": {
|
173 |
+
"model_id": "c3864d9070be43ecb470df927ed9bc30",
|
174 |
+
"version_major": 2,
|
175 |
+
"version_minor": 0
|
176 |
+
},
|
177 |
+
"text/plain": [
|
178 |
+
"Generating train split: 0 examples [00:00, ? examples/s]"
|
179 |
+
]
|
180 |
+
},
|
181 |
+
"metadata": {},
|
182 |
+
"output_type": "display_data"
|
183 |
+
},
|
184 |
+
{
|
185 |
+
"name": "stderr",
|
186 |
+
"output_type": "stream",
|
187 |
+
"text": [
|
188 |
+
"AWQ: 100%|██████████| 32/32 [09:45<00:00, 18.28s/it]\n"
|
189 |
+
]
|
190 |
+
}
|
191 |
+
],
|
192 |
+
"source": [
|
193 |
+
"quant_path = 'malaysian-mistral-7b-32k-instructions-awq'\n",
|
194 |
+
"quant_config = { \"zero_point\": True, \"q_group_size\": 128, \"w_bit\": 4, \"version\": \"GEMM\" }\n",
|
195 |
+
"\n",
|
196 |
+
"tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)\n",
|
197 |
+
"model.quantize(tokenizer, quant_config=quant_config, calib_data = 'mesolitica/malaysian-calibration')"
|
198 |
+
]
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"cell_type": "code",
|
202 |
+
"execution_count": 31,
|
203 |
+
"id": "77e03f18",
|
204 |
+
"metadata": {
|
205 |
+
"scrolled": true
|
206 |
+
},
|
207 |
+
"outputs": [
|
208 |
+
{
|
209 |
+
"name": "stderr",
|
210 |
+
"output_type": "stream",
|
211 |
+
"text": [
|
212 |
+
"WARNING:root:`quant_config.json` is being deprecated in the future in favor of quantization_config in config.json.\n"
|
213 |
+
]
|
214 |
+
},
|
215 |
+
{
|
216 |
+
"data": {
|
217 |
+
"text/plain": [
|
218 |
+
"('malaysian-mistral-7b-32k-instructions-awq/tokenizer_config.json',\n",
|
219 |
+
" 'malaysian-mistral-7b-32k-instructions-awq/special_tokens_map.json',\n",
|
220 |
+
" 'malaysian-mistral-7b-32k-instructions-awq/tokenizer.model',\n",
|
221 |
+
" 'malaysian-mistral-7b-32k-instructions-awq/added_tokens.json',\n",
|
222 |
+
" 'malaysian-mistral-7b-32k-instructions-awq/tokenizer.json')"
|
223 |
+
]
|
224 |
+
},
|
225 |
+
"execution_count": 31,
|
226 |
+
"metadata": {},
|
227 |
+
"output_type": "execute_result"
|
228 |
+
}
|
229 |
+
],
|
230 |
+
"source": [
|
231 |
+
"model.save_quantized(quant_path, safetensors = False)\n",
|
232 |
+
"tokenizer.save_pretrained(quant_path)"
|
233 |
+
]
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"cell_type": "code",
|
237 |
+
"execution_count": 18,
|
238 |
+
"id": "fd35b057",
|
239 |
+
"metadata": {
|
240 |
+
"scrolled": false
|
241 |
+
},
|
242 |
+
"outputs": [
|
243 |
+
{
|
244 |
+
"data": {
|
245 |
+
"application/vnd.jupyter.widget-view+json": {
|
246 |
+
"model_id": "c9e54720c2f44b8b8a769ddc69bb6c82",
|
247 |
+
"version_major": 2,
|
248 |
+
"version_minor": 0
|
249 |
+
},
|
250 |
+
"text/plain": [
|
251 |
+
"tokenizer.model: 0%| | 0.00/493k [00:00<?, ?B/s]"
|
252 |
+
]
|
253 |
+
},
|
254 |
+
"metadata": {},
|
255 |
+
"output_type": "display_data"
|
256 |
+
},
|
257 |
+
{
|
258 |
+
"data": {
|
259 |
+
"text/plain": [
|
260 |
+
"CommitInfo(commit_url='https://huggingface.co/mesolitica/malaysian-mistral-7b-32k-instructions-AWQ/commit/f7c5657f4c023ba9ca5c04760856998e10f06875', commit_message='Upload tokenizer', commit_description='', oid='f7c5657f4c023ba9ca5c04760856998e10f06875', pr_url=None, pr_revision=None, pr_num=None)"
|
261 |
+
]
|
262 |
+
},
|
263 |
+
"execution_count": 18,
|
264 |
+
"metadata": {},
|
265 |
+
"output_type": "execute_result"
|
266 |
+
}
|
267 |
+
],
|
268 |
+
"source": [
|
269 |
+
"tokenizer.push_to_hub('mesolitica/malaysian-mistral-7b-32k-instructions-AWQ')"
|
270 |
+
]
|
271 |
+
},
|
272 |
+
{
|
273 |
+
"cell_type": "code",
|
274 |
+
"execution_count": 43,
|
275 |
+
"id": "c0d284f6",
|
276 |
+
"metadata": {},
|
277 |
+
"outputs": [
|
278 |
+
{
|
279 |
+
"data": {
|
280 |
+
"text/plain": [
|
281 |
+
"transformers.utils.quantization_config.AwqConfig"
|
282 |
+
]
|
283 |
+
},
|
284 |
+
"execution_count": 43,
|
285 |
+
"metadata": {},
|
286 |
+
"output_type": "execute_result"
|
287 |
+
}
|
288 |
+
],
|
289 |
+
"source": [
|
290 |
+
"AwqConfig()"
|
291 |
+
]
|
292 |
+
},
|
293 |
+
{
|
294 |
+
"cell_type": "code",
|
295 |
+
"execution_count": 44,
|
296 |
+
"id": "816dacc8",
|
297 |
+
"metadata": {},
|
298 |
+
"outputs": [
|
299 |
+
{
|
300 |
+
"data": {
|
301 |
+
"text/plain": [
|
302 |
+
"CommitInfo(commit_url='https://huggingface.co/mesolitica/malaysian-mistral-7b-32k-instructions-AWQ/commit/ef01ad16114f487387c426a41e172df2a2b94341', commit_message='Upload config', commit_description='', oid='ef01ad16114f487387c426a41e172df2a2b94341', pr_url=None, pr_revision=None, pr_num=None)"
|
303 |
+
]
|
304 |
+
},
|
305 |
+
"execution_count": 44,
|
306 |
+
"metadata": {},
|
307 |
+
"output_type": "execute_result"
|
308 |
+
}
|
309 |
+
],
|
310 |
+
"source": [
|
311 |
+
"quantization_config = AwqConfig(\n",
|
312 |
+
" bits=quant_config['w_bit'],\n",
|
313 |
+
" group_size=quant_config['q_group_size'],\n",
|
314 |
+
" zero_point=quant_config['zero_point'],\n",
|
315 |
+
" backend='autoawq',\n",
|
316 |
+
" version=quant_config['version'].lower(),\n",
|
317 |
+
")\n",
|
318 |
+
"\n",
|
319 |
+
"config = AutoConfig.from_pretrained(model_path)\n",
|
320 |
+
"config.quantization_config = quantization_config\n",
|
321 |
+
"\n",
|
322 |
+
"config.push_to_hub('mesolitica/malaysian-mistral-7b-32k-instructions-AWQ')"
|
323 |
+
]
|
324 |
+
},
|
325 |
+
{
|
326 |
+
"cell_type": "code",
|
327 |
+
"execution_count": 19,
|
328 |
+
"id": "846835fa",
|
329 |
+
"metadata": {},
|
330 |
+
"outputs": [],
|
331 |
+
"source": [
|
332 |
+
"from huggingface_hub import HfApi\n",
|
333 |
+
"\n",
|
334 |
+
"api = HfApi()"
|
335 |
+
]
|
336 |
+
},
|
337 |
+
{
|
338 |
+
"cell_type": "code",
|
339 |
+
"execution_count": 32,
|
340 |
+
"id": "f8c2bef7",
|
341 |
+
"metadata": {},
|
342 |
+
"outputs": [
|
343 |
+
{
|
344 |
+
"data": {
|
345 |
+
"application/vnd.jupyter.widget-view+json": {
|
346 |
+
"model_id": "9e48773d264248c589f9cae73965b579",
|
347 |
+
"version_major": 2,
|
348 |
+
"version_minor": 0
|
349 |
+
},
|
350 |
+
"text/plain": [
|
351 |
+
"pytorch_model.bin: 0%| | 0.00/4.15G [00:00<?, ?B/s]"
|
352 |
+
]
|
353 |
+
},
|
354 |
+
"metadata": {},
|
355 |
+
"output_type": "display_data"
|
356 |
+
},
|
357 |
+
{
|
358 |
+
"data": {
|
359 |
+
"text/plain": [
|
360 |
+
"'https://huggingface.co/mesolitica/malaysian-mistral-7b-32k-instructions-AWQ/blob/main/pytorch_model.bin'"
|
361 |
+
]
|
362 |
+
},
|
363 |
+
"execution_count": 32,
|
364 |
+
"metadata": {},
|
365 |
+
"output_type": "execute_result"
|
366 |
+
}
|
367 |
+
],
|
368 |
+
"source": [
|
369 |
+
"api.upload_file(\n",
|
370 |
+
" path_or_fileobj='malaysian-mistral-7b-32k-instructions-awq/pytorch_model.bin',\n",
|
371 |
+
" path_in_repo=\"pytorch_model.bin\",\n",
|
372 |
+
" repo_id='mesolitica/malaysian-mistral-7b-32k-instructions-AWQ',\n",
|
373 |
+
" repo_type=\"model\",\n",
|
374 |
+
")"
|
375 |
+
]
|
376 |
+
},
|
377 |
+
{
|
378 |
+
"cell_type": "code",
|
379 |
+
"execution_count": 45,
|
380 |
+
"id": "b6b0f30f",
|
381 |
+
"metadata": {},
|
382 |
+
"outputs": [
|
383 |
+
{
|
384 |
+
"data": {
|
385 |
+
"application/vnd.jupyter.widget-view+json": {
|
386 |
+
"model_id": "dca4e6f178ec4431b17c4924adb13563",
|
387 |
+
"version_major": 2,
|
388 |
+
"version_minor": 0
|
389 |
+
},
|
390 |
+
"text/plain": [
|
391 |
+
"Downloading (…)lve/main/config.json: 0%| | 0.00/806 [00:00<?, ?B/s]"
|
392 |
+
]
|
393 |
+
},
|
394 |
+
"metadata": {},
|
395 |
+
"output_type": "display_data"
|
396 |
+
},
|
397 |
+
{
|
398 |
+
"name": "stderr",
|
399 |
+
"output_type": "stream",
|
400 |
+
"text": [
|
401 |
+
"You have loaded an AWQ model on CPU and have a CUDA device available, make sure to set your model on a GPU device in order to run your model.\n"
|
402 |
+
]
|
403 |
+
}
|
404 |
+
],
|
405 |
+
"source": [
|
406 |
+
"quantized_model = AutoModelForCausalLM.from_pretrained('mesolitica/malaysian-mistral-7b-32k-instructions-AWQ')\n",
|
407 |
+
"_ = quantized_model.cuda()"
|
408 |
+
]
|
409 |
+
},
|
410 |
+
{
|
411 |
+
"cell_type": "code",
|
412 |
+
"execution_count": 46,
|
413 |
+
"id": "698cd4c9",
|
414 |
+
"metadata": {},
|
415 |
+
"outputs": [],
|
416 |
+
"source": [
|
417 |
+
"def parse_mistral_chat(messages):\n",
|
418 |
+
"\n",
|
419 |
+
" user_query = messages[-1]['content']\n",
|
420 |
+
"\n",
|
421 |
+
" users, assistants = [], []\n",
|
422 |
+
" for q in messages[:-1]:\n",
|
423 |
+
" if q['role'] == 'user':\n",
|
424 |
+
" users.append(q['content'])\n",
|
425 |
+
" elif q['role'] == 'assistant':\n",
|
426 |
+
" assistants.append(q['content'])\n",
|
427 |
+
"\n",
|
428 |
+
" texts = ['<s>']\n",
|
429 |
+
" for u, a in zip(users, assistants):\n",
|
430 |
+
" texts.append(f'[INST] {u.strip()} [/INST]{a.strip()}</s> ')\n",
|
431 |
+
"\n",
|
432 |
+
" texts.append(f'[INST] {user_query.strip()} [/INST]')\n",
|
433 |
+
" prompt = ''.join(texts).strip()\n",
|
434 |
+
" return prompt"
|
435 |
+
]
|
436 |
+
},
|
437 |
+
{
|
438 |
+
"cell_type": "code",
|
439 |
+
"execution_count": 47,
|
440 |
+
"id": "63315893",
|
441 |
+
"metadata": {},
|
442 |
+
"outputs": [],
|
443 |
+
"source": [
|
444 |
+
"messages = [\n",
|
445 |
+
" {'role': 'user', 'content': 'kwsp tu apa'}\n",
|
446 |
+
"]\n",
|
447 |
+
"prompt = parse_mistral_chat(messages)\n",
|
448 |
+
"inputs = tokenizer([prompt], return_tensors='pt', add_special_tokens=False).to('cuda')"
|
449 |
+
]
|
450 |
+
},
|
451 |
+
{
|
452 |
+
"cell_type": "code",
|
453 |
+
"execution_count": 49,
|
454 |
+
"id": "8a3c15d8",
|
455 |
+
"metadata": {},
|
456 |
+
"outputs": [
|
457 |
+
{
|
458 |
+
"name": "stderr",
|
459 |
+
"output_type": "stream",
|
460 |
+
"text": [
|
461 |
+
"Setting `pad_token_id` to `eos_token_id`:2 for open-end generation.\n"
|
462 |
+
]
|
463 |
+
},
|
464 |
+
{
|
465 |
+
"name": "stdout",
|
466 |
+
"output_type": "stream",
|
467 |
+
"text": [
|
468 |
+
"CPU times: user 2.67 s, sys: 0 ns, total: 2.67 s\n",
|
469 |
+
"Wall time: 2.67 s\n"
|
470 |
+
]
|
471 |
+
},
|
472 |
+
{
|
473 |
+
"data": {
|
474 |
+
"text/plain": [
|
475 |
+
"'<s> [INST] kwsp tu apa [/INST]kwsp merujuk kepada Kumpulan Wang Simpanan Pekerja, iaitu sebuah organisasi simpanan persaraan yang ditubuhkan oleh kerajaan Malaysia untuk melindungi dan menyediakan simpanan untuk pekerja-pekerja sektor swasta pada akhir penggajian mereka.</s>'"
|
476 |
+
]
|
477 |
+
},
|
478 |
+
"execution_count": 49,
|
479 |
+
"metadata": {},
|
480 |
+
"output_type": "execute_result"
|
481 |
+
}
|
482 |
+
],
|
483 |
+
"source": [
|
484 |
+
"%%time\n",
|
485 |
+
"\n",
|
486 |
+
"generate_kwargs = dict(\n",
|
487 |
+
" inputs,\n",
|
488 |
+
" max_new_tokens=1024,\n",
|
489 |
+
" top_p=0.95,\n",
|
490 |
+
" top_k=50,\n",
|
491 |
+
" temperature=0.9,\n",
|
492 |
+
" do_sample=True,\n",
|
493 |
+
" num_beams=1,\n",
|
494 |
+
")\n",
|
495 |
+
"r = quantized_model.generate(**generate_kwargs)\n",
|
496 |
+
"tokenizer.decode(r[0])"
|
497 |
+
]
|
498 |
+
},
|
499 |
+
{
|
500 |
+
"cell_type": "code",
|
501 |
+
"execution_count": null,
|
502 |
+
"id": "d73d43a0",
|
503 |
+
"metadata": {},
|
504 |
+
"outputs": [],
|
505 |
+
"source": []
|
506 |
+
}
|
507 |
+
],
|
508 |
+
"metadata": {
|
509 |
+
"kernelspec": {
|
510 |
+
"display_name": "Python 3 (ipykernel)",
|
511 |
+
"language": "python",
|
512 |
+
"name": "python3"
|
513 |
+
},
|
514 |
+
"language_info": {
|
515 |
+
"codemirror_mode": {
|
516 |
+
"name": "ipython",
|
517 |
+
"version": 3
|
518 |
+
},
|
519 |
+
"file_extension": ".py",
|
520 |
+
"mimetype": "text/x-python",
|
521 |
+
"name": "python",
|
522 |
+
"nbconvert_exporter": "python",
|
523 |
+
"pygments_lexer": "ipython3",
|
524 |
+
"version": "3.10.12"
|
525 |
+
}
|
526 |
+
},
|
527 |
+
"nbformat": 4,
|
528 |
+
"nbformat_minor": 5
|
529 |
+
}
|