SocialLocalMobile commited on
Commit
9b418bf
·
verified ·
1 Parent(s): 44190f7

Upload Export_Recipe_Llama_3_2_1B_Instruct_QLORA_INT4_EO8.ipynb

Browse files
Export_Recipe_Llama_3_2_1B_Instruct_QLORA_INT4_EO8.ipynb ADDED
@@ -0,0 +1,1431 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": []
7
+ },
8
+ "kernelspec": {
9
+ "name": "python3",
10
+ "display_name": "Python 3"
11
+ },
12
+ "language_info": {
13
+ "name": "python"
14
+ },
15
+ "widgets": {
16
+ "application/vnd.jupyter.widget-state+json": {
17
+ "c3b751d377094dea8b10f2e07a541944": {
18
+ "model_module": "@jupyter-widgets/controls",
19
+ "model_name": "VBoxModel",
20
+ "model_module_version": "1.5.0",
21
+ "state": {
22
+ "_dom_classes": [],
23
+ "_model_module": "@jupyter-widgets/controls",
24
+ "_model_module_version": "1.5.0",
25
+ "_model_name": "VBoxModel",
26
+ "_view_count": null,
27
+ "_view_module": "@jupyter-widgets/controls",
28
+ "_view_module_version": "1.5.0",
29
+ "_view_name": "VBoxView",
30
+ "box_style": "",
31
+ "children": [],
32
+ "layout": "IPY_MODEL_45ba930e69324d568384c4e9add02d91"
33
+ }
34
+ },
35
+ "0c9114b0c04a444ea572b9d051320fb2": {
36
+ "model_module": "@jupyter-widgets/controls",
37
+ "model_name": "HTMLModel",
38
+ "model_module_version": "1.5.0",
39
+ "state": {
40
+ "_dom_classes": [],
41
+ "_model_module": "@jupyter-widgets/controls",
42
+ "_model_module_version": "1.5.0",
43
+ "_model_name": "HTMLModel",
44
+ "_view_count": null,
45
+ "_view_module": "@jupyter-widgets/controls",
46
+ "_view_module_version": "1.5.0",
47
+ "_view_name": "HTMLView",
48
+ "description": "",
49
+ "description_tooltip": null,
50
+ "layout": "IPY_MODEL_4ed7db37c8b145279b9abac0add2f361",
51
+ "placeholder": "​",
52
+ "style": "IPY_MODEL_a622d9fa89a945a69a3bea033a130c6a",
53
+ "value": "<center> <img\nsrc=https://huggingface.co/front/assets/huggingface_logo-noborder.svg\nalt='Hugging Face'> <br> Copy a token from <a\nhref=\"https://huggingface.co/settings/tokens\" target=\"_blank\">your Hugging Face\ntokens page</a> and paste it below. <br> Immediately click login after copying\nyour token or it might be stored in plain text in this notebook file. </center>"
54
+ }
55
+ },
56
+ "1455ec1476ad4c99b58011b0c31c9dba": {
57
+ "model_module": "@jupyter-widgets/controls",
58
+ "model_name": "PasswordModel",
59
+ "model_module_version": "1.5.0",
60
+ "state": {
61
+ "_dom_classes": [],
62
+ "_model_module": "@jupyter-widgets/controls",
63
+ "_model_module_version": "1.5.0",
64
+ "_model_name": "PasswordModel",
65
+ "_view_count": null,
66
+ "_view_module": "@jupyter-widgets/controls",
67
+ "_view_module_version": "1.5.0",
68
+ "_view_name": "PasswordView",
69
+ "continuous_update": true,
70
+ "description": "Token:",
71
+ "description_tooltip": null,
72
+ "disabled": false,
73
+ "layout": "IPY_MODEL_ab4ade18f2494fcf8c9ef1f2a14b4396",
74
+ "placeholder": "​",
75
+ "style": "IPY_MODEL_81b1b9740629417aa9a69406d7fb994d",
76
+ "value": ""
77
+ }
78
+ },
79
+ "4830dbb59c6d4fa2b4818f6f0e1f134e": {
80
+ "model_module": "@jupyter-widgets/controls",
81
+ "model_name": "CheckboxModel",
82
+ "model_module_version": "1.5.0",
83
+ "state": {
84
+ "_dom_classes": [],
85
+ "_model_module": "@jupyter-widgets/controls",
86
+ "_model_module_version": "1.5.0",
87
+ "_model_name": "CheckboxModel",
88
+ "_view_count": null,
89
+ "_view_module": "@jupyter-widgets/controls",
90
+ "_view_module_version": "1.5.0",
91
+ "_view_name": "CheckboxView",
92
+ "description": "Add token as git credential?",
93
+ "description_tooltip": null,
94
+ "disabled": false,
95
+ "indent": true,
96
+ "layout": "IPY_MODEL_d34872b179a2489ab6583b199744bb6e",
97
+ "style": "IPY_MODEL_2701e495c32f4808a7bf25b8f3b1417d",
98
+ "value": true
99
+ }
100
+ },
101
+ "d2bf2102bc7e48708c1f3ef0a8162a82": {
102
+ "model_module": "@jupyter-widgets/controls",
103
+ "model_name": "ButtonModel",
104
+ "model_module_version": "1.5.0",
105
+ "state": {
106
+ "_dom_classes": [],
107
+ "_model_module": "@jupyter-widgets/controls",
108
+ "_model_module_version": "1.5.0",
109
+ "_model_name": "ButtonModel",
110
+ "_view_count": null,
111
+ "_view_module": "@jupyter-widgets/controls",
112
+ "_view_module_version": "1.5.0",
113
+ "_view_name": "ButtonView",
114
+ "button_style": "",
115
+ "description": "Login",
116
+ "disabled": false,
117
+ "icon": "",
118
+ "layout": "IPY_MODEL_bb17b33ae0fc48aca21c627b9f959383",
119
+ "style": "IPY_MODEL_441e651de05b414fb666398be95a6ef8",
120
+ "tooltip": ""
121
+ }
122
+ },
123
+ "9652dd5970e64fb0be11630520af68eb": {
124
+ "model_module": "@jupyter-widgets/controls",
125
+ "model_name": "HTMLModel",
126
+ "model_module_version": "1.5.0",
127
+ "state": {
128
+ "_dom_classes": [],
129
+ "_model_module": "@jupyter-widgets/controls",
130
+ "_model_module_version": "1.5.0",
131
+ "_model_name": "HTMLModel",
132
+ "_view_count": null,
133
+ "_view_module": "@jupyter-widgets/controls",
134
+ "_view_module_version": "1.5.0",
135
+ "_view_name": "HTMLView",
136
+ "description": "",
137
+ "description_tooltip": null,
138
+ "layout": "IPY_MODEL_212c60896f584652b999f18ed992a062",
139
+ "placeholder": "​",
140
+ "style": "IPY_MODEL_0a3391066cb3491180581aa5aa43e301",
141
+ "value": "\n<b>Pro Tip:</b> If you don't already have one, you can create a dedicated\n'notebooks' token with 'write' access, that you can then easily reuse for all\nnotebooks. </center>"
142
+ }
143
+ },
144
+ "45ba930e69324d568384c4e9add02d91": {
145
+ "model_module": "@jupyter-widgets/base",
146
+ "model_name": "LayoutModel",
147
+ "model_module_version": "1.2.0",
148
+ "state": {
149
+ "_model_module": "@jupyter-widgets/base",
150
+ "_model_module_version": "1.2.0",
151
+ "_model_name": "LayoutModel",
152
+ "_view_count": null,
153
+ "_view_module": "@jupyter-widgets/base",
154
+ "_view_module_version": "1.2.0",
155
+ "_view_name": "LayoutView",
156
+ "align_content": null,
157
+ "align_items": "center",
158
+ "align_self": null,
159
+ "border": null,
160
+ "bottom": null,
161
+ "display": "flex",
162
+ "flex": null,
163
+ "flex_flow": "column",
164
+ "grid_area": null,
165
+ "grid_auto_columns": null,
166
+ "grid_auto_flow": null,
167
+ "grid_auto_rows": null,
168
+ "grid_column": null,
169
+ "grid_gap": null,
170
+ "grid_row": null,
171
+ "grid_template_areas": null,
172
+ "grid_template_columns": null,
173
+ "grid_template_rows": null,
174
+ "height": null,
175
+ "justify_content": null,
176
+ "justify_items": null,
177
+ "left": null,
178
+ "margin": null,
179
+ "max_height": null,
180
+ "max_width": null,
181
+ "min_height": null,
182
+ "min_width": null,
183
+ "object_fit": null,
184
+ "object_position": null,
185
+ "order": null,
186
+ "overflow": null,
187
+ "overflow_x": null,
188
+ "overflow_y": null,
189
+ "padding": null,
190
+ "right": null,
191
+ "top": null,
192
+ "visibility": null,
193
+ "width": "50%"
194
+ }
195
+ },
196
+ "4ed7db37c8b145279b9abac0add2f361": {
197
+ "model_module": "@jupyter-widgets/base",
198
+ "model_name": "LayoutModel",
199
+ "model_module_version": "1.2.0",
200
+ "state": {
201
+ "_model_module": "@jupyter-widgets/base",
202
+ "_model_module_version": "1.2.0",
203
+ "_model_name": "LayoutModel",
204
+ "_view_count": null,
205
+ "_view_module": "@jupyter-widgets/base",
206
+ "_view_module_version": "1.2.0",
207
+ "_view_name": "LayoutView",
208
+ "align_content": null,
209
+ "align_items": null,
210
+ "align_self": null,
211
+ "border": null,
212
+ "bottom": null,
213
+ "display": null,
214
+ "flex": null,
215
+ "flex_flow": null,
216
+ "grid_area": null,
217
+ "grid_auto_columns": null,
218
+ "grid_auto_flow": null,
219
+ "grid_auto_rows": null,
220
+ "grid_column": null,
221
+ "grid_gap": null,
222
+ "grid_row": null,
223
+ "grid_template_areas": null,
224
+ "grid_template_columns": null,
225
+ "grid_template_rows": null,
226
+ "height": null,
227
+ "justify_content": null,
228
+ "justify_items": null,
229
+ "left": null,
230
+ "margin": null,
231
+ "max_height": null,
232
+ "max_width": null,
233
+ "min_height": null,
234
+ "min_width": null,
235
+ "object_fit": null,
236
+ "object_position": null,
237
+ "order": null,
238
+ "overflow": null,
239
+ "overflow_x": null,
240
+ "overflow_y": null,
241
+ "padding": null,
242
+ "right": null,
243
+ "top": null,
244
+ "visibility": null,
245
+ "width": null
246
+ }
247
+ },
248
+ "a622d9fa89a945a69a3bea033a130c6a": {
249
+ "model_module": "@jupyter-widgets/controls",
250
+ "model_name": "DescriptionStyleModel",
251
+ "model_module_version": "1.5.0",
252
+ "state": {
253
+ "_model_module": "@jupyter-widgets/controls",
254
+ "_model_module_version": "1.5.0",
255
+ "_model_name": "DescriptionStyleModel",
256
+ "_view_count": null,
257
+ "_view_module": "@jupyter-widgets/base",
258
+ "_view_module_version": "1.2.0",
259
+ "_view_name": "StyleView",
260
+ "description_width": ""
261
+ }
262
+ },
263
+ "ab4ade18f2494fcf8c9ef1f2a14b4396": {
264
+ "model_module": "@jupyter-widgets/base",
265
+ "model_name": "LayoutModel",
266
+ "model_module_version": "1.2.0",
267
+ "state": {
268
+ "_model_module": "@jupyter-widgets/base",
269
+ "_model_module_version": "1.2.0",
270
+ "_model_name": "LayoutModel",
271
+ "_view_count": null,
272
+ "_view_module": "@jupyter-widgets/base",
273
+ "_view_module_version": "1.2.0",
274
+ "_view_name": "LayoutView",
275
+ "align_content": null,
276
+ "align_items": null,
277
+ "align_self": null,
278
+ "border": null,
279
+ "bottom": null,
280
+ "display": null,
281
+ "flex": null,
282
+ "flex_flow": null,
283
+ "grid_area": null,
284
+ "grid_auto_columns": null,
285
+ "grid_auto_flow": null,
286
+ "grid_auto_rows": null,
287
+ "grid_column": null,
288
+ "grid_gap": null,
289
+ "grid_row": null,
290
+ "grid_template_areas": null,
291
+ "grid_template_columns": null,
292
+ "grid_template_rows": null,
293
+ "height": null,
294
+ "justify_content": null,
295
+ "justify_items": null,
296
+ "left": null,
297
+ "margin": null,
298
+ "max_height": null,
299
+ "max_width": null,
300
+ "min_height": null,
301
+ "min_width": null,
302
+ "object_fit": null,
303
+ "object_position": null,
304
+ "order": null,
305
+ "overflow": null,
306
+ "overflow_x": null,
307
+ "overflow_y": null,
308
+ "padding": null,
309
+ "right": null,
310
+ "top": null,
311
+ "visibility": null,
312
+ "width": null
313
+ }
314
+ },
315
+ "81b1b9740629417aa9a69406d7fb994d": {
316
+ "model_module": "@jupyter-widgets/controls",
317
+ "model_name": "DescriptionStyleModel",
318
+ "model_module_version": "1.5.0",
319
+ "state": {
320
+ "_model_module": "@jupyter-widgets/controls",
321
+ "_model_module_version": "1.5.0",
322
+ "_model_name": "DescriptionStyleModel",
323
+ "_view_count": null,
324
+ "_view_module": "@jupyter-widgets/base",
325
+ "_view_module_version": "1.2.0",
326
+ "_view_name": "StyleView",
327
+ "description_width": ""
328
+ }
329
+ },
330
+ "d34872b179a2489ab6583b199744bb6e": {
331
+ "model_module": "@jupyter-widgets/base",
332
+ "model_name": "LayoutModel",
333
+ "model_module_version": "1.2.0",
334
+ "state": {
335
+ "_model_module": "@jupyter-widgets/base",
336
+ "_model_module_version": "1.2.0",
337
+ "_model_name": "LayoutModel",
338
+ "_view_count": null,
339
+ "_view_module": "@jupyter-widgets/base",
340
+ "_view_module_version": "1.2.0",
341
+ "_view_name": "LayoutView",
342
+ "align_content": null,
343
+ "align_items": null,
344
+ "align_self": null,
345
+ "border": null,
346
+ "bottom": null,
347
+ "display": null,
348
+ "flex": null,
349
+ "flex_flow": null,
350
+ "grid_area": null,
351
+ "grid_auto_columns": null,
352
+ "grid_auto_flow": null,
353
+ "grid_auto_rows": null,
354
+ "grid_column": null,
355
+ "grid_gap": null,
356
+ "grid_row": null,
357
+ "grid_template_areas": null,
358
+ "grid_template_columns": null,
359
+ "grid_template_rows": null,
360
+ "height": null,
361
+ "justify_content": null,
362
+ "justify_items": null,
363
+ "left": null,
364
+ "margin": null,
365
+ "max_height": null,
366
+ "max_width": null,
367
+ "min_height": null,
368
+ "min_width": null,
369
+ "object_fit": null,
370
+ "object_position": null,
371
+ "order": null,
372
+ "overflow": null,
373
+ "overflow_x": null,
374
+ "overflow_y": null,
375
+ "padding": null,
376
+ "right": null,
377
+ "top": null,
378
+ "visibility": null,
379
+ "width": null
380
+ }
381
+ },
382
+ "2701e495c32f4808a7bf25b8f3b1417d": {
383
+ "model_module": "@jupyter-widgets/controls",
384
+ "model_name": "DescriptionStyleModel",
385
+ "model_module_version": "1.5.0",
386
+ "state": {
387
+ "_model_module": "@jupyter-widgets/controls",
388
+ "_model_module_version": "1.5.0",
389
+ "_model_name": "DescriptionStyleModel",
390
+ "_view_count": null,
391
+ "_view_module": "@jupyter-widgets/base",
392
+ "_view_module_version": "1.2.0",
393
+ "_view_name": "StyleView",
394
+ "description_width": ""
395
+ }
396
+ },
397
+ "bb17b33ae0fc48aca21c627b9f959383": {
398
+ "model_module": "@jupyter-widgets/base",
399
+ "model_name": "LayoutModel",
400
+ "model_module_version": "1.2.0",
401
+ "state": {
402
+ "_model_module": "@jupyter-widgets/base",
403
+ "_model_module_version": "1.2.0",
404
+ "_model_name": "LayoutModel",
405
+ "_view_count": null,
406
+ "_view_module": "@jupyter-widgets/base",
407
+ "_view_module_version": "1.2.0",
408
+ "_view_name": "LayoutView",
409
+ "align_content": null,
410
+ "align_items": null,
411
+ "align_self": null,
412
+ "border": null,
413
+ "bottom": null,
414
+ "display": null,
415
+ "flex": null,
416
+ "flex_flow": null,
417
+ "grid_area": null,
418
+ "grid_auto_columns": null,
419
+ "grid_auto_flow": null,
420
+ "grid_auto_rows": null,
421
+ "grid_column": null,
422
+ "grid_gap": null,
423
+ "grid_row": null,
424
+ "grid_template_areas": null,
425
+ "grid_template_columns": null,
426
+ "grid_template_rows": null,
427
+ "height": null,
428
+ "justify_content": null,
429
+ "justify_items": null,
430
+ "left": null,
431
+ "margin": null,
432
+ "max_height": null,
433
+ "max_width": null,
434
+ "min_height": null,
435
+ "min_width": null,
436
+ "object_fit": null,
437
+ "object_position": null,
438
+ "order": null,
439
+ "overflow": null,
440
+ "overflow_x": null,
441
+ "overflow_y": null,
442
+ "padding": null,
443
+ "right": null,
444
+ "top": null,
445
+ "visibility": null,
446
+ "width": null
447
+ }
448
+ },
449
+ "441e651de05b414fb666398be95a6ef8": {
450
+ "model_module": "@jupyter-widgets/controls",
451
+ "model_name": "ButtonStyleModel",
452
+ "model_module_version": "1.5.0",
453
+ "state": {
454
+ "_model_module": "@jupyter-widgets/controls",
455
+ "_model_module_version": "1.5.0",
456
+ "_model_name": "ButtonStyleModel",
457
+ "_view_count": null,
458
+ "_view_module": "@jupyter-widgets/base",
459
+ "_view_module_version": "1.2.0",
460
+ "_view_name": "StyleView",
461
+ "button_color": null,
462
+ "font_weight": ""
463
+ }
464
+ },
465
+ "212c60896f584652b999f18ed992a062": {
466
+ "model_module": "@jupyter-widgets/base",
467
+ "model_name": "LayoutModel",
468
+ "model_module_version": "1.2.0",
469
+ "state": {
470
+ "_model_module": "@jupyter-widgets/base",
471
+ "_model_module_version": "1.2.0",
472
+ "_model_name": "LayoutModel",
473
+ "_view_count": null,
474
+ "_view_module": "@jupyter-widgets/base",
475
+ "_view_module_version": "1.2.0",
476
+ "_view_name": "LayoutView",
477
+ "align_content": null,
478
+ "align_items": null,
479
+ "align_self": null,
480
+ "border": null,
481
+ "bottom": null,
482
+ "display": null,
483
+ "flex": null,
484
+ "flex_flow": null,
485
+ "grid_area": null,
486
+ "grid_auto_columns": null,
487
+ "grid_auto_flow": null,
488
+ "grid_auto_rows": null,
489
+ "grid_column": null,
490
+ "grid_gap": null,
491
+ "grid_row": null,
492
+ "grid_template_areas": null,
493
+ "grid_template_columns": null,
494
+ "grid_template_rows": null,
495
+ "height": null,
496
+ "justify_content": null,
497
+ "justify_items": null,
498
+ "left": null,
499
+ "margin": null,
500
+ "max_height": null,
501
+ "max_width": null,
502
+ "min_height": null,
503
+ "min_width": null,
504
+ "object_fit": null,
505
+ "object_position": null,
506
+ "order": null,
507
+ "overflow": null,
508
+ "overflow_x": null,
509
+ "overflow_y": null,
510
+ "padding": null,
511
+ "right": null,
512
+ "top": null,
513
+ "visibility": null,
514
+ "width": null
515
+ }
516
+ },
517
+ "0a3391066cb3491180581aa5aa43e301": {
518
+ "model_module": "@jupyter-widgets/controls",
519
+ "model_name": "DescriptionStyleModel",
520
+ "model_module_version": "1.5.0",
521
+ "state": {
522
+ "_model_module": "@jupyter-widgets/controls",
523
+ "_model_module_version": "1.5.0",
524
+ "_model_name": "DescriptionStyleModel",
525
+ "_view_count": null,
526
+ "_view_module": "@jupyter-widgets/base",
527
+ "_view_module_version": "1.2.0",
528
+ "_view_name": "StyleView",
529
+ "description_width": ""
530
+ }
531
+ },
532
+ "5971d59b22174c88accba147892ee0e8": {
533
+ "model_module": "@jupyter-widgets/controls",
534
+ "model_name": "LabelModel",
535
+ "model_module_version": "1.5.0",
536
+ "state": {
537
+ "_dom_classes": [],
538
+ "_model_module": "@jupyter-widgets/controls",
539
+ "_model_module_version": "1.5.0",
540
+ "_model_name": "LabelModel",
541
+ "_view_count": null,
542
+ "_view_module": "@jupyter-widgets/controls",
543
+ "_view_module_version": "1.5.0",
544
+ "_view_name": "LabelView",
545
+ "description": "",
546
+ "description_tooltip": null,
547
+ "layout": "IPY_MODEL_fb60489fbd7645ebade4444562a6a15c",
548
+ "placeholder": "​",
549
+ "style": "IPY_MODEL_891e6712a87a4e55af55a892dd6c992e",
550
+ "value": "Connecting..."
551
+ }
552
+ },
553
+ "fb60489fbd7645ebade4444562a6a15c": {
554
+ "model_module": "@jupyter-widgets/base",
555
+ "model_name": "LayoutModel",
556
+ "model_module_version": "1.2.0",
557
+ "state": {
558
+ "_model_module": "@jupyter-widgets/base",
559
+ "_model_module_version": "1.2.0",
560
+ "_model_name": "LayoutModel",
561
+ "_view_count": null,
562
+ "_view_module": "@jupyter-widgets/base",
563
+ "_view_module_version": "1.2.0",
564
+ "_view_name": "LayoutView",
565
+ "align_content": null,
566
+ "align_items": null,
567
+ "align_self": null,
568
+ "border": null,
569
+ "bottom": null,
570
+ "display": null,
571
+ "flex": null,
572
+ "flex_flow": null,
573
+ "grid_area": null,
574
+ "grid_auto_columns": null,
575
+ "grid_auto_flow": null,
576
+ "grid_auto_rows": null,
577
+ "grid_column": null,
578
+ "grid_gap": null,
579
+ "grid_row": null,
580
+ "grid_template_areas": null,
581
+ "grid_template_columns": null,
582
+ "grid_template_rows": null,
583
+ "height": null,
584
+ "justify_content": null,
585
+ "justify_items": null,
586
+ "left": null,
587
+ "margin": null,
588
+ "max_height": null,
589
+ "max_width": null,
590
+ "min_height": null,
591
+ "min_width": null,
592
+ "object_fit": null,
593
+ "object_position": null,
594
+ "order": null,
595
+ "overflow": null,
596
+ "overflow_x": null,
597
+ "overflow_y": null,
598
+ "padding": null,
599
+ "right": null,
600
+ "top": null,
601
+ "visibility": null,
602
+ "width": null
603
+ }
604
+ },
605
+ "891e6712a87a4e55af55a892dd6c992e": {
606
+ "model_module": "@jupyter-widgets/controls",
607
+ "model_name": "DescriptionStyleModel",
608
+ "model_module_version": "1.5.0",
609
+ "state": {
610
+ "_model_module": "@jupyter-widgets/controls",
611
+ "_model_module_version": "1.5.0",
612
+ "_model_name": "DescriptionStyleModel",
613
+ "_view_count": null,
614
+ "_view_module": "@jupyter-widgets/base",
615
+ "_view_module_version": "1.2.0",
616
+ "_view_name": "StyleView",
617
+ "description_width": ""
618
+ }
619
+ }
620
+ }
621
+ }
622
+ },
623
+ "cells": [
624
+ {
625
+ "cell_type": "markdown",
626
+ "source": [
627
+ "**Step 1: Setting Up ExecuTorch**"
628
+ ],
629
+ "metadata": {
630
+ "id": "ZOq6nHdVElC6"
631
+ }
632
+ },
633
+ {
634
+ "cell_type": "code",
635
+ "execution_count": 1,
636
+ "metadata": {
637
+ "colab": {
638
+ "base_uri": "https://localhost:8080/"
639
+ },
640
+ "collapsed": true,
641
+ "id": "EWz6sJfRDQKT",
642
+ "outputId": "743d233a-dfcb-42a6-8101-87c86cf5a740"
643
+ },
644
+ "outputs": [
645
+ {
646
+ "output_type": "stream",
647
+ "name": "stdout",
648
+ "text": [
649
+ "Requirement already satisfied: executorch in /usr/local/lib/python3.11/dist-packages (0.6.0+cpu)\n",
650
+ "Requirement already satisfied: expecttest in /usr/local/lib/python3.11/dist-packages (from executorch) (0.3.0)\n",
651
+ "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.11/dist-packages (from executorch) (25.2.10)\n",
652
+ "Requirement already satisfied: hypothesis in /usr/local/lib/python3.11/dist-packages (from executorch) (6.131.0)\n",
653
+ "Requirement already satisfied: mpmath==1.3.0 in /usr/local/lib/python3.11/dist-packages (from executorch) (1.3.0)\n",
654
+ "Requirement already satisfied: numpy>=2.0.0 in /usr/local/lib/python3.11/dist-packages (from executorch) (2.0.2)\n",
655
+ "Requirement already satisfied: packaging in /usr/local/lib/python3.11/dist-packages (from executorch) (24.2)\n",
656
+ "Requirement already satisfied: pandas>=2.2.2 in /usr/local/lib/python3.11/dist-packages (from executorch) (2.2.2)\n",
657
+ "Requirement already satisfied: parameterized in /usr/local/lib/python3.11/dist-packages (from executorch) (0.9.0)\n",
658
+ "Requirement already satisfied: pytest in /usr/local/lib/python3.11/dist-packages (from executorch) (8.3.5)\n",
659
+ "Requirement already satisfied: pytest-xdist in /usr/local/lib/python3.11/dist-packages (from executorch) (3.6.1)\n",
660
+ "Requirement already satisfied: pytest-rerunfailures in /usr/local/lib/python3.11/dist-packages (from executorch) (15.0)\n",
661
+ "Requirement already satisfied: pyyaml in /usr/local/lib/python3.11/dist-packages (from executorch) (6.0.2)\n",
662
+ "Requirement already satisfied: ruamel.yaml in /usr/local/lib/python3.11/dist-packages (from executorch) (0.18.10)\n",
663
+ "Requirement already satisfied: sympy in /usr/local/lib/python3.11/dist-packages (from executorch) (1.13.3)\n",
664
+ "Requirement already satisfied: tabulate in /usr/local/lib/python3.11/dist-packages (from executorch) (0.9.0)\n",
665
+ "Requirement already satisfied: torchao==0.10.0 in /usr/local/lib/python3.11/dist-packages (from executorch) (0.10.0+cpu)\n",
666
+ "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.11/dist-packages (from executorch) (4.13.1)\n",
667
+ "Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.11/dist-packages (from pandas>=2.2.2->executorch) (2.8.2)\n",
668
+ "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.11/dist-packages (from pandas>=2.2.2->executorch) (2025.2)\n",
669
+ "Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.11/dist-packages (from pandas>=2.2.2->executorch) (2025.2)\n",
670
+ "Requirement already satisfied: attrs>=22.2.0 in /usr/local/lib/python3.11/dist-packages (from hypothesis->executorch) (25.3.0)\n",
671
+ "Requirement already satisfied: sortedcontainers<3.0.0,>=2.1.0 in /usr/local/lib/python3.11/dist-packages (from hypothesis->executorch) (2.4.0)\n",
672
+ "Requirement already satisfied: iniconfig in /usr/local/lib/python3.11/dist-packages (from pytest->executorch) (2.1.0)\n",
673
+ "Requirement already satisfied: pluggy<2,>=1.5 in /usr/local/lib/python3.11/dist-packages (from pytest->executorch) (1.5.0)\n",
674
+ "Requirement already satisfied: execnet>=2.1 in /usr/local/lib/python3.11/dist-packages (from pytest-xdist->executorch) (2.1.1)\n",
675
+ "Requirement already satisfied: ruamel.yaml.clib>=0.2.7 in /usr/local/lib/python3.11/dist-packages (from ruamel.yaml->executorch) (0.2.12)\n",
676
+ "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.11/dist-packages (from python-dateutil>=2.8.2->pandas>=2.2.2->executorch) (1.17.0)\n"
677
+ ]
678
+ }
679
+ ],
680
+ "source": [
681
+ "!pip install executorch\n",
682
+ "# Testing release candidate\n",
683
+ "#!pip install --extra-index-url https://download.pytorch.org/whl/test/cpu executorch==0.6.0 torch==2.7.0 torchaudio==2.7.0 torchvision==0.22.0"
684
+ ]
685
+ },
686
+ {
687
+ "cell_type": "markdown",
688
+ "source": [],
689
+ "metadata": {
690
+ "id": "0DwPYBnLEChh"
691
+ }
692
+ },
693
+ {
694
+ "cell_type": "code",
695
+ "source": [
696
+ "# Installing dependencies for Llama\n",
697
+ "!pip install transformers accelerate sentencepiece huggingface_hub tiktoken torchtune tokenizers snakeviz lm_eval==0.4.5 blobfile"
698
+ ],
699
+ "metadata": {
700
+ "colab": {
701
+ "base_uri": "https://localhost:8080/"
702
+ },
703
+ "collapsed": true,
704
+ "id": "vhSLDN0sDp9-",
705
+ "outputId": "4343173c-4f02-4613-bec5-84c423bcec22"
706
+ },
707
+ "execution_count": 2,
708
+ "outputs": [
709
+ {
710
+ "output_type": "stream",
711
+ "name": "stdout",
712
+ "text": [
713
+ "Requirement already satisfied: transformers in /usr/local/lib/python3.11/dist-packages (4.50.3)\n",
714
+ "Requirement already satisfied: accelerate in /usr/local/lib/python3.11/dist-packages (1.5.2)\n",
715
+ "Requirement already satisfied: sentencepiece in /usr/local/lib/python3.11/dist-packages (0.2.0)\n",
716
+ "Requirement already satisfied: huggingface_hub in /usr/local/lib/python3.11/dist-packages (0.30.1)\n",
717
+ "Requirement already satisfied: tiktoken in /usr/local/lib/python3.11/dist-packages (0.9.0)\n",
718
+ "Requirement already satisfied: torchtune in /usr/local/lib/python3.11/dist-packages (0.6.1)\n",
719
+ "Requirement already satisfied: tokenizers in /usr/local/lib/python3.11/dist-packages (0.21.1)\n",
720
+ "Requirement already satisfied: snakeviz in /usr/local/lib/python3.11/dist-packages (2.2.2)\n",
721
+ "Requirement already satisfied: lm_eval==0.4.5 in /usr/local/lib/python3.11/dist-packages (0.4.5)\n",
722
+ "Requirement already satisfied: blobfile in /usr/local/lib/python3.11/dist-packages (3.0.0)\n",
723
+ "Requirement already satisfied: evaluate in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (0.4.3)\n",
724
+ "Requirement already satisfied: datasets>=2.16.0 in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (3.5.0)\n",
725
+ "Requirement already satisfied: jsonlines in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (4.0.0)\n",
726
+ "Requirement already satisfied: numexpr in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (2.10.2)\n",
727
+ "Requirement already satisfied: peft>=0.2.0 in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (0.14.0)\n",
728
+ "Requirement already satisfied: pybind11>=2.6.2 in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (2.13.6)\n",
729
+ "Requirement already satisfied: pytablewriter in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (1.2.1)\n",
730
+ "Requirement already satisfied: rouge-score>=0.0.4 in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (0.1.2)\n",
731
+ "Requirement already satisfied: sacrebleu>=1.5.0 in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (2.5.1)\n",
732
+ "Requirement already satisfied: scikit-learn>=0.24.1 in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (1.6.1)\n",
733
+ "Requirement already satisfied: sqlitedict in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (2.1.0)\n",
734
+ "Requirement already satisfied: torch>=1.8 in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (2.7.0+cpu)\n",
735
+ "Requirement already satisfied: tqdm-multiprocess in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (0.0.11)\n",
736
+ "Requirement already satisfied: zstandard in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (0.23.0)\n",
737
+ "Requirement already satisfied: dill in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (0.3.8)\n",
738
+ "Requirement already satisfied: word2number in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (1.1)\n",
739
+ "Requirement already satisfied: more-itertools in /usr/local/lib/python3.11/dist-packages (from lm_eval==0.4.5) (10.6.0)\n",
740
+ "Requirement already satisfied: filelock in /usr/local/lib/python3.11/dist-packages (from transformers) (3.18.0)\n",
741
+ "Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.11/dist-packages (from transformers) (2.0.2)\n",
742
+ "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.11/dist-packages (from transformers) (24.2)\n",
743
+ "Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.11/dist-packages (from transformers) (6.0.2)\n",
744
+ "Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.11/dist-packages (from transformers) (2024.11.6)\n",
745
+ "Requirement already satisfied: requests in /usr/local/lib/python3.11/dist-packages (from transformers) (2.32.3)\n",
746
+ "Requirement already satisfied: safetensors>=0.4.3 in /usr/local/lib/python3.11/dist-packages (from transformers) (0.5.3)\n",
747
+ "Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.11/dist-packages (from transformers) (4.67.1)\n",
748
+ "Requirement already satisfied: psutil in /usr/local/lib/python3.11/dist-packages (from accelerate) (5.9.5)\n",
749
+ "Requirement already satisfied: fsspec>=2023.5.0 in /usr/local/lib/python3.11/dist-packages (from huggingface_hub) (2024.12.0)\n",
750
+ "Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.11/dist-packages (from huggingface_hub) (4.13.1)\n",
751
+ "Requirement already satisfied: torchdata==0.11.0 in /usr/local/lib/python3.11/dist-packages (from torchtune) (0.11.0)\n",
752
+ "Requirement already satisfied: kagglehub in /usr/local/lib/python3.11/dist-packages (from torchtune) (0.3.11)\n",
753
+ "Requirement already satisfied: omegaconf in /usr/local/lib/python3.11/dist-packages (from torchtune) (2.3.0)\n",
754
+ "Requirement already satisfied: Pillow>=9.4.0 in /usr/local/lib/python3.11/dist-packages (from torchtune) (11.1.0)\n",
755
+ "Requirement already satisfied: urllib3>=1.25 in /usr/local/lib/python3.11/dist-packages (from torchdata==0.11.0->torchtune) (2.3.0)\n",
756
+ "Requirement already satisfied: tornado>=2.0 in /usr/local/lib/python3.11/dist-packages (from snakeviz) (6.4.2)\n",
757
+ "Requirement already satisfied: pycryptodomex>=3.8 in /usr/local/lib/python3.11/dist-packages (from blobfile) (3.22.0)\n",
758
+ "Requirement already satisfied: lxml>=4.9 in /usr/local/lib/python3.11/dist-packages (from blobfile) (5.3.1)\n",
759
+ "Requirement already satisfied: pyarrow>=15.0.0 in /usr/local/lib/python3.11/dist-packages (from datasets>=2.16.0->lm_eval==0.4.5) (18.1.0)\n",
760
+ "Requirement already satisfied: pandas in /usr/local/lib/python3.11/dist-packages (from datasets>=2.16.0->lm_eval==0.4.5) (2.2.2)\n",
761
+ "Requirement already satisfied: xxhash in /usr/local/lib/python3.11/dist-packages (from datasets>=2.16.0->lm_eval==0.4.5) (3.5.0)\n",
762
+ "Requirement already satisfied: multiprocess<0.70.17 in /usr/local/lib/python3.11/dist-packages (from datasets>=2.16.0->lm_eval==0.4.5) (0.70.16)\n",
763
+ "Requirement already satisfied: aiohttp in /usr/local/lib/python3.11/dist-packages (from datasets>=2.16.0->lm_eval==0.4.5) (3.11.15)\n",
764
+ "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/dist-packages (from requests->transformers) (3.4.1)\n",
765
+ "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.11/dist-packages (from requests->transformers) (3.10)\n",
766
+ "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.11/dist-packages (from requests->transformers) (2025.1.31)\n",
767
+ "Requirement already satisfied: absl-py in /usr/local/lib/python3.11/dist-packages (from rouge-score>=0.0.4->lm_eval==0.4.5) (1.4.0)\n",
768
+ "Requirement already satisfied: nltk in /usr/local/lib/python3.11/dist-packages (from rouge-score>=0.0.4->lm_eval==0.4.5) (3.9.1)\n",
769
+ "Requirement already satisfied: six>=1.14.0 in /usr/local/lib/python3.11/dist-packages (from rouge-score>=0.0.4->lm_eval==0.4.5) (1.17.0)\n",
770
+ "Requirement already satisfied: portalocker in /usr/local/lib/python3.11/dist-packages (from sacrebleu>=1.5.0->lm_eval==0.4.5) (3.1.1)\n",
771
+ "Requirement already satisfied: tabulate>=0.8.9 in /usr/local/lib/python3.11/dist-packages (from sacrebleu>=1.5.0->lm_eval==0.4.5) (0.9.0)\n",
772
+ "Requirement already satisfied: colorama in /usr/local/lib/python3.11/dist-packages (from sacrebleu>=1.5.0->lm_eval==0.4.5) (0.4.6)\n",
773
+ "Requirement already satisfied: scipy>=1.6.0 in /usr/local/lib/python3.11/dist-packages (from scikit-learn>=0.24.1->lm_eval==0.4.5) (1.14.1)\n",
774
+ "Requirement already satisfied: joblib>=1.2.0 in /usr/local/lib/python3.11/dist-packages (from scikit-learn>=0.24.1->lm_eval==0.4.5) (1.4.2)\n",
775
+ "Requirement already satisfied: threadpoolctl>=3.1.0 in /usr/local/lib/python3.11/dist-packages (from scikit-learn>=0.24.1->lm_eval==0.4.5) (3.6.0)\n",
776
+ "Requirement already satisfied: sympy>=1.13.3 in /usr/local/lib/python3.11/dist-packages (from torch>=1.8->lm_eval==0.4.5) (1.13.3)\n",
777
+ "Requirement already satisfied: networkx in /usr/local/lib/python3.11/dist-packages (from torch>=1.8->lm_eval==0.4.5) (3.4.2)\n",
778
+ "Requirement already satisfied: jinja2 in /usr/local/lib/python3.11/dist-packages (from torch>=1.8->lm_eval==0.4.5) (3.1.6)\n",
779
+ "Requirement already satisfied: hf-transfer>=0.1.4 in /usr/local/lib/python3.11/dist-packages (from huggingface_hub[hf_transfer]->torchtune) (0.1.9)\n",
780
+ "Requirement already satisfied: attrs>=19.2.0 in /usr/local/lib/python3.11/dist-packages (from jsonlines->lm_eval==0.4.5) (25.3.0)\n",
781
+ "Requirement already satisfied: antlr4-python3-runtime==4.9.* in /usr/local/lib/python3.11/dist-packages (from omegaconf->torchtune) (4.9.3)\n",
782
+ "Requirement already satisfied: setuptools>=38.3.0 in /usr/local/lib/python3.11/dist-packages (from pytablewriter->lm_eval==0.4.5) (75.2.0)\n",
783
+ "Requirement already satisfied: DataProperty<2,>=1.1.0 in /usr/local/lib/python3.11/dist-packages (from pytablewriter->lm_eval==0.4.5) (1.1.0)\n",
784
+ "Requirement already satisfied: mbstrdecoder<2,>=1.0.0 in /usr/local/lib/python3.11/dist-packages (from pytablewriter->lm_eval==0.4.5) (1.1.4)\n",
785
+ "Requirement already satisfied: pathvalidate<4,>=2.3.0 in /usr/local/lib/python3.11/dist-packages (from pytablewriter->lm_eval==0.4.5) (3.2.3)\n",
786
+ "Requirement already satisfied: tabledata<2,>=1.3.1 in /usr/local/lib/python3.11/dist-packages (from pytablewriter->lm_eval==0.4.5) (1.3.4)\n",
787
+ "Requirement already satisfied: tcolorpy<1,>=0.0.5 in /usr/local/lib/python3.11/dist-packages (from pytablewriter->lm_eval==0.4.5) (0.1.7)\n",
788
+ "Requirement already satisfied: typepy<2,>=1.3.2 in /usr/local/lib/python3.11/dist-packages (from typepy[datetime]<2,>=1.3.2->pytablewriter->lm_eval==0.4.5) (1.3.4)\n",
789
+ "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/lib/python3.11/dist-packages (from aiohttp->datasets>=2.16.0->lm_eval==0.4.5) (2.6.1)\n",
790
+ "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.11/dist-packages (from aiohttp->datasets>=2.16.0->lm_eval==0.4.5) (1.3.2)\n",
791
+ "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.11/dist-packages (from aiohttp->datasets>=2.16.0->lm_eval==0.4.5) (1.5.0)\n",
792
+ "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.11/dist-packages (from aiohttp->datasets>=2.16.0->lm_eval==0.4.5) (6.2.0)\n",
793
+ "Requirement already satisfied: propcache>=0.2.0 in /usr/local/lib/python3.11/dist-packages (from aiohttp->datasets>=2.16.0->lm_eval==0.4.5) (0.3.1)\n",
794
+ "Requirement already satisfied: yarl<2.0,>=1.17.0 in /usr/local/lib/python3.11/dist-packages (from aiohttp->datasets>=2.16.0->lm_eval==0.4.5) (1.18.3)\n",
795
+ "Requirement already satisfied: chardet<6,>=3.0.4 in /usr/local/lib/python3.11/dist-packages (from mbstrdecoder<2,>=1.0.0->pytablewriter->lm_eval==0.4.5) (5.2.0)\n",
796
+ "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /usr/local/lib/python3.11/dist-packages (from sympy>=1.13.3->torch>=1.8->lm_eval==0.4.5) (1.3.0)\n",
797
+ "Requirement already satisfied: python-dateutil<3.0.0,>=2.8.0 in /usr/local/lib/python3.11/dist-packages (from typepy[datetime]<2,>=1.3.2->pytablewriter->lm_eval==0.4.5) (2.8.2)\n",
798
+ "Requirement already satisfied: pytz>=2018.9 in /usr/local/lib/python3.11/dist-packages (from typepy[datetime]<2,>=1.3.2->pytablewriter->lm_eval==0.4.5) (2025.2)\n",
799
+ "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.11/dist-packages (from jinja2->torch>=1.8->lm_eval==0.4.5) (3.0.2)\n",
800
+ "Requirement already satisfied: click in /usr/local/lib/python3.11/dist-packages (from nltk->rouge-score>=0.0.4->lm_eval==0.4.5) (8.1.8)\n",
801
+ "Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.11/dist-packages (from pandas->datasets>=2.16.0->lm_eval==0.4.5) (2025.2)\n"
802
+ ]
803
+ }
804
+ ]
805
+ },
806
+ {
807
+ "cell_type": "markdown",
808
+ "source": [
809
+ "**Step 2. Download Llama 3.2 1B/3B models**"
810
+ ],
811
+ "metadata": {
812
+ "id": "px0lGiHFErF_"
813
+ }
814
+ },
815
+ {
816
+ "cell_type": "code",
817
+ "source": [
818
+ "from huggingface_hub import login\n",
819
+ "login()"
820
+ ],
821
+ "metadata": {
822
+ "colab": {
823
+ "base_uri": "https://localhost:8080/",
824
+ "height": 17,
825
+ "referenced_widgets": [
826
+ "c3b751d377094dea8b10f2e07a541944",
827
+ "0c9114b0c04a444ea572b9d051320fb2",
828
+ "1455ec1476ad4c99b58011b0c31c9dba",
829
+ "4830dbb59c6d4fa2b4818f6f0e1f134e",
830
+ "d2bf2102bc7e48708c1f3ef0a8162a82",
831
+ "9652dd5970e64fb0be11630520af68eb",
832
+ "45ba930e69324d568384c4e9add02d91",
833
+ "4ed7db37c8b145279b9abac0add2f361",
834
+ "a622d9fa89a945a69a3bea033a130c6a",
835
+ "ab4ade18f2494fcf8c9ef1f2a14b4396",
836
+ "81b1b9740629417aa9a69406d7fb994d",
837
+ "d34872b179a2489ab6583b199744bb6e",
838
+ "2701e495c32f4808a7bf25b8f3b1417d",
839
+ "bb17b33ae0fc48aca21c627b9f959383",
840
+ "441e651de05b414fb666398be95a6ef8",
841
+ "212c60896f584652b999f18ed992a062",
842
+ "0a3391066cb3491180581aa5aa43e301",
843
+ "5971d59b22174c88accba147892ee0e8",
844
+ "fb60489fbd7645ebade4444562a6a15c",
845
+ "891e6712a87a4e55af55a892dd6c992e"
846
+ ]
847
+ },
848
+ "id": "fKKfjA_KEDnU",
849
+ "outputId": "0bbfc57a-ed7a-45f8-d7c7-d9c556c18260"
850
+ },
851
+ "execution_count": 3,
852
+ "outputs": [
853
+ {
854
+ "output_type": "display_data",
855
+ "data": {
856
+ "text/plain": [
857
+ "VBox(children=(HTML(value='<center> <img\\nsrc=https://huggingface.co/front/assets/huggingface_logo-noborder.sv…"
858
+ ],
859
+ "application/vnd.jupyter.widget-view+json": {
860
+ "version_major": 2,
861
+ "version_minor": 0,
862
+ "model_id": "c3b751d377094dea8b10f2e07a541944"
863
+ }
864
+ },
865
+ "metadata": {}
866
+ }
867
+ ]
868
+ },
869
+ {
870
+ "cell_type": "code",
871
+ "source": [
872
+ "!huggingface-cli download meta-llama/Llama-3.2-1B-Instruct-QLORA_INT4_EO8 --local-dir /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8 --local-dir-use-symlinks False"
873
+ ],
874
+ "metadata": {
875
+ "colab": {
876
+ "base_uri": "https://localhost:8080/"
877
+ },
878
+ "collapsed": true,
879
+ "id": "JJdsEZaSEEFR",
880
+ "outputId": "df09bfff-4ca3-487c-d47a-26f4345cc50e"
881
+ },
882
+ "execution_count": 4,
883
+ "outputs": [
884
+ {
885
+ "output_type": "stream",
886
+ "name": "stdout",
887
+ "text": [
888
+ "/usr/local/lib/python3.11/dist-packages/huggingface_hub/commands/download.py:139: FutureWarning: Ignoring --local-dir-use-symlinks. Downloading to a local directory does not use symlinks anymore.\n",
889
+ " warnings.warn(\n",
890
+ "Fetching 6 files: 0% 0/6 [00:00<?, ?it/s]Downloading 'README.md' to '/content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/.cache/huggingface/download/Xn7B-BWUGOee2Y6hCZtEhtFu4BE=.310946eb240c90bd6811285fab0d4abfb1ae8326.incomplete'\n",
891
+ "Downloading 'tokenizer.model' to '/content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/.cache/huggingface/download/7iVfz3cUOMr-hyjiqqRDHEwVBAM=.82e9d31979e92ab929cd544440f129d9ecd797b69e327f80f17e1c50d5551b55.incomplete'\n",
892
+ "Downloading 'consolidated.00.pth' to '/content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/.cache/huggingface/download/_dLw4ih-O1I9AkO57vYC89Z48Os=.70870ec7d0dbf4990e0af4fc9bce853bb757fcbce6a4ff9c7cfd5370435e48b4.incomplete'\n",
893
+ "Downloading '.gitattributes' to '/content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/.cache/huggingface/download/wPaCkH-WbT7GsmxMKKrNZTV4nSM=.a6344aac8c09253b3b630fb776ae94478aa0275b.incomplete'\n",
894
+ "Downloading 'config.json' to '/content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/.cache/huggingface/download/8_PA_wEVGiVa2goH2H4KQOQpvVY=.79b5f0f5a9dad9941bd9d61aaaac300d86912ef9.incomplete'\n",
895
+ "\n",
896
+ "README.md: 0% 0.00/40.2k [00:00<?, ?B/s]\u001b[ADownloading 'params.json' to '/content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/.cache/huggingface/download/jqHB00sRqBVJXCrFOHz5gDS2Bg8=.e245b3198c0cc85442d63bb51c28c1acc844c8ff.incomplete'\n",
897
+ "README.md: 100% 40.2k/40.2k [00:00<00:00, 11.9MB/s]\n",
898
+ "Download complete. Moving file to /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/README.md\n",
899
+ "\n",
900
+ "tokenizer.model: 0% 0.00/2.18M [00:00<?, ?B/s]\u001b[A\n",
901
+ "\n",
902
+ "config.json: 100% 27.0/27.0 [00:00<00:00, 173kB/s]\n",
903
+ "Download complete. Moving file to /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/config.json\n",
904
+ "\n",
905
+ "\n",
906
+ "tokenizer.model: 100% 2.18M/2.18M [00:00<00:00, 39.9MB/s]\n",
907
+ "Download complete. Moving file to /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/tokenizer.model\n",
908
+ "\n",
909
+ "params.json: 100% 332/332 [00:00<00:00, 2.20MB/s]\n",
910
+ "Download complete. Moving file to /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/params.json\n",
911
+ "\n",
912
+ ".gitattributes: 100% 1.52k/1.52k [00:00<00:00, 10.1MB/s]\n",
913
+ "Download complete. Moving file to /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/.gitattributes\n",
914
+ "Fetching 6 files: 17% 1/6 [00:00<00:02, 2.13it/s]\n",
915
+ "\n",
916
+ "consolidated.00.pth: 1% 10.5M/1.64G [00:00<01:04, 25.3MB/s]\u001b[A\u001b[A\n",
917
+ "\n",
918
+ "consolidated.00.pth: 1% 21.0M/1.64G [00:00<00:42, 38.4MB/s]\u001b[A\u001b[A\n",
919
+ "\n",
920
+ "consolidated.00.pth: 2% 31.5M/1.64G [00:00<00:36, 43.8MB/s]\u001b[A\u001b[A\n",
921
+ "\n",
922
+ "consolidated.00.pth: 3% 41.9M/1.64G [00:00<00:32, 48.8MB/s]\u001b[A\u001b[A\n",
923
+ "\n",
924
+ "consolidated.00.pth: 3% 52.4M/1.64G [00:01<00:33, 48.1MB/s]\u001b[A\u001b[A\n",
925
+ "\n",
926
+ "consolidated.00.pth: 4% 62.9M/1.64G [00:01<00:31, 51.0MB/s]\u001b[A\u001b[A\n",
927
+ "\n",
928
+ "consolidated.00.pth: 4% 73.4M/1.64G [00:01<00:29, 52.8MB/s]\u001b[A\u001b[A\n",
929
+ "\n",
930
+ "consolidated.00.pth: 5% 83.9M/1.64G [00:01<00:28, 54.2MB/s]\u001b[A\u001b[A\n",
931
+ "\n",
932
+ "consolidated.00.pth: 6% 94.4M/1.64G [00:01<00:28, 55.1MB/s]\u001b[A\u001b[A\n",
933
+ "\n",
934
+ "consolidated.00.pth: 6% 105M/1.64G [00:02<00:27, 55.6MB/s] \u001b[A\u001b[A\n",
935
+ "\n",
936
+ "consolidated.00.pth: 7% 115M/1.64G [00:02<00:27, 56.3MB/s]\u001b[A\u001b[A\n",
937
+ "\n",
938
+ "consolidated.00.pth: 8% 126M/1.64G [00:02<00:26, 56.5MB/s]\u001b[A\u001b[A\n",
939
+ "\n",
940
+ "consolidated.00.pth: 8% 136M/1.64G [00:02<00:27, 55.1MB/s]\u001b[A\u001b[A\n",
941
+ "\n",
942
+ "consolidated.00.pth: 9% 147M/1.64G [00:02<00:26, 57.3MB/s]\u001b[A\u001b[A\n",
943
+ "\n",
944
+ "consolidated.00.pth: 10% 157M/1.64G [00:03<00:25, 57.5MB/s]\u001b[A\u001b[A\n",
945
+ "\n",
946
+ "consolidated.00.pth: 10% 168M/1.64G [00:03<00:25, 57.3MB/s]\u001b[A\u001b[A\n",
947
+ "\n",
948
+ "consolidated.00.pth: 11% 178M/1.64G [00:03<00:25, 57.4MB/s]\u001b[A\u001b[A\n",
949
+ "\n",
950
+ "consolidated.00.pth: 11% 189M/1.64G [00:03<00:25, 57.4MB/s]\u001b[A\u001b[A\n",
951
+ "\n",
952
+ "consolidated.00.pth: 12% 199M/1.64G [00:03<00:25, 57.1MB/s]\u001b[A\u001b[A\n",
953
+ "\n",
954
+ "consolidated.00.pth: 13% 210M/1.64G [00:03<00:24, 57.4MB/s]\u001b[A\u001b[A\n",
955
+ "\n",
956
+ "consolidated.00.pth: 13% 220M/1.64G [00:04<00:24, 57.2MB/s]\u001b[A\u001b[A\n",
957
+ "\n",
958
+ "consolidated.00.pth: 14% 231M/1.64G [00:04<00:24, 57.2MB/s]\u001b[A\u001b[A\n",
959
+ "\n",
960
+ "consolidated.00.pth: 15% 241M/1.64G [00:04<00:24, 57.1MB/s]\u001b[A\u001b[A\n",
961
+ "\n",
962
+ "consolidated.00.pth: 15% 252M/1.64G [00:04<00:24, 57.1MB/s]\u001b[A\u001b[A\n",
963
+ "\n",
964
+ "consolidated.00.pth: 16% 262M/1.64G [00:04<00:24, 57.1MB/s]\u001b[A\u001b[A\n",
965
+ "\n",
966
+ "consolidated.00.pth: 17% 273M/1.64G [00:05<00:23, 57.2MB/s]\u001b[A\u001b[A\n",
967
+ "\n",
968
+ "consolidated.00.pth: 17% 283M/1.64G [00:05<00:23, 57.1MB/s]\u001b[A\u001b[A\n",
969
+ "\n",
970
+ "consolidated.00.pth: 18% 294M/1.64G [00:05<00:23, 57.2MB/s]\u001b[A\u001b[A\n",
971
+ "\n",
972
+ "consolidated.00.pth: 18% 304M/1.64G [00:05<00:23, 57.2MB/s]\u001b[A\u001b[A\n",
973
+ "\n",
974
+ "consolidated.00.pth: 19% 315M/1.64G [00:05<00:23, 57.2MB/s]\u001b[A\u001b[A\n",
975
+ "\n",
976
+ "consolidated.00.pth: 20% 325M/1.64G [00:05<00:23, 57.0MB/s]\u001b[A\u001b[A\n",
977
+ "\n",
978
+ "consolidated.00.pth: 20% 336M/1.64G [00:06<00:23, 56.7MB/s]\u001b[A\u001b[A\n",
979
+ "\n",
980
+ "consolidated.00.pth: 21% 346M/1.64G [00:06<00:22, 56.8MB/s]\u001b[A\u001b[A\n",
981
+ "\n",
982
+ "consolidated.00.pth: 22% 357M/1.64G [00:06<00:23, 55.5MB/s]\u001b[A\u001b[A\n",
983
+ "\n",
984
+ "consolidated.00.pth: 22% 367M/1.64G [00:06<00:23, 55.4MB/s]\u001b[A\u001b[A\n",
985
+ "\n",
986
+ "consolidated.00.pth: 23% 377M/1.64G [00:06<00:22, 55.8MB/s]\u001b[A\u001b[A\n",
987
+ "\n",
988
+ "consolidated.00.pth: 24% 388M/1.64G [00:07<00:22, 56.2MB/s]\u001b[A\u001b[A\n",
989
+ "\n",
990
+ "consolidated.00.pth: 24% 398M/1.64G [00:07<00:22, 56.3MB/s]\u001b[A\u001b[A\n",
991
+ "\n",
992
+ "consolidated.00.pth: 25% 409M/1.64G [00:07<00:21, 56.2MB/s]\u001b[A\u001b[A\n",
993
+ "\n",
994
+ "consolidated.00.pth: 26% 419M/1.64G [00:07<00:22, 55.3MB/s]\u001b[A\u001b[A\n",
995
+ "\n",
996
+ "consolidated.00.pth: 26% 430M/1.64G [00:07<00:22, 55.2MB/s]\u001b[A\u001b[A\n",
997
+ "\n",
998
+ "consolidated.00.pth: 27% 440M/1.64G [00:08<00:21, 55.5MB/s]\u001b[A\u001b[A\n",
999
+ "\n",
1000
+ "consolidated.00.pth: 27% 451M/1.64G [00:08<00:21, 55.5MB/s]\u001b[A\u001b[A\n",
1001
+ "\n",
1002
+ "consolidated.00.pth: 28% 461M/1.64G [00:08<00:21, 56.1MB/s]\u001b[A\u001b[A\n",
1003
+ "\n",
1004
+ "consolidated.00.pth: 29% 472M/1.64G [00:08<00:20, 56.4MB/s]\u001b[A\u001b[A\n",
1005
+ "\n",
1006
+ "consolidated.00.pth: 29% 482M/1.64G [00:08<00:20, 56.6MB/s]\u001b[A\u001b[A\n",
1007
+ "\n",
1008
+ "consolidated.00.pth: 30% 493M/1.64G [00:08<00:20, 56.7MB/s]\u001b[A\u001b[A\n",
1009
+ "\n",
1010
+ "consolidated.00.pth: 31% 503M/1.64G [00:09<00:20, 56.9MB/s]\u001b[A\u001b[A\n",
1011
+ "\n",
1012
+ "consolidated.00.pth: 31% 514M/1.64G [00:09<00:19, 56.9MB/s]\u001b[A\u001b[A\n",
1013
+ "\n",
1014
+ "consolidated.00.pth: 32% 524M/1.64G [00:09<00:19, 57.2MB/s]\u001b[A\u001b[A\n",
1015
+ "\n",
1016
+ "consolidated.00.pth: 33% 535M/1.64G [00:09<00:19, 57.1MB/s]\u001b[A\u001b[A\n",
1017
+ "\n",
1018
+ "consolidated.00.pth: 33% 545M/1.64G [00:09<00:19, 57.2MB/s]\u001b[A\u001b[A\n",
1019
+ "\n",
1020
+ "consolidated.00.pth: 34% 556M/1.64G [00:10<00:23, 46.9MB/s]\u001b[A\u001b[A\n",
1021
+ "\n",
1022
+ "consolidated.00.pth: 35% 577M/1.64G [00:10<00:17, 60.1MB/s]\u001b[A\u001b[A\n",
1023
+ "\n",
1024
+ "consolidated.00.pth: 36% 587M/1.64G [00:10<00:17, 59.5MB/s]\u001b[A\u001b[A\n",
1025
+ "\n",
1026
+ "consolidated.00.pth: 36% 598M/1.64G [00:10<00:19, 53.2MB/s]\u001b[A\u001b[A\n",
1027
+ "\n",
1028
+ "consolidated.00.pth: 37% 608M/1.64G [00:10<00:17, 60.2MB/s]\u001b[A\u001b[A\n",
1029
+ "\n",
1030
+ "consolidated.00.pth: 38% 619M/1.64G [00:11<00:17, 58.6MB/s]\u001b[A\u001b[A\n",
1031
+ "\n",
1032
+ "consolidated.00.pth: 38% 629M/1.64G [00:11<00:17, 58.9MB/s]\u001b[A\u001b[A\n",
1033
+ "\n",
1034
+ "consolidated.00.pth: 39% 640M/1.64G [00:11<00:17, 58.7MB/s]\u001b[A\u001b[A\n",
1035
+ "\n",
1036
+ "consolidated.00.pth: 40% 650M/1.64G [00:11<00:17, 58.2MB/s]\u001b[A\u001b[A\n",
1037
+ "\n",
1038
+ "consolidated.00.pth: 40% 661M/1.64G [00:11<00:17, 57.8MB/s]\u001b[A\u001b[A\n",
1039
+ "\n",
1040
+ "consolidated.00.pth: 41% 671M/1.64G [00:12<00:16, 57.4MB/s]\u001b[A\u001b[A\n",
1041
+ "\n",
1042
+ "consolidated.00.pth: 41% 682M/1.64G [00:12<00:16, 57.0MB/s]\u001b[A\u001b[A\n",
1043
+ "\n",
1044
+ "consolidated.00.pth: 42% 692M/1.64G [00:12<00:16, 56.1MB/s]\u001b[A\u001b[A\n",
1045
+ "\n",
1046
+ "consolidated.00.pth: 43% 703M/1.64G [00:12<00:17, 54.9MB/s]\u001b[A\u001b[A\n",
1047
+ "\n",
1048
+ "consolidated.00.pth: 43% 713M/1.64G [00:12<00:16, 56.5MB/s]\u001b[A\u001b[A\n",
1049
+ "\n",
1050
+ "consolidated.00.pth: 44% 724M/1.64G [00:13<00:16, 56.6MB/s]\u001b[A\u001b[A\n",
1051
+ "\n",
1052
+ "consolidated.00.pth: 45% 734M/1.64G [00:13<00:16, 56.6MB/s]\u001b[A\u001b[A\n",
1053
+ "\n",
1054
+ "consolidated.00.pth: 45% 744M/1.64G [00:13<00:15, 56.8MB/s]\u001b[A\u001b[A\n",
1055
+ "\n",
1056
+ "consolidated.00.pth: 46% 755M/1.64G [00:13<00:15, 56.9MB/s]\u001b[A\u001b[A\n",
1057
+ "\n",
1058
+ "consolidated.00.pth: 47% 765M/1.64G [00:13<00:15, 56.8MB/s]\u001b[A\u001b[A\n",
1059
+ "\n",
1060
+ "consolidated.00.pth: 47% 776M/1.64G [00:13<00:15, 56.9MB/s]\u001b[A\u001b[A\n",
1061
+ "\n",
1062
+ "consolidated.00.pth: 48% 786M/1.64G [00:14<00:15, 56.8MB/s]\u001b[A\u001b[A\n",
1063
+ "\n",
1064
+ "consolidated.00.pth: 48% 797M/1.64G [00:14<00:15, 56.5MB/s]\u001b[A\u001b[A\n",
1065
+ "\n",
1066
+ "consolidated.00.pth: 49% 807M/1.64G [00:14<00:15, 55.6MB/s]\u001b[A\u001b[A\n",
1067
+ "\n",
1068
+ "consolidated.00.pth: 50% 818M/1.64G [00:14<00:14, 55.3MB/s]\u001b[A\u001b[A\n",
1069
+ "\n",
1070
+ "consolidated.00.pth: 50% 828M/1.64G [00:14<00:14, 55.5MB/s]\u001b[A\u001b[A\n",
1071
+ "\n",
1072
+ "consolidated.00.pth: 51% 839M/1.64G [00:15<00:14, 56.0MB/s]\u001b[A\u001b[A\n",
1073
+ "\n",
1074
+ "consolidated.00.pth: 52% 849M/1.64G [00:15<00:14, 56.1MB/s]\u001b[A\u001b[A\n",
1075
+ "\n",
1076
+ "consolidated.00.pth: 52% 860M/1.64G [00:15<00:13, 56.3MB/s]\u001b[A\u001b[A\n",
1077
+ "\n",
1078
+ "consolidated.00.pth: 53% 870M/1.64G [00:15<00:13, 56.5MB/s]\u001b[A\u001b[A\n",
1079
+ "\n",
1080
+ "consolidated.00.pth: 54% 881M/1.64G [00:15<00:13, 56.6MB/s]\u001b[A\u001b[A\n",
1081
+ "\n",
1082
+ "consolidated.00.pth: 54% 891M/1.64G [00:15<00:13, 56.8MB/s]\u001b[A\u001b[A\n",
1083
+ "\n",
1084
+ "consolidated.00.pth: 55% 902M/1.64G [00:16<00:13, 57.0MB/s]\u001b[A\u001b[A\n",
1085
+ "\n",
1086
+ "consolidated.00.pth: 55% 912M/1.64G [00:16<00:12, 57.1MB/s]\u001b[A\u001b[A\n",
1087
+ "\n",
1088
+ "consolidated.00.pth: 56% 923M/1.64G [00:16<00:12, 57.1MB/s]\u001b[A\u001b[A\n",
1089
+ "\n",
1090
+ "consolidated.00.pth: 57% 933M/1.64G [00:16<00:12, 57.2MB/s]\u001b[A\u001b[A\n",
1091
+ "\n",
1092
+ "consolidated.00.pth: 57% 944M/1.64G [00:16<00:12, 56.9MB/s]\u001b[A\u001b[A\n",
1093
+ "\n",
1094
+ "consolidated.00.pth: 58% 954M/1.64G [00:17<00:12, 57.3MB/s]\u001b[A\u001b[A\n",
1095
+ "\n",
1096
+ "consolidated.00.pth: 59% 965M/1.64G [00:17<00:11, 57.2MB/s]\u001b[A\u001b[A\n",
1097
+ "\n",
1098
+ "consolidated.00.pth: 59% 975M/1.64G [00:17<00:11, 57.2MB/s]\u001b[A\u001b[A\n",
1099
+ "\n",
1100
+ "consolidated.00.pth: 60% 986M/1.64G [00:17<00:11, 57.2MB/s]\u001b[A\u001b[A\n",
1101
+ "\n",
1102
+ "consolidated.00.pth: 61% 996M/1.64G [00:17<00:11, 57.3MB/s]\u001b[A\u001b[A\n",
1103
+ "\n",
1104
+ "consolidated.00.pth: 61% 1.01G/1.64G [00:18<00:11, 56.9MB/s]\u001b[A\u001b[A\n",
1105
+ "\n",
1106
+ "consolidated.00.pth: 62% 1.02G/1.64G [00:18<00:11, 56.8MB/s]\u001b[A\u001b[A\n",
1107
+ "\n",
1108
+ "consolidated.00.pth: 63% 1.03G/1.64G [00:18<00:10, 56.9MB/s]\u001b[A\u001b[A\n",
1109
+ "\n",
1110
+ "consolidated.00.pth: 63% 1.04G/1.64G [00:18<00:10, 56.8MB/s]\u001b[A\u001b[A\n",
1111
+ "\n",
1112
+ "consolidated.00.pth: 64% 1.05G/1.64G [00:18<00:10, 56.5MB/s]\u001b[A\u001b[A\n",
1113
+ "\n",
1114
+ "consolidated.00.pth: 64% 1.06G/1.64G [00:18<00:10, 56.3MB/s]\u001b[A\u001b[A\n",
1115
+ "\n",
1116
+ "consolidated.00.pth: 65% 1.07G/1.64G [00:19<00:10, 56.4MB/s]\u001b[A\u001b[A\n",
1117
+ "\n",
1118
+ "consolidated.00.pth: 66% 1.08G/1.64G [00:19<00:09, 56.6MB/s]\u001b[A\u001b[A\n",
1119
+ "\n",
1120
+ "consolidated.00.pth: 66% 1.09G/1.64G [00:19<00:09, 56.5MB/s]\u001b[A\u001b[A\n",
1121
+ "\n",
1122
+ "consolidated.00.pth: 67% 1.10G/1.64G [00:19<00:09, 56.6MB/s]\u001b[A\u001b[A\n",
1123
+ "\n",
1124
+ "consolidated.00.pth: 68% 1.11G/1.64G [00:19<00:09, 56.6MB/s]\u001b[A\u001b[A\n",
1125
+ "\n",
1126
+ "consolidated.00.pth: 68% 1.12G/1.64G [00:20<00:09, 56.4MB/s]\u001b[A\u001b[A\n",
1127
+ "\n",
1128
+ "consolidated.00.pth: 69% 1.13G/1.64G [00:20<00:09, 56.5MB/s]\u001b[A\u001b[A\n",
1129
+ "\n",
1130
+ "consolidated.00.pth: 70% 1.14G/1.64G [00:20<00:08, 56.7MB/s]\u001b[A\u001b[A\n",
1131
+ "\n",
1132
+ "consolidated.00.pth: 70% 1.15G/1.64G [00:20<00:08, 56.1MB/s]\u001b[A\u001b[A\n",
1133
+ "\n",
1134
+ "consolidated.00.pth: 71% 1.16G/1.64G [00:20<00:08, 56.0MB/s]\u001b[A\u001b[A\n",
1135
+ "\n",
1136
+ "consolidated.00.pth: 71% 1.17G/1.64G [00:20<00:08, 55.4MB/s]\u001b[A\u001b[A\n",
1137
+ "\n",
1138
+ "consolidated.00.pth: 72% 1.18G/1.64G [00:21<00:08, 52.1MB/s]\u001b[A\u001b[A\n",
1139
+ "\n",
1140
+ "consolidated.00.pth: 73% 1.20G/1.64G [00:21<00:08, 55.0MB/s]\u001b[A\u001b[A\n",
1141
+ "\n",
1142
+ "consolidated.00.pth: 73% 1.21G/1.64G [00:21<00:07, 56.9MB/s]\u001b[A\u001b[A\n",
1143
+ "\n",
1144
+ "consolidated.00.pth: 74% 1.22G/1.64G [00:21<00:07, 57.5MB/s]\u001b[A\u001b[A\n",
1145
+ "\n",
1146
+ "consolidated.00.pth: 75% 1.23G/1.64G [00:21<00:07, 57.5MB/s]\u001b[A\u001b[A\n",
1147
+ "\n",
1148
+ "consolidated.00.pth: 75% 1.24G/1.64G [00:22<00:07, 57.2MB/s]\u001b[A\u001b[A\n",
1149
+ "\n",
1150
+ "consolidated.00.pth: 76% 1.25G/1.64G [00:22<00:06, 57.1MB/s]\u001b[A\u001b[A\n",
1151
+ "\n",
1152
+ "consolidated.00.pth: 77% 1.26G/1.64G [00:22<00:06, 57.1MB/s]\u001b[A\u001b[A\n",
1153
+ "\n",
1154
+ "consolidated.00.pth: 77% 1.27G/1.64G [00:22<00:06, 56.3MB/s]\u001b[A\u001b[A\n",
1155
+ "\n",
1156
+ "consolidated.00.pth: 78% 1.28G/1.64G [00:22<00:06, 57.6MB/s]\u001b[A\u001b[A\n",
1157
+ "\n",
1158
+ "consolidated.00.pth: 78% 1.29G/1.64G [00:23<00:06, 57.4MB/s]\u001b[A\u001b[A\n",
1159
+ "\n",
1160
+ "consolidated.00.pth: 79% 1.30G/1.64G [00:23<00:05, 57.3MB/s]\u001b[A\u001b[A\n",
1161
+ "\n",
1162
+ "consolidated.00.pth: 80% 1.31G/1.64G [00:23<00:05, 57.3MB/s]\u001b[A\u001b[A\n",
1163
+ "\n",
1164
+ "consolidated.00.pth: 80% 1.32G/1.64G [00:23<00:05, 57.2MB/s]\u001b[A\u001b[A\n",
1165
+ "\n",
1166
+ "consolidated.00.pth: 81% 1.33G/1.64G [00:23<00:05, 57.2MB/s]\u001b[A\u001b[A\n",
1167
+ "\n",
1168
+ "consolidated.00.pth: 82% 1.34G/1.64G [00:23<00:05, 57.2MB/s]\u001b[A\u001b[A\n",
1169
+ "\n",
1170
+ "consolidated.00.pth: 82% 1.35G/1.64G [00:24<00:05, 57.1MB/s]\u001b[A\u001b[A\n",
1171
+ "\n",
1172
+ "consolidated.00.pth: 83% 1.36G/1.64G [00:24<00:04, 57.1MB/s]\u001b[A\u001b[A\n",
1173
+ "\n",
1174
+ "consolidated.00.pth: 84% 1.37G/1.64G [00:24<00:04, 57.1MB/s]\u001b[A\u001b[A\n",
1175
+ "\n",
1176
+ "consolidated.00.pth: 84% 1.38G/1.64G [00:24<00:04, 56.7MB/s]\u001b[A\u001b[A\n",
1177
+ "\n",
1178
+ "consolidated.00.pth: 85% 1.39G/1.64G [00:24<00:04, 56.9MB/s]\u001b[A\u001b[A\n",
1179
+ "\n",
1180
+ "consolidated.00.pth: 85% 1.41G/1.64G [00:25<00:04, 56.7MB/s]\u001b[A\u001b[A\n",
1181
+ "\n",
1182
+ "consolidated.00.pth: 86% 1.42G/1.64G [00:25<00:04, 56.3MB/s]\u001b[A\u001b[A\n",
1183
+ "\n",
1184
+ "consolidated.00.pth: 87% 1.43G/1.64G [00:25<00:03, 56.3MB/s]\u001b[A\u001b[A\n",
1185
+ "\n",
1186
+ "consolidated.00.pth: 87% 1.44G/1.64G [00:25<00:03, 56.4MB/s]\u001b[A\u001b[A\n",
1187
+ "\n",
1188
+ "consolidated.00.pth: 88% 1.45G/1.64G [00:25<00:03, 56.4MB/s]\u001b[A\u001b[A\n",
1189
+ "\n",
1190
+ "consolidated.00.pth: 89% 1.46G/1.64G [00:25<00:03, 56.7MB/s]\u001b[A\u001b[A\n",
1191
+ "\n",
1192
+ "consolidated.00.pth: 89% 1.47G/1.64G [00:26<00:03, 56.9MB/s]\u001b[A\u001b[A\n",
1193
+ "\n",
1194
+ "consolidated.00.pth: 90% 1.48G/1.64G [00:26<00:02, 57.0MB/s]\u001b[A\u001b[A\n",
1195
+ "\n",
1196
+ "consolidated.00.pth: 91% 1.49G/1.64G [00:26<00:02, 56.9MB/s]\u001b[A\u001b[A\n",
1197
+ "\n",
1198
+ "consolidated.00.pth: 91% 1.50G/1.64G [00:26<00:02, 56.8MB/s]\u001b[A\u001b[A\n",
1199
+ "\n",
1200
+ "consolidated.00.pth: 92% 1.51G/1.64G [00:26<00:02, 55.8MB/s]\u001b[A\u001b[A\n",
1201
+ "\n",
1202
+ "consolidated.00.pth: 92% 1.52G/1.64G [00:27<00:02, 55.9MB/s]\u001b[A\u001b[A\n",
1203
+ "\n",
1204
+ "consolidated.00.pth: 93% 1.53G/1.64G [00:27<00:02, 55.8MB/s]\u001b[A\u001b[A\n",
1205
+ "\n",
1206
+ "consolidated.00.pth: 94% 1.54G/1.64G [00:27<00:01, 56.2MB/s]\u001b[A\u001b[A\n",
1207
+ "\n",
1208
+ "consolidated.00.pth: 94% 1.55G/1.64G [00:27<00:01, 55.1MB/s]\u001b[A\u001b[A\n",
1209
+ "\n",
1210
+ "consolidated.00.pth: 95% 1.56G/1.64G [00:27<00:01, 56.9MB/s]\u001b[A\u001b[A\n",
1211
+ "\n",
1212
+ "consolidated.00.pth: 96% 1.57G/1.64G [00:28<00:01, 56.7MB/s]\u001b[A\u001b[A\n",
1213
+ "\n",
1214
+ "consolidated.00.pth: 96% 1.58G/1.64G [00:28<00:01, 56.7MB/s]\u001b[A\u001b[A\n",
1215
+ "\n",
1216
+ "consolidated.00.pth: 97% 1.59G/1.64G [00:28<00:00, 56.1MB/s]\u001b[A\u001b[A\n",
1217
+ "\n",
1218
+ "consolidated.00.pth: 98% 1.60G/1.64G [00:28<00:00, 56.2MB/s]\u001b[A\u001b[A\n",
1219
+ "\n",
1220
+ "consolidated.00.pth: 98% 1.61G/1.64G [00:28<00:00, 56.6MB/s]\u001b[A\u001b[A\n",
1221
+ "\n",
1222
+ "consolidated.00.pth: 99% 1.63G/1.64G [00:28<00:00, 56.6MB/s]\u001b[A\u001b[A\n",
1223
+ "\n",
1224
+ "consolidated.00.pth: 100% 1.64G/1.64G [00:29<00:00, 56.8MB/s]\u001b[A\u001b[A\n",
1225
+ "\n",
1226
+ "consolidated.00.pth: 100% 1.64G/1.64G [00:29<00:00, 56.2MB/s]\n",
1227
+ "Download complete. Moving file to /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/consolidated.00.pth\n",
1228
+ "Fetching 6 files: 100% 6/6 [00:29<00:00, 4.93s/it]\n",
1229
+ "/content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8\n"
1230
+ ]
1231
+ }
1232
+ ]
1233
+ },
1234
+ {
1235
+ "cell_type": "markdown",
1236
+ "source": [
1237
+ "**Step 3: Export to ExecuTorch**"
1238
+ ],
1239
+ "metadata": {
1240
+ "id": "XLsl5STwEyEh"
1241
+ }
1242
+ },
1243
+ {
1244
+ "cell_type": "code",
1245
+ "source": [
1246
+ "!cd /content/; python -m executorch.examples.models.llama.export_llama \\\n",
1247
+ " --model \"llama3_2\" \\\n",
1248
+ " --checkpoint /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/consolidated.00.pth \\\n",
1249
+ " --params /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/params.json \\\n",
1250
+ " -qat \\\n",
1251
+ " -lora 16 \\\n",
1252
+ " --preq_mode 8da4w_output_8da8w \\\n",
1253
+ " --preq_group_size 32 \\\n",
1254
+ " --preq_embedding_quantize 8,0 \\\n",
1255
+ " --use_sdpa_with_kv_cache \\\n",
1256
+ " -kv \\\n",
1257
+ " -X \\\n",
1258
+ " --xnnpack-extended-ops \\\n",
1259
+ " -d fp32 \\\n",
1260
+ " --max_seq_length 2048 \\\n",
1261
+ " --max_context_length 2048 \\\n",
1262
+ " --output_name \"Llama-3.2-1B-Instruct-QLORA_INT4_EO8.pte\" \\\n",
1263
+ " --metadata '{\"get_bos_id\":128000, \"get_eos_ids\":[128009, 128001]}'"
1264
+ ],
1265
+ "metadata": {
1266
+ "colab": {
1267
+ "base_uri": "https://localhost:8080/"
1268
+ },
1269
+ "id": "gXLuFtVVEZov",
1270
+ "outputId": "f6804426-aca7-4728-ed61-7bfb09c8b326"
1271
+ },
1272
+ "execution_count": 7,
1273
+ "outputs": [
1274
+ {
1275
+ "output_type": "stream",
1276
+ "name": "stdout",
1277
+ "text": [
1278
+ "[INFO 2025-04-10 15:18:38,244 utils.py:162] NumExpr defaulting to 2 threads.\n",
1279
+ "[INFO 2025-04-10 15:18:39,380 export_llama_lib.py:684] Applying quantizers: []\n",
1280
+ "Mixed dtype model. Dtype of layers.0.attention.wq.adaptor.A.weight: torch.bfloat16. Mismatches in the checkpoint: [('tok_embeddings.weight', torch.int8), ('tok_embeddings.scales', torch.float32), ('layers.0.attention.wq.weight', torch.int8), ('layers.0.attention.wq.scales', torch.float32), ('layers.0.attention.wk.weight', torch.int8), ('layers.0.attention.wk.scales', torch.float32), ('layers.0.attention.wv.weight', torch.int8), ('layers.0.attention.wv.scales', torch.float32), ('layers.0.attention.wo.weight', torch.int8), ('layers.0.attention.wo.scales', torch.float32), ('layers.0.feed_forward.w1.weight', torch.int8), ('layers.0.feed_forward.w1.scales', torch.float32), ('layers.0.feed_forward.w3.weight', torch.int8), ('layers.0.feed_forward.w3.scales', torch.float32), ('layers.0.feed_forward.w2.weight', torch.int8), ('layers.0.feed_forward.w2.scales', torch.float32), ('layers.1.attention.wq.weight', torch.int8), ('layers.1.attention.wq.scales', torch.float32), ('layers.1.attention.wk.weight', torch.int8), ('layers.1.attention.wk.scales', torch.float32), ('layers.1.attention.wv.weight', torch.int8), ('layers.1.attention.wv.scales', torch.float32), ('layers.1.attention.wo.weight', torch.int8), ('layers.1.attention.wo.scales', torch.float32), ('layers.1.feed_forward.w1.weight', torch.int8), ('layers.1.feed_forward.w1.scales', torch.float32), ('layers.1.feed_forward.w3.weight', torch.int8), ('layers.1.feed_forward.w3.scales', torch.float32), ('layers.1.feed_forward.w2.weight', torch.int8), ('layers.1.feed_forward.w2.scales', torch.float32), ('layers.2.attention.wq.weight', torch.int8), ('layers.2.attention.wq.scales', torch.float32), ('layers.2.attention.wk.weight', torch.int8), ('layers.2.attention.wk.scales', torch.float32), ('layers.2.attention.wv.weight', torch.int8), ('layers.2.attention.wv.scales', torch.float32), ('layers.2.attention.wo.weight', torch.int8), ('layers.2.attention.wo.scales', torch.float32), ('layers.2.feed_forward.w1.weight', torch.int8), ('layers.2.feed_forward.w1.scales', torch.float32), ('layers.2.feed_forward.w3.weight', torch.int8), ('layers.2.feed_forward.w3.scales', torch.float32), ('layers.2.feed_forward.w2.weight', torch.int8), ('layers.2.feed_forward.w2.scales', torch.float32), ('layers.3.attention.wq.weight', torch.int8), ('layers.3.attention.wq.scales', torch.float32), ('layers.3.attention.wk.weight', torch.int8), ('layers.3.attention.wk.scales', torch.float32), ('layers.3.attention.wv.weight', torch.int8), ('layers.3.attention.wv.scales', torch.float32), ('layers.3.attention.wo.weight', torch.int8), ('layers.3.attention.wo.scales', torch.float32), ('layers.3.feed_forward.w1.weight', torch.int8), ('layers.3.feed_forward.w1.scales', torch.float32), ('layers.3.feed_forward.w3.weight', torch.int8), ('layers.3.feed_forward.w3.scales', torch.float32), ('layers.3.feed_forward.w2.weight', torch.int8), ('layers.3.feed_forward.w2.scales', torch.float32), ('layers.4.attention.wq.weight', torch.int8), ('layers.4.attention.wq.scales', torch.float32), ('layers.4.attention.wk.weight', torch.int8), ('layers.4.attention.wk.scales', torch.float32), ('layers.4.attention.wv.weight', torch.int8), ('layers.4.attention.wv.scales', torch.float32), ('layers.4.attention.wo.weight', torch.int8), ('layers.4.attention.wo.scales', torch.float32), ('layers.4.feed_forward.w1.weight', torch.int8), ('layers.4.feed_forward.w1.scales', torch.float32), ('layers.4.feed_forward.w3.weight', torch.int8), ('layers.4.feed_forward.w3.scales', torch.float32), ('layers.4.feed_forward.w2.weight', torch.int8), ('layers.4.feed_forward.w2.scales', torch.float32), ('layers.5.attention.wq.weight', torch.int8), ('layers.5.attention.wq.scales', torch.float32), ('layers.5.attention.wk.weight', torch.int8), ('layers.5.attention.wk.scales', torch.float32), ('layers.5.attention.wv.weight', torch.int8), ('layers.5.attention.wv.scales', torch.float32), ('layers.5.attention.wo.weight', torch.int8), ('layers.5.attention.wo.scales', torch.float32), ('layers.5.feed_forward.w1.weight', torch.int8), ('layers.5.feed_forward.w1.scales', torch.float32), ('layers.5.feed_forward.w3.weight', torch.int8), ('layers.5.feed_forward.w3.scales', torch.float32), ('layers.5.feed_forward.w2.weight', torch.int8), ('layers.5.feed_forward.w2.scales', torch.float32), ('layers.6.attention.wq.weight', torch.int8), ('layers.6.attention.wq.scales', torch.float32), ('layers.6.attention.wk.weight', torch.int8), ('layers.6.attention.wk.scales', torch.float32), ('layers.6.attention.wv.weight', torch.int8), ('layers.6.attention.wv.scales', torch.float32), ('layers.6.attention.wo.weight', torch.int8), ('layers.6.attention.wo.scales', torch.float32), ('layers.6.feed_forward.w1.weight', torch.int8), ('layers.6.feed_forward.w1.scales', torch.float32), ('layers.6.feed_forward.w3.weight', torch.int8), ('layers.6.feed_forward.w3.scales', torch.float32), ('layers.6.feed_forward.w2.weight', torch.int8), ('layers.6.feed_forward.w2.scales', torch.float32), ('layers.7.attention.wq.weight', torch.int8), ('layers.7.attention.wq.scales', torch.float32), ('layers.7.attention.wk.weight', torch.int8), ('layers.7.attention.wk.scales', torch.float32), ('layers.7.attention.wv.weight', torch.int8), ('layers.7.attention.wv.scales', torch.float32), ('layers.7.attention.wo.weight', torch.int8), ('layers.7.attention.wo.scales', torch.float32), ('layers.7.feed_forward.w1.weight', torch.int8), ('layers.7.feed_forward.w1.scales', torch.float32), ('layers.7.feed_forward.w3.weight', torch.int8), ('layers.7.feed_forward.w3.scales', torch.float32), ('layers.7.feed_forward.w2.weight', torch.int8), ('layers.7.feed_forward.w2.scales', torch.float32), ('layers.8.attention.wq.weight', torch.int8), ('layers.8.attention.wq.scales', torch.float32), ('layers.8.attention.wk.weight', torch.int8), ('layers.8.attention.wk.scales', torch.float32), ('layers.8.attention.wv.weight', torch.int8), ('layers.8.attention.wv.scales', torch.float32), ('layers.8.attention.wo.weight', torch.int8), ('layers.8.attention.wo.scales', torch.float32), ('layers.8.feed_forward.w1.weight', torch.int8), ('layers.8.feed_forward.w1.scales', torch.float32), ('layers.8.feed_forward.w3.weight', torch.int8), ('layers.8.feed_forward.w3.scales', torch.float32), ('layers.8.feed_forward.w2.weight', torch.int8), ('layers.8.feed_forward.w2.scales', torch.float32), ('layers.9.attention.wq.weight', torch.int8), ('layers.9.attention.wq.scales', torch.float32), ('layers.9.attention.wk.weight', torch.int8), ('layers.9.attention.wk.scales', torch.float32), ('layers.9.attention.wv.weight', torch.int8), ('layers.9.attention.wv.scales', torch.float32), ('layers.9.attention.wo.weight', torch.int8), ('layers.9.attention.wo.scales', torch.float32), ('layers.9.feed_forward.w1.weight', torch.int8), ('layers.9.feed_forward.w1.scales', torch.float32), ('layers.9.feed_forward.w3.weight', torch.int8), ('layers.9.feed_forward.w3.scales', torch.float32), ('layers.9.feed_forward.w2.weight', torch.int8), ('layers.9.feed_forward.w2.scales', torch.float32), ('layers.10.attention.wq.weight', torch.int8), ('layers.10.attention.wq.scales', torch.float32), ('layers.10.attention.wk.weight', torch.int8), ('layers.10.attention.wk.scales', torch.float32), ('layers.10.attention.wv.weight', torch.int8), ('layers.10.attention.wv.scales', torch.float32), ('layers.10.attention.wo.weight', torch.int8), ('layers.10.attention.wo.scales', torch.float32), ('layers.10.feed_forward.w1.weight', torch.int8), ('layers.10.feed_forward.w1.scales', torch.float32), ('layers.10.feed_forward.w3.weight', torch.int8), ('layers.10.feed_forward.w3.scales', torch.float32), ('layers.10.feed_forward.w2.weight', torch.int8), ('layers.10.feed_forward.w2.scales', torch.float32), ('layers.11.attention.wq.weight', torch.int8), ('layers.11.attention.wq.scales', torch.float32), ('layers.11.attention.wk.weight', torch.int8), ('layers.11.attention.wk.scales', torch.float32), ('layers.11.attention.wv.weight', torch.int8), ('layers.11.attention.wv.scales', torch.float32), ('layers.11.attention.wo.weight', torch.int8), ('layers.11.attention.wo.scales', torch.float32), ('layers.11.feed_forward.w1.weight', torch.int8), ('layers.11.feed_forward.w1.scales', torch.float32), ('layers.11.feed_forward.w3.weight', torch.int8), ('layers.11.feed_forward.w3.scales', torch.float32), ('layers.11.feed_forward.w2.weight', torch.int8), ('layers.11.feed_forward.w2.scales', torch.float32), ('layers.12.attention.wq.weight', torch.int8), ('layers.12.attention.wq.scales', torch.float32), ('layers.12.attention.wk.weight', torch.int8), ('layers.12.attention.wk.scales', torch.float32), ('layers.12.attention.wv.weight', torch.int8), ('layers.12.attention.wv.scales', torch.float32), ('layers.12.attention.wo.weight', torch.int8), ('layers.12.attention.wo.scales', torch.float32), ('layers.12.feed_forward.w1.weight', torch.int8), ('layers.12.feed_forward.w1.scales', torch.float32), ('layers.12.feed_forward.w3.weight', torch.int8), ('layers.12.feed_forward.w3.scales', torch.float32), ('layers.12.feed_forward.w2.weight', torch.int8), ('layers.12.feed_forward.w2.scales', torch.float32), ('layers.13.attention.wq.weight', torch.int8), ('layers.13.attention.wq.scales', torch.float32), ('layers.13.attention.wk.weight', torch.int8), ('layers.13.attention.wk.scales', torch.float32), ('layers.13.attention.wv.weight', torch.int8), ('layers.13.attention.wv.scales', torch.float32), ('layers.13.attention.wo.weight', torch.int8), ('layers.13.attention.wo.scales', torch.float32), ('layers.13.feed_forward.w1.weight', torch.int8), ('layers.13.feed_forward.w1.scales', torch.float32), ('layers.13.feed_forward.w3.weight', torch.int8), ('layers.13.feed_forward.w3.scales', torch.float32), ('layers.13.feed_forward.w2.weight', torch.int8), ('layers.13.feed_forward.w2.scales', torch.float32), ('layers.14.attention.wq.weight', torch.int8), ('layers.14.attention.wq.scales', torch.float32), ('layers.14.attention.wk.weight', torch.int8), ('layers.14.attention.wk.scales', torch.float32), ('layers.14.attention.wv.weight', torch.int8), ('layers.14.attention.wv.scales', torch.float32), ('layers.14.attention.wo.weight', torch.int8), ('layers.14.attention.wo.scales', torch.float32), ('layers.14.feed_forward.w1.weight', torch.int8), ('layers.14.feed_forward.w1.scales', torch.float32), ('layers.14.feed_forward.w3.weight', torch.int8), ('layers.14.feed_forward.w3.scales', torch.float32), ('layers.14.feed_forward.w2.weight', torch.int8), ('layers.14.feed_forward.w2.scales', torch.float32), ('layers.15.attention.wq.weight', torch.int8), ('layers.15.attention.wq.scales', torch.float32), ('layers.15.attention.wk.weight', torch.int8), ('layers.15.attention.wk.scales', torch.float32), ('layers.15.attention.wv.weight', torch.int8), ('layers.15.attention.wv.scales', torch.float32), ('layers.15.attention.wo.weight', torch.int8), ('layers.15.attention.wo.scales', torch.float32), ('layers.15.feed_forward.w1.weight', torch.int8), ('layers.15.feed_forward.w1.scales', torch.float32), ('layers.15.feed_forward.w3.weight', torch.int8), ('layers.15.feed_forward.w3.scales', torch.float32), ('layers.15.feed_forward.w2.weight', torch.int8), ('layers.15.feed_forward.w2.scales', torch.float32), ('output.weight', torch.int8), ('output.scales', torch.float32)]\n",
1281
+ "Using QAT quantization.\n",
1282
+ "[INFO 2025-04-10 15:18:45,157 export_llama_lib.py:649] Checkpoint dtype: torch.bfloat16\n",
1283
+ "[INFO 2025-04-10 15:18:45,157 quantized_kv_cache.py:277] Replacing KVCache with CustomKVCache. This modifies the model in place.\n",
1284
+ "[INFO 2025-04-10 15:18:45,182 custom_ops.py:34] Looking for libcustom_ops_aot_lib.so in /usr/local/lib/python3.11/dist-packages/executorch/extension/llm/custom_ops\n",
1285
+ "[INFO 2025-04-10 15:18:45,183 custom_ops.py:39] Loading custom ops library: /usr/local/lib/python3.11/dist-packages/executorch/extension/llm/custom_ops/libcustom_ops_aot_lib.so\n",
1286
+ "[INFO 2025-04-10 15:18:45,194 builder.py:173] Model after source transforms: Transformer(\n",
1287
+ " (tok_embeddings): QuantizedGroupEmbedding()\n",
1288
+ " (rope): Rope(\n",
1289
+ " (apply_rotary_emb): RotaryEmbedding()\n",
1290
+ " )\n",
1291
+ " (layers): ModuleList(\n",
1292
+ " (0-15): 16 x TransformerBlock(\n",
1293
+ " (attention): AttentionMHA(\n",
1294
+ " (wq): Int8DynActInt4WeightLinearLoRA(\n",
1295
+ " (adaptor): LoRAAdaptorLinear(\n",
1296
+ " (A): Linear(in_features=2048, out_features=16, bias=False)\n",
1297
+ " (B): Linear(in_features=16, out_features=2048, bias=False)\n",
1298
+ " )\n",
1299
+ " )\n",
1300
+ " (wk): Int8DynActInt4WeightLinearLoRA(\n",
1301
+ " (adaptor): LoRAAdaptorLinear(\n",
1302
+ " (A): Linear(in_features=2048, out_features=16, bias=False)\n",
1303
+ " (B): Linear(in_features=16, out_features=512, bias=False)\n",
1304
+ " )\n",
1305
+ " )\n",
1306
+ " (wv): Int8DynActInt4WeightLinearLoRA(\n",
1307
+ " (adaptor): LoRAAdaptorLinear(\n",
1308
+ " (A): Linear(in_features=2048, out_features=16, bias=False)\n",
1309
+ " (B): Linear(in_features=16, out_features=512, bias=False)\n",
1310
+ " )\n",
1311
+ " )\n",
1312
+ " (wo): Int8DynActInt4WeightLinearLoRA(\n",
1313
+ " (adaptor): LoRAAdaptorLinear(\n",
1314
+ " (A): Linear(in_features=2048, out_features=16, bias=False)\n",
1315
+ " (B): Linear(in_features=16, out_features=2048, bias=False)\n",
1316
+ " )\n",
1317
+ " )\n",
1318
+ " (rope): Rope(\n",
1319
+ " (apply_rotary_emb): RotaryEmbedding()\n",
1320
+ " )\n",
1321
+ " (kv_cache): CustomKVCache()\n",
1322
+ " (SDPA): SDPACustom()\n",
1323
+ " )\n",
1324
+ " (feed_forward): FeedForward(\n",
1325
+ " (w1): Int8DynActInt4WeightLinearLoRA(\n",
1326
+ " (adaptor): LoRAAdaptorLinear(\n",
1327
+ " (A): Linear(in_features=2048, out_features=16, bias=False)\n",
1328
+ " (B): Linear(in_features=16, out_features=8192, bias=False)\n",
1329
+ " )\n",
1330
+ " )\n",
1331
+ " (w2): Int8DynActInt4WeightLinearLoRA(\n",
1332
+ " (adaptor): LoRAAdaptorLinear(\n",
1333
+ " (A): Linear(in_features=8192, out_features=16, bias=False)\n",
1334
+ " (B): Linear(in_features=16, out_features=2048, bias=False)\n",
1335
+ " )\n",
1336
+ " )\n",
1337
+ " (w3): Int8DynActInt4WeightLinearLoRA(\n",
1338
+ " (adaptor): LoRAAdaptorLinear(\n",
1339
+ " (A): Linear(in_features=2048, out_features=16, bias=False)\n",
1340
+ " (B): Linear(in_features=16, out_features=8192, bias=False)\n",
1341
+ " )\n",
1342
+ " )\n",
1343
+ " )\n",
1344
+ " (attention_norm): RMSNorm()\n",
1345
+ " (ffn_norm): RMSNorm()\n",
1346
+ " )\n",
1347
+ " )\n",
1348
+ " (norm): RMSNorm()\n",
1349
+ " (output): Int8DynActInt8WeightLinear()\n",
1350
+ ")\n",
1351
+ "[INFO 2025-04-10 15:18:45,227 builder.py:228] Exporting with:\n",
1352
+ "[INFO 2025-04-10 15:18:45,229 builder.py:229] inputs: (tensor([[2, 3, 4]]), {'input_pos': tensor([0])})\n",
1353
+ "[INFO 2025-04-10 15:18:45,229 builder.py:230] kwargs: None\n",
1354
+ "[INFO 2025-04-10 15:18:45,230 builder.py:231] dynamic shapes: ({1: <class 'executorch.extension.llm.export.builder.token_dim'>}, {'input_pos': {0: 1}})\n",
1355
+ "[INFO 2025-04-10 15:19:21,627 builder.py:262] Running canonical pass: RemoveRedundantTransposes\n",
1356
+ "[INFO 2025-04-10 15:19:21,791 export_llama_lib.py:755] Lowering model using following partitioner(s): \n",
1357
+ "[INFO 2025-04-10 15:19:21,791 export_llama_lib.py:757] --> XnnpackDynamicallyQuantizedPartitioner\n",
1358
+ "[INFO 2025-04-10 15:19:21,791 export_llama_lib.py:757] --> XnnpackPartitioner\n",
1359
+ "[INFO 2025-04-10 15:19:21,791 builder.py:348] Using pt2e [] to quantizing the model...\n",
1360
+ "[INFO 2025-04-10 15:19:21,791 builder.py:399] No quantizer provided, passing...\n",
1361
+ "[INFO 2025-04-10 15:19:21,792 builder.py:226] Re-exporting with:\n",
1362
+ "[INFO 2025-04-10 15:19:21,793 builder.py:229] inputs: (tensor([[2, 3, 4]]), {'input_pos': tensor([0])})\n",
1363
+ "[INFO 2025-04-10 15:19:21,793 builder.py:230] kwargs: None\n",
1364
+ "[INFO 2025-04-10 15:19:21,793 builder.py:231] dynamic shapes: ({1: <class 'executorch.extension.llm.export.builder.token_dim'>}, {'input_pos': {0: 1}})\n",
1365
+ "/usr/local/lib/python3.11/dist-packages/executorch/exir/emit/_emitter.py:1592: UserWarning: Mutation on a buffer in the model is detected. ExecuTorch assumes buffers that are mutated in the graph have a meaningless initial state, only the shape and dtype will be serialized, unless a pass which sets meta[\"et_init_buffer\"] to True such as InitializedMutableBufferPass is run.\n",
1366
+ " warnings.warn(\n",
1367
+ "[INFO 2025-04-10 15:27:34,732 builder.py:518] Required memory for activation in bytes: [0, 486891328]\n",
1368
+ "modelname: Llama-3.2-1B-Instruct-QLORA_INT4_EO8\n",
1369
+ "output_file: Llama-3.2-1B-Instruct-QLORA_INT4_EO8.pte\n",
1370
+ "[INFO 2025-04-10 15:27:58,585 utils.py:141] Saved exported program to Llama-3.2-1B-Instruct-QLORA_INT4_EO8.pte\n"
1371
+ ]
1372
+ }
1373
+ ]
1374
+ },
1375
+ {
1376
+ "cell_type": "markdown",
1377
+ "source": [],
1378
+ "metadata": {
1379
+ "id": "d_urCPkvEi98"
1380
+ }
1381
+ },
1382
+ {
1383
+ "cell_type": "code",
1384
+ "source": [
1385
+ "!mv /content/Llama-3.2-1B-Instruct-QLORA_INT4_EO8.pte /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/"
1386
+ ],
1387
+ "metadata": {
1388
+ "id": "XOAVJceLE68b"
1389
+ },
1390
+ "execution_count": 8,
1391
+ "outputs": []
1392
+ },
1393
+ {
1394
+ "cell_type": "markdown",
1395
+ "source": [
1396
+ "**Step 4: Upload to HF**"
1397
+ ],
1398
+ "metadata": {
1399
+ "id": "-urRwR_iF0QX"
1400
+ }
1401
+ },
1402
+ {
1403
+ "cell_type": "code",
1404
+ "source": [
1405
+ "!huggingface-cli upload executorch-community/Llama-3.2-1B-Instruct-QLORA_INT4_EO8-ET /content/models/Llama-3.2-1B-Instruct-QLORA_INT4_EO8/ --exclude=\"*.pth\""
1406
+ ],
1407
+ "metadata": {
1408
+ "colab": {
1409
+ "base_uri": "https://localhost:8080/"
1410
+ },
1411
+ "id": "9YvGfvgxF8sn",
1412
+ "outputId": "77261b89-7f7f-499a-e855-a577b1af6cb7"
1413
+ },
1414
+ "execution_count": 9,
1415
+ "outputs": [
1416
+ {
1417
+ "output_type": "stream",
1418
+ "name": "stdout",
1419
+ "text": [
1420
+ "Consider using `hf_transfer` for faster uploads. This solution comes with some limitations. See https://huggingface.co/docs/huggingface_hub/hf_transfer for more details.\n",
1421
+ "Start hashing 6 files.\n",
1422
+ "Finished hashing 6 files.\n",
1423
+ "Llama-3.2-1B-Instruct-QLORA_INT4_EO8.pte: 100% 1.18G/1.18G [00:30<00:00, 38.3MB/s]\n",
1424
+ "Removing 3 file(s) from commit that have not changed.\n",
1425
+ "https://huggingface.co/executorch-community/Llama-3.2-1B-Instruct-QLORA_INT4_EO8-ET/tree/main/.\n"
1426
+ ]
1427
+ }
1428
+ ]
1429
+ }
1430
+ ]
1431
+ }