lejelly's picture
Upload folder using huggingface_hub
8a1b83a verified
{
"lambdas": [
[
-0.008600689470767975,
-0.27110379934310913,
0.4651559889316559
],
[
-0.03290576487779617,
0.17893846333026886,
0.8713870048522949
],
[
0.15764102339744568,
-0.07096076756715775,
0.1824948787689209
],
[
0.535243809223175,
0.8895148038864136,
0.8248310685157776
],
[
0.32287347316741943,
-0.27368971705436707,
0.4297662377357483
],
[
-0.10271385312080383,
0.1222417801618576,
-0.05851483345031738
],
[
0.5892490744590759,
0.5596224665641785,
-0.09430505335330963
],
[
0.5037448406219482,
0.5673904418945312,
0.44907820224761963
],
[
0.04533325880765915,
-0.012348051182925701,
0.46244335174560547
],
[
0.37038925290107727,
-0.4712038040161133,
0.009666825644671917
],
[
-0.3951989710330963,
0.011475293897092342,
0.7200853228569031
],
[
0.4634708762168884,
0.03281472995877266,
0.6417493224143982
],
[
0.15638893842697144,
0.6444075703620911,
0.33211857080459595
],
[
0.005255121272057295,
0.6458375453948975,
0.5695016980171204
],
[
0.5011530518531799,
-0.9443197250366211,
0.2154204100370407
],
[
0.724826991558075,
-0.6742737293243408,
0.7136117815971375
],
[
1.0802881717681885,
0.3505321145057678,
0.7226514220237732
],
[
0.44733840227127075,
0.49678224325180054,
0.14433912932872772
],
[
-0.41781729459762573,
0.07401419430971146,
1.1384369134902954
],
[
-0.0549161396920681,
0.46900907158851624,
0.10407879203557968
],
[
0.21801219880580902,
0.17864975333213806,
0.0976036787033081
],
[
-0.3787577152252197,
0.5172115564346313,
-0.10581710189580917
],
[
0.42402195930480957,
-0.6051362156867981,
0.8535587787628174
],
[
-0.0035712181124836206,
0.2822836637496948,
-0.20307590067386627
],
[
-0.3496381640434265,
-0.09288095682859421,
-0.007612093351781368
],
[
0.5201497077941895,
-0.51421058177948,
0.01208819355815649
],
[
0.3452093005180359,
0.452639639377594,
0.3317156732082367
],
[
-0.05814714357256889,
0.28733497858047485,
0.2111409306526184
],
[
0.1041894257068634,
0.8851682543754578,
0.7849133610725403
],
[
0.17876780033111572,
1.0249152183532715,
0.8022587299346924
],
[
0.2981276214122772,
0.5915073156356812,
0.7403908371925354
],
[
-0.023820744827389717,
0.5682498812675476,
0.7562360167503357
],
[
0.18472808599472046,
-0.38978439569473267,
0.3726218640804291
],
[
-0.5412111878395081,
-0.8462530374526978,
0.07724694162607193
],
[
-0.002844978356733918,
0.6006382703781128,
-0.13113689422607422
],
[
0.575594961643219,
0.03936400264501572,
0.5742295980453491
],
[
-0.17656289041042328,
0.8008044958114624,
0.5196676850318909
],
[
0.392094224691391,
0.04320698603987694,
0.023837342858314514
],
[
-0.39720162749290466,
-0.491166353225708,
-0.5983253717422485
],
[
-0.11212798207998276,
0.8369597792625427,
0.09399152547121048
],
[
-0.3240719437599182,
0.6937462091445923,
0.5937374830245972
],
[
-0.072868712246418,
-0.4130878448486328,
0.503585696220398
],
[
0.008192034438252449,
1.0617002248764038,
0.41240212321281433
],
[
0.10066752880811691,
-0.949410080909729,
0.20881889760494232
],
[
-0.13509775698184967,
0.740580141544342,
-0.3912171721458435
],
[
-0.6682857275009155,
-0.3668968975543976,
-0.4059883952140808
],
[
-0.23979230225086212,
-0.04724369943141937,
-0.1683410406112671
],
[
-0.030199818313121796,
-0.06801427155733109,
-0.2558162212371826
],
[
0.4532090127468109,
0.51909339427948,
-0.06161358952522278
],
[
0.4220469892024994,
-0.30356091260910034,
1.0478200912475586
],
[
0.3710097670555115,
0.5032548308372498,
-0.3149319291114807
],
[
-0.3288426399230957,
0.7235692143440247,
0.09411199390888214
],
[
0.6833317875862122,
0.5491123795509338,
-0.13000386953353882
],
[
-0.18602947890758514,
0.30868157744407654,
0.4305288791656494
],
[
-0.010125085711479187,
0.28111228346824646,
0.2414601594209671
],
[
-0.39355704188346863,
0.17274145781993866,
0.2901214361190796
],
[
0.4727241098880768,
0.7610338926315308,
0.38114458322525024
],
[
-0.17738765478134155,
0.8454529047012329,
0.49150586128234863
],
[
0.18232625722885132,
0.010407311841845512,
0.8297613263130188
],
[
0.12667511403560638,
0.06083226576447487,
-0.8528534173965454
],
[
0.36976495385169983,
0.9643232226371765,
0.9328480362892151
],
[
-0.15158019959926605,
0.7440004348754883,
0.436826229095459
],
[
-0.08947861939668655,
0.3323182165622711,
0.4373578131198883
],
[
0.16054478287696838,
0.12135281413793564,
0.09848786890506744
],
[
0.12028805911540985,
0.35101237893104553,
0.04929043725132942
],
[
1.0261940956115723,
0.6754860281944275,
-0.07319711148738861
],
[
-0.06602616608142853,
0.4309370219707489,
-0.25024744868278503
],
[
0.28278306126594543,
0.7632055878639221,
-0.4026739001274109
],
[
0.20000706613063812,
-0.05334228649735451,
0.683163583278656
],
[
-0.2022043764591217,
0.3802075982093811,
0.9503597021102905
],
[
-0.36965954303741455,
0.28152400255203247,
-0.019405817613005638
],
[
0.0975869670510292,
0.29181671142578125,
-0.12390828877687454
],
[
-0.211451455950737,
0.004585533868521452,
-0.029675688594579697
],
[
-0.12587527930736542,
0.4573831260204315,
0.22337642312049866
],
[
-0.04365545138716698,
0.69129478931427,
0.5938308238983154
],
[
0.09625856578350067,
-0.5298608541488647,
-0.46198388934135437
],
[
0.9985604882240295,
0.5789576172828674,
-0.38657164573669434
],
[
0.4649677574634552,
0.600328803062439,
0.3077949583530426
],
[
-0.0432770811021328,
0.5958781242370605,
0.5499005913734436
],
[
0.8883084058761597,
1.05435049533844,
0.6483041048049927
],
[
0.3577224314212799,
0.29181671142578125,
0.31332382559776306
],
[
0.3399612605571747,
0.6493778228759766,
-0.11633078008890152
],
[
-0.47325441241264343,
0.23329900205135345,
0.16339211165905
],
[
0.1438535898923874,
0.22306153178215027,
0.20849788188934326
],
[
0.14767497777938843,
-0.06503646820783615,
-0.23767957091331482
],
[
0.35725727677345276,
-0.10453184694051743,
0.5414588451385498
],
[
-0.09145285934209824,
0.3344604969024658,
0.04309385269880295
],
[
1.37043035030365,
-0.7319602370262146,
0.42546531558036804
],
[
-0.08218256384134293,
0.9168795943260193,
-0.34223073720932007
],
[
0.15490470826625824,
0.5449513792991638,
0.010205847211182117
],
[
0.033627722412347794,
0.7240769863128662,
0.21537230908870697
],
[
-0.13714678585529327,
-0.11730996519327164,
-0.053494907915592194
],
[
0.8719881176948547,
0.9111194610595703,
0.49565842747688293
],
[
0.5770467519760132,
0.28903597593307495,
0.9253307580947876
],
[
-0.7643914222717285,
-0.3916642367839813,
0.2070000171661377
],
[
0.7137627601623535,
-0.3188117742538452,
-0.7938769459724426
],
[
0.2149561196565628,
-0.8529192805290222,
-0.840792715549469
],
[
0.18863415718078613,
-0.07081560045480728,
0.10497643798589706
],
[
0.7639882564544678,
0.29181671142578125,
-0.31540560722351074
],
[
0.7296006083488464,
0.7830615639686584,
-0.02962500974535942
],
[
-0.1286885142326355,
-0.4659258723258972,
0.12040185928344727
],
[
-0.20754382014274597,
-0.38997575640678406,
-0.3076478838920593
],
[
0.11249106377363205,
-0.17241854965686798,
0.4551856815814972
],
[
-0.22096848487854004,
-0.30131396651268005,
0.3322945833206177
],
[
-0.2885717451572418,
0.13564074039459229,
0.3038218021392822
],
[
0.5470362901687622,
-0.056911759078502655,
0.3336308002471924
],
[
0.14821100234985352,
1.019626259803772,
0.11912097781896591
],
[
-0.26448413729667664,
0.4463828206062317,
0.5633707642555237
],
[
-0.5019347667694092,
-0.8952313661575317,
0.9968082308769226
],
[
-0.09097509831190109,
-0.5021821856498718,
-0.774272084236145
],
[
0.7782373428344727,
-0.8219204545021057,
-0.6385967135429382
],
[
-0.4179416298866272,
1.0138181447982788,
0.9003906846046448
],
[
-0.7695522904396057,
0.4951356053352356,
0.27838262915611267
],
[
0.1352238953113556,
-0.09653834253549576,
-0.5367135405540466
],
[
0.4316764771938324,
-0.15874578058719635,
-0.5840363502502441
],
[
-0.8944721221923828,
-0.6129788756370544,
0.398253858089447
],
[
0.6038775444030762,
1.230621576309204,
0.23074018955230713
],
[
-0.3480087220668793,
-0.41038715839385986,
-0.33286935091018677
],
[
0.16971904039382935,
-0.7054998874664307,
-0.17762042582035065
],
[
0.10676750540733337,
0.33191439509391785,
0.28654158115386963
],
[
0.8161095976829529,
-0.7738041281700134,
0.4539409875869751
],
[
0.4919089376926422,
0.19382242858409882,
0.13490013778209686
],
[
-0.5341103672981262,
-0.4275410771369934,
0.16920465230941772
],
[
-0.5047714114189148,
0.6682338118553162,
0.9830099940299988
],
[
0.5792328715324402,
0.020967308431863785,
0.16329899430274963
],
[
-0.028551798313856125,
0.29181671142578125,
0.6271538138389587
],
[
1.0009040832519531,
1.0899860858917236,
0.03138141706585884
],
[
0.1733202189207077,
0.2952180504798889,
0.39154383540153503
],
[
1.1995248794555664,
0.49439093470573425,
0.49956780672073364
],
[
0.3402847945690155,
-0.7532049417495728,
0.3707257807254791
],
[
0.16844721138477325,
0.0371837355196476,
0.21008865535259247
],
[
-0.18475691974163055,
0.492800772190094,
1.1090965270996094
],
[
1.37343430519104,
0.6074701547622681,
0.07295160740613937
],
[
-0.2716894745826721,
0.34719541668891907,
0.2974252998828888
],
[
0.6517245173454285,
0.34517648816108704,
-0.6991267800331116
],
[
1.131462574005127,
-0.4181934893131256,
0.014711310155689716
],
[
0.45891329646110535,
0.6425840258598328,
0.5629575848579407
],
[
0.9829176068305969,
0.6837454438209534,
0.5779233574867249
],
[
0.21745261549949646,
0.10289150476455688,
1.0186656713485718
],
[
-0.7066150903701782,
0.2927456200122833,
0.05835003778338432
],
[
-0.29833900928497314,
-0.24320939183235168,
0.33919867873191833
],
[
0.42278122901916504,
-0.6517714858055115,
-0.45446234941482544
],
[
0.1102355569601059,
0.33386605978012085,
-0.047687217593193054
],
[
1.0147796869277954,
0.29181671142578125,
-0.4382956326007843
],
[
0.8252743482589722,
0.2133755087852478,
0.08934953808784485
],
[
0.04747254401445389,
0.3921492397785187,
0.3003828227519989
],
[
0.8993004560470581,
0.732109785079956,
0.5620099902153015
],
[
0.969788670539856,
0.3594958782196045,
0.24025796353816986
],
[
-0.5418591499328613,
0.684735119342804,
0.13651178777217865
],
[
-0.3063417077064514,
-0.7611579895019531,
0.06944245845079422
],
[
-0.899300754070282,
0.0799374058842659,
-0.4000241458415985
],
[
1.050485372543335,
1.190054178237915,
1.5068362951278687
],
[
1.248936653137207,
0.29181671142578125,
-0.10971122980117798
],
[
-0.13933615386486053,
-0.24252121150493622,
0.36267510056495667
],
[
-0.36821112036705017,
0.19332224130630493,
0.5897371768951416
],
[
1.270178198814392,
1.128736138343811,
0.35461029410362244
],
[
0.8342562317848206,
0.7590351700782776,
0.16291548311710358
],
[
-0.21916896104812622,
0.5995560884475708,
0.9344035387039185
],
[
-0.4020785391330719,
0.13320356607437134,
-0.5687698125839233
],
[
-0.3448737859725952,
0.7098224759101868,
0.4886254370212555
],
[
0.7236321568489075,
0.49880942702293396,
0.8195453882217407
],
[
1.111620545387268,
0.29181671142578125,
-0.4977026879787445
],
[
1.1955883502960205,
0.09661023318767548,
0.27732595801353455
],
[
0.47855231165885925,
0.3998045027256012,
0.520326554775238
],
[
0.7978026866912842,
1.0429245233535767,
1.114219069480896
],
[
0.36164867877960205,
0.07844629883766174,
0.984810471534729
],
[
-0.1214253231883049,
0.017159847542643547,
1.3403764963150024
],
[
0.27760541439056396,
-0.9298321008682251,
0.4096109867095947
],
[
-0.004906071349978447,
0.2557479739189148,
-0.4466286301612854
],
[
-0.3781079053878784,
-0.9627765417098999,
1.4935261011123657
],
[
1.2579375505447388,
0.29181671142578125,
-0.3692721724510193
],
[
0.7241488695144653,
-0.08786455541849136,
1.2958627939224243
],
[
0.7014161348342896,
-0.09325847774744034,
-0.25087472796440125
],
[
0.4672619700431824,
0.010848304256796837,
-0.12294646352529526
],
[
0.005329097621142864,
1.0686196088790894,
-0.4747982323169708
],
[
0.6448685526847839,
1.1435459852218628,
-0.4118386507034302
],
[
0.06303134560585022,
0.7841649055480957,
0.20986035466194153
],
[
0.5187354683876038,
-0.7191954851150513,
-0.2346244901418686
],
[
0.5911051630973816,
-0.5409417748451233,
-0.9105331301689148
],
[
-0.030187280848622322,
0.29181671142578125,
1.038795828819275
],
[
0.6818335056304932,
0.6027613282203674,
1.3147859573364258
],
[
0.37735089659690857,
-0.15784670412540436,
0.3256359100341797
],
[
1.1971839666366577,
0.8749886155128479,
0.6059221029281616
],
[
0.6457439064979553,
0.6699243187904358,
0.4733956754207611
],
[
-0.26511242985725403,
0.6644261479377747,
-0.47634589672088623
],
[
0.616598904132843,
0.16727499663829803,
0.7800426483154297
],
[
0.5040062069892883,
-0.14378352463245392,
-0.13610804080963135
],
[
1.2880492210388184,
0.36526361107826233,
-0.38332849740982056
],
[
0.7372151613235474,
0.29181671142578125,
-0.03127037733793259
],
[
-0.17959535121917725,
-0.46408042311668396,
0.40369734168052673
],
[
1.0773673057556152,
0.18843935430049896,
0.31045636534690857
],
[
-0.06575462967157364,
-0.0999893993139267,
-0.3071534037590027
],
[
0.9312586784362793,
0.037244897335767746,
1.262158751487732
],
[
0.5934927463531494,
-0.11001330614089966,
0.4146396219730377
],
[
0.8895318508148193,
0.33912143111228943,
0.441883385181427
],
[
-0.11742016673088074,
0.6659654378890991,
1.0389689207077026
],
[
-0.9391144514083862,
1.1299172639846802,
0.2904535233974457
],
[
-0.19635428488254547,
0.29181671142578125,
1.180428385734558
],
[
0.3635255694389343,
-0.7368738651275635,
-0.44110631942749023
],
[
0.6002898216247559,
0.7115048766136169,
0.4520062804222107
],
[
0.5340331792831421,
0.5105125308036804,
0.6853306293487549
],
[
-0.35422268509864807,
-0.383005291223526,
-0.494064599275589
],
[
1.4476197957992554,
-0.1437377631664276,
0.6719149351119995
],
[
-0.46162307262420654,
0.4841626286506653,
-0.11894477158784866
],
[
1.2744969129562378,
-0.06925814598798752,
-0.4621801972389221
],
[
0.08298911154270172,
-0.1357104778289795,
0.3881130814552307
],
[
0.5402959585189819,
0.2570511996746063,
-0.08939802646636963
],
[
-0.6459614038467407,
-0.25679051876068115,
0.8747705817222595
],
[
0.43076369166374207,
-0.5021677613258362,
0.0363541804254055
],
[
0.31835153698921204,
0.5368619561195374,
0.7120820879936218
],
[
1.3529167175292969,
0.2760588228702545,
0.45914947986602783
],
[
1.0444769859313965,
0.8666611909866333,
1.2023478746414185
],
[
-0.26926925778388977,
-0.8990693092346191,
-0.19023294746875763
],
[
-0.1632995754480362,
-0.41065558791160583,
-0.23155640065670013
],
[
0.5899744629859924,
-0.6720564365386963,
0.04501838609576225
],
[
0.347116082906723,
0.29181671142578125,
0.6329561471939087
],
[
-0.7669775485992432,
0.48225492238998413,
0.566737174987793
],
[
-0.1526595950126648,
-0.08648448437452316,
0.7179281711578369
],
[
0.8701344132423401,
-0.12348105758428574,
-0.07938604801893234
],
[
0.6966987252235413,
0.9275979995727539,
0.018549533560872078
],
[
1.3724595308303833,
-0.5801642537117004,
0.22728000581264496
],
[
-0.28334110975265503,
-0.7934252619743347,
0.10227243602275848
],
[
0.40091943740844727,
0.29383963346481323,
1.2603650093078613
],
[
0.2890244126319885,
1.226073980331421,
-0.7119418978691101
],
[
0.6233953237533569,
1.0033429861068726,
1.032111644744873
],
[
0.49351900815963745,
0.8799840211868286,
0.7926546931266785
],
[
0.34952425956726074,
0.1354638785123825,
-0.4291952848434448
],
[
0.2993246018886566,
0.15255890786647797,
0.37319880723953247
],
[
0.9639967083930969,
0.1397349089384079,
-0.41835474967956543
],
[
0.5237535238265991,
-0.896891713142395,
-1.351672649383545
],
[
-0.3893134593963623,
0.31328412890434265,
-0.08003895729780197
],
[
0.8522402048110962,
0.5729881525039673,
0.18530771136283875
],
[
1.7133067846298218,
0.47896021604537964,
0.16805309057235718
],
[
0.4199868142604828,
0.7454566359519958,
0.15968471765518188
],
[
-0.5626803040504456,
-0.32539764046669006,
1.6168047189712524
],
[
0.4851708710193634,
0.08485821634531021,
0.552808403968811
],
[
0.3033086359500885,
0.3137320876121521,
0.23798324167728424
],
[
0.5072812438011169,
-0.7742474675178528,
0.20723572373390198
],
[
1.5272622108459473,
-0.857026994228363,
-0.3885817527770996
],
[
-0.8498314619064331,
-1.0903066396713257,
1.107202410697937
],
[
-0.9824524521827698,
-0.5545493364334106,
0.6659228801727295
],
[
1.5147898197174072,
0.945969820022583,
0.7589691281318665
],
[
0.6783913969993591,
0.29181671142578125,
0.5737981200218201
],
[
0.2241208702325821,
0.13132338225841522,
1.100775122642517
],
[
0.26957446336746216,
-0.4629915654659271,
-0.8701563477516174
],
[
-0.49690186977386475,
0.5250351428985596,
-0.4097417891025543
],
[
0.5711739659309387,
-0.8684530854225159,
0.45744916796684265
],
[
1.0462424755096436,
-0.7023312449455261,
-0.19748517870903015
],
[
0.030918758362531662,
0.7856431603431702,
-0.3355279564857483
],
[
-0.008804546669125557,
-0.8827769756317139,
-0.4400416314601898
],
[
1.140304446220398,
0.5504470467567444,
1.6465587615966797
],
[
-0.3450998365879059,
0.29181671142578125,
0.30365073680877686
],
[
0.9964448809623718,
-0.62396639585495,
0.5908036828041077
],
[
0.14356864988803864,
-0.23120446503162384,
0.48505157232284546
],
[
0.07708197087049484,
-0.05111119896173477,
-0.20051567256450653
],
[
1.3595986366271973,
-1.1465976238250732,
-0.2699911892414093
],
[
0.4273603558540344,
-0.44986778497695923,
-0.4925510883331299
],
[
1.0969822406768799,
-1.0698773860931396,
-0.3923966884613037
],
[
-0.8404163122177124,
1.4395596981048584,
-0.19648201763629913
],
[
1.3057408332824707,
0.27632415294647217,
0.6152714490890503
],
[
-0.17780931293964386,
0.43932393193244934,
0.6458268165588379
],
[
-0.528653085231781,
-0.14353175461292267,
0.9229506254196167
],
[
1.4555248022079468,
-1.0259952545166016,
-0.4562441408634186
],
[
-0.14756165444850922,
-0.5561291575431824,
1.0768119096755981
],
[
-0.41391754150390625,
-1.1037604808807373,
1.2041736841201782
],
[
1.550291657447815,
-1.0872933864593506,
0.7051491141319275
],
[
-0.04375248774886131,
-0.4118551015853882,
-0.36378324031829834
],
[
0.05724472925066948,
-0.18368586897850037,
0.1453598439693451
],
[
1.626542091369629,
-0.4099309742450714,
1.0515543222427368
],
[
-0.6195567846298218,
0.29181671142578125,
1.736567735671997
],
[
-0.12013274431228638,
-0.9047581553459167,
0.5967634320259094
],
[
-0.39893123507499695,
0.6357282996177673,
-0.6430260539054871
],
[
-0.08661642670631409,
0.6548808813095093,
-0.41377395391464233
],
[
1.4145830869674683,
-0.19026178121566772,
0.8982827067375183
],
[
1.058542013168335,
1.0563756227493286,
0.9325186610221863
],
[
1.911826252937317,
0.45161741971969604,
-0.18585841357707977
],
[
-0.332288920879364,
0.39983323216438293,
-0.491976261138916
],
[
1.5136407613754272,
-0.2972322404384613,
0.957003116607666
],
[
-0.47592005133628845,
0.2637055218219757,
1.2554272413253784
],
[
-0.608280599117279,
0.31998905539512634,
0.8458261489868164
],
[
-0.911689281463623,
0.717431366443634,
0.3810383677482605
],
[
1.7994369268417358,
0.760383665561676,
0.7605606913566589
],
[
0.5412089228630066,
-1.1707699298858643,
-0.17998889088630676
],
[
0.8193005919456482,
0.03770531341433525,
-0.6201762557029724
],
[
-1.4711514711380005,
0.643258273601532,
1.6248557567596436
],
[
-0.4884009063243866,
0.1881939172744751,
1.156332015991211
],
[
-0.8440516591072083,
1.5180282592773438,
1.0642322301864624
],
[
-0.992385983467102,
0.29181671142578125,
-0.8700374960899353
],
[
2.151193857192993,
0.29181671142578125,
-1.2509030103683472
],
[
2.0959925651550293,
-0.8486384153366089,
-1.6925979852676392
],
[
-1.304608941078186,
1.8554006814956665,
-0.2182377427816391
]
],
"model_names": [
"instruct",
"math",
"code"
],
"num_models": 3,
"num_params": 291,
"param_names": [
"model.embed_tokens.weight",
"model.layers.0.self_attn.q_proj.weight",
"model.layers.0.self_attn.k_proj.weight",
"model.layers.0.self_attn.v_proj.weight",
"model.layers.0.self_attn.o_proj.weight",
"model.layers.0.mlp.gate_proj.weight",
"model.layers.0.mlp.up_proj.weight",
"model.layers.0.mlp.down_proj.weight",
"model.layers.0.input_layernorm.weight",
"model.layers.0.post_attention_layernorm.weight",
"model.layers.1.self_attn.q_proj.weight",
"model.layers.1.self_attn.k_proj.weight",
"model.layers.1.self_attn.v_proj.weight",
"model.layers.1.self_attn.o_proj.weight",
"model.layers.1.mlp.gate_proj.weight",
"model.layers.1.mlp.up_proj.weight",
"model.layers.1.mlp.down_proj.weight",
"model.layers.1.input_layernorm.weight",
"model.layers.1.post_attention_layernorm.weight",
"model.layers.2.self_attn.q_proj.weight",
"model.layers.2.self_attn.k_proj.weight",
"model.layers.2.self_attn.v_proj.weight",
"model.layers.2.self_attn.o_proj.weight",
"model.layers.2.mlp.gate_proj.weight",
"model.layers.2.mlp.up_proj.weight",
"model.layers.2.mlp.down_proj.weight",
"model.layers.2.input_layernorm.weight",
"model.layers.2.post_attention_layernorm.weight",
"model.layers.3.self_attn.q_proj.weight",
"model.layers.3.self_attn.k_proj.weight",
"model.layers.3.self_attn.v_proj.weight",
"model.layers.3.self_attn.o_proj.weight",
"model.layers.3.mlp.gate_proj.weight",
"model.layers.3.mlp.up_proj.weight",
"model.layers.3.mlp.down_proj.weight",
"model.layers.3.input_layernorm.weight",
"model.layers.3.post_attention_layernorm.weight",
"model.layers.4.self_attn.q_proj.weight",
"model.layers.4.self_attn.k_proj.weight",
"model.layers.4.self_attn.v_proj.weight",
"model.layers.4.self_attn.o_proj.weight",
"model.layers.4.mlp.gate_proj.weight",
"model.layers.4.mlp.up_proj.weight",
"model.layers.4.mlp.down_proj.weight",
"model.layers.4.input_layernorm.weight",
"model.layers.4.post_attention_layernorm.weight",
"model.layers.5.self_attn.q_proj.weight",
"model.layers.5.self_attn.k_proj.weight",
"model.layers.5.self_attn.v_proj.weight",
"model.layers.5.self_attn.o_proj.weight",
"model.layers.5.mlp.gate_proj.weight",
"model.layers.5.mlp.up_proj.weight",
"model.layers.5.mlp.down_proj.weight",
"model.layers.5.input_layernorm.weight",
"model.layers.5.post_attention_layernorm.weight",
"model.layers.6.self_attn.q_proj.weight",
"model.layers.6.self_attn.k_proj.weight",
"model.layers.6.self_attn.v_proj.weight",
"model.layers.6.self_attn.o_proj.weight",
"model.layers.6.mlp.gate_proj.weight",
"model.layers.6.mlp.up_proj.weight",
"model.layers.6.mlp.down_proj.weight",
"model.layers.6.input_layernorm.weight",
"model.layers.6.post_attention_layernorm.weight",
"model.layers.7.self_attn.q_proj.weight",
"model.layers.7.self_attn.k_proj.weight",
"model.layers.7.self_attn.v_proj.weight",
"model.layers.7.self_attn.o_proj.weight",
"model.layers.7.mlp.gate_proj.weight",
"model.layers.7.mlp.up_proj.weight",
"model.layers.7.mlp.down_proj.weight",
"model.layers.7.input_layernorm.weight",
"model.layers.7.post_attention_layernorm.weight",
"model.layers.8.self_attn.q_proj.weight",
"model.layers.8.self_attn.k_proj.weight",
"model.layers.8.self_attn.v_proj.weight",
"model.layers.8.self_attn.o_proj.weight",
"model.layers.8.mlp.gate_proj.weight",
"model.layers.8.mlp.up_proj.weight",
"model.layers.8.mlp.down_proj.weight",
"model.layers.8.input_layernorm.weight",
"model.layers.8.post_attention_layernorm.weight",
"model.layers.9.self_attn.q_proj.weight",
"model.layers.9.self_attn.k_proj.weight",
"model.layers.9.self_attn.v_proj.weight",
"model.layers.9.self_attn.o_proj.weight",
"model.layers.9.mlp.gate_proj.weight",
"model.layers.9.mlp.up_proj.weight",
"model.layers.9.mlp.down_proj.weight",
"model.layers.9.input_layernorm.weight",
"model.layers.9.post_attention_layernorm.weight",
"model.layers.10.self_attn.q_proj.weight",
"model.layers.10.self_attn.k_proj.weight",
"model.layers.10.self_attn.v_proj.weight",
"model.layers.10.self_attn.o_proj.weight",
"model.layers.10.mlp.gate_proj.weight",
"model.layers.10.mlp.up_proj.weight",
"model.layers.10.mlp.down_proj.weight",
"model.layers.10.input_layernorm.weight",
"model.layers.10.post_attention_layernorm.weight",
"model.layers.11.self_attn.q_proj.weight",
"model.layers.11.self_attn.k_proj.weight",
"model.layers.11.self_attn.v_proj.weight",
"model.layers.11.self_attn.o_proj.weight",
"model.layers.11.mlp.gate_proj.weight",
"model.layers.11.mlp.up_proj.weight",
"model.layers.11.mlp.down_proj.weight",
"model.layers.11.input_layernorm.weight",
"model.layers.11.post_attention_layernorm.weight",
"model.layers.12.self_attn.q_proj.weight",
"model.layers.12.self_attn.k_proj.weight",
"model.layers.12.self_attn.v_proj.weight",
"model.layers.12.self_attn.o_proj.weight",
"model.layers.12.mlp.gate_proj.weight",
"model.layers.12.mlp.up_proj.weight",
"model.layers.12.mlp.down_proj.weight",
"model.layers.12.input_layernorm.weight",
"model.layers.12.post_attention_layernorm.weight",
"model.layers.13.self_attn.q_proj.weight",
"model.layers.13.self_attn.k_proj.weight",
"model.layers.13.self_attn.v_proj.weight",
"model.layers.13.self_attn.o_proj.weight",
"model.layers.13.mlp.gate_proj.weight",
"model.layers.13.mlp.up_proj.weight",
"model.layers.13.mlp.down_proj.weight",
"model.layers.13.input_layernorm.weight",
"model.layers.13.post_attention_layernorm.weight",
"model.layers.14.self_attn.q_proj.weight",
"model.layers.14.self_attn.k_proj.weight",
"model.layers.14.self_attn.v_proj.weight",
"model.layers.14.self_attn.o_proj.weight",
"model.layers.14.mlp.gate_proj.weight",
"model.layers.14.mlp.up_proj.weight",
"model.layers.14.mlp.down_proj.weight",
"model.layers.14.input_layernorm.weight",
"model.layers.14.post_attention_layernorm.weight",
"model.layers.15.self_attn.q_proj.weight",
"model.layers.15.self_attn.k_proj.weight",
"model.layers.15.self_attn.v_proj.weight",
"model.layers.15.self_attn.o_proj.weight",
"model.layers.15.mlp.gate_proj.weight",
"model.layers.15.mlp.up_proj.weight",
"model.layers.15.mlp.down_proj.weight",
"model.layers.15.input_layernorm.weight",
"model.layers.15.post_attention_layernorm.weight",
"model.layers.16.self_attn.q_proj.weight",
"model.layers.16.self_attn.k_proj.weight",
"model.layers.16.self_attn.v_proj.weight",
"model.layers.16.self_attn.o_proj.weight",
"model.layers.16.mlp.gate_proj.weight",
"model.layers.16.mlp.up_proj.weight",
"model.layers.16.mlp.down_proj.weight",
"model.layers.16.input_layernorm.weight",
"model.layers.16.post_attention_layernorm.weight",
"model.layers.17.self_attn.q_proj.weight",
"model.layers.17.self_attn.k_proj.weight",
"model.layers.17.self_attn.v_proj.weight",
"model.layers.17.self_attn.o_proj.weight",
"model.layers.17.mlp.gate_proj.weight",
"model.layers.17.mlp.up_proj.weight",
"model.layers.17.mlp.down_proj.weight",
"model.layers.17.input_layernorm.weight",
"model.layers.17.post_attention_layernorm.weight",
"model.layers.18.self_attn.q_proj.weight",
"model.layers.18.self_attn.k_proj.weight",
"model.layers.18.self_attn.v_proj.weight",
"model.layers.18.self_attn.o_proj.weight",
"model.layers.18.mlp.gate_proj.weight",
"model.layers.18.mlp.up_proj.weight",
"model.layers.18.mlp.down_proj.weight",
"model.layers.18.input_layernorm.weight",
"model.layers.18.post_attention_layernorm.weight",
"model.layers.19.self_attn.q_proj.weight",
"model.layers.19.self_attn.k_proj.weight",
"model.layers.19.self_attn.v_proj.weight",
"model.layers.19.self_attn.o_proj.weight",
"model.layers.19.mlp.gate_proj.weight",
"model.layers.19.mlp.up_proj.weight",
"model.layers.19.mlp.down_proj.weight",
"model.layers.19.input_layernorm.weight",
"model.layers.19.post_attention_layernorm.weight",
"model.layers.20.self_attn.q_proj.weight",
"model.layers.20.self_attn.k_proj.weight",
"model.layers.20.self_attn.v_proj.weight",
"model.layers.20.self_attn.o_proj.weight",
"model.layers.20.mlp.gate_proj.weight",
"model.layers.20.mlp.up_proj.weight",
"model.layers.20.mlp.down_proj.weight",
"model.layers.20.input_layernorm.weight",
"model.layers.20.post_attention_layernorm.weight",
"model.layers.21.self_attn.q_proj.weight",
"model.layers.21.self_attn.k_proj.weight",
"model.layers.21.self_attn.v_proj.weight",
"model.layers.21.self_attn.o_proj.weight",
"model.layers.21.mlp.gate_proj.weight",
"model.layers.21.mlp.up_proj.weight",
"model.layers.21.mlp.down_proj.weight",
"model.layers.21.input_layernorm.weight",
"model.layers.21.post_attention_layernorm.weight",
"model.layers.22.self_attn.q_proj.weight",
"model.layers.22.self_attn.k_proj.weight",
"model.layers.22.self_attn.v_proj.weight",
"model.layers.22.self_attn.o_proj.weight",
"model.layers.22.mlp.gate_proj.weight",
"model.layers.22.mlp.up_proj.weight",
"model.layers.22.mlp.down_proj.weight",
"model.layers.22.input_layernorm.weight",
"model.layers.22.post_attention_layernorm.weight",
"model.layers.23.self_attn.q_proj.weight",
"model.layers.23.self_attn.k_proj.weight",
"model.layers.23.self_attn.v_proj.weight",
"model.layers.23.self_attn.o_proj.weight",
"model.layers.23.mlp.gate_proj.weight",
"model.layers.23.mlp.up_proj.weight",
"model.layers.23.mlp.down_proj.weight",
"model.layers.23.input_layernorm.weight",
"model.layers.23.post_attention_layernorm.weight",
"model.layers.24.self_attn.q_proj.weight",
"model.layers.24.self_attn.k_proj.weight",
"model.layers.24.self_attn.v_proj.weight",
"model.layers.24.self_attn.o_proj.weight",
"model.layers.24.mlp.gate_proj.weight",
"model.layers.24.mlp.up_proj.weight",
"model.layers.24.mlp.down_proj.weight",
"model.layers.24.input_layernorm.weight",
"model.layers.24.post_attention_layernorm.weight",
"model.layers.25.self_attn.q_proj.weight",
"model.layers.25.self_attn.k_proj.weight",
"model.layers.25.self_attn.v_proj.weight",
"model.layers.25.self_attn.o_proj.weight",
"model.layers.25.mlp.gate_proj.weight",
"model.layers.25.mlp.up_proj.weight",
"model.layers.25.mlp.down_proj.weight",
"model.layers.25.input_layernorm.weight",
"model.layers.25.post_attention_layernorm.weight",
"model.layers.26.self_attn.q_proj.weight",
"model.layers.26.self_attn.k_proj.weight",
"model.layers.26.self_attn.v_proj.weight",
"model.layers.26.self_attn.o_proj.weight",
"model.layers.26.mlp.gate_proj.weight",
"model.layers.26.mlp.up_proj.weight",
"model.layers.26.mlp.down_proj.weight",
"model.layers.26.input_layernorm.weight",
"model.layers.26.post_attention_layernorm.weight",
"model.layers.27.self_attn.q_proj.weight",
"model.layers.27.self_attn.k_proj.weight",
"model.layers.27.self_attn.v_proj.weight",
"model.layers.27.self_attn.o_proj.weight",
"model.layers.27.mlp.gate_proj.weight",
"model.layers.27.mlp.up_proj.weight",
"model.layers.27.mlp.down_proj.weight",
"model.layers.27.input_layernorm.weight",
"model.layers.27.post_attention_layernorm.weight",
"model.layers.28.self_attn.q_proj.weight",
"model.layers.28.self_attn.k_proj.weight",
"model.layers.28.self_attn.v_proj.weight",
"model.layers.28.self_attn.o_proj.weight",
"model.layers.28.mlp.gate_proj.weight",
"model.layers.28.mlp.up_proj.weight",
"model.layers.28.mlp.down_proj.weight",
"model.layers.28.input_layernorm.weight",
"model.layers.28.post_attention_layernorm.weight",
"model.layers.29.self_attn.q_proj.weight",
"model.layers.29.self_attn.k_proj.weight",
"model.layers.29.self_attn.v_proj.weight",
"model.layers.29.self_attn.o_proj.weight",
"model.layers.29.mlp.gate_proj.weight",
"model.layers.29.mlp.up_proj.weight",
"model.layers.29.mlp.down_proj.weight",
"model.layers.29.input_layernorm.weight",
"model.layers.29.post_attention_layernorm.weight",
"model.layers.30.self_attn.q_proj.weight",
"model.layers.30.self_attn.k_proj.weight",
"model.layers.30.self_attn.v_proj.weight",
"model.layers.30.self_attn.o_proj.weight",
"model.layers.30.mlp.gate_proj.weight",
"model.layers.30.mlp.up_proj.weight",
"model.layers.30.mlp.down_proj.weight",
"model.layers.30.input_layernorm.weight",
"model.layers.30.post_attention_layernorm.weight",
"model.layers.31.self_attn.q_proj.weight",
"model.layers.31.self_attn.k_proj.weight",
"model.layers.31.self_attn.v_proj.weight",
"model.layers.31.self_attn.o_proj.weight",
"model.layers.31.mlp.gate_proj.weight",
"model.layers.31.mlp.up_proj.weight",
"model.layers.31.mlp.down_proj.weight",
"model.layers.31.input_layernorm.weight",
"model.layers.31.post_attention_layernorm.weight",
"model.norm.weight",
"lm_head.weight"
]
}