lejelly's picture
Upload folder using huggingface_hub
05acf08 verified
{
"lambdas": [
[
0.2534952461719513,
0.30523034930229187,
0.3759872019290924
],
[
0.19381080567836761,
0.14330413937568665,
0.1505417376756668
],
[
0.15468859672546387,
0.11949324607849121,
0.11205987632274628
],
[
0.5130396485328674,
0.12730471789836884,
0.2577188014984131
],
[
0.3262805938720703,
0.12673349678516388,
0.43820610642433167
],
[
0.08854815363883972,
0.17906822264194489,
0.07189840078353882
],
[
0.4790966212749481,
0.4757692217826843,
0.2594115436077118
],
[
0.44506341218948364,
0.4810096323490143,
0.174846813082695
],
[
0.13854311406612396,
0.11476646363735199,
0.45711416006088257
],
[
0.3992047607898712,
0.21270178258419037,
0.19707091152668
],
[
0.2006145864725113,
0.136004239320755,
0.45083877444267273
],
[
0.42933884263038635,
0.48759180307388306,
0.4610913395881653
],
[
0.09757842123508453,
0.09553612023591995,
0.13694429397583008
],
[
0.09377998113632202,
0.4987637996673584,
0.47199103236198425
],
[
0.342789888381958,
0.13183650374412537,
0.11036911606788635
],
[
0.33038148283958435,
0.14878816902637482,
0.26642900705337524
],
[
0.5244519710540771,
0.5266465544700623,
0.5108031034469604
],
[
0.45286843180656433,
0.4926087558269501,
0.15870466828346252
],
[
0.11907289177179337,
0.12675036489963531,
0.47801220417022705
],
[
0.1762814223766327,
0.22741958498954773,
0.08227628469467163
],
[
0.35158514976501465,
0.2241077721118927,
0.12628868222236633
],
[
0.31179118156433105,
0.5237205028533936,
0.221613809466362
],
[
0.05017201974987984,
0.06903746724128723,
0.537602961063385
],
[
0.44471949338912964,
0.4415968954563141,
0.41711723804473877
],
[
0.2558377683162689,
0.39776676893234253,
0.12752346694469452
],
[
0.506903350353241,
0.08512687683105469,
0.5269022583961487
],
[
0.12547433376312256,
0.5467050075531006,
0.2142793983221054
],
[
0.49493587017059326,
0.5380048155784607,
0.3759128749370575
],
[
0.1444317102432251,
0.2716050446033478,
0.40274953842163086
],
[
0.12919534742832184,
0.3601296842098236,
0.4177890717983246
],
[
0.31740885972976685,
0.5108253955841064,
0.4309729039669037
],
[
0.23399466276168823,
0.3505608141422272,
0.5342388153076172
],
[
0.47912299633026123,
0.11666414141654968,
0.4114471971988678
],
[
0.33454301953315735,
0.07829625904560089,
0.08453896641731262
],
[
0.4557119607925415,
0.47959810495376587,
0.4164620339870453
],
[
0.14079050719738007,
0.44912412762641907,
0.4894633889198303
],
[
0.13113750517368317,
0.4795096218585968,
0.5065174102783203
],
[
0.41767609119415283,
0.12207511812448502,
0.28158777952194214
],
[
0.2823454439640045,
0.11494332551956177,
0.14318899810314178
],
[
0.1188262328505516,
0.49996885657310486,
0.3268076479434967
],
[
0.11482632905244827,
0.13078516721725464,
0.4283144474029541
],
[
0.2638767957687378,
0.47975513339042664,
0.5138802528381348
],
[
0.3112858831882477,
0.4337930679321289,
0.38627398014068604
],
[
0.4781522750854492,
0.45803841948509216,
0.4390278458595276
],
[
0.19160860776901245,
0.45856159925460815,
0.1799495369195938
],
[
0.5289384126663208,
0.5018876791000366,
0.08807901293039322
],
[
0.39029595255851746,
0.22166197001934052,
0.05597026273608208
],
[
0.14031341671943665,
0.0284577663987875,
0.039545465260744095
],
[
0.3012852966785431,
0.4547480642795563,
0.36205294728279114
],
[
0.4226453900337219,
0.2290882170200348,
0.5030483603477478
],
[
0.37617990374565125,
0.5257728695869446,
0.2895321547985077
],
[
0.3288835883140564,
0.5261308550834656,
0.40352365374565125
],
[
0.5320791602134705,
0.4879598617553711,
0.47232821583747864
],
[
0.02394048310816288,
0.5648339986801147,
0.5524075627326965
],
[
0.09589371085166931,
0.22865454852581024,
0.24091486632823944
],
[
0.37051576375961304,
0.09366679191589355,
0.2356727421283722
],
[
0.482128769159317,
0.3785671889781952,
0.29638534784317017
],
[
0.0703749731183052,
0.47385674715042114,
0.34347960352897644
],
[
0.34640681743621826,
0.20363754034042358,
0.4984317719936371
],
[
0.12805169820785522,
0.07272768765687943,
0.481533944606781
],
[
0.44757071137428284,
0.44445836544036865,
0.5367795825004578
],
[
0.4767749309539795,
0.2248300313949585,
0.5253691673278809
],
[
0.22370415925979614,
0.042401693761348724,
0.39869654178619385
],
[
0.4211507737636566,
0.26674604415893555,
0.29245680570602417
],
[
0.31515729427337646,
0.22131618857383728,
0.1825154423713684
],
[
0.5393283367156982,
0.43332377076148987,
0.29671788215637207
],
[
0.11622118204832077,
0.11878670006990433,
0.18064448237419128
],
[
0.5045350193977356,
0.47212979197502136,
0.17135772109031677
],
[
0.4371897280216217,
0.28812137246131897,
0.5101091861724854
],
[
0.3035878539085388,
0.10632573068141937,
0.3595532178878784
],
[
0.48716968297958374,
0.2961490750312805,
0.4975534975528717
],
[
0.30991655588150024,
0.29923370480537415,
0.4639711380004883
],
[
0.523935854434967,
0.3574671149253845,
0.04057236388325691
],
[
0.5104513168334961,
0.06938118487596512,
0.4495874047279358
],
[
0.3236958980560303,
0.2927325665950775,
0.3352988362312317
],
[
0.17906847596168518,
0.2763298451900482,
0.14234255254268646
],
[
0.5065359473228455,
0.5002820491790771,
0.4729934632778168
],
[
0.524003803730011,
0.1553145796060562,
0.4596402943134308
],
[
0.4732443690299988,
0.46863853931427,
0.46989449858665466
],
[
0.5130873918533325,
0.46314916014671326,
0.12571106851100922
],
[
0.29410871863365173,
0.29923370480537415,
0.15922415256500244
],
[
0.40763089060783386,
0.44121450185775757,
0.3422219157218933
],
[
0.19644838571548462,
0.3400014042854309,
0.10237600654363632
],
[
0.1363479644060135,
0.1328810602426529,
0.13454383611679077
],
[
0.470550000667572,
0.30152225494384766,
0.46644097566604614
],
[
0.16385038197040558,
0.1695399433374405,
0.4856874346733093
],
[
0.20677362382411957,
0.3881402313709259,
0.5318411588668823
],
[
0.10901015251874924,
0.2707475423812866,
0.3816758990287781
],
[
0.4847850203514099,
0.15239009261131287,
0.45163530111312866
],
[
0.09481005370616913,
0.15819105505943298,
0.12841404974460602
],
[
0.4983784854412079,
0.45718005299568176,
0.08424646407365799
],
[
0.2763046324253082,
0.2063058465719223,
0.3703247904777527
],
[
0.4036531150341034,
0.4484309256076813,
0.24457362294197083
],
[
0.5230464935302734,
0.4396556317806244,
0.3629106879234314
],
[
0.2946498990058899,
0.19224371016025543,
0.18639756739139557
],
[
0.33942845463752747,
0.1070963591337204,
0.49657177925109863
],
[
0.12787079811096191,
0.08263149112462997,
0.1840149164199829
],
[
0.35910454392433167,
0.31310170888900757,
0.11980897933244705
],
[
0.15722618997097015,
0.29923370480537415,
0.407820463180542
],
[
0.12364465743303299,
0.4659133553504944,
0.15633027255535126
],
[
0.28565514087677,
0.2897105813026428,
0.12392035126686096
],
[
0.19736559689044952,
0.1416158825159073,
0.1349979192018509
],
[
0.3496997356414795,
0.4211839437484741,
0.1715952455997467
],
[
0.475223183631897,
0.4130305349826813,
0.4653870165348053
],
[
0.28921404480934143,
0.34739425778388977,
0.11075332015752792
],
[
0.4047956168651581,
0.13861128687858582,
0.16961310803890228
],
[
0.5106709003448486,
0.5673624873161316,
0.0892450362443924
],
[
0.10365679115056992,
0.43958431482315063,
0.48060742020606995
],
[
0.2888166904449463,
0.07966294884681702,
0.36358967423439026
],
[
0.19292157888412476,
0.17309828102588654,
0.05589678883552551
],
[
0.5227929949760437,
0.1855587214231491,
0.11792507767677307
],
[
0.2660389244556427,
0.548773467540741,
0.5037134885787964
],
[
0.057378534227609634,
0.4946551024913788,
0.16494522988796234
],
[
0.48666009306907654,
0.4797813594341278,
0.07033804059028625
],
[
0.5187705159187317,
0.34227901697158813,
0.406067818403244
],
[
0.4448551535606384,
0.10400378704071045,
0.3110816180706024
],
[
0.4698975086212158,
0.2864452302455902,
0.30077314376831055
],
[
0.0703377053141594,
0.14353232085704803,
0.34064897894859314
],
[
0.4335997402667999,
0.38353508710861206,
0.20544759929180145
],
[
0.3803330063819885,
0.44886884093284607,
0.437945157289505
],
[
0.4261038303375244,
0.0924801379442215,
0.49706584215164185
],
[
0.22050204873085022,
0.4724709689617157,
0.18526726961135864
],
[
0.3928665518760681,
0.21280480921268463,
0.09385833889245987
],
[
0.2593844532966614,
0.5017476677894592,
0.4851435124874115
],
[
0.5046696066856384,
0.25496184825897217,
0.4357806444168091
],
[
0.5074482560157776,
0.29923370480537415,
0.06577571481466293
],
[
0.221904918551445,
0.42789438366889954,
0.21795718371868134
],
[
0.38635969161987305,
0.22646084427833557,
0.17344880104064941
],
[
0.5065464973449707,
0.41880306601524353,
0.1801910251379013
],
[
0.30564725399017334,
0.3196573853492737,
0.49566876888275146
],
[
0.09010817855596542,
0.40919071435928345,
0.37821680307388306
],
[
0.47009795904159546,
0.35186442732810974,
0.5456129908561707
],
[
0.162612184882164,
0.10159525275230408,
0.4674549400806427
],
[
0.09660515934228897,
0.12465592473745346,
0.06865537911653519
],
[
0.18723157048225403,
0.3670336604118347,
0.2650076448917389
],
[
0.49288445711135864,
0.09083911031484604,
0.09884956479072571
],
[
0.3524926006793976,
0.4623507559299469,
0.23764653503894806
],
[
0.41735246777534485,
0.2218008041381836,
0.17354455590248108
],
[
0.47854217886924744,
0.3222688138484955,
0.4959508180618286
],
[
0.10266521573066711,
0.30623453855514526,
0.09199462831020355
],
[
0.08731709420681,
0.4694647789001465,
0.5055084824562073
],
[
0.23517507314682007,
0.3065198063850403,
0.05544609948992729
],
[
0.09088436514139175,
0.19915884733200073,
0.07785775512456894
],
[
0.18251122534275055,
0.29923370480537415,
0.4866146147251129
],
[
0.10572174191474915,
0.47605466842651367,
0.5299978256225586
],
[
0.3730151653289795,
0.47881704568862915,
0.20123206079006195
],
[
0.4724118709564209,
0.49485599994659424,
0.49308905005455017
],
[
0.2028992474079132,
0.2132064700126648,
0.22739055752754211
],
[
0.3550381064414978,
0.2551001310348511,
0.3830864429473877
],
[
0.4802624583244324,
0.4192729890346527,
0.4569671154022217
],
[
0.39423421025276184,
0.46593964099884033,
0.1289444863796234
],
[
0.45385563373565674,
0.09339053183794022,
0.3881056606769562
],
[
0.5068311095237732,
0.29923370480537415,
0.10691635310649872
],
[
0.09883947670459747,
0.06944058835506439,
0.5056400895118713
],
[
0.352832168340683,
0.40964841842651367,
0.1477414220571518
],
[
0.5820969939231873,
0.46638599038124084,
0.4030589163303375
],
[
0.43042948842048645,
0.24972397089004517,
0.4475313425064087
],
[
0.1091168001294136,
0.28223493695259094,
0.17356599867343903
],
[
0.1386835128068924,
0.3159557282924652,
0.43098369240760803
],
[
0.5068760514259338,
0.537592887878418,
0.4652017652988434
],
[
0.5192381739616394,
0.09259963035583496,
0.5365279316902161
],
[
0.4348467290401459,
0.29923370480537415,
0.057594649493694305
],
[
0.5597925782203674,
0.35377395153045654,
0.3289499282836914
],
[
0.38250571489334106,
0.45372092723846436,
0.4277156889438629
],
[
0.5085664987564087,
0.5573768615722656,
0.39853039383888245
],
[
0.2629241347312927,
0.3252616822719574,
0.48582205176353455
],
[
0.10790055990219116,
0.023456986993551254,
0.5011544823646545
],
[
0.5336201786994934,
0.1303187757730484,
0.32369309663772583
],
[
0.518532395362854,
0.1121792271733284,
0.328836053609848
],
[
0.19930943846702576,
0.20736496150493622,
0.4669167995452881
],
[
0.5300899744033813,
0.29923370480537415,
0.05071401223540306
],
[
0.5105882287025452,
0.2925345301628113,
0.5228399038314819
],
[
0.3276820480823517,
0.24914789199829102,
0.35004571080207825
],
[
0.48565033078193665,
0.33661529421806335,
0.47143107652664185
],
[
0.37147772312164307,
0.5003097653388977,
0.13270977139472961
],
[
0.5216526389122009,
0.48277392983436584,
0.07973511517047882
],
[
0.21449095010757446,
0.12620313465595245,
0.2667386531829834
],
[
0.4579208791255951,
0.14321710169315338,
0.06148596107959747
],
[
0.4731524586677551,
0.17464078962802887,
0.06310705095529556
],
[
0.23052988946437836,
0.29923370480537415,
0.510108232498169
],
[
0.1353495568037033,
0.5124863386154175,
0.5183387994766235
],
[
0.2910447418689728,
0.08562562614679337,
0.08877898752689362
],
[
0.3357255160808563,
0.01985875889658928,
0.16783209145069122
],
[
0.18995055556297302,
0.46446508169174194,
0.3787825405597687
],
[
0.534010648727417,
0.4986063838005066,
0.24415051937103271
],
[
0.3214907646179199,
0.21466046571731567,
0.2880139648914337
],
[
0.3890703618526459,
0.05805783346295357,
0.09035345166921616
],
[
0.09757407009601593,
0.49493804574012756,
0.49909377098083496
],
[
0.19078117609024048,
0.29923370480537415,
0.14100411534309387
],
[
0.15969878435134888,
0.16440604627132416,
0.15606528520584106
],
[
0.1717253476381302,
0.512469470500946,
0.31309831142425537
],
[
0.33371666073799133,
0.17783179879188538,
0.46897128224372864
],
[
0.5191593170166016,
0.4730057120323181,
0.5315001606941223
],
[
0.45993658900260925,
0.13571709394454956,
0.3396763205528259
],
[
0.2905575633049011,
0.36112692952156067,
0.26040688157081604
],
[
0.1273851990699768,
0.228861004114151,
0.2306375652551651
],
[
0.3950050175189972,
0.4847060441970825,
0.49440380930900574
],
[
0.2743946611881256,
0.29923370480537415,
0.2916116416454315
],
[
0.33875811100006104,
0.12597376108169556,
0.28650468587875366
],
[
0.331350713968277,
0.15663138031959534,
0.31230947375297546
],
[
0.47922152280807495,
0.20223678648471832,
0.4985326826572418
],
[
0.09411115199327469,
0.10990307480096817,
0.10064181685447693
],
[
0.4060930609703064,
0.10882768779993057,
0.46024370193481445
],
[
0.09361258149147034,
0.09751991927623749,
0.1396244913339615
],
[
0.1423683911561966,
0.27079346776008606,
0.03294278681278229
],
[
0.4669570028781891,
0.03322674334049225,
0.1372508853673935
],
[
0.3554062247276306,
0.33211812376976013,
0.1725987195968628
],
[
0.3068210482597351,
0.13354963064193726,
0.5721191167831421
],
[
0.3807375133037567,
0.22847671806812286,
0.33199241757392883
],
[
0.26810795068740845,
0.15397001802921295,
0.28484871983528137
],
[
0.3794940710067749,
0.11301714926958084,
0.2054196447134018
],
[
0.44565343856811523,
0.10809874534606934,
0.5133920907974243
],
[
0.4692825376987457,
0.3971514403820038,
0.4755999743938446
],
[
0.39292535185813904,
0.09397334605455399,
0.06860191375017166
],
[
0.4834848940372467,
0.09435981512069702,
0.17222602665424347
],
[
0.09720193594694138,
0.29923370480537415,
0.510779082775116
],
[
0.10717114061117172,
0.4595472514629364,
0.3745758533477783
],
[
0.1838539093732834,
0.21227142214775085,
0.42954763770103455
],
[
0.36779552698135376,
0.21453768014907837,
0.09934128820896149
],
[
0.3316192030906677,
0.3886072635650635,
0.07977966219186783
],
[
0.5129786729812622,
0.4144686460494995,
0.2253073751926422
],
[
0.07541624456644058,
0.06450625509023666,
0.3642188012599945
],
[
0.5166736841201782,
0.24363957345485687,
0.5150353908538818
],
[
0.498140424489975,
0.41894200444221497,
0.3856343924999237
],
[
0.24936480820178986,
0.47356557846069336,
0.5537445545196533
],
[
0.4409908354282379,
0.28117305040359497,
0.39970213174819946
],
[
0.5267730951309204,
0.1018093004822731,
0.03787040337920189
],
[
0.3677961528301239,
0.0394858680665493,
0.0650714784860611
],
[
0.5265408754348755,
0.13327135145664215,
0.2464272528886795
],
[
0.28510764241218567,
0.10290946811437607,
0.20700719952583313
],
[
0.12357800453901291,
0.15474779903888702,
0.28230687975883484
],
[
0.5179312825202942,
0.4698978364467621,
0.06783566623926163
],
[
0.5200085639953613,
0.3792725205421448,
0.3718236982822418
],
[
0.3375779986381531,
0.5374161005020142,
0.21857187151908875
],
[
0.09756416082382202,
0.34533458948135376,
0.5081673860549927
],
[
0.28710633516311646,
0.07219579070806503,
0.12115068733692169
],
[
0.2413293570280075,
0.4114782512187958,
0.3004918694496155
],
[
0.5400509834289551,
0.02599618397653103,
0.22999224066734314
],
[
0.4863761365413666,
0.09944410622119904,
0.4682898223400116
],
[
0.04367741569876671,
0.3792931139469147,
0.4213159382343292
],
[
0.34282636642456055,
0.04855912923812866,
0.34633585810661316
],
[
0.5270025730133057,
0.5420955419540405,
0.13136616349220276
],
[
0.19974078238010406,
0.3719595968723297,
0.5812338590621948
],
[
0.3951050937175751,
0.520481526851654,
0.43944546580314636
],
[
0.07492329180240631,
0.00783818680793047,
0.19456180930137634
],
[
0.06577272713184357,
0.04675120860338211,
0.06137834116816521
],
[
0.059974879026412964,
0.04614439234137535,
0.30091536045074463
],
[
0.503949761390686,
0.10325167328119278,
0.5292012691497803
],
[
0.3045593798160553,
0.04624379798769951,
0.5069072842597961
],
[
0.4598368704319,
0.10093829780817032,
0.08005744218826294
],
[
0.5855668187141418,
0.5845062136650085,
0.5233513712882996
],
[
0.0390322208404541,
0.29923370480537415,
0.49177414178848267
],
[
0.4870739281177521,
0.4436861276626587,
0.09054065495729446
],
[
0.1031922772526741,
0.08936416357755661,
0.278532475233078
],
[
0.09785640984773636,
0.10036541521549225,
0.07195070385932922
],
[
0.19014137983322144,
0.08649999648332596,
0.184501051902771
],
[
0.5291661620140076,
0.3851234018802643,
0.09130331128835678
],
[
0.5385644435882568,
0.1805342584848404,
0.1751730889081955
],
[
0.4074404835700989,
0.5299202799797058,
0.10594134032726288
],
[
0.5955472588539124,
0.3051132559776306,
0.5478314161300659
],
[
0.05961701646447182,
0.4008893370628357,
0.5330079793930054
],
[
0.11700171232223511,
0.145936518907547,
0.3546701967716217
],
[
0.5655185580253601,
-0.016938162967562675,
0.008070452138781548
],
[
0.4683241844177246,
0.03576419875025749,
0.3765697777271271
],
[
0.16034618020057678,
0.04000109061598778,
0.45461782813072205
],
[
0.47220855951309204,
0.08483608067035675,
0.5206044316291809
],
[
0.4109242856502533,
0.17806819081306458,
0.09369169175624847
],
[
0.22748956084251404,
0.05649997293949127,
0.33820465207099915
],
[
0.48838844895362854,
0.42935165762901306,
0.4283795952796936
],
[
0.015660583972930908,
0.29923370480537415,
0.6172947287559509
],
[
0.28337985277175903,
0.039204973727464676,
0.4324447512626648
],
[
0.586188793182373,
0.5959473252296448,
0.09735152870416641
],
[
0.14806878566741943,
0.5574384331703186,
0.09092339128255844
],
[
0.5625902414321899,
0.30627891421318054,
0.4743388891220093
],
[
0.5635393857955933,
0.4067699909210205,
0.417013019323349
],
[
0.554167628288269,
0.3569263517856598,
0.3360467255115509
],
[
0.056985314935445786,
0.33760568499565125,
0.11646073311567307
],
[
0.5913213491439819,
0.3084571361541748,
0.3540549576282501
],
[
0.07675735652446747,
0.13443472981452942,
0.548639178276062
],
[
0.032978132367134094,
0.14077959954738617,
0.5724940896034241
],
[
0.08972731232643127,
0.12107591331005096,
0.17119447886943817
],
[
0.09382423758506775,
0.24321328103542328,
0.4423363506793976
],
[
0.29118674993515015,
0.16025410592556,
0.41708433628082275
],
[
0.5054580569267273,
0.44674912095069885,
0.43730923533439636
],
[
0.14315062761306763,
0.3252270221710205,
0.5201801061630249
],
[
0.33933597803115845,
0.1201607808470726,
0.340225487947464
],
[
0.17847172915935516,
0.4916016161441803,
0.5548601746559143
],
[
0.43462249636650085,
0.29923370480537415,
0.005099068395793438
],
[
0.22771261632442474,
0.29923370480537415,
0.07144622504711151
],
[
0.5556842088699341,
0.5212759375572205,
0.04692516848444939
],
[
0.0067663975059986115,
0.584466278553009,
0.04930794984102249
]
],
"model_names": [
"instruct",
"math",
"code"
],
"num_models": 3,
"num_params": 291,
"param_names": [
"model.embed_tokens.weight",
"model.layers.0.self_attn.q_proj.weight",
"model.layers.0.self_attn.k_proj.weight",
"model.layers.0.self_attn.v_proj.weight",
"model.layers.0.self_attn.o_proj.weight",
"model.layers.0.mlp.gate_proj.weight",
"model.layers.0.mlp.up_proj.weight",
"model.layers.0.mlp.down_proj.weight",
"model.layers.0.input_layernorm.weight",
"model.layers.0.post_attention_layernorm.weight",
"model.layers.1.self_attn.q_proj.weight",
"model.layers.1.self_attn.k_proj.weight",
"model.layers.1.self_attn.v_proj.weight",
"model.layers.1.self_attn.o_proj.weight",
"model.layers.1.mlp.gate_proj.weight",
"model.layers.1.mlp.up_proj.weight",
"model.layers.1.mlp.down_proj.weight",
"model.layers.1.input_layernorm.weight",
"model.layers.1.post_attention_layernorm.weight",
"model.layers.2.self_attn.q_proj.weight",
"model.layers.2.self_attn.k_proj.weight",
"model.layers.2.self_attn.v_proj.weight",
"model.layers.2.self_attn.o_proj.weight",
"model.layers.2.mlp.gate_proj.weight",
"model.layers.2.mlp.up_proj.weight",
"model.layers.2.mlp.down_proj.weight",
"model.layers.2.input_layernorm.weight",
"model.layers.2.post_attention_layernorm.weight",
"model.layers.3.self_attn.q_proj.weight",
"model.layers.3.self_attn.k_proj.weight",
"model.layers.3.self_attn.v_proj.weight",
"model.layers.3.self_attn.o_proj.weight",
"model.layers.3.mlp.gate_proj.weight",
"model.layers.3.mlp.up_proj.weight",
"model.layers.3.mlp.down_proj.weight",
"model.layers.3.input_layernorm.weight",
"model.layers.3.post_attention_layernorm.weight",
"model.layers.4.self_attn.q_proj.weight",
"model.layers.4.self_attn.k_proj.weight",
"model.layers.4.self_attn.v_proj.weight",
"model.layers.4.self_attn.o_proj.weight",
"model.layers.4.mlp.gate_proj.weight",
"model.layers.4.mlp.up_proj.weight",
"model.layers.4.mlp.down_proj.weight",
"model.layers.4.input_layernorm.weight",
"model.layers.4.post_attention_layernorm.weight",
"model.layers.5.self_attn.q_proj.weight",
"model.layers.5.self_attn.k_proj.weight",
"model.layers.5.self_attn.v_proj.weight",
"model.layers.5.self_attn.o_proj.weight",
"model.layers.5.mlp.gate_proj.weight",
"model.layers.5.mlp.up_proj.weight",
"model.layers.5.mlp.down_proj.weight",
"model.layers.5.input_layernorm.weight",
"model.layers.5.post_attention_layernorm.weight",
"model.layers.6.self_attn.q_proj.weight",
"model.layers.6.self_attn.k_proj.weight",
"model.layers.6.self_attn.v_proj.weight",
"model.layers.6.self_attn.o_proj.weight",
"model.layers.6.mlp.gate_proj.weight",
"model.layers.6.mlp.up_proj.weight",
"model.layers.6.mlp.down_proj.weight",
"model.layers.6.input_layernorm.weight",
"model.layers.6.post_attention_layernorm.weight",
"model.layers.7.self_attn.q_proj.weight",
"model.layers.7.self_attn.k_proj.weight",
"model.layers.7.self_attn.v_proj.weight",
"model.layers.7.self_attn.o_proj.weight",
"model.layers.7.mlp.gate_proj.weight",
"model.layers.7.mlp.up_proj.weight",
"model.layers.7.mlp.down_proj.weight",
"model.layers.7.input_layernorm.weight",
"model.layers.7.post_attention_layernorm.weight",
"model.layers.8.self_attn.q_proj.weight",
"model.layers.8.self_attn.k_proj.weight",
"model.layers.8.self_attn.v_proj.weight",
"model.layers.8.self_attn.o_proj.weight",
"model.layers.8.mlp.gate_proj.weight",
"model.layers.8.mlp.up_proj.weight",
"model.layers.8.mlp.down_proj.weight",
"model.layers.8.input_layernorm.weight",
"model.layers.8.post_attention_layernorm.weight",
"model.layers.9.self_attn.q_proj.weight",
"model.layers.9.self_attn.k_proj.weight",
"model.layers.9.self_attn.v_proj.weight",
"model.layers.9.self_attn.o_proj.weight",
"model.layers.9.mlp.gate_proj.weight",
"model.layers.9.mlp.up_proj.weight",
"model.layers.9.mlp.down_proj.weight",
"model.layers.9.input_layernorm.weight",
"model.layers.9.post_attention_layernorm.weight",
"model.layers.10.self_attn.q_proj.weight",
"model.layers.10.self_attn.k_proj.weight",
"model.layers.10.self_attn.v_proj.weight",
"model.layers.10.self_attn.o_proj.weight",
"model.layers.10.mlp.gate_proj.weight",
"model.layers.10.mlp.up_proj.weight",
"model.layers.10.mlp.down_proj.weight",
"model.layers.10.input_layernorm.weight",
"model.layers.10.post_attention_layernorm.weight",
"model.layers.11.self_attn.q_proj.weight",
"model.layers.11.self_attn.k_proj.weight",
"model.layers.11.self_attn.v_proj.weight",
"model.layers.11.self_attn.o_proj.weight",
"model.layers.11.mlp.gate_proj.weight",
"model.layers.11.mlp.up_proj.weight",
"model.layers.11.mlp.down_proj.weight",
"model.layers.11.input_layernorm.weight",
"model.layers.11.post_attention_layernorm.weight",
"model.layers.12.self_attn.q_proj.weight",
"model.layers.12.self_attn.k_proj.weight",
"model.layers.12.self_attn.v_proj.weight",
"model.layers.12.self_attn.o_proj.weight",
"model.layers.12.mlp.gate_proj.weight",
"model.layers.12.mlp.up_proj.weight",
"model.layers.12.mlp.down_proj.weight",
"model.layers.12.input_layernorm.weight",
"model.layers.12.post_attention_layernorm.weight",
"model.layers.13.self_attn.q_proj.weight",
"model.layers.13.self_attn.k_proj.weight",
"model.layers.13.self_attn.v_proj.weight",
"model.layers.13.self_attn.o_proj.weight",
"model.layers.13.mlp.gate_proj.weight",
"model.layers.13.mlp.up_proj.weight",
"model.layers.13.mlp.down_proj.weight",
"model.layers.13.input_layernorm.weight",
"model.layers.13.post_attention_layernorm.weight",
"model.layers.14.self_attn.q_proj.weight",
"model.layers.14.self_attn.k_proj.weight",
"model.layers.14.self_attn.v_proj.weight",
"model.layers.14.self_attn.o_proj.weight",
"model.layers.14.mlp.gate_proj.weight",
"model.layers.14.mlp.up_proj.weight",
"model.layers.14.mlp.down_proj.weight",
"model.layers.14.input_layernorm.weight",
"model.layers.14.post_attention_layernorm.weight",
"model.layers.15.self_attn.q_proj.weight",
"model.layers.15.self_attn.k_proj.weight",
"model.layers.15.self_attn.v_proj.weight",
"model.layers.15.self_attn.o_proj.weight",
"model.layers.15.mlp.gate_proj.weight",
"model.layers.15.mlp.up_proj.weight",
"model.layers.15.mlp.down_proj.weight",
"model.layers.15.input_layernorm.weight",
"model.layers.15.post_attention_layernorm.weight",
"model.layers.16.self_attn.q_proj.weight",
"model.layers.16.self_attn.k_proj.weight",
"model.layers.16.self_attn.v_proj.weight",
"model.layers.16.self_attn.o_proj.weight",
"model.layers.16.mlp.gate_proj.weight",
"model.layers.16.mlp.up_proj.weight",
"model.layers.16.mlp.down_proj.weight",
"model.layers.16.input_layernorm.weight",
"model.layers.16.post_attention_layernorm.weight",
"model.layers.17.self_attn.q_proj.weight",
"model.layers.17.self_attn.k_proj.weight",
"model.layers.17.self_attn.v_proj.weight",
"model.layers.17.self_attn.o_proj.weight",
"model.layers.17.mlp.gate_proj.weight",
"model.layers.17.mlp.up_proj.weight",
"model.layers.17.mlp.down_proj.weight",
"model.layers.17.input_layernorm.weight",
"model.layers.17.post_attention_layernorm.weight",
"model.layers.18.self_attn.q_proj.weight",
"model.layers.18.self_attn.k_proj.weight",
"model.layers.18.self_attn.v_proj.weight",
"model.layers.18.self_attn.o_proj.weight",
"model.layers.18.mlp.gate_proj.weight",
"model.layers.18.mlp.up_proj.weight",
"model.layers.18.mlp.down_proj.weight",
"model.layers.18.input_layernorm.weight",
"model.layers.18.post_attention_layernorm.weight",
"model.layers.19.self_attn.q_proj.weight",
"model.layers.19.self_attn.k_proj.weight",
"model.layers.19.self_attn.v_proj.weight",
"model.layers.19.self_attn.o_proj.weight",
"model.layers.19.mlp.gate_proj.weight",
"model.layers.19.mlp.up_proj.weight",
"model.layers.19.mlp.down_proj.weight",
"model.layers.19.input_layernorm.weight",
"model.layers.19.post_attention_layernorm.weight",
"model.layers.20.self_attn.q_proj.weight",
"model.layers.20.self_attn.k_proj.weight",
"model.layers.20.self_attn.v_proj.weight",
"model.layers.20.self_attn.o_proj.weight",
"model.layers.20.mlp.gate_proj.weight",
"model.layers.20.mlp.up_proj.weight",
"model.layers.20.mlp.down_proj.weight",
"model.layers.20.input_layernorm.weight",
"model.layers.20.post_attention_layernorm.weight",
"model.layers.21.self_attn.q_proj.weight",
"model.layers.21.self_attn.k_proj.weight",
"model.layers.21.self_attn.v_proj.weight",
"model.layers.21.self_attn.o_proj.weight",
"model.layers.21.mlp.gate_proj.weight",
"model.layers.21.mlp.up_proj.weight",
"model.layers.21.mlp.down_proj.weight",
"model.layers.21.input_layernorm.weight",
"model.layers.21.post_attention_layernorm.weight",
"model.layers.22.self_attn.q_proj.weight",
"model.layers.22.self_attn.k_proj.weight",
"model.layers.22.self_attn.v_proj.weight",
"model.layers.22.self_attn.o_proj.weight",
"model.layers.22.mlp.gate_proj.weight",
"model.layers.22.mlp.up_proj.weight",
"model.layers.22.mlp.down_proj.weight",
"model.layers.22.input_layernorm.weight",
"model.layers.22.post_attention_layernorm.weight",
"model.layers.23.self_attn.q_proj.weight",
"model.layers.23.self_attn.k_proj.weight",
"model.layers.23.self_attn.v_proj.weight",
"model.layers.23.self_attn.o_proj.weight",
"model.layers.23.mlp.gate_proj.weight",
"model.layers.23.mlp.up_proj.weight",
"model.layers.23.mlp.down_proj.weight",
"model.layers.23.input_layernorm.weight",
"model.layers.23.post_attention_layernorm.weight",
"model.layers.24.self_attn.q_proj.weight",
"model.layers.24.self_attn.k_proj.weight",
"model.layers.24.self_attn.v_proj.weight",
"model.layers.24.self_attn.o_proj.weight",
"model.layers.24.mlp.gate_proj.weight",
"model.layers.24.mlp.up_proj.weight",
"model.layers.24.mlp.down_proj.weight",
"model.layers.24.input_layernorm.weight",
"model.layers.24.post_attention_layernorm.weight",
"model.layers.25.self_attn.q_proj.weight",
"model.layers.25.self_attn.k_proj.weight",
"model.layers.25.self_attn.v_proj.weight",
"model.layers.25.self_attn.o_proj.weight",
"model.layers.25.mlp.gate_proj.weight",
"model.layers.25.mlp.up_proj.weight",
"model.layers.25.mlp.down_proj.weight",
"model.layers.25.input_layernorm.weight",
"model.layers.25.post_attention_layernorm.weight",
"model.layers.26.self_attn.q_proj.weight",
"model.layers.26.self_attn.k_proj.weight",
"model.layers.26.self_attn.v_proj.weight",
"model.layers.26.self_attn.o_proj.weight",
"model.layers.26.mlp.gate_proj.weight",
"model.layers.26.mlp.up_proj.weight",
"model.layers.26.mlp.down_proj.weight",
"model.layers.26.input_layernorm.weight",
"model.layers.26.post_attention_layernorm.weight",
"model.layers.27.self_attn.q_proj.weight",
"model.layers.27.self_attn.k_proj.weight",
"model.layers.27.self_attn.v_proj.weight",
"model.layers.27.self_attn.o_proj.weight",
"model.layers.27.mlp.gate_proj.weight",
"model.layers.27.mlp.up_proj.weight",
"model.layers.27.mlp.down_proj.weight",
"model.layers.27.input_layernorm.weight",
"model.layers.27.post_attention_layernorm.weight",
"model.layers.28.self_attn.q_proj.weight",
"model.layers.28.self_attn.k_proj.weight",
"model.layers.28.self_attn.v_proj.weight",
"model.layers.28.self_attn.o_proj.weight",
"model.layers.28.mlp.gate_proj.weight",
"model.layers.28.mlp.up_proj.weight",
"model.layers.28.mlp.down_proj.weight",
"model.layers.28.input_layernorm.weight",
"model.layers.28.post_attention_layernorm.weight",
"model.layers.29.self_attn.q_proj.weight",
"model.layers.29.self_attn.k_proj.weight",
"model.layers.29.self_attn.v_proj.weight",
"model.layers.29.self_attn.o_proj.weight",
"model.layers.29.mlp.gate_proj.weight",
"model.layers.29.mlp.up_proj.weight",
"model.layers.29.mlp.down_proj.weight",
"model.layers.29.input_layernorm.weight",
"model.layers.29.post_attention_layernorm.weight",
"model.layers.30.self_attn.q_proj.weight",
"model.layers.30.self_attn.k_proj.weight",
"model.layers.30.self_attn.v_proj.weight",
"model.layers.30.self_attn.o_proj.weight",
"model.layers.30.mlp.gate_proj.weight",
"model.layers.30.mlp.up_proj.weight",
"model.layers.30.mlp.down_proj.weight",
"model.layers.30.input_layernorm.weight",
"model.layers.30.post_attention_layernorm.weight",
"model.layers.31.self_attn.q_proj.weight",
"model.layers.31.self_attn.k_proj.weight",
"model.layers.31.self_attn.v_proj.weight",
"model.layers.31.self_attn.o_proj.weight",
"model.layers.31.mlp.gate_proj.weight",
"model.layers.31.mlp.up_proj.weight",
"model.layers.31.mlp.down_proj.weight",
"model.layers.31.input_layernorm.weight",
"model.layers.31.post_attention_layernorm.weight",
"model.norm.weight",
"lm_head.weight"
]
}