lejelly's picture
Upload folder using huggingface_hub
a975df7 verified
{
"lambdas": [
[
0.4511440098285675,
0.34568217396736145,
0.4310983717441559
],
[
0.4077865779399872,
0.44282540678977966,
0.3368643522262573
],
[
0.28722721338272095,
0.4189178943634033,
0.36558520793914795
],
[
0.4073631763458252,
0.3240983784198761,
0.38805025815963745
],
[
0.4540562629699707,
0.2138141542673111,
0.2885076403617859
],
[
0.45886796712875366,
0.4091651439666748,
0.4662487804889679
],
[
0.46575337648391724,
0.3446827530860901,
0.4880160689353943
],
[
0.3579305112361908,
0.32643458247184753,
0.43178242444992065
],
[
0.23109537363052368,
0.33650973439216614,
0.40308934450149536
],
[
0.3454071283340454,
0.3584277331829071,
0.27936890721321106
],
[
0.35491490364074707,
0.3612421751022339,
0.20739391446113586
],
[
0.25850096344947815,
0.22921274602413177,
0.2169143110513687
],
[
0.2233383059501648,
0.4430111348628998,
0.4659053683280945
],
[
0.20520174503326416,
0.24486230313777924,
0.23429881036281586
],
[
0.2931152880191803,
0.4284198582172394,
0.42642292380332947
],
[
0.4972710609436035,
0.44421976804733276,
0.38601377606391907
],
[
0.2682636082172394,
0.17335446178913116,
0.41383957862854004
],
[
0.2809653878211975,
0.3548043966293335,
0.25039395689964294
],
[
0.3761029541492462,
0.26408830285072327,
0.22779043018817902
],
[
0.27649909257888794,
0.4074185788631439,
0.25979748368263245
],
[
0.3501487076282501,
0.3458843231201172,
0.2438654899597168
],
[
0.12980426847934723,
0.27279528975486755,
0.12390352785587311
],
[
0.4389776885509491,
0.4472886323928833,
0.15493842959403992
],
[
0.4947451055049896,
0.1215558797121048,
0.46804678440093994
],
[
0.4076070189476013,
0.3182707726955414,
0.36709439754486084
],
[
0.18712618947029114,
0.1271672546863556,
0.19190722703933716
],
[
0.3089291751384735,
0.22327086329460144,
0.14891940355300903
],
[
0.378889799118042,
0.29507654905319214,
0.4630947411060333
],
[
0.14441412687301636,
0.18343178927898407,
0.2112361043691635
],
[
0.2136223167181015,
0.2010475993156433,
0.23488080501556396
],
[
0.1485104262828827,
0.23044322431087494,
0.1440969556570053
],
[
0.40699833631515503,
0.19921451807022095,
0.21528899669647217
],
[
0.16625864803791046,
0.16209496557712555,
0.41099050641059875
],
[
0.4706342816352844,
0.4607185423374176,
0.38538724184036255
],
[
0.15955328941345215,
0.2526731789112091,
0.1258438378572464
],
[
0.27645596861839294,
0.2471887171268463,
0.3655036389827728
],
[
0.48072558641433716,
0.18587033450603485,
0.11874216049909592
],
[
0.32947680354118347,
0.3421177566051483,
0.2826484739780426
],
[
0.22274364531040192,
0.4459400177001953,
0.4375954568386078
],
[
0.16980119049549103,
0.17654569447040558,
0.14438512921333313
],
[
0.3976966142654419,
0.3390005826950073,
0.16800610721111298
],
[
0.3277134597301483,
0.402604877948761,
0.23107308149337769
],
[
0.3207840323448181,
0.3076019585132599,
0.24418573081493378
],
[
0.31352096796035767,
0.2760259211063385,
0.18287041783332825
],
[
0.19400371611118317,
0.2872091233730316,
0.4200877249240875
],
[
0.4157150685787201,
0.16084441542625427,
0.47111842036247253
],
[
0.21216757595539093,
0.21133485436439514,
0.25343239307403564
],
[
0.28364571928977966,
0.30081626772880554,
0.35991379618644714
],
[
0.15380434691905975,
0.29351890087127686,
0.15854692459106445
],
[
0.11507381498813629,
0.1427544206380844,
0.3868664503097534
],
[
0.1273266226053238,
0.10876727104187012,
0.11684992909431458
],
[
0.432993620634079,
0.16948646306991577,
0.3123820126056671
],
[
0.23365753889083862,
0.14285247027873993,
0.117621511220932
],
[
0.3091544508934021,
0.18454627692699432,
0.22001750767230988
],
[
0.20372647047042847,
0.19373637437820435,
0.35549601912498474
],
[
0.4604543447494507,
0.4596346318721771,
0.44899746775627136
],
[
0.283939003944397,
0.21922054886817932,
0.19192583858966827
],
[
0.12823466956615448,
0.2267867624759674,
0.154268279671669
],
[
0.11155832558870316,
0.1461743265390396,
0.21099990606307983
],
[
0.1693519800901413,
0.39470532536506653,
0.11266002804040909
],
[
0.13164618611335754,
0.1770446002483368,
0.15961745381355286
],
[
0.4330594539642334,
0.08785655349493027,
0.18969190120697021
],
[
0.1738586127758026,
0.4532787501811981,
0.3413299322128296
],
[
0.14526906609535217,
0.1388753354549408,
0.2560168504714966
],
[
0.1856858879327774,
0.24550595879554749,
0.22082236409187317
],
[
0.4369717240333557,
0.425886332988739,
0.37043431401252747
],
[
0.35394248366355896,
0.2707023620605469,
0.16234971582889557
],
[
0.17669916152954102,
0.1573595404624939,
0.4139661490917206
],
[
0.2815914750099182,
0.16102179884910583,
0.26710090041160583
],
[
0.29185590147972107,
0.17331469058990479,
0.3098703920841217
],
[
0.4079897701740265,
0.272733211517334,
0.33182939887046814
],
[
0.34515219926834106,
0.29923370480537415,
0.37515610456466675
],
[
0.22567912936210632,
0.42377275228500366,
0.2018435001373291
],
[
0.2051728218793869,
0.11492697149515152,
0.14809797704219818
],
[
0.35379916429519653,
0.2150591015815735,
0.1706886887550354
],
[
0.41625112295150757,
0.4670504331588745,
0.32151931524276733
],
[
0.14866942167282104,
0.24495528638362885,
0.46335816383361816
],
[
0.12006267160177231,
0.34981709718704224,
0.10970097035169601
],
[
0.279428631067276,
0.37996864318847656,
0.12361448258161545
],
[
0.24313272535800934,
0.25561684370040894,
0.13757745921611786
],
[
0.2363184094429016,
0.29923370480537415,
0.13216632604599
],
[
0.2571988105773926,
0.1526828557252884,
0.3124895989894867
],
[
0.41544994711875916,
0.2990501821041107,
0.4286174178123474
],
[
0.2243904322385788,
0.1854301393032074,
0.28971952199935913
],
[
0.22121213376522064,
0.3917357325553894,
0.16811366379261017
],
[
0.20991240441799164,
0.3683031499385834,
0.17156505584716797
],
[
0.4028129279613495,
0.3321937918663025,
0.16572663187980652
],
[
0.1638612598180771,
0.48852384090423584,
0.1588045209646225
],
[
0.24374844133853912,
0.22875361144542694,
0.35431236028671265
],
[
0.2844041883945465,
0.4644933342933655,
0.39079946279525757
],
[
0.1330053210258484,
0.36112070083618164,
0.22321827709674835
],
[
0.39910584688186646,
0.20124942064285278,
0.18411631882190704
],
[
0.4278510510921478,
0.46304693818092346,
0.4296000003814697
],
[
0.22069533169269562,
0.1936628520488739,
0.2681259214878082
],
[
0.44691622257232666,
0.43557244539260864,
0.42169421911239624
],
[
0.31918004155158997,
0.3289930820465088,
0.1294279396533966
],
[
0.145906463265419,
0.13503797352313995,
0.1452464610338211
],
[
0.474565327167511,
0.17498759925365448,
0.08103786408901215
],
[
0.23951378464698792,
0.29923370480537415,
0.12034349143505096
],
[
0.21072536706924438,
0.22345665097236633,
0.41911208629608154
],
[
0.450216144323349,
0.21926067769527435,
0.4052693545818329
],
[
0.43164557218551636,
0.42432743310928345,
0.3966224789619446
],
[
0.44685351848602295,
0.4619434177875519,
0.36645224690437317
],
[
0.13675516843795776,
0.14978928864002228,
0.22030705213546753
],
[
0.12139047682285309,
0.38780781626701355,
0.47421255707740784
],
[
0.46543893218040466,
0.1662176251411438,
0.105337955057621
],
[
0.31978920102119446,
0.353864461183548,
0.4400956928730011
],
[
0.30786028504371643,
0.42015624046325684,
0.3595878481864929
],
[
0.4008002281188965,
0.3998754024505615,
0.16971978545188904
],
[
0.20247533917427063,
0.2598496973514557,
0.31049075722694397
],
[
0.15641634166240692,
0.18051019310951233,
0.2142690122127533
],
[
0.2825934588909149,
0.13605156540870667,
0.18161936104297638
],
[
0.25422942638397217,
0.3929879367351532,
0.33059224486351013
],
[
0.4474930465221405,
0.4542589783668518,
0.4316270351409912
],
[
0.45026838779449463,
0.42323270440101624,
0.3742210865020752
],
[
0.46585074067115784,
0.37513217329978943,
0.24602507054805756
],
[
0.4334811568260193,
0.19446375966072083,
0.16675102710723877
],
[
0.35880813002586365,
0.43830639123916626,
0.3471340835094452
],
[
0.40446630120277405,
0.45337149500846863,
0.2590368986129761
],
[
0.28164413571357727,
0.33701270818710327,
0.38862496614456177
],
[
0.21139982342720032,
0.385542631149292,
0.164750337600708
],
[
0.2833697199821472,
0.1660548895597458,
0.16905063390731812
],
[
0.48628145456314087,
0.39683374762535095,
0.4549018144607544
],
[
0.42255058884620667,
0.12292030453681946,
0.2693982422351837
],
[
0.4466954469680786,
0.4508359134197235,
0.42110398411750793
],
[
0.3344976007938385,
0.29923370480537415,
0.3550164997577667
],
[
0.22152170538902283,
0.25506770610809326,
0.19674324989318848
],
[
0.2944912910461426,
0.40148842334747314,
0.4079500734806061
],
[
0.31500399112701416,
0.264303594827652,
0.24273818731307983
],
[
0.24160929024219513,
0.43865495920181274,
0.17754635214805603
],
[
0.35719066858291626,
0.4569880962371826,
0.10947935283184052
],
[
0.17870254814624786,
0.15666398406028748,
0.1511623114347458
],
[
0.1309283971786499,
0.39916273951530457,
0.35891109704971313
],
[
0.41414639353752136,
0.40458348393440247,
0.4373874068260193
],
[
0.223582461476326,
0.20872588455677032,
0.20849093794822693
],
[
0.15978248417377472,
0.3871738910675049,
0.20173275470733643
],
[
0.40426936745643616,
0.17517821490764618,
0.15639956295490265
],
[
0.2445727288722992,
0.24165970087051392,
0.26430395245552063
],
[
0.27058425545692444,
0.45792025327682495,
0.22470559179782867
],
[
0.1343778669834137,
0.23025086522102356,
0.4482075273990631
],
[
0.301599383354187,
0.4401021897792816,
0.30802470445632935
],
[
0.3678591847419739,
0.41126516461372375,
0.12729883193969727
],
[
0.4265497028827667,
0.3064115643501282,
0.12861239910125732
],
[
0.25590309500694275,
0.29923370480537415,
0.40055426955223083
],
[
0.21432910859584808,
0.35219594836235046,
0.3894593119621277
],
[
0.16293656826019287,
0.20112387835979462,
0.1641329973936081
],
[
0.23716431856155396,
0.23458261787891388,
0.22277328372001648
],
[
0.16283032298088074,
0.24371109902858734,
0.18812552094459534
],
[
0.17482346296310425,
0.27810290455818176,
0.2653261721134186
],
[
0.44124141335487366,
0.4653926193714142,
0.18024171888828278
],
[
0.4573570489883423,
0.23765107989311218,
0.223820760846138
],
[
0.48433545231819153,
0.45144760608673096,
0.2777790427207947
],
[
0.1559525728225708,
0.29923370480537415,
0.4903254210948944
],
[
0.3679139316082001,
0.40922513604164124,
0.14046727120876312
],
[
0.3210708498954773,
0.16060097515583038,
0.435181587934494
],
[
0.2536516785621643,
0.23436033725738525,
0.27494579553604126
],
[
0.14041517674922943,
0.45182326436042786,
0.16119162738323212
],
[
0.14711785316467285,
0.1674029380083084,
0.15063847601413727
],
[
0.36876657605171204,
0.48956817388534546,
0.3209514319896698
],
[
0.3705935776233673,
0.21650540828704834,
0.4331192374229431
],
[
0.4736544191837311,
0.4322001338005066,
0.12574197351932526
],
[
0.245101198554039,
0.29923370480537415,
0.40264415740966797
],
[
0.44746536016464233,
0.33772048354148865,
0.1483197808265686
],
[
0.3145650029182434,
0.44416722655296326,
0.35693588852882385
],
[
0.37297913432121277,
0.18011485040187836,
0.16238778829574585
],
[
0.26034584641456604,
0.4297695755958557,
0.32993659377098083
],
[
0.37756991386413574,
0.44577649235725403,
0.44806650280952454
],
[
0.0845801904797554,
0.23957164585590363,
0.12926174700260162
],
[
0.13752779364585876,
0.18119001388549805,
0.19948138296604156
],
[
0.10815407335758209,
0.46910011768341064,
0.4763980805873871
],
[
0.19809484481811523,
0.29923370480537415,
0.366342157125473
],
[
0.18643337488174438,
0.45779895782470703,
0.48765724897384644
],
[
0.13408148288726807,
0.41982096433639526,
0.19417880475521088
],
[
0.2252083420753479,
0.16844086349010468,
0.26387283205986023
],
[
0.2037154585123062,
0.20130690932273865,
0.19506138563156128
],
[
0.18133416771888733,
0.3889558017253876,
0.17258818447589874
],
[
0.4873719811439514,
0.43230339884757996,
0.21695755422115326
],
[
0.4797218143939972,
0.10397535562515259,
0.12909311056137085
],
[
0.23951224982738495,
0.10424715280532837,
0.45742061734199524
],
[
0.2841903865337372,
0.29923370480537415,
0.30746060609817505
],
[
0.2450842559337616,
0.23246590793132782,
0.48158928751945496
],
[
0.3935782015323639,
0.3982652723789215,
0.44564884901046753
],
[
0.42746949195861816,
0.43627870082855225,
0.41870421171188354
],
[
0.1621244102716446,
0.18475289642810822,
0.1475932002067566
],
[
0.21315304934978485,
0.17851777374744415,
0.4540901780128479
],
[
0.459518700838089,
0.46814388036727905,
0.12805859744548798
],
[
0.4473622441291809,
0.1991201937198639,
0.45949587225914
],
[
0.3688053488731384,
0.2689209282398224,
0.490568071603775
],
[
0.4463302791118622,
0.29923370480537415,
0.363638699054718
],
[
0.07843068242073059,
0.1257428079843521,
0.47206512093544006
],
[
0.2896142303943634,
0.48443368077278137,
0.1995886117219925
],
[
0.23569075763225555,
0.17438194155693054,
0.16555899381637573
],
[
0.3339311480522156,
0.138455331325531,
0.4986150860786438
],
[
0.4561089277267456,
0.13907161355018616,
0.37022289633750916
],
[
0.11571026593446732,
0.1735377013683319,
0.3909721076488495
],
[
0.41012808680534363,
0.12148356437683105,
0.13725464046001434
],
[
0.4633272588253021,
0.1216013953089714,
0.36428725719451904
],
[
0.13796786963939667,
0.29923370480537415,
0.46274399757385254
],
[
0.45114168524742126,
0.14195981621742249,
0.46917733550071716
],
[
0.4262934625148773,
0.11466571688652039,
0.19186291098594666
],
[
0.44846612215042114,
0.14936070144176483,
0.29932039976119995
],
[
0.1297784298658371,
0.36136552691459656,
0.1830536276102066
],
[
0.25918319821357727,
0.3930901885032654,
0.42164939641952515
],
[
0.454575777053833,
0.48423320055007935,
0.18333756923675537
],
[
0.4566199481487274,
0.2628677785396576,
0.07379919290542603
],
[
0.46558135747909546,
0.4114809036254883,
0.467506468296051
],
[
0.3389800786972046,
0.28094711899757385,
0.23497305810451508
],
[
0.46289947628974915,
0.17742177844047546,
0.14021877944469452
],
[
0.09659633785486221,
0.13616475462913513,
0.14357511699199677
],
[
0.350506991147995,
0.10470135509967804,
0.12466616928577423
],
[
0.1182580217719078,
0.42097800970077515,
0.13212710618972778
],
[
0.5112250447273254,
0.15854305028915405,
0.1426139622926712
],
[
0.20451374351978302,
0.4863293766975403,
0.294251412153244
],
[
0.48365139961242676,
0.41005825996398926,
0.11694485694169998
],
[
0.24377882480621338,
0.1884140521287918,
0.48632267117500305
],
[
0.31411013007164,
0.29923370480537415,
0.18183420598506927
],
[
0.19102278351783752,
0.20111915469169617,
0.1339617818593979
],
[
0.26393866539001465,
0.22674188017845154,
0.4447513222694397
],
[
0.1592191904783249,
0.2599426805973053,
0.21397040784358978
],
[
0.10093661397695541,
0.31607648730278015,
0.2935095429420471
],
[
0.4517417848110199,
0.11916311085224152,
0.4487104117870331
],
[
0.414072185754776,
0.4277026653289795,
0.4792312979698181
],
[
0.4864226281642914,
0.37999510765075684,
0.4680911898612976
],
[
0.4162227213382721,
0.496494323015213,
0.4781576097011566
],
[
0.1922696977853775,
0.41444700956344604,
0.457570344209671
],
[
0.47464364767074585,
0.40973153710365295,
0.4741663932800293
],
[
0.39086464047431946,
0.4758678078651428,
0.37202033400535583
],
[
0.45488241314888,
0.38954630494117737,
0.35717692971229553
],
[
0.4128759801387787,
0.18781141936779022,
0.18855643272399902
],
[
0.4449164867401123,
0.24285383522510529,
0.48864275217056274
],
[
0.08543943613767624,
0.4856216013431549,
0.4056006669998169
],
[
0.4745456576347351,
0.08682437986135483,
0.4459170699119568
],
[
0.10274919122457504,
0.48529237508773804,
0.47913098335266113
],
[
0.46872222423553467,
0.18523962795734406,
0.15047748386859894
],
[
0.39252278208732605,
0.39467936754226685,
0.5171827077865601
],
[
0.46476298570632935,
0.47057217359542847,
0.13309550285339355
],
[
0.44297391176223755,
0.23084476590156555,
0.1847122311592102
],
[
0.4511570930480957,
0.37978699803352356,
0.4634334444999695
],
[
0.4489362835884094,
0.4209038019180298,
0.1906983107328415
],
[
0.292703241109848,
0.48317569494247437,
0.4351253807544708
],
[
0.4513009488582611,
0.11310037225484848,
0.41090840101242065
],
[
0.15975885093212128,
0.13482822477817535,
0.4434134364128113
],
[
0.46823757886886597,
0.288373738527298,
0.36955058574676514
],
[
0.32527464628219604,
0.3661269247531891,
0.1951110064983368
],
[
0.4789314270019531,
0.07736703008413315,
0.2624352276325226
],
[
0.3614385724067688,
0.13425564765930176,
0.2201119214296341
],
[
0.13200026750564575,
0.322633296251297,
0.3131023943424225
],
[
0.4229698181152344,
0.175356924533844,
0.18913114070892334
],
[
0.44432491064071655,
0.49774083495140076,
0.22631461918354034
],
[
0.4541386663913727,
0.10495408624410629,
0.43360456824302673
],
[
0.15666554868221283,
0.15558116137981415,
0.2319335788488388
],
[
0.23508504033088684,
0.29923370480537415,
0.1499142199754715
],
[
0.39173614978790283,
0.15001919865608215,
0.1313292533159256
],
[
0.41128212213516235,
0.4478446841239929,
0.3802175223827362
],
[
0.4341309368610382,
0.15578855574131012,
0.20559746026992798
],
[
0.14219129085540771,
0.14060185849666595,
0.20822055637836456
],
[
0.4654639959335327,
0.3616563379764557,
0.3889743983745575
],
[
0.24674555659294128,
0.42591360211372375,
0.09854333102703094
],
[
0.4760911762714386,
0.08101151138544083,
0.44511404633522034
],
[
0.15141984820365906,
0.4241148829460144,
0.4395207166671753
],
[
0.40674036741256714,
0.3935491144657135,
0.1797652244567871
],
[
0.4640699625015259,
0.35606488585472107,
0.48520034551620483
],
[
0.45517098903656006,
0.10837940871715546,
0.3634328544139862
],
[
0.42892348766326904,
0.3894287049770355,
0.4179135262966156
],
[
0.1619490683078766,
0.1275283247232437,
0.4848206639289856
],
[
0.4587709605693817,
0.3761414885520935,
0.4999237358570099
],
[
0.45937579870224,
0.10048946738243103,
0.3620624542236328
],
[
0.1622796505689621,
0.06675637513399124,
0.4629978835582733
],
[
0.14350159466266632,
0.3432171046733856,
0.08686857670545578
],
[
0.2418466955423355,
0.29923370480537415,
0.15336613357067108
],
[
0.10652916133403778,
0.3400610387325287,
0.5012139081954956
],
[
0.470577597618103,
0.4068159759044647,
0.18197748064994812
],
[
0.16513541340827942,
0.3945123255252838,
0.3649037182331085
],
[
0.169801726937294,
0.5046741366386414,
0.4489770233631134
],
[
0.381733775138855,
0.42710191011428833,
0.46337172389030457
],
[
0.2477995753288269,
0.1106489896774292,
0.13583549857139587
],
[
0.09561775624752045,
0.48951175808906555,
0.19908744096755981
],
[
0.13161425292491913,
0.1405383050441742,
0.3670024275779724
],
[
0.27499905228614807,
0.28686726093292236,
0.2851425111293793
],
[
0.10630062967538834,
0.4811836779117584,
0.47763416171073914
],
[
0.11440788954496384,
0.31618937849998474,
0.3314228653907776
],
[
0.19934608042240143,
0.1537989377975464,
0.1396467685699463
],
[
0.12085483968257904,
0.49329978227615356,
0.4896600842475891
],
[
0.1220579594373703,
0.4949476718902588,
0.5103369951248169
],
[
0.4522629380226135,
0.16088415682315826,
0.23696646094322205
],
[
0.4699283838272095,
0.17811758816242218,
0.12464112788438797
],
[
0.11091146618127823,
0.48623692989349365,
0.4849981367588043
],
[
0.4842935800552368,
0.29923370480537415,
0.06224353238940239
],
[
0.06589362770318985,
0.29923370480537415,
0.44713032245635986
],
[
0.5308268070220947,
0.1069527193903923,
0.07115095853805542
],
[
0.510724663734436,
0.4425855875015259,
0.49699831008911133
]
],
"model_names": [
"instruct",
"math",
"code"
],
"num_models": 3,
"num_params": 291,
"param_names": [
"model.embed_tokens.weight",
"model.layers.0.self_attn.q_proj.weight",
"model.layers.0.self_attn.k_proj.weight",
"model.layers.0.self_attn.v_proj.weight",
"model.layers.0.self_attn.o_proj.weight",
"model.layers.0.mlp.gate_proj.weight",
"model.layers.0.mlp.up_proj.weight",
"model.layers.0.mlp.down_proj.weight",
"model.layers.0.input_layernorm.weight",
"model.layers.0.post_attention_layernorm.weight",
"model.layers.1.self_attn.q_proj.weight",
"model.layers.1.self_attn.k_proj.weight",
"model.layers.1.self_attn.v_proj.weight",
"model.layers.1.self_attn.o_proj.weight",
"model.layers.1.mlp.gate_proj.weight",
"model.layers.1.mlp.up_proj.weight",
"model.layers.1.mlp.down_proj.weight",
"model.layers.1.input_layernorm.weight",
"model.layers.1.post_attention_layernorm.weight",
"model.layers.2.self_attn.q_proj.weight",
"model.layers.2.self_attn.k_proj.weight",
"model.layers.2.self_attn.v_proj.weight",
"model.layers.2.self_attn.o_proj.weight",
"model.layers.2.mlp.gate_proj.weight",
"model.layers.2.mlp.up_proj.weight",
"model.layers.2.mlp.down_proj.weight",
"model.layers.2.input_layernorm.weight",
"model.layers.2.post_attention_layernorm.weight",
"model.layers.3.self_attn.q_proj.weight",
"model.layers.3.self_attn.k_proj.weight",
"model.layers.3.self_attn.v_proj.weight",
"model.layers.3.self_attn.o_proj.weight",
"model.layers.3.mlp.gate_proj.weight",
"model.layers.3.mlp.up_proj.weight",
"model.layers.3.mlp.down_proj.weight",
"model.layers.3.input_layernorm.weight",
"model.layers.3.post_attention_layernorm.weight",
"model.layers.4.self_attn.q_proj.weight",
"model.layers.4.self_attn.k_proj.weight",
"model.layers.4.self_attn.v_proj.weight",
"model.layers.4.self_attn.o_proj.weight",
"model.layers.4.mlp.gate_proj.weight",
"model.layers.4.mlp.up_proj.weight",
"model.layers.4.mlp.down_proj.weight",
"model.layers.4.input_layernorm.weight",
"model.layers.4.post_attention_layernorm.weight",
"model.layers.5.self_attn.q_proj.weight",
"model.layers.5.self_attn.k_proj.weight",
"model.layers.5.self_attn.v_proj.weight",
"model.layers.5.self_attn.o_proj.weight",
"model.layers.5.mlp.gate_proj.weight",
"model.layers.5.mlp.up_proj.weight",
"model.layers.5.mlp.down_proj.weight",
"model.layers.5.input_layernorm.weight",
"model.layers.5.post_attention_layernorm.weight",
"model.layers.6.self_attn.q_proj.weight",
"model.layers.6.self_attn.k_proj.weight",
"model.layers.6.self_attn.v_proj.weight",
"model.layers.6.self_attn.o_proj.weight",
"model.layers.6.mlp.gate_proj.weight",
"model.layers.6.mlp.up_proj.weight",
"model.layers.6.mlp.down_proj.weight",
"model.layers.6.input_layernorm.weight",
"model.layers.6.post_attention_layernorm.weight",
"model.layers.7.self_attn.q_proj.weight",
"model.layers.7.self_attn.k_proj.weight",
"model.layers.7.self_attn.v_proj.weight",
"model.layers.7.self_attn.o_proj.weight",
"model.layers.7.mlp.gate_proj.weight",
"model.layers.7.mlp.up_proj.weight",
"model.layers.7.mlp.down_proj.weight",
"model.layers.7.input_layernorm.weight",
"model.layers.7.post_attention_layernorm.weight",
"model.layers.8.self_attn.q_proj.weight",
"model.layers.8.self_attn.k_proj.weight",
"model.layers.8.self_attn.v_proj.weight",
"model.layers.8.self_attn.o_proj.weight",
"model.layers.8.mlp.gate_proj.weight",
"model.layers.8.mlp.up_proj.weight",
"model.layers.8.mlp.down_proj.weight",
"model.layers.8.input_layernorm.weight",
"model.layers.8.post_attention_layernorm.weight",
"model.layers.9.self_attn.q_proj.weight",
"model.layers.9.self_attn.k_proj.weight",
"model.layers.9.self_attn.v_proj.weight",
"model.layers.9.self_attn.o_proj.weight",
"model.layers.9.mlp.gate_proj.weight",
"model.layers.9.mlp.up_proj.weight",
"model.layers.9.mlp.down_proj.weight",
"model.layers.9.input_layernorm.weight",
"model.layers.9.post_attention_layernorm.weight",
"model.layers.10.self_attn.q_proj.weight",
"model.layers.10.self_attn.k_proj.weight",
"model.layers.10.self_attn.v_proj.weight",
"model.layers.10.self_attn.o_proj.weight",
"model.layers.10.mlp.gate_proj.weight",
"model.layers.10.mlp.up_proj.weight",
"model.layers.10.mlp.down_proj.weight",
"model.layers.10.input_layernorm.weight",
"model.layers.10.post_attention_layernorm.weight",
"model.layers.11.self_attn.q_proj.weight",
"model.layers.11.self_attn.k_proj.weight",
"model.layers.11.self_attn.v_proj.weight",
"model.layers.11.self_attn.o_proj.weight",
"model.layers.11.mlp.gate_proj.weight",
"model.layers.11.mlp.up_proj.weight",
"model.layers.11.mlp.down_proj.weight",
"model.layers.11.input_layernorm.weight",
"model.layers.11.post_attention_layernorm.weight",
"model.layers.12.self_attn.q_proj.weight",
"model.layers.12.self_attn.k_proj.weight",
"model.layers.12.self_attn.v_proj.weight",
"model.layers.12.self_attn.o_proj.weight",
"model.layers.12.mlp.gate_proj.weight",
"model.layers.12.mlp.up_proj.weight",
"model.layers.12.mlp.down_proj.weight",
"model.layers.12.input_layernorm.weight",
"model.layers.12.post_attention_layernorm.weight",
"model.layers.13.self_attn.q_proj.weight",
"model.layers.13.self_attn.k_proj.weight",
"model.layers.13.self_attn.v_proj.weight",
"model.layers.13.self_attn.o_proj.weight",
"model.layers.13.mlp.gate_proj.weight",
"model.layers.13.mlp.up_proj.weight",
"model.layers.13.mlp.down_proj.weight",
"model.layers.13.input_layernorm.weight",
"model.layers.13.post_attention_layernorm.weight",
"model.layers.14.self_attn.q_proj.weight",
"model.layers.14.self_attn.k_proj.weight",
"model.layers.14.self_attn.v_proj.weight",
"model.layers.14.self_attn.o_proj.weight",
"model.layers.14.mlp.gate_proj.weight",
"model.layers.14.mlp.up_proj.weight",
"model.layers.14.mlp.down_proj.weight",
"model.layers.14.input_layernorm.weight",
"model.layers.14.post_attention_layernorm.weight",
"model.layers.15.self_attn.q_proj.weight",
"model.layers.15.self_attn.k_proj.weight",
"model.layers.15.self_attn.v_proj.weight",
"model.layers.15.self_attn.o_proj.weight",
"model.layers.15.mlp.gate_proj.weight",
"model.layers.15.mlp.up_proj.weight",
"model.layers.15.mlp.down_proj.weight",
"model.layers.15.input_layernorm.weight",
"model.layers.15.post_attention_layernorm.weight",
"model.layers.16.self_attn.q_proj.weight",
"model.layers.16.self_attn.k_proj.weight",
"model.layers.16.self_attn.v_proj.weight",
"model.layers.16.self_attn.o_proj.weight",
"model.layers.16.mlp.gate_proj.weight",
"model.layers.16.mlp.up_proj.weight",
"model.layers.16.mlp.down_proj.weight",
"model.layers.16.input_layernorm.weight",
"model.layers.16.post_attention_layernorm.weight",
"model.layers.17.self_attn.q_proj.weight",
"model.layers.17.self_attn.k_proj.weight",
"model.layers.17.self_attn.v_proj.weight",
"model.layers.17.self_attn.o_proj.weight",
"model.layers.17.mlp.gate_proj.weight",
"model.layers.17.mlp.up_proj.weight",
"model.layers.17.mlp.down_proj.weight",
"model.layers.17.input_layernorm.weight",
"model.layers.17.post_attention_layernorm.weight",
"model.layers.18.self_attn.q_proj.weight",
"model.layers.18.self_attn.k_proj.weight",
"model.layers.18.self_attn.v_proj.weight",
"model.layers.18.self_attn.o_proj.weight",
"model.layers.18.mlp.gate_proj.weight",
"model.layers.18.mlp.up_proj.weight",
"model.layers.18.mlp.down_proj.weight",
"model.layers.18.input_layernorm.weight",
"model.layers.18.post_attention_layernorm.weight",
"model.layers.19.self_attn.q_proj.weight",
"model.layers.19.self_attn.k_proj.weight",
"model.layers.19.self_attn.v_proj.weight",
"model.layers.19.self_attn.o_proj.weight",
"model.layers.19.mlp.gate_proj.weight",
"model.layers.19.mlp.up_proj.weight",
"model.layers.19.mlp.down_proj.weight",
"model.layers.19.input_layernorm.weight",
"model.layers.19.post_attention_layernorm.weight",
"model.layers.20.self_attn.q_proj.weight",
"model.layers.20.self_attn.k_proj.weight",
"model.layers.20.self_attn.v_proj.weight",
"model.layers.20.self_attn.o_proj.weight",
"model.layers.20.mlp.gate_proj.weight",
"model.layers.20.mlp.up_proj.weight",
"model.layers.20.mlp.down_proj.weight",
"model.layers.20.input_layernorm.weight",
"model.layers.20.post_attention_layernorm.weight",
"model.layers.21.self_attn.q_proj.weight",
"model.layers.21.self_attn.k_proj.weight",
"model.layers.21.self_attn.v_proj.weight",
"model.layers.21.self_attn.o_proj.weight",
"model.layers.21.mlp.gate_proj.weight",
"model.layers.21.mlp.up_proj.weight",
"model.layers.21.mlp.down_proj.weight",
"model.layers.21.input_layernorm.weight",
"model.layers.21.post_attention_layernorm.weight",
"model.layers.22.self_attn.q_proj.weight",
"model.layers.22.self_attn.k_proj.weight",
"model.layers.22.self_attn.v_proj.weight",
"model.layers.22.self_attn.o_proj.weight",
"model.layers.22.mlp.gate_proj.weight",
"model.layers.22.mlp.up_proj.weight",
"model.layers.22.mlp.down_proj.weight",
"model.layers.22.input_layernorm.weight",
"model.layers.22.post_attention_layernorm.weight",
"model.layers.23.self_attn.q_proj.weight",
"model.layers.23.self_attn.k_proj.weight",
"model.layers.23.self_attn.v_proj.weight",
"model.layers.23.self_attn.o_proj.weight",
"model.layers.23.mlp.gate_proj.weight",
"model.layers.23.mlp.up_proj.weight",
"model.layers.23.mlp.down_proj.weight",
"model.layers.23.input_layernorm.weight",
"model.layers.23.post_attention_layernorm.weight",
"model.layers.24.self_attn.q_proj.weight",
"model.layers.24.self_attn.k_proj.weight",
"model.layers.24.self_attn.v_proj.weight",
"model.layers.24.self_attn.o_proj.weight",
"model.layers.24.mlp.gate_proj.weight",
"model.layers.24.mlp.up_proj.weight",
"model.layers.24.mlp.down_proj.weight",
"model.layers.24.input_layernorm.weight",
"model.layers.24.post_attention_layernorm.weight",
"model.layers.25.self_attn.q_proj.weight",
"model.layers.25.self_attn.k_proj.weight",
"model.layers.25.self_attn.v_proj.weight",
"model.layers.25.self_attn.o_proj.weight",
"model.layers.25.mlp.gate_proj.weight",
"model.layers.25.mlp.up_proj.weight",
"model.layers.25.mlp.down_proj.weight",
"model.layers.25.input_layernorm.weight",
"model.layers.25.post_attention_layernorm.weight",
"model.layers.26.self_attn.q_proj.weight",
"model.layers.26.self_attn.k_proj.weight",
"model.layers.26.self_attn.v_proj.weight",
"model.layers.26.self_attn.o_proj.weight",
"model.layers.26.mlp.gate_proj.weight",
"model.layers.26.mlp.up_proj.weight",
"model.layers.26.mlp.down_proj.weight",
"model.layers.26.input_layernorm.weight",
"model.layers.26.post_attention_layernorm.weight",
"model.layers.27.self_attn.q_proj.weight",
"model.layers.27.self_attn.k_proj.weight",
"model.layers.27.self_attn.v_proj.weight",
"model.layers.27.self_attn.o_proj.weight",
"model.layers.27.mlp.gate_proj.weight",
"model.layers.27.mlp.up_proj.weight",
"model.layers.27.mlp.down_proj.weight",
"model.layers.27.input_layernorm.weight",
"model.layers.27.post_attention_layernorm.weight",
"model.layers.28.self_attn.q_proj.weight",
"model.layers.28.self_attn.k_proj.weight",
"model.layers.28.self_attn.v_proj.weight",
"model.layers.28.self_attn.o_proj.weight",
"model.layers.28.mlp.gate_proj.weight",
"model.layers.28.mlp.up_proj.weight",
"model.layers.28.mlp.down_proj.weight",
"model.layers.28.input_layernorm.weight",
"model.layers.28.post_attention_layernorm.weight",
"model.layers.29.self_attn.q_proj.weight",
"model.layers.29.self_attn.k_proj.weight",
"model.layers.29.self_attn.v_proj.weight",
"model.layers.29.self_attn.o_proj.weight",
"model.layers.29.mlp.gate_proj.weight",
"model.layers.29.mlp.up_proj.weight",
"model.layers.29.mlp.down_proj.weight",
"model.layers.29.input_layernorm.weight",
"model.layers.29.post_attention_layernorm.weight",
"model.layers.30.self_attn.q_proj.weight",
"model.layers.30.self_attn.k_proj.weight",
"model.layers.30.self_attn.v_proj.weight",
"model.layers.30.self_attn.o_proj.weight",
"model.layers.30.mlp.gate_proj.weight",
"model.layers.30.mlp.up_proj.weight",
"model.layers.30.mlp.down_proj.weight",
"model.layers.30.input_layernorm.weight",
"model.layers.30.post_attention_layernorm.weight",
"model.layers.31.self_attn.q_proj.weight",
"model.layers.31.self_attn.k_proj.weight",
"model.layers.31.self_attn.v_proj.weight",
"model.layers.31.self_attn.o_proj.weight",
"model.layers.31.mlp.gate_proj.weight",
"model.layers.31.mlp.up_proj.weight",
"model.layers.31.mlp.down_proj.weight",
"model.layers.31.input_layernorm.weight",
"model.layers.31.post_attention_layernorm.weight",
"model.norm.weight",
"lm_head.weight"
]
}