lejelly's picture
Upload folder using huggingface_hub
472a9aa verified
{
"lambdas": [
[
0.01599043607711792,
0.07910382002592087,
0.3448607325553894
],
[
0.17009833455085754,
0.24276399612426758,
0.28520259261131287
],
[
0.22930702567100525,
0.1947651356458664,
0.3797568082809448
],
[
0.5765836834907532,
0.5401663780212402,
0.4292379319667816
],
[
0.4299381375312805,
0.08562182635068893,
0.5532997846603394
],
[
0.03850938007235527,
0.14255663752555847,
-0.09585331380367279
],
[
0.517099142074585,
0.4729280471801758,
-0.11753474175930023
],
[
0.4575115442276001,
0.4921486973762512,
0.07641060650348663
],
[
0.1553262174129486,
0.19444818794727325,
0.43875443935394287
],
[
0.33514660596847534,
0.02114877663552761,
0.202809140086174
],
[
0.14150194823741913,
0.10070247203111649,
0.48707714676856995
],
[
0.5090183615684509,
0.37541118264198303,
0.4838451147079468
],
[
0.08164742588996887,
0.18616284430027008,
0.2003747969865799
],
[
0.11110395938158035,
0.5485895276069641,
0.5204711556434631
],
[
0.26736125349998474,
-0.11140822619199753,
0.16532571613788605
],
[
0.1518910974264145,
-0.19768822193145752,
0.33158326148986816
],
[
0.6766136288642883,
0.6073042750358582,
0.43711960315704346
],
[
0.39417725801467896,
0.5125226974487305,
0.19487713277339935
],
[
0.07722551375627518,
0.08235609531402588,
0.4862894117832184
],
[
-0.01921824924647808,
0.03716010972857475,
-0.005826251581311226
],
[
0.04018884897232056,
-0.05662180110812187,
0.012582519091665745
],
[
-0.14602302014827728,
0.6273214221000671,
-0.1370716392993927
],
[
0.23516058921813965,
0.18687807023525238,
0.7543795108795166
],
[
0.2830682396888733,
0.6818769574165344,
0.14090973138809204
],
[
0.3058266341686249,
0.29882270097732544,
0.2535727322101593
],
[
0.5249281525611877,
0.09992153942584991,
0.5279116034507751
],
[
0.1744370311498642,
0.6006382703781128,
0.16376593708992004
],
[
0.5848185420036316,
0.48536574840545654,
0.2751784920692444
],
[
-0.07011879980564117,
0.5549236536026001,
0.5478190779685974
],
[
0.0915273055434227,
0.540806233882904,
0.5738257169723511
],
[
0.17367717623710632,
0.6487545371055603,
0.4134986996650696
],
[
-0.05588354170322418,
0.4881119728088379,
0.6522910594940186
],
[
0.43857619166374207,
-0.2079947143793106,
0.17159408330917358
],
[
-0.10169821232557297,
-0.17214111983776093,
0.32555773854255676
],
[
0.38798969984054565,
0.24356767535209656,
0.09040752798318863
],
[
0.2490927129983902,
0.4746534824371338,
0.5482431650161743
],
[
-0.06856263428926468,
0.4576064348220825,
0.48140206933021545
],
[
0.5001192688941956,
0.334839791059494,
0.17152617871761322
],
[
0.2056734412908554,
0.16234035789966583,
0.19932730495929718
],
[
0.0014050715835765004,
0.4758378267288208,
0.23230105638504028
],
[
0.0058401841670274734,
0.39217662811279297,
0.4583430290222168
],
[
0.18868501484394073,
0.12274983525276184,
0.49226075410842896
],
[
0.12423153966665268,
0.3423975110054016,
0.21390224993228912
],
[
0.34903889894485474,
-0.16818591952323914,
-0.058767449110746384
],
[
0.24379467964172363,
0.3844958245754242,
0.0544603168964386
],
[
-0.029136497527360916,
0.3949006199836731,
0.06975358724594116
],
[
-0.1446920484304428,
-0.041572753340005875,
-0.0007185045396909118
],
[
-0.13842879235744476,
-0.15981432795524597,
-0.09082823991775513
],
[
0.2527516186237335,
0.337313711643219,
0.3083096146583557
],
[
0.547559380531311,
0.12810300290584564,
0.7151609063148499
],
[
0.12702472507953644,
0.21924181282520294,
0.14363932609558105
],
[
0.01562615856528282,
0.6548745632171631,
0.5009778738021851
],
[
0.5593827962875366,
0.6098013520240784,
-0.0461578443646431
],
[
0.013694017194211483,
0.6636450290679932,
0.6194273233413696
],
[
0.2987560033798218,
0.022782018408179283,
0.07610537111759186
],
[
-0.12981519103050232,
-0.19013537466526031,
-0.07035347819328308
],
[
0.3291417062282562,
0.20864982903003693,
0.05053253844380379
],
[
-0.04699275642633438,
0.503038763999939,
0.6677936911582947
],
[
0.1437007337808609,
0.2261950671672821,
0.5974223613739014
],
[
0.04695993289351463,
-0.07446567714214325,
-0.1129576712846756
],
[
0.6254064440727234,
0.5017024874687195,
0.07904713600873947
],
[
0.0019202864496037364,
0.43504422903060913,
0.6323918700218201
],
[
0.4403229355812073,
0.004568339325487614,
0.18745370209217072
],
[
0.0014599814312532544,
0.22413475811481476,
0.33116215467453003
],
[
0.06887754052877426,
0.0065323784947395325,
0.021494783461093903
],
[
0.6652426719665527,
0.35264232754707336,
0.06482046842575073
],
[
0.07228405028581619,
0.17254772782325745,
0.4340400993824005
],
[
0.3716700077056885,
0.5772385001182556,
0.10676165670156479
],
[
0.5635396242141724,
-0.11767765879631042,
0.40892335772514343
],
[
0.020552344620227814,
0.16470174491405487,
0.4842602014541626
],
[
0.07673037797212601,
0.6421074867248535,
0.3806331753730774
],
[
0.2110418677330017,
0.29749801754951477,
0.3663078844547272
],
[
0.38431379199028015,
0.4754885733127594,
0.09653961658477783
],
[
0.08547362685203552,
0.04304228350520134,
0.23581258952617645
],
[
0.18672442436218262,
0.5017492771148682,
0.5003490447998047
],
[
0.10371429473161697,
0.09120243042707443,
0.07684166729450226
],
[
0.5001019239425659,
0.5860259532928467,
0.09642764925956726
],
[
0.5609427094459534,
0.4382409155368805,
-0.02284425124526024
],
[
0.21434681117534637,
0.46153926849365234,
0.3080545961856842
],
[
0.4944053888320923,
0.45209163427352905,
0.12842531502246857
],
[
0.3099152743816376,
0.29749801754951477,
0.08832375705242157
],
[
0.31177574396133423,
0.5996876358985901,
0.13919484615325928
],
[
-0.09139897674322128,
-0.09113945066928864,
-0.21073846518993378
],
[
-0.05761115252971649,
-0.025762978941202164,
-0.013335811905562878
],
[
0.2858734726905823,
0.4327617585659027,
0.250955730676651
],
[
-0.052565380930900574,
0.16300855576992035,
0.5123083591461182
],
[
0.2560293674468994,
0.14097511768341064,
0.5452496409416199
],
[
0.46375420689582825,
0.06282729655504227,
0.23315195739269257
],
[
0.32732337713241577,
0.5193568468093872,
0.015725215896964073
],
[
-0.010673001408576965,
0.374300092458725,
0.10821487754583359
],
[
0.2633635103702545,
0.5545069575309753,
0.2372552752494812
],
[
0.1483982801437378,
0.012030037119984627,
-0.07985374331474304
],
[
0.04111429676413536,
0.11877942830324173,
0.0476335845887661
],
[
0.5204341411590576,
0.5176655650138855,
0.15289345383644104
],
[
-0.03464260324835777,
0.15774759650230408,
0.20076815783977509
],
[
0.3218705952167511,
0.020254574716091156,
-0.03559993579983711
],
[
-0.02109401673078537,
-0.26779305934906006,
-0.09109269082546234
],
[
0.3816053867340088,
0.04293779283761978,
0.29983797669410706
],
[
0.3110748827457428,
0.29749801754951477,
0.13367198407649994
],
[
0.3082156181335449,
0.5470995306968689,
0.14599701762199402
],
[
-0.05987169221043587,
0.17502745985984802,
-0.11723871529102325
],
[
-0.052537936717271805,
-0.10716339200735092,
-0.14999830722808838
],
[
0.21884188055992126,
0.2631400525569916,
0.25435885787010193
],
[
0.051594823598861694,
0.25865405797958374,
0.21938708424568176
],
[
0.05851895734667778,
0.5311360955238342,
-0.04055311530828476
],
[
0.21702082455158234,
0.49768388271331787,
0.03630099445581436
],
[
0.6262938380241394,
0.41813990473747253,
0.06634730100631714
],
[
-0.07463525980710983,
0.22270584106445312,
0.5984657406806946
],
[
-0.12694045901298523,
-0.23482294380664825,
0.6486899852752686
],
[
-0.15358315408229828,
-0.1360277682542801,
-0.22152650356292725
],
[
0.6764119267463684,
-0.1499798744916916,
-0.16230666637420654
],
[
-0.06610509008169174,
0.7990362048149109,
0.5687921047210693
],
[
-0.2541526257991791,
0.7196478843688965,
0.5128430724143982
],
[
0.09824393689632416,
-0.016661161556839943,
-0.163141131401062
],
[
0.6214092373847961,
0.05031277611851692,
-0.27262213826179504
],
[
-0.16061092913150787,
0.3333853781223297,
0.3791975677013397
],
[
0.7842614650726318,
0.4856654107570648,
0.08772221207618713
],
[
-0.1150759756565094,
0.17284426093101501,
0.09132510423660278
],
[
0.25243014097213745,
-0.2290802001953125,
0.3636671304702759
],
[
0.21612147986888885,
0.666143536567688,
0.6417580246925354
],
[
0.7855614423751831,
-0.08744312822818756,
0.6597111821174622
],
[
0.1127760261297226,
0.28794223070144653,
0.2611285448074341
],
[
-0.13818934559822083,
0.11706602573394775,
-0.07560122758150101
],
[
-0.14191126823425293,
0.5854519009590149,
0.4225197732448578
],
[
0.3990960419178009,
0.26778239011764526,
0.38506439328193665
],
[
0.5892043113708496,
0.29749801754951477,
0.0606595017015934
],
[
0.5404478907585144,
0.5369660258293152,
0.07990983128547668
],
[
0.1300719827413559,
-0.004840303678065538,
0.4391452670097351
],
[
0.7454240322113037,
0.5084694623947144,
0.4036027491092682
],
[
0.5027218461036682,
-0.18492116034030914,
0.6051274538040161
],
[
0.040944598615169525,
0.3362106680870056,
0.2611672580242157
],
[
0.13193345069885254,
0.5204035043716431,
0.5407533049583435
],
[
0.5361729264259338,
-0.06258770078420639,
-0.09143968671560287
],
[
0.0044425795786082745,
0.32727351784706116,
-0.1788080632686615
],
[
0.46037060022354126,
0.3551003932952881,
-0.21465198695659637
],
[
0.6152193546295166,
-0.16117939352989197,
0.346040815114975
],
[
0.37264683842658997,
0.501764178276062,
0.36462652683258057
],
[
0.5563479661941528,
0.4359903931617737,
0.3975433111190796
],
[
0.3388786315917969,
0.06975376605987549,
0.5966251492500305
],
[
-0.07709570229053497,
0.3281635046005249,
-0.14663462340831757
],
[
-0.11623705178499222,
-0.20639938116073608,
0.3000643849372864
],
[
-0.09680212289094925,
-0.1651865690946579,
-0.22462184727191925
],
[
-0.023799890652298927,
0.23095734417438507,
-0.17398789525032043
],
[
0.3015843629837036,
0.29749801754951477,
0.24261680245399475
],
[
0.47933879494667053,
-0.1380712240934372,
0.5735412240028381
],
[
0.1501915007829666,
0.5119481086730957,
0.5339949727058411
],
[
0.6020656824111938,
0.5287696123123169,
0.5046725869178772
],
[
0.40036115050315857,
0.8000597953796387,
0.41855588555336
],
[
-0.12582387030124664,
0.66556316614151,
0.24566808342933655
],
[
0.03229839354753494,
-0.12308546900749207,
0.18237566947937012
],
[
-0.20814543962478638,
0.2598685324192047,
0.02552688866853714
],
[
0.44924426078796387,
0.24572095274925232,
0.6801377534866333
],
[
0.8236588835716248,
0.29749801754951477,
0.038368936628103256
],
[
-0.011815950274467468,
-0.061023954302072525,
0.724912166595459
],
[
0.12601391971111298,
0.5528285503387451,
0.7473504543304443
],
[
0.9336178302764893,
0.8735527992248535,
0.7013804912567139
],
[
0.5510903000831604,
0.21719542145729065,
0.021330880001187325
],
[
-0.018494797870516777,
0.40933048725128174,
0.4681331515312195
],
[
-0.24285200238227844,
0.2250830978155136,
-0.20424440503120422
],
[
0.20800316333770752,
0.7496346235275269,
0.29539358615875244
],
[
0.537553071975708,
-0.01649906113743782,
0.5864800214767456
],
[
0.8672025799751282,
0.29749801754951477,
-0.2473604679107666
],
[
0.6353182792663574,
0.09113934636116028,
0.17883849143981934
],
[
0.3175722062587738,
0.587150514125824,
0.6702712178230286
],
[
0.6020896434783936,
0.7407205700874329,
0.6961850523948669
],
[
0.5461270809173584,
0.2971365451812744,
0.6156988143920898
],
[
0.004322703927755356,
-0.014298546127974987,
0.758113443851471
],
[
0.595248818397522,
-0.12689098715782166,
0.5269378423690796
],
[
0.20496299862861633,
-0.13315002620220184,
-0.2005455046892166
],
[
0.11705457419157028,
-0.20519837737083435,
0.6399083137512207
],
[
0.7902838587760925,
0.29749801754951477,
-0.15674856305122375
],
[
0.7074562311172485,
0.23388278484344482,
0.7495532035827637
],
[
0.6721110343933105,
0.0237455815076828,
0.2975677251815796
],
[
0.45000895857810974,
0.21478034555912018,
0.245432510972023
],
[
0.15746952593326569,
0.6868487596511841,
-0.1827555000782013
],
[
0.6097325086593628,
0.6833253502845764,
-0.18156245350837708
],
[
0.5533154010772705,
0.3557550609111786,
-0.19717878103256226
],
[
0.6758953928947449,
-0.22753627598285675,
-0.15693435072898865
],
[
0.30093079805374146,
-0.2000143676996231,
-0.2889554500579834
],
[
0.2660248875617981,
0.29749801754951477,
0.7251256704330444
],
[
0.5735198259353638,
0.6195103526115417,
0.7168455719947815
],
[
0.6466394662857056,
-0.1328648030757904,
0.3343120515346527
],
[
0.5526488423347473,
0.29530736804008484,
0.7730970978736877
],
[
0.2184007465839386,
0.5285938382148743,
0.34033167362213135
],
[
0.4040486812591553,
0.3532559275627136,
-0.022606950253248215
],
[
0.43881240487098694,
0.20694898068904877,
0.7183454632759094
],
[
0.0980994775891304,
-0.21123932301998138,
0.0609867125749588
],
[
0.811890184879303,
0.7597039341926575,
0.2846825122833252
],
[
0.6745463609695435,
0.29749801754951477,
-0.013324028812348843
],
[
-0.1549432873725891,
-0.3104512095451355,
0.6881526708602905
],
[
0.6744190454483032,
0.5039845108985901,
0.16317816078662872
],
[
0.10261714458465576,
0.20150357484817505,
-0.044558774679899216
],
[
0.8403182625770569,
0.3964232802391052,
0.8573811650276184
],
[
0.004042636603116989,
-0.11467218399047852,
0.0020725629292428493
],
[
0.31779393553733826,
0.19405393302440643,
0.7433959245681763
],
[
0.4772237241268158,
0.6787571310997009,
0.5434707403182983
],
[
0.09663095325231552,
0.9135802984237671,
0.5454173684120178
],
[
0.21718333661556244,
0.29749801754951477,
0.7073388695716858
],
[
-0.005789556074887514,
-0.1133071705698967,
0.05186580494046211
],
[
0.44210121035575867,
0.4564633071422577,
0.5316054821014404
],
[
0.575492799282074,
0.5081076622009277,
0.670264720916748
],
[
-0.10280992090702057,
-0.2002638876438141,
-0.16899052262306213
],
[
0.8498543500900269,
-0.11671660095453262,
0.6663899421691895
],
[
0.13588984310626984,
-0.1464170217514038,
-0.1629866063594818
],
[
0.6222835779190063,
0.102676622569561,
-0.24524657428264618
],
[
0.32089030742645264,
-0.19922597706317902,
0.5342713594436646
],
[
0.5251973271369934,
0.17669403553009033,
-0.06222068890929222
],
[
-0.24343685805797577,
0.5095407962799072,
0.80568927526474
],
[
0.41881364583969116,
0.005490098148584366,
0.4252149164676666
],
[
0.4128798246383667,
0.35789409279823303,
0.3850858807563782
],
[
0.8962928056716919,
0.516810953617096,
0.32145845890045166
],
[
0.8185632824897766,
0.5714494585990906,
0.6912118792533875
],
[
0.12971827387809753,
-0.13721288740634918,
-0.2538377046585083
],
[
0.48897865414619446,
-0.23255352675914764,
-0.1063329428434372
],
[
0.6273745894432068,
-0.18879573047161102,
-0.16811954975128174
],
[
0.2424761801958084,
0.29749801754951477,
0.36899450421333313
],
[
-0.2593151926994324,
0.39464235305786133,
0.6507261395454407
],
[
-0.04506643861532211,
0.08662794530391693,
0.5787338614463806
],
[
0.568374514579773,
-0.10791207104921341,
-0.03072788193821907
],
[
0.21234413981437683,
0.6341139078140259,
-0.069466732442379
],
[
0.7911196947097778,
-0.20284181833267212,
-0.02067982591688633
],
[
0.1715625524520874,
-0.07432891428470612,
0.49321284890174866
],
[
0.7599884271621704,
0.4262634515762329,
0.5917020440101624
],
[
0.5973278880119324,
0.7194172739982605,
-0.2999279797077179
],
[
0.36043447256088257,
0.5750863552093506,
0.7785177826881409
],
[
0.27919894456863403,
0.40940025448799133,
0.5622883439064026
],
[
0.3695577085018158,
0.08054511249065399,
0.1326419860124588
],
[
0.19545520842075348,
0.13208957016468048,
0.1535075306892395
],
[
0.5919494032859802,
0.3429125249385834,
0.17981788516044617
],
[
0.1910606175661087,
-0.1356583684682846,
-0.2599838376045227
],
[
0.33339598774909973,
0.688002347946167,
0.4338204860687256
],
[
0.7345695495605469,
0.6212841272354126,
-0.0999615415930748
],
[
0.8091265559196472,
0.2597179114818573,
0.2931313216686249
],
[
0.15012915432453156,
0.48616594076156616,
0.4497654438018799
],
[
-0.1743641495704651,
0.12134350091218948,
0.9147934913635254
],
[
0.48907652497291565,
-0.14691315591335297,
0.3915908932685852
],
[
0.3643360733985901,
0.29856422543525696,
0.3871447741985321
],
[
0.5638335943222046,
-0.2574458420276642,
0.05555402860045433
],
[
0.6228417754173279,
-0.1526241898536682,
0.04258164390921593
],
[
-0.229490727186203,
-0.2857581675052643,
0.5720612406730652
],
[
-0.12484939396381378,
-0.2719227373600006,
0.5750134587287903
],
[
0.9652711749076843,
0.8682180643081665,
0.5889125466346741
],
[
0.4702938497066498,
0.29749801754951477,
0.6427656412124634
],
[
0.5798051357269287,
0.30665135383605957,
0.18816451728343964
],
[
0.11984176933765411,
-0.21717390418052673,
-0.14504815638065338
],
[
-0.33105841279029846,
0.4773516058921814,
-0.23491303622722626
],
[
0.6318503022193909,
-0.23607271909713745,
0.6774725914001465
],
[
0.8063805103302002,
-0.3163478374481201,
0.3729407489299774
],
[
0.2512238621711731,
0.5095811486244202,
0.1339169442653656
],
[
0.5197060704231262,
-0.0028830973897129297,
-0.11290308088064194
],
[
0.8138087391853333,
0.67933589220047,
0.8770127296447754
],
[
0.03774372115731239,
0.29749801754951477,
0.3368535041809082
],
[
0.694883406162262,
0.02835400402545929,
0.20498327910900116
],
[
0.0388697125017643,
-0.03738120198249817,
0.45987847447395325
],
[
0.09020638465881348,
0.04108214005827904,
-0.03813562169671059
],
[
0.741973876953125,
-0.327504426240921,
0.04978661984205246
],
[
0.6137462854385376,
0.1334371268749237,
-0.10636506974697113
],
[
0.9661648869514465,
0.07778263092041016,
-0.2380676567554474
],
[
-0.06368500739336014,
0.8562915921211243,
-0.046653393656015396
],
[
0.7861021161079407,
0.5866456031799316,
0.5126196146011353
],
[
-0.0629267767071724,
0.3183724284172058,
0.6920751929283142
],
[
-0.017139675095677376,
-0.09968429058790207,
0.7182760834693909
],
[
0.8493861556053162,
-0.35104507207870483,
-0.25400182604789734
],
[
-0.18676061928272247,
-0.19935616850852966,
0.9451103210449219
],
[
-0.09611842781305313,
-0.23422367870807648,
0.7284772396087646
],
[
0.7778447270393372,
-0.3390900492668152,
0.7546222805976868
],
[
0.1439206302165985,
-0.03861633315682411,
0.1929101049900055
],
[
0.2531692683696747,
0.12696722149848938,
-0.15549466013908386
],
[
0.9167981147766113,
-0.07732019573450089,
0.5214565396308899
],
[
0.028374074026942253,
0.29749801754951477,
0.942882239818573
],
[
-0.12227128446102142,
-0.1600886583328247,
0.5848357081413269
],
[
0.5874069929122925,
0.887337863445282,
-0.09928861260414124
],
[
0.05977584049105644,
-0.02208755351603031,
-0.05060272663831711
],
[
0.9418545961380005,
0.039121922105550766,
0.7819925546646118
],
[
0.8918648362159729,
0.6931926608085632,
0.5870084762573242
],
[
0.9627878069877625,
0.8483216166496277,
-0.18061578273773193
],
[
-0.2402106076478958,
-0.05498563498258591,
-0.04466891661286354
],
[
0.8309476375579834,
-0.023677941411733627,
0.49072879552841187
],
[
0.07876493781805038,
0.2422550916671753,
0.7482385635375977
],
[
-0.12777724862098694,
0.5849539041519165,
0.6678881049156189
],
[
0.18864518404006958,
0.012955310754477978,
0.24239203333854675
],
[
0.5322927236557007,
0.7108901143074036,
0.4764990508556366
],
[
0.25263598561286926,
-0.28333842754364014,
0.2874367833137512
],
[
0.7813484072685242,
0.5830729007720947,
0.03487842530012131
],
[
0.03489123284816742,
0.4412636160850525,
0.9310927987098694
],
[
-0.35273435711860657,
-0.07631612569093704,
0.7809179425239563
],
[
-0.1988491415977478,
0.8520939946174622,
0.760961651802063
],
[
-0.13238301873207092,
0.29749801754951477,
-0.2438860684633255
],
[
0.9222155809402466,
0.29749801754951477,
-0.29746559262275696
],
[
0.9672008752822876,
0.660213828086853,
-0.396340012550354
],
[
-0.29582199454307556,
0.8106338381767273,
-0.33686840534210205
]
],
"model_names": [
"instruct",
"math",
"code"
],
"num_models": 3,
"num_params": 291,
"param_names": [
"model.embed_tokens.weight",
"model.layers.0.self_attn.q_proj.weight",
"model.layers.0.self_attn.k_proj.weight",
"model.layers.0.self_attn.v_proj.weight",
"model.layers.0.self_attn.o_proj.weight",
"model.layers.0.mlp.gate_proj.weight",
"model.layers.0.mlp.up_proj.weight",
"model.layers.0.mlp.down_proj.weight",
"model.layers.0.input_layernorm.weight",
"model.layers.0.post_attention_layernorm.weight",
"model.layers.1.self_attn.q_proj.weight",
"model.layers.1.self_attn.k_proj.weight",
"model.layers.1.self_attn.v_proj.weight",
"model.layers.1.self_attn.o_proj.weight",
"model.layers.1.mlp.gate_proj.weight",
"model.layers.1.mlp.up_proj.weight",
"model.layers.1.mlp.down_proj.weight",
"model.layers.1.input_layernorm.weight",
"model.layers.1.post_attention_layernorm.weight",
"model.layers.2.self_attn.q_proj.weight",
"model.layers.2.self_attn.k_proj.weight",
"model.layers.2.self_attn.v_proj.weight",
"model.layers.2.self_attn.o_proj.weight",
"model.layers.2.mlp.gate_proj.weight",
"model.layers.2.mlp.up_proj.weight",
"model.layers.2.mlp.down_proj.weight",
"model.layers.2.input_layernorm.weight",
"model.layers.2.post_attention_layernorm.weight",
"model.layers.3.self_attn.q_proj.weight",
"model.layers.3.self_attn.k_proj.weight",
"model.layers.3.self_attn.v_proj.weight",
"model.layers.3.self_attn.o_proj.weight",
"model.layers.3.mlp.gate_proj.weight",
"model.layers.3.mlp.up_proj.weight",
"model.layers.3.mlp.down_proj.weight",
"model.layers.3.input_layernorm.weight",
"model.layers.3.post_attention_layernorm.weight",
"model.layers.4.self_attn.q_proj.weight",
"model.layers.4.self_attn.k_proj.weight",
"model.layers.4.self_attn.v_proj.weight",
"model.layers.4.self_attn.o_proj.weight",
"model.layers.4.mlp.gate_proj.weight",
"model.layers.4.mlp.up_proj.weight",
"model.layers.4.mlp.down_proj.weight",
"model.layers.4.input_layernorm.weight",
"model.layers.4.post_attention_layernorm.weight",
"model.layers.5.self_attn.q_proj.weight",
"model.layers.5.self_attn.k_proj.weight",
"model.layers.5.self_attn.v_proj.weight",
"model.layers.5.self_attn.o_proj.weight",
"model.layers.5.mlp.gate_proj.weight",
"model.layers.5.mlp.up_proj.weight",
"model.layers.5.mlp.down_proj.weight",
"model.layers.5.input_layernorm.weight",
"model.layers.5.post_attention_layernorm.weight",
"model.layers.6.self_attn.q_proj.weight",
"model.layers.6.self_attn.k_proj.weight",
"model.layers.6.self_attn.v_proj.weight",
"model.layers.6.self_attn.o_proj.weight",
"model.layers.6.mlp.gate_proj.weight",
"model.layers.6.mlp.up_proj.weight",
"model.layers.6.mlp.down_proj.weight",
"model.layers.6.input_layernorm.weight",
"model.layers.6.post_attention_layernorm.weight",
"model.layers.7.self_attn.q_proj.weight",
"model.layers.7.self_attn.k_proj.weight",
"model.layers.7.self_attn.v_proj.weight",
"model.layers.7.self_attn.o_proj.weight",
"model.layers.7.mlp.gate_proj.weight",
"model.layers.7.mlp.up_proj.weight",
"model.layers.7.mlp.down_proj.weight",
"model.layers.7.input_layernorm.weight",
"model.layers.7.post_attention_layernorm.weight",
"model.layers.8.self_attn.q_proj.weight",
"model.layers.8.self_attn.k_proj.weight",
"model.layers.8.self_attn.v_proj.weight",
"model.layers.8.self_attn.o_proj.weight",
"model.layers.8.mlp.gate_proj.weight",
"model.layers.8.mlp.up_proj.weight",
"model.layers.8.mlp.down_proj.weight",
"model.layers.8.input_layernorm.weight",
"model.layers.8.post_attention_layernorm.weight",
"model.layers.9.self_attn.q_proj.weight",
"model.layers.9.self_attn.k_proj.weight",
"model.layers.9.self_attn.v_proj.weight",
"model.layers.9.self_attn.o_proj.weight",
"model.layers.9.mlp.gate_proj.weight",
"model.layers.9.mlp.up_proj.weight",
"model.layers.9.mlp.down_proj.weight",
"model.layers.9.input_layernorm.weight",
"model.layers.9.post_attention_layernorm.weight",
"model.layers.10.self_attn.q_proj.weight",
"model.layers.10.self_attn.k_proj.weight",
"model.layers.10.self_attn.v_proj.weight",
"model.layers.10.self_attn.o_proj.weight",
"model.layers.10.mlp.gate_proj.weight",
"model.layers.10.mlp.up_proj.weight",
"model.layers.10.mlp.down_proj.weight",
"model.layers.10.input_layernorm.weight",
"model.layers.10.post_attention_layernorm.weight",
"model.layers.11.self_attn.q_proj.weight",
"model.layers.11.self_attn.k_proj.weight",
"model.layers.11.self_attn.v_proj.weight",
"model.layers.11.self_attn.o_proj.weight",
"model.layers.11.mlp.gate_proj.weight",
"model.layers.11.mlp.up_proj.weight",
"model.layers.11.mlp.down_proj.weight",
"model.layers.11.input_layernorm.weight",
"model.layers.11.post_attention_layernorm.weight",
"model.layers.12.self_attn.q_proj.weight",
"model.layers.12.self_attn.k_proj.weight",
"model.layers.12.self_attn.v_proj.weight",
"model.layers.12.self_attn.o_proj.weight",
"model.layers.12.mlp.gate_proj.weight",
"model.layers.12.mlp.up_proj.weight",
"model.layers.12.mlp.down_proj.weight",
"model.layers.12.input_layernorm.weight",
"model.layers.12.post_attention_layernorm.weight",
"model.layers.13.self_attn.q_proj.weight",
"model.layers.13.self_attn.k_proj.weight",
"model.layers.13.self_attn.v_proj.weight",
"model.layers.13.self_attn.o_proj.weight",
"model.layers.13.mlp.gate_proj.weight",
"model.layers.13.mlp.up_proj.weight",
"model.layers.13.mlp.down_proj.weight",
"model.layers.13.input_layernorm.weight",
"model.layers.13.post_attention_layernorm.weight",
"model.layers.14.self_attn.q_proj.weight",
"model.layers.14.self_attn.k_proj.weight",
"model.layers.14.self_attn.v_proj.weight",
"model.layers.14.self_attn.o_proj.weight",
"model.layers.14.mlp.gate_proj.weight",
"model.layers.14.mlp.up_proj.weight",
"model.layers.14.mlp.down_proj.weight",
"model.layers.14.input_layernorm.weight",
"model.layers.14.post_attention_layernorm.weight",
"model.layers.15.self_attn.q_proj.weight",
"model.layers.15.self_attn.k_proj.weight",
"model.layers.15.self_attn.v_proj.weight",
"model.layers.15.self_attn.o_proj.weight",
"model.layers.15.mlp.gate_proj.weight",
"model.layers.15.mlp.up_proj.weight",
"model.layers.15.mlp.down_proj.weight",
"model.layers.15.input_layernorm.weight",
"model.layers.15.post_attention_layernorm.weight",
"model.layers.16.self_attn.q_proj.weight",
"model.layers.16.self_attn.k_proj.weight",
"model.layers.16.self_attn.v_proj.weight",
"model.layers.16.self_attn.o_proj.weight",
"model.layers.16.mlp.gate_proj.weight",
"model.layers.16.mlp.up_proj.weight",
"model.layers.16.mlp.down_proj.weight",
"model.layers.16.input_layernorm.weight",
"model.layers.16.post_attention_layernorm.weight",
"model.layers.17.self_attn.q_proj.weight",
"model.layers.17.self_attn.k_proj.weight",
"model.layers.17.self_attn.v_proj.weight",
"model.layers.17.self_attn.o_proj.weight",
"model.layers.17.mlp.gate_proj.weight",
"model.layers.17.mlp.up_proj.weight",
"model.layers.17.mlp.down_proj.weight",
"model.layers.17.input_layernorm.weight",
"model.layers.17.post_attention_layernorm.weight",
"model.layers.18.self_attn.q_proj.weight",
"model.layers.18.self_attn.k_proj.weight",
"model.layers.18.self_attn.v_proj.weight",
"model.layers.18.self_attn.o_proj.weight",
"model.layers.18.mlp.gate_proj.weight",
"model.layers.18.mlp.up_proj.weight",
"model.layers.18.mlp.down_proj.weight",
"model.layers.18.input_layernorm.weight",
"model.layers.18.post_attention_layernorm.weight",
"model.layers.19.self_attn.q_proj.weight",
"model.layers.19.self_attn.k_proj.weight",
"model.layers.19.self_attn.v_proj.weight",
"model.layers.19.self_attn.o_proj.weight",
"model.layers.19.mlp.gate_proj.weight",
"model.layers.19.mlp.up_proj.weight",
"model.layers.19.mlp.down_proj.weight",
"model.layers.19.input_layernorm.weight",
"model.layers.19.post_attention_layernorm.weight",
"model.layers.20.self_attn.q_proj.weight",
"model.layers.20.self_attn.k_proj.weight",
"model.layers.20.self_attn.v_proj.weight",
"model.layers.20.self_attn.o_proj.weight",
"model.layers.20.mlp.gate_proj.weight",
"model.layers.20.mlp.up_proj.weight",
"model.layers.20.mlp.down_proj.weight",
"model.layers.20.input_layernorm.weight",
"model.layers.20.post_attention_layernorm.weight",
"model.layers.21.self_attn.q_proj.weight",
"model.layers.21.self_attn.k_proj.weight",
"model.layers.21.self_attn.v_proj.weight",
"model.layers.21.self_attn.o_proj.weight",
"model.layers.21.mlp.gate_proj.weight",
"model.layers.21.mlp.up_proj.weight",
"model.layers.21.mlp.down_proj.weight",
"model.layers.21.input_layernorm.weight",
"model.layers.21.post_attention_layernorm.weight",
"model.layers.22.self_attn.q_proj.weight",
"model.layers.22.self_attn.k_proj.weight",
"model.layers.22.self_attn.v_proj.weight",
"model.layers.22.self_attn.o_proj.weight",
"model.layers.22.mlp.gate_proj.weight",
"model.layers.22.mlp.up_proj.weight",
"model.layers.22.mlp.down_proj.weight",
"model.layers.22.input_layernorm.weight",
"model.layers.22.post_attention_layernorm.weight",
"model.layers.23.self_attn.q_proj.weight",
"model.layers.23.self_attn.k_proj.weight",
"model.layers.23.self_attn.v_proj.weight",
"model.layers.23.self_attn.o_proj.weight",
"model.layers.23.mlp.gate_proj.weight",
"model.layers.23.mlp.up_proj.weight",
"model.layers.23.mlp.down_proj.weight",
"model.layers.23.input_layernorm.weight",
"model.layers.23.post_attention_layernorm.weight",
"model.layers.24.self_attn.q_proj.weight",
"model.layers.24.self_attn.k_proj.weight",
"model.layers.24.self_attn.v_proj.weight",
"model.layers.24.self_attn.o_proj.weight",
"model.layers.24.mlp.gate_proj.weight",
"model.layers.24.mlp.up_proj.weight",
"model.layers.24.mlp.down_proj.weight",
"model.layers.24.input_layernorm.weight",
"model.layers.24.post_attention_layernorm.weight",
"model.layers.25.self_attn.q_proj.weight",
"model.layers.25.self_attn.k_proj.weight",
"model.layers.25.self_attn.v_proj.weight",
"model.layers.25.self_attn.o_proj.weight",
"model.layers.25.mlp.gate_proj.weight",
"model.layers.25.mlp.up_proj.weight",
"model.layers.25.mlp.down_proj.weight",
"model.layers.25.input_layernorm.weight",
"model.layers.25.post_attention_layernorm.weight",
"model.layers.26.self_attn.q_proj.weight",
"model.layers.26.self_attn.k_proj.weight",
"model.layers.26.self_attn.v_proj.weight",
"model.layers.26.self_attn.o_proj.weight",
"model.layers.26.mlp.gate_proj.weight",
"model.layers.26.mlp.up_proj.weight",
"model.layers.26.mlp.down_proj.weight",
"model.layers.26.input_layernorm.weight",
"model.layers.26.post_attention_layernorm.weight",
"model.layers.27.self_attn.q_proj.weight",
"model.layers.27.self_attn.k_proj.weight",
"model.layers.27.self_attn.v_proj.weight",
"model.layers.27.self_attn.o_proj.weight",
"model.layers.27.mlp.gate_proj.weight",
"model.layers.27.mlp.up_proj.weight",
"model.layers.27.mlp.down_proj.weight",
"model.layers.27.input_layernorm.weight",
"model.layers.27.post_attention_layernorm.weight",
"model.layers.28.self_attn.q_proj.weight",
"model.layers.28.self_attn.k_proj.weight",
"model.layers.28.self_attn.v_proj.weight",
"model.layers.28.self_attn.o_proj.weight",
"model.layers.28.mlp.gate_proj.weight",
"model.layers.28.mlp.up_proj.weight",
"model.layers.28.mlp.down_proj.weight",
"model.layers.28.input_layernorm.weight",
"model.layers.28.post_attention_layernorm.weight",
"model.layers.29.self_attn.q_proj.weight",
"model.layers.29.self_attn.k_proj.weight",
"model.layers.29.self_attn.v_proj.weight",
"model.layers.29.self_attn.o_proj.weight",
"model.layers.29.mlp.gate_proj.weight",
"model.layers.29.mlp.up_proj.weight",
"model.layers.29.mlp.down_proj.weight",
"model.layers.29.input_layernorm.weight",
"model.layers.29.post_attention_layernorm.weight",
"model.layers.30.self_attn.q_proj.weight",
"model.layers.30.self_attn.k_proj.weight",
"model.layers.30.self_attn.v_proj.weight",
"model.layers.30.self_attn.o_proj.weight",
"model.layers.30.mlp.gate_proj.weight",
"model.layers.30.mlp.up_proj.weight",
"model.layers.30.mlp.down_proj.weight",
"model.layers.30.input_layernorm.weight",
"model.layers.30.post_attention_layernorm.weight",
"model.layers.31.self_attn.q_proj.weight",
"model.layers.31.self_attn.k_proj.weight",
"model.layers.31.self_attn.v_proj.weight",
"model.layers.31.self_attn.o_proj.weight",
"model.layers.31.mlp.gate_proj.weight",
"model.layers.31.mlp.up_proj.weight",
"model.layers.31.mlp.down_proj.weight",
"model.layers.31.input_layernorm.weight",
"model.layers.31.post_attention_layernorm.weight",
"model.norm.weight",
"lm_head.weight"
]
}