| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.615222974550549, | |
| "eval_steps": 500, | |
| "global_step": 2800, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0009339248190520663, | |
| "grad_norm": 6.638877692627699, | |
| "learning_rate": 9.345794392523364e-07, | |
| "loss": 9.2917, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.009339248190520663, | |
| "grad_norm": 1.1560921335705272, | |
| "learning_rate": 9.345794392523365e-06, | |
| "loss": 9.0876, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.018678496381041326, | |
| "grad_norm": 0.8415132296956432, | |
| "learning_rate": 1.869158878504673e-05, | |
| "loss": 8.2164, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02801774457156199, | |
| "grad_norm": 0.45381630992958155, | |
| "learning_rate": 2.8037383177570094e-05, | |
| "loss": 7.5184, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03735699276208265, | |
| "grad_norm": 0.8400636107958425, | |
| "learning_rate": 3.738317757009346e-05, | |
| "loss": 6.6507, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.046696240952603316, | |
| "grad_norm": 0.557696240829066, | |
| "learning_rate": 4.672897196261683e-05, | |
| "loss": 5.8909, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05603548914312398, | |
| "grad_norm": 0.3971996057467842, | |
| "learning_rate": 5.607476635514019e-05, | |
| "loss": 5.4127, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06537473733364464, | |
| "grad_norm": 0.2932710540265688, | |
| "learning_rate": 6.542056074766355e-05, | |
| "loss": 5.0106, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0747139855241653, | |
| "grad_norm": 0.3682690443551033, | |
| "learning_rate": 7.476635514018692e-05, | |
| "loss": 4.6042, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08405323371468597, | |
| "grad_norm": 0.3132971920011515, | |
| "learning_rate": 8.411214953271028e-05, | |
| "loss": 4.2031, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09339248190520663, | |
| "grad_norm": 0.6731868159213446, | |
| "learning_rate": 9.345794392523365e-05, | |
| "loss": 3.9423, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1027317300957273, | |
| "grad_norm": 0.27848867836763197, | |
| "learning_rate": 0.000102803738317757, | |
| "loss": 3.7157, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.11207097828624796, | |
| "grad_norm": 0.24642109032991807, | |
| "learning_rate": 0.00011214953271028037, | |
| "loss": 3.4516, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.12141022647676862, | |
| "grad_norm": 0.25717384664029797, | |
| "learning_rate": 0.00012149532710280373, | |
| "loss": 3.2167, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.13074947466728928, | |
| "grad_norm": 0.20912922668565637, | |
| "learning_rate": 0.0001308411214953271, | |
| "loss": 3.0237, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.14008872285780993, | |
| "grad_norm": 0.15805888388706113, | |
| "learning_rate": 0.00014018691588785047, | |
| "loss": 2.8529, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1494279710483306, | |
| "grad_norm": 0.23370349497479534, | |
| "learning_rate": 0.00014953271028037384, | |
| "loss": 2.7078, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15876721923885126, | |
| "grad_norm": 0.1802138633012483, | |
| "learning_rate": 0.0001588785046728972, | |
| "loss": 2.6115, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16810646742937194, | |
| "grad_norm": 0.13354347610039718, | |
| "learning_rate": 0.00016822429906542056, | |
| "loss": 2.5309, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.17744571561989259, | |
| "grad_norm": 0.09414865188086892, | |
| "learning_rate": 0.00017757009345794393, | |
| "loss": 2.4452, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18678496381041326, | |
| "grad_norm": 0.08333601554768896, | |
| "learning_rate": 0.0001869158878504673, | |
| "loss": 2.3832, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1961242120009339, | |
| "grad_norm": 0.15926414699806835, | |
| "learning_rate": 0.00019626168224299065, | |
| "loss": 2.3492, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2054634601914546, | |
| "grad_norm": 0.09492820761057012, | |
| "learning_rate": 0.0001999989254250208, | |
| "loss": 2.323, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.21480270838197524, | |
| "grad_norm": 0.0801349259356147, | |
| "learning_rate": 0.00019999235866155886, | |
| "loss": 2.2731, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.22414195657249592, | |
| "grad_norm": 0.12210960524693895, | |
| "learning_rate": 0.00019997982251228469, | |
| "loss": 2.2433, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.23348120476301656, | |
| "grad_norm": 3.14289498732125, | |
| "learning_rate": 0.00019996131772558666, | |
| "loss": 3.2769, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.24282045295353724, | |
| "grad_norm": 1.632940983166179, | |
| "learning_rate": 0.00019993684540617132, | |
| "loss": 4.9343, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2521597011440579, | |
| "grad_norm": 3.4831252230225416, | |
| "learning_rate": 0.00019990640701499736, | |
| "loss": 4.2768, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.26149894933457857, | |
| "grad_norm": 1.6069045920523788, | |
| "learning_rate": 0.00019987000436918874, | |
| "loss": 5.9581, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.27083819752509924, | |
| "grad_norm": 0.2220907936615993, | |
| "learning_rate": 0.00019982763964192585, | |
| "loss": 3.8228, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.28017744571561987, | |
| "grad_norm": 0.24737284913291765, | |
| "learning_rate": 0.00019977931536231596, | |
| "loss": 3.1413, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.28951669390614054, | |
| "grad_norm": 4.010404518241152, | |
| "learning_rate": 0.00019972503441524224, | |
| "loss": 2.8432, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2988559420966612, | |
| "grad_norm": 0.1515583580811596, | |
| "learning_rate": 0.00019966480004119142, | |
| "loss": 2.7859, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3081951902871819, | |
| "grad_norm": 0.11259395750650594, | |
| "learning_rate": 0.00019959861583606045, | |
| "loss": 2.5821, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3175344384777025, | |
| "grad_norm": 0.22514797814956813, | |
| "learning_rate": 0.00019952648575094183, | |
| "loss": 2.4517, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3268736866682232, | |
| "grad_norm": 0.08040136172033542, | |
| "learning_rate": 0.00019944841409188767, | |
| "loss": 2.3794, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3362129348587439, | |
| "grad_norm": 0.054758073593565354, | |
| "learning_rate": 0.00019936440551965263, | |
| "loss": 2.3232, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.34555218304926455, | |
| "grad_norm": 0.06742998909645591, | |
| "learning_rate": 0.00019927446504941577, | |
| "loss": 2.2776, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.35489143123978517, | |
| "grad_norm": 0.048780907584876736, | |
| "learning_rate": 0.00019917859805048096, | |
| "loss": 2.2376, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.36423067943030585, | |
| "grad_norm": 0.0475325963052214, | |
| "learning_rate": 0.00019907681024595663, | |
| "loss": 2.2191, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3735699276208265, | |
| "grad_norm": 0.054089563211590065, | |
| "learning_rate": 0.00019896910771241387, | |
| "loss": 2.1961, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.3829091758113472, | |
| "grad_norm": 0.21798406131864823, | |
| "learning_rate": 0.00019885549687952372, | |
| "loss": 2.2078, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3922484240018678, | |
| "grad_norm": 0.8673185709111124, | |
| "learning_rate": 0.00019873598452967338, | |
| "loss": 2.3731, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.4015876721923885, | |
| "grad_norm": 0.22424350669971718, | |
| "learning_rate": 0.0001986105777975613, | |
| "loss": 2.6195, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4109269203829092, | |
| "grad_norm": 0.307418135168262, | |
| "learning_rate": 0.00019847928416977126, | |
| "loss": 2.3624, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.42026616857342985, | |
| "grad_norm": 0.07944722668080402, | |
| "learning_rate": 0.00019834211148432536, | |
| "loss": 2.2799, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4296054167639505, | |
| "grad_norm": 0.18146933758664588, | |
| "learning_rate": 0.00019819906793021614, | |
| "loss": 2.2177, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.43894466495447115, | |
| "grad_norm": 0.07035825837333018, | |
| "learning_rate": 0.0001980501620469178, | |
| "loss": 2.1767, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.44828391314499183, | |
| "grad_norm": 0.04596186944454228, | |
| "learning_rate": 0.0001978954027238763, | |
| "loss": 2.1598, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.4576231613355125, | |
| "grad_norm": 0.041342347745088055, | |
| "learning_rate": 0.0001977347991999786, | |
| "loss": 2.131, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.46696240952603313, | |
| "grad_norm": 0.04172063219841485, | |
| "learning_rate": 0.00019756836106300137, | |
| "loss": 2.1231, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4763016577165538, | |
| "grad_norm": 0.03373646457711144, | |
| "learning_rate": 0.00019739609824903843, | |
| "loss": 2.1146, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.4856409059070745, | |
| "grad_norm": 0.03736871030676605, | |
| "learning_rate": 0.00019721802104190748, | |
| "loss": 2.1003, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.49498015409759516, | |
| "grad_norm": 0.033931028038211034, | |
| "learning_rate": 0.00019703414007253645, | |
| "loss": 2.0983, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5043194022881158, | |
| "grad_norm": 0.03790055446070549, | |
| "learning_rate": 0.00019684446631832868, | |
| "loss": 2.092, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5136586504786365, | |
| "grad_norm": 0.030956192803893078, | |
| "learning_rate": 0.00019664901110250758, | |
| "loss": 2.0807, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5229978986691571, | |
| "grad_norm": 0.03542530209935129, | |
| "learning_rate": 0.00019644778609344068, | |
| "loss": 2.0773, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5323371468596778, | |
| "grad_norm": 0.040947757568902336, | |
| "learning_rate": 0.00019624080330394306, | |
| "loss": 2.0649, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5416763950501985, | |
| "grad_norm": 0.034273415973688146, | |
| "learning_rate": 0.00019602807509056018, | |
| "loss": 2.0479, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5510156432407192, | |
| "grad_norm": 0.031427481498873144, | |
| "learning_rate": 0.00019580961415283028, | |
| "loss": 2.0563, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5603548914312397, | |
| "grad_norm": 0.03141549752041532, | |
| "learning_rate": 0.00019558543353252611, | |
| "loss": 2.0503, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5696941396217604, | |
| "grad_norm": 0.033012392726428204, | |
| "learning_rate": 0.00019535554661287652, | |
| "loss": 2.0389, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5790333878122811, | |
| "grad_norm": 0.02913261992661444, | |
| "learning_rate": 0.0001951199671177673, | |
| "loss": 2.036, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5883726360028018, | |
| "grad_norm": 0.030543903708435332, | |
| "learning_rate": 0.00019487870911092214, | |
| "loss": 2.0326, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5977118841933224, | |
| "grad_norm": 0.03215005545393897, | |
| "learning_rate": 0.00019463178699506277, | |
| "loss": 2.0231, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6070511323838431, | |
| "grad_norm": 0.03823630791937631, | |
| "learning_rate": 0.00019437921551104933, | |
| "loss": 2.0293, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6163903805743638, | |
| "grad_norm": 0.03200103149471209, | |
| "learning_rate": 0.00019412100973700038, | |
| "loss": 2.017, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6257296287648845, | |
| "grad_norm": 0.03841804153577787, | |
| "learning_rate": 0.00019385718508739262, | |
| "loss": 2.0135, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.635068876955405, | |
| "grad_norm": 0.03052396655271533, | |
| "learning_rate": 0.0001935877573121407, | |
| "loss": 2.0237, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6444081251459257, | |
| "grad_norm": 0.033817837533771815, | |
| "learning_rate": 0.00019331274249565717, | |
| "loss": 2.0069, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6537473733364464, | |
| "grad_norm": 0.028286999650643876, | |
| "learning_rate": 0.00019303215705589194, | |
| "loss": 2.0112, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6630866215269671, | |
| "grad_norm": 0.030693948626257357, | |
| "learning_rate": 0.00019274601774335243, | |
| "loss": 2.0, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6724258697174877, | |
| "grad_norm": 0.029984615635953022, | |
| "learning_rate": 0.0001924543416401035, | |
| "loss": 2.0028, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6817651179080084, | |
| "grad_norm": 0.02915985673921391, | |
| "learning_rate": 0.00019215714615874755, | |
| "loss": 2.0031, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6911043660985291, | |
| "grad_norm": 0.028305399777245336, | |
| "learning_rate": 0.00019185444904138528, | |
| "loss": 1.9924, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.7004436142890498, | |
| "grad_norm": 0.036720505429756495, | |
| "learning_rate": 0.00019154626835855628, | |
| "loss": 1.9981, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7097828624795703, | |
| "grad_norm": 0.0287124048917296, | |
| "learning_rate": 0.00019123262250816034, | |
| "loss": 1.9868, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.719122110670091, | |
| "grad_norm": 0.03318092492837997, | |
| "learning_rate": 0.00019091353021435915, | |
| "loss": 1.9943, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7284613588606117, | |
| "grad_norm": 0.054035272137015325, | |
| "learning_rate": 0.00019058901052645844, | |
| "loss": 1.9838, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7378006070511324, | |
| "grad_norm": 0.03184392761983255, | |
| "learning_rate": 0.00019025908281777078, | |
| "loss": 1.982, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.747139855241653, | |
| "grad_norm": 0.029409948164434735, | |
| "learning_rate": 0.00018992376678445908, | |
| "loss": 1.9693, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7564791034321737, | |
| "grad_norm": 0.029656963043919016, | |
| "learning_rate": 0.00018958308244436064, | |
| "loss": 1.9914, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7658183516226944, | |
| "grad_norm": 0.030843610865326686, | |
| "learning_rate": 0.00018923705013579233, | |
| "loss": 1.9749, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7751575998132151, | |
| "grad_norm": 0.0377584286045999, | |
| "learning_rate": 0.00018888569051633613, | |
| "loss": 1.9606, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7844968480037356, | |
| "grad_norm": 0.03147699916274391, | |
| "learning_rate": 0.00018852902456160616, | |
| "loss": 1.9696, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7938360961942563, | |
| "grad_norm": 0.029193153251471263, | |
| "learning_rate": 0.0001881670735639963, | |
| "loss": 1.9687, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.803175344384777, | |
| "grad_norm": 0.030201895228156087, | |
| "learning_rate": 0.00018779985913140924, | |
| "loss": 1.9678, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8125145925752977, | |
| "grad_norm": 0.029272319485493213, | |
| "learning_rate": 0.00018742740318596632, | |
| "loss": 1.9697, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8218538407658184, | |
| "grad_norm": 0.033740176465285654, | |
| "learning_rate": 0.000187049727962699, | |
| "loss": 1.9647, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.831193088956339, | |
| "grad_norm": 0.029539399251208593, | |
| "learning_rate": 0.0001866668560082213, | |
| "loss": 1.9627, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8405323371468597, | |
| "grad_norm": 0.028666077337951026, | |
| "learning_rate": 0.0001862788101793839, | |
| "loss": 1.9529, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8498715853373804, | |
| "grad_norm": 0.030280792492665805, | |
| "learning_rate": 0.0001858856136419097, | |
| "loss": 1.9536, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.859210833527901, | |
| "grad_norm": 0.03921536105057096, | |
| "learning_rate": 0.0001854872898690106, | |
| "loss": 1.9474, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8685500817184216, | |
| "grad_norm": 0.030632523637038354, | |
| "learning_rate": 0.0001850838626399865, | |
| "loss": 1.9423, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8778893299089423, | |
| "grad_norm": 0.04615147601979514, | |
| "learning_rate": 0.00018467535603880548, | |
| "loss": 1.946, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.887228578099463, | |
| "grad_norm": 0.028216236017006333, | |
| "learning_rate": 0.00018426179445266616, | |
| "loss": 1.9408, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8965678262899837, | |
| "grad_norm": 0.0282407563402959, | |
| "learning_rate": 0.00018384320257054177, | |
| "loss": 1.9447, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.9059070744805043, | |
| "grad_norm": 0.029365880854687894, | |
| "learning_rate": 0.0001834196053817062, | |
| "loss": 1.9389, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.915246322671025, | |
| "grad_norm": 0.02855404439130719, | |
| "learning_rate": 0.00018299102817424234, | |
| "loss": 1.9425, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9245855708615457, | |
| "grad_norm": 0.03238310984070135, | |
| "learning_rate": 0.00018255749653353225, | |
| "loss": 1.9392, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9339248190520663, | |
| "grad_norm": 0.030115350805430388, | |
| "learning_rate": 0.00018211903634072983, | |
| "loss": 1.956, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.9432640672425869, | |
| "grad_norm": 0.039090564032501135, | |
| "learning_rate": 0.0001816756737712158, | |
| "loss": 1.9358, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9526033154331076, | |
| "grad_norm": 0.028567954476327023, | |
| "learning_rate": 0.000181227435293035, | |
| "loss": 1.9342, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9619425636236283, | |
| "grad_norm": 0.028158653241284505, | |
| "learning_rate": 0.00018077434766531624, | |
| "loss": 1.9287, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.971281811814149, | |
| "grad_norm": 0.02927795717651538, | |
| "learning_rate": 0.00018031643793667504, | |
| "loss": 1.9298, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9806210600046696, | |
| "grad_norm": 0.06298340584032344, | |
| "learning_rate": 0.0001798537334435986, | |
| "loss": 1.9303, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.9899603081951903, | |
| "grad_norm": 0.03627278406983255, | |
| "learning_rate": 0.00017938626180881407, | |
| "loss": 1.9285, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.999299556385711, | |
| "grad_norm": 0.03916823421329747, | |
| "learning_rate": 0.00017891405093963938, | |
| "loss": 1.9239, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.0087555451786132, | |
| "grad_norm": 0.03383539251115568, | |
| "learning_rate": 0.00017843712902631723, | |
| "loss": 1.8855, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.0180947933691338, | |
| "grad_norm": 0.02931021936560147, | |
| "learning_rate": 0.00017795552454033224, | |
| "loss": 1.8004, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.0274340415596543, | |
| "grad_norm": 0.029173295095350292, | |
| "learning_rate": 0.0001774692662327113, | |
| "loss": 1.7912, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.0367732897501751, | |
| "grad_norm": 0.035210065642974735, | |
| "learning_rate": 0.000176978383132307, | |
| "loss": 1.7902, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.0461125379406957, | |
| "grad_norm": 0.029794447659573477, | |
| "learning_rate": 0.00017648290454406475, | |
| "loss": 1.8072, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.0554517861312165, | |
| "grad_norm": 0.03194584938279939, | |
| "learning_rate": 0.0001759828600472734, | |
| "loss": 1.803, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.064791034321737, | |
| "grad_norm": 0.031028415312581603, | |
| "learning_rate": 0.00017547827949379924, | |
| "loss": 1.7945, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.0741302825122578, | |
| "grad_norm": 0.03313245828751237, | |
| "learning_rate": 0.00017496919300630403, | |
| "loss": 1.8139, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.0834695307027784, | |
| "grad_norm": 0.03049431724979126, | |
| "learning_rate": 0.00017445563097644664, | |
| "loss": 1.8031, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.0928087788932992, | |
| "grad_norm": 0.02880548705343715, | |
| "learning_rate": 0.00017393762406306878, | |
| "loss": 1.7974, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.1021480270838198, | |
| "grad_norm": 0.03200427895977668, | |
| "learning_rate": 0.00017341520319036469, | |
| "loss": 1.7994, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.1114872752743403, | |
| "grad_norm": 0.031293532815600045, | |
| "learning_rate": 0.00017288839954603496, | |
| "loss": 1.8073, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.1208265234648611, | |
| "grad_norm": 0.032367211109345505, | |
| "learning_rate": 0.00017235724457942468, | |
| "loss": 1.7944, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.1301657716553817, | |
| "grad_norm": 0.037783793923191374, | |
| "learning_rate": 0.0001718217699996462, | |
| "loss": 1.7948, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.1395050198459025, | |
| "grad_norm": 0.02881083117349317, | |
| "learning_rate": 0.00017128200777368567, | |
| "loss": 1.8029, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.148844268036423, | |
| "grad_norm": 0.032997408408175985, | |
| "learning_rate": 0.00017073799012449524, | |
| "loss": 1.7914, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.1581835162269438, | |
| "grad_norm": 0.030263615801190885, | |
| "learning_rate": 0.00017018974952906884, | |
| "loss": 1.792, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.1675227644174644, | |
| "grad_norm": 0.03286310429098484, | |
| "learning_rate": 0.00016963731871650378, | |
| "loss": 1.8149, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.1768620126079852, | |
| "grad_norm": 0.031111621587597126, | |
| "learning_rate": 0.00016908073066604663, | |
| "loss": 1.8092, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.1862012607985057, | |
| "grad_norm": 0.03372950172075241, | |
| "learning_rate": 0.0001685200186051246, | |
| "loss": 1.818, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.1955405089890263, | |
| "grad_norm": 0.040782040436567434, | |
| "learning_rate": 0.00016795521600736164, | |
| "loss": 1.7999, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.204879757179547, | |
| "grad_norm": 0.03451605168178924, | |
| "learning_rate": 0.00016738635659058044, | |
| "loss": 1.7945, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.2142190053700677, | |
| "grad_norm": 0.03235681588882673, | |
| "learning_rate": 0.00016681347431478933, | |
| "loss": 1.8087, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.2235582535605884, | |
| "grad_norm": 0.030750745605971932, | |
| "learning_rate": 0.00016623660338015487, | |
| "loss": 1.7995, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.232897501751109, | |
| "grad_norm": 0.029444668665577274, | |
| "learning_rate": 0.00016565577822496042, | |
| "loss": 1.8025, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.2422367499416298, | |
| "grad_norm": 0.038528856709584745, | |
| "learning_rate": 0.00016507103352354996, | |
| "loss": 1.7954, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.2515759981321504, | |
| "grad_norm": 0.034217088004383035, | |
| "learning_rate": 0.00016448240418425814, | |
| "loss": 1.7962, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.260915246322671, | |
| "grad_norm": 0.030205405393195585, | |
| "learning_rate": 0.00016388992534732645, | |
| "loss": 1.7973, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.2702544945131917, | |
| "grad_norm": 0.029082218516562994, | |
| "learning_rate": 0.00016329363238280528, | |
| "loss": 1.796, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.2795937427037123, | |
| "grad_norm": 0.029003887688766505, | |
| "learning_rate": 0.00016269356088844238, | |
| "loss": 1.7946, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.288932990894233, | |
| "grad_norm": 0.03341157363649238, | |
| "learning_rate": 0.00016208974668755779, | |
| "loss": 1.7972, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.2982722390847536, | |
| "grad_norm": 0.030614480844663026, | |
| "learning_rate": 0.00016148222582690517, | |
| "loss": 1.7973, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.3076114872752744, | |
| "grad_norm": 0.029741346740467405, | |
| "learning_rate": 0.00016087103457452, | |
| "loss": 1.8076, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.316950735465795, | |
| "grad_norm": 0.029569313554185597, | |
| "learning_rate": 0.00016025620941755424, | |
| "loss": 1.8043, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.3262899836563156, | |
| "grad_norm": 0.02947637404374054, | |
| "learning_rate": 0.0001596377870600983, | |
| "loss": 1.797, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.3356292318468364, | |
| "grad_norm": 0.031005062093959545, | |
| "learning_rate": 0.00015901580442098968, | |
| "loss": 1.8086, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.344968480037357, | |
| "grad_norm": 0.029493792984873927, | |
| "learning_rate": 0.00015839029863160922, | |
| "loss": 1.8026, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.3543077282278777, | |
| "grad_norm": 0.0288068155951218, | |
| "learning_rate": 0.0001577613070336641, | |
| "loss": 1.7951, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.3636469764183983, | |
| "grad_norm": 0.03380404824627639, | |
| "learning_rate": 0.00015712886717695885, | |
| "loss": 1.7938, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.372986224608919, | |
| "grad_norm": 0.039744706189693335, | |
| "learning_rate": 0.0001564930168171536, | |
| "loss": 1.8016, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.3823254727994396, | |
| "grad_norm": 0.030565530594285437, | |
| "learning_rate": 0.00015585379391351012, | |
| "loss": 1.7984, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.3916647209899602, | |
| "grad_norm": 0.04009392805554255, | |
| "learning_rate": 0.00015521123662662567, | |
| "loss": 1.7999, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.401003969180481, | |
| "grad_norm": 0.03516196009586836, | |
| "learning_rate": 0.000154565383316155, | |
| "loss": 1.7979, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.4103432173710018, | |
| "grad_norm": 0.03534161399054556, | |
| "learning_rate": 0.0001539162725385202, | |
| "loss": 1.8057, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.4196824655615223, | |
| "grad_norm": 0.028488879438601067, | |
| "learning_rate": 0.000153263943044609, | |
| "loss": 1.792, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.429021713752043, | |
| "grad_norm": 0.03125154490954804, | |
| "learning_rate": 0.00015260843377746147, | |
| "loss": 1.8008, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.4383609619425637, | |
| "grad_norm": 0.030194357488801882, | |
| "learning_rate": 0.00015194978386994507, | |
| "loss": 1.7948, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.4477002101330843, | |
| "grad_norm": 0.03049246845786265, | |
| "learning_rate": 0.00015128803264241852, | |
| "loss": 1.7967, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.4570394583236048, | |
| "grad_norm": 0.030497211097258083, | |
| "learning_rate": 0.0001506232196003844, | |
| "loss": 1.7894, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.4663787065141256, | |
| "grad_norm": 0.028748806119737205, | |
| "learning_rate": 0.00014995538443213094, | |
| "loss": 1.806, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.4757179547046464, | |
| "grad_norm": 0.036423750322912396, | |
| "learning_rate": 0.00014928456700636237, | |
| "loss": 1.7995, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.485057202895167, | |
| "grad_norm": 0.039101516109204065, | |
| "learning_rate": 0.00014861080736981906, | |
| "loss": 1.8028, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.4943964510856875, | |
| "grad_norm": 0.031368399541673815, | |
| "learning_rate": 0.00014793414574488663, | |
| "loss": 1.8005, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.5037356992762083, | |
| "grad_norm": 0.029788484702512056, | |
| "learning_rate": 0.00014725462252719495, | |
| "loss": 1.7963, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.5130749474667289, | |
| "grad_norm": 0.029719041811636312, | |
| "learning_rate": 0.00014657227828320635, | |
| "loss": 1.7957, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.5224141956572494, | |
| "grad_norm": 0.02820041575417432, | |
| "learning_rate": 0.00014588715374779407, | |
| "loss": 1.7986, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.5317534438477702, | |
| "grad_norm": 1.8322544897261024, | |
| "learning_rate": 0.0001451992898218102, | |
| "loss": 2.016, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.541092692038291, | |
| "grad_norm": 0.8676995793107466, | |
| "learning_rate": 0.0001445087275696443, | |
| "loss": 2.1095, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.5504319402288116, | |
| "grad_norm": 0.08688193434631736, | |
| "learning_rate": 0.00014381550821677155, | |
| "loss": 2.0497, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.5597711884193322, | |
| "grad_norm": 0.18321381224589608, | |
| "learning_rate": 0.0001431196731472921, | |
| "loss": 1.9023, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.569110436609853, | |
| "grad_norm": 0.058214343698110564, | |
| "learning_rate": 0.00014242126390145998, | |
| "loss": 1.8566, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.5784496848003737, | |
| "grad_norm": 0.03965578147557666, | |
| "learning_rate": 0.0001417203221732036, | |
| "loss": 1.8206, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.587788932990894, | |
| "grad_norm": 0.03131802880017099, | |
| "learning_rate": 0.00014101688980763658, | |
| "loss": 1.8272, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.5971281811814149, | |
| "grad_norm": 0.02788722706683908, | |
| "learning_rate": 0.00014031100879855968, | |
| "loss": 1.8145, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.6064674293719357, | |
| "grad_norm": 0.02781057130092059, | |
| "learning_rate": 0.00013960272128595372, | |
| "loss": 1.8122, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.6158066775624562, | |
| "grad_norm": 0.032067383734154756, | |
| "learning_rate": 0.00013889206955346403, | |
| "loss": 1.8064, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.6251459257529768, | |
| "grad_norm": 0.03448807133884858, | |
| "learning_rate": 0.00013817909602587613, | |
| "loss": 1.8128, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.6344851739434976, | |
| "grad_norm": 0.04127395288271696, | |
| "learning_rate": 0.00013746384326658305, | |
| "loss": 1.8041, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.6438244221340184, | |
| "grad_norm": 0.030243192787820814, | |
| "learning_rate": 0.00013674635397504427, | |
| "loss": 1.803, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.653163670324539, | |
| "grad_norm": 0.032183525625428915, | |
| "learning_rate": 0.00013602667098423687, | |
| "loss": 1.8092, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.6625029185150595, | |
| "grad_norm": 0.030391044676815377, | |
| "learning_rate": 0.00013530483725809818, | |
| "loss": 1.8039, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.6718421667055803, | |
| "grad_norm": 0.03204756818238517, | |
| "learning_rate": 0.000134580895888961, | |
| "loss": 1.8017, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.6811814148961008, | |
| "grad_norm": 0.031117112662107062, | |
| "learning_rate": 0.00013385489009498124, | |
| "loss": 1.8017, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.6905206630866214, | |
| "grad_norm": 0.028389293445401805, | |
| "learning_rate": 0.00013312686321755761, | |
| "loss": 1.811, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.6998599112771422, | |
| "grad_norm": 4.908185097372493, | |
| "learning_rate": 0.0001323968587187443, | |
| "loss": 2.194, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.709199159467663, | |
| "grad_norm": 0.2849452041194025, | |
| "learning_rate": 0.00013166492017865637, | |
| "loss": 2.0785, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.7185384076581836, | |
| "grad_norm": 0.10989252058989733, | |
| "learning_rate": 0.0001309310912928682, | |
| "loss": 1.986, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.7278776558487041, | |
| "grad_norm": 0.057475612656740484, | |
| "learning_rate": 0.00013019541586980463, | |
| "loss": 1.8614, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.737216904039225, | |
| "grad_norm": 0.034908181734207726, | |
| "learning_rate": 0.000129457937828126, | |
| "loss": 1.8326, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.7465561522297455, | |
| "grad_norm": 0.02892836681897248, | |
| "learning_rate": 0.00012871870119410614, | |
| "loss": 1.8243, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.755895400420266, | |
| "grad_norm": 0.03311636981729384, | |
| "learning_rate": 0.00012797775009900397, | |
| "loss": 1.8183, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.7652346486107868, | |
| "grad_norm": 0.03151917952358458, | |
| "learning_rate": 0.00012723512877642904, | |
| "loss": 1.8034, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.7745738968013076, | |
| "grad_norm": 0.028109921832296925, | |
| "learning_rate": 0.000126490881559701, | |
| "loss": 1.8129, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.7839131449918282, | |
| "grad_norm": 0.030350462454962698, | |
| "learning_rate": 0.00012574505287920259, | |
| "loss": 1.8003, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.7932523931823487, | |
| "grad_norm": 0.03131380630103849, | |
| "learning_rate": 0.00012499768725972754, | |
| "loss": 1.814, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.8025916413728695, | |
| "grad_norm": 0.029450198273050322, | |
| "learning_rate": 0.00012424882931782243, | |
| "loss": 1.7998, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.81193088956339, | |
| "grad_norm": 0.0310524261453681, | |
| "learning_rate": 0.0001234985237591231, | |
| "loss": 1.8078, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.8212701377539107, | |
| "grad_norm": 0.029362038478982822, | |
| "learning_rate": 0.00012274681537568585, | |
| "loss": 1.8014, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.8306093859444315, | |
| "grad_norm": 0.027166816460226118, | |
| "learning_rate": 0.00012199374904331337, | |
| "loss": 1.8021, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.8399486341349522, | |
| "grad_norm": 0.03680509283276228, | |
| "learning_rate": 0.00012123936971887578, | |
| "loss": 1.7973, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.8492878823254728, | |
| "grad_norm": 0.03135174840346185, | |
| "learning_rate": 0.0001204837224376267, | |
| "loss": 1.7874, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.8586271305159934, | |
| "grad_norm": 0.02644533256389969, | |
| "learning_rate": 0.0001197268523105148, | |
| "loss": 1.798, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.8679663787065142, | |
| "grad_norm": 0.02999453651649614, | |
| "learning_rate": 0.00011896880452149077, | |
| "loss": 1.7957, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.877305626897035, | |
| "grad_norm": 0.026905209700322272, | |
| "learning_rate": 0.00011820962432480985, | |
| "loss": 1.793, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.8866448750875553, | |
| "grad_norm": 0.027263640323285022, | |
| "learning_rate": 0.00011744935704233005, | |
| "loss": 1.7974, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.895984123278076, | |
| "grad_norm": 0.030479226063932337, | |
| "learning_rate": 0.00011668804806080693, | |
| "loss": 1.7898, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.9053233714685969, | |
| "grad_norm": 0.030129902025534238, | |
| "learning_rate": 0.00011592574282918369, | |
| "loss": 1.7856, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.9146626196591174, | |
| "grad_norm": 0.027884976674153635, | |
| "learning_rate": 0.00011516248685587814, | |
| "loss": 1.7858, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.924001867849638, | |
| "grad_norm": 0.02925266011156687, | |
| "learning_rate": 0.00011439832570606586, | |
| "loss": 1.7876, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.9333411160401588, | |
| "grad_norm": 0.028472914828616754, | |
| "learning_rate": 0.00011363330499895997, | |
| "loss": 1.7834, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.9426803642306796, | |
| "grad_norm": 0.025877740032137875, | |
| "learning_rate": 0.00011286747040508789, | |
| "loss": 1.7955, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.9520196124212001, | |
| "grad_norm": 0.02605295620697312, | |
| "learning_rate": 0.0001121008676435648, | |
| "loss": 1.7877, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.9613588606117207, | |
| "grad_norm": 0.026887649929567867, | |
| "learning_rate": 0.00011133354247936423, | |
| "loss": 1.773, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.9706981088022415, | |
| "grad_norm": 0.027982045915154026, | |
| "learning_rate": 0.00011056554072058596, | |
| "loss": 1.7762, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.980037356992762, | |
| "grad_norm": 0.028077027059053006, | |
| "learning_rate": 0.0001097969082157215, | |
| "loss": 1.7963, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.9893766051832826, | |
| "grad_norm": 0.027877078975954036, | |
| "learning_rate": 0.00010902769085091686, | |
| "loss": 1.7787, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.9987158533738034, | |
| "grad_norm": 0.026120077388738373, | |
| "learning_rate": 0.00010825793454723325, | |
| "loss": 1.7842, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.0081718421667056, | |
| "grad_norm": 0.044810283079268924, | |
| "learning_rate": 0.00010748768525790569, | |
| "loss": 1.6591, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.0175110903572264, | |
| "grad_norm": 0.03431848280739808, | |
| "learning_rate": 0.00010671698896559968, | |
| "loss": 1.5599, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.0268503385477468, | |
| "grad_norm": 0.04234332973849956, | |
| "learning_rate": 0.00010594589167966606, | |
| "loss": 1.5494, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.0361895867382676, | |
| "grad_norm": 0.03260321438171042, | |
| "learning_rate": 0.00010517443943339438, | |
| "loss": 1.5473, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.0455288349287883, | |
| "grad_norm": 0.034116901745609114, | |
| "learning_rate": 0.00010440267828126478, | |
| "loss": 1.5464, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.0548680831193087, | |
| "grad_norm": 0.030992757239375807, | |
| "learning_rate": 0.00010363065429619858, | |
| "loss": 1.5514, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.0642073313098295, | |
| "grad_norm": 0.03365516197786113, | |
| "learning_rate": 0.0001028584135668077, | |
| "loss": 1.5493, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.0735465795003503, | |
| "grad_norm": 0.033293307482261586, | |
| "learning_rate": 0.00010208600219464355, | |
| "loss": 1.5426, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.082885827690871, | |
| "grad_norm": 0.03653097737467338, | |
| "learning_rate": 0.00010131346629144451, | |
| "loss": 1.5471, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.0922250758813914, | |
| "grad_norm": 0.03390291511697895, | |
| "learning_rate": 0.0001005408519763833, | |
| "loss": 1.5568, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.101564324071912, | |
| "grad_norm": 0.03192694661852283, | |
| "learning_rate": 9.976820537331374e-05, | |
| "loss": 1.5452, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.110903572262433, | |
| "grad_norm": 0.03561740193515691, | |
| "learning_rate": 9.899557260801707e-05, | |
| "loss": 1.546, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.1202428204529538, | |
| "grad_norm": 0.029803732953068658, | |
| "learning_rate": 9.822299980544862e-05, | |
| "loss": 1.5533, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.129582068643474, | |
| "grad_norm": 0.031232417271289125, | |
| "learning_rate": 9.745053308698392e-05, | |
| "loss": 1.5469, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.138921316833995, | |
| "grad_norm": 0.032434793780181034, | |
| "learning_rate": 9.667821856766548e-05, | |
| "loss": 1.5514, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.1482605650245157, | |
| "grad_norm": 0.03579370906405582, | |
| "learning_rate": 9.590610235344972e-05, | |
| "loss": 1.5577, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.157599813215036, | |
| "grad_norm": 0.029662202478648328, | |
| "learning_rate": 9.51342305384546e-05, | |
| "loss": 1.5543, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.166939061405557, | |
| "grad_norm": 0.03178715913934592, | |
| "learning_rate": 9.436264920220781e-05, | |
| "loss": 1.5579, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.1762783095960776, | |
| "grad_norm": 0.03384008887051677, | |
| "learning_rate": 9.359140440689601e-05, | |
| "loss": 1.5595, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.1856175577865984, | |
| "grad_norm": 0.03316450664408166, | |
| "learning_rate": 9.282054219461475e-05, | |
| "loss": 1.5556, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.1949568059771187, | |
| "grad_norm": 0.032176305552558876, | |
| "learning_rate": 9.205010858462007e-05, | |
| "loss": 1.5638, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.2042960541676395, | |
| "grad_norm": 0.031134335256756362, | |
| "learning_rate": 9.128014957058109e-05, | |
| "loss": 1.5629, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.2136353023581603, | |
| "grad_norm": 0.032205851810441756, | |
| "learning_rate": 9.051071111783436e-05, | |
| "loss": 1.5613, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.2229745505486807, | |
| "grad_norm": 0.029048245476020442, | |
| "learning_rate": 8.974183916063968e-05, | |
| "loss": 1.5594, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.2323137987392014, | |
| "grad_norm": 0.03183166054573621, | |
| "learning_rate": 8.897357959943795e-05, | |
| "loss": 1.5606, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.2416530469297222, | |
| "grad_norm": 0.032309922117136916, | |
| "learning_rate": 8.820597829811109e-05, | |
| "loss": 1.5524, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.250992295120243, | |
| "grad_norm": 0.03598922231958808, | |
| "learning_rate": 8.743908108124388e-05, | |
| "loss": 1.5604, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.2603315433107634, | |
| "grad_norm": 0.03046424735786346, | |
| "learning_rate": 8.667293373138835e-05, | |
| "loss": 1.5598, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.269670791501284, | |
| "grad_norm": 0.030995453538377543, | |
| "learning_rate": 8.59075819863307e-05, | |
| "loss": 1.5652, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.279010039691805, | |
| "grad_norm": 0.029309020623010097, | |
| "learning_rate": 8.514307153636077e-05, | |
| "loss": 1.5651, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.2883492878823253, | |
| "grad_norm": 0.03158721106736763, | |
| "learning_rate": 8.437944802154434e-05, | |
| "loss": 1.5581, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.297688536072846, | |
| "grad_norm": 0.03168229084049938, | |
| "learning_rate": 8.361675702899871e-05, | |
| "loss": 1.5671, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.307027784263367, | |
| "grad_norm": 0.031335222148495136, | |
| "learning_rate": 8.2855044090171e-05, | |
| "loss": 1.5675, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.3163670324538876, | |
| "grad_norm": 0.031073847325941303, | |
| "learning_rate": 8.209435467811998e-05, | |
| "loss": 1.5624, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.325706280644408, | |
| "grad_norm": 0.030099100631045844, | |
| "learning_rate": 8.133473420480161e-05, | |
| "loss": 1.5606, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.3350455288349288, | |
| "grad_norm": 0.034147632950361176, | |
| "learning_rate": 8.057622801835788e-05, | |
| "loss": 1.5703, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.3443847770254496, | |
| "grad_norm": 0.03051580784550685, | |
| "learning_rate": 7.981888140040955e-05, | |
| "loss": 1.5731, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.3537240252159704, | |
| "grad_norm": 0.03068917065832597, | |
| "learning_rate": 7.9062739563353e-05, | |
| "loss": 1.5723, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.3630632734064907, | |
| "grad_norm": 0.02899547641705554, | |
| "learning_rate": 7.830784764766118e-05, | |
| "loss": 1.5691, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.3724025215970115, | |
| "grad_norm": 0.030965383166701443, | |
| "learning_rate": 7.755425071918858e-05, | |
| "loss": 1.5627, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.3817417697875323, | |
| "grad_norm": 0.03252440018336625, | |
| "learning_rate": 7.680199376648108e-05, | |
| "loss": 1.5536, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.3910810179780526, | |
| "grad_norm": 0.031720485449340044, | |
| "learning_rate": 7.605112169809008e-05, | |
| "loss": 1.5617, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.4004202661685734, | |
| "grad_norm": 0.031796658969132544, | |
| "learning_rate": 7.530167933989161e-05, | |
| "loss": 1.5595, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.409759514359094, | |
| "grad_norm": 0.03218288097429844, | |
| "learning_rate": 7.45537114324102e-05, | |
| "loss": 1.5628, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.4190987625496145, | |
| "grad_norm": 0.0305713183075559, | |
| "learning_rate": 7.380726262814814e-05, | |
| "loss": 1.5717, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.4284380107401353, | |
| "grad_norm": 0.06879342166341705, | |
| "learning_rate": 7.30623774889195e-05, | |
| "loss": 1.5726, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.437777258930656, | |
| "grad_norm": 0.04101428600237338, | |
| "learning_rate": 7.231910048319011e-05, | |
| "loss": 1.5679, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.447116507121177, | |
| "grad_norm": 0.031060002638443395, | |
| "learning_rate": 7.157747598342274e-05, | |
| "loss": 1.562, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.4564557553116972, | |
| "grad_norm": 0.032302829437386466, | |
| "learning_rate": 7.083754826342816e-05, | |
| "loss": 1.5767, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.465795003502218, | |
| "grad_norm": 0.03111462744196413, | |
| "learning_rate": 7.009936149572205e-05, | |
| "loss": 1.5672, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.475134251692739, | |
| "grad_norm": 0.031134083521743777, | |
| "learning_rate": 6.936295974888807e-05, | |
| "loss": 1.5665, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.4844734998832596, | |
| "grad_norm": 0.030961556373721985, | |
| "learning_rate": 6.862838698494693e-05, | |
| "loss": 1.5608, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.49381274807378, | |
| "grad_norm": 0.03168121700432082, | |
| "learning_rate": 6.789568705673183e-05, | |
| "loss": 1.566, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.5031519962643007, | |
| "grad_norm": 0.030850372541726772, | |
| "learning_rate": 6.716490370527081e-05, | |
| "loss": 1.5651, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.5124912444548215, | |
| "grad_norm": 0.03076635908430861, | |
| "learning_rate": 6.643608055717519e-05, | |
| "loss": 1.5596, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.521830492645342, | |
| "grad_norm": 0.031897253741779714, | |
| "learning_rate": 6.570926112203528e-05, | |
| "loss": 1.5716, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.5311697408358627, | |
| "grad_norm": 0.03085546721246857, | |
| "learning_rate": 6.498448878982291e-05, | |
| "loss": 1.5647, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.5405089890263834, | |
| "grad_norm": 0.03127518794548787, | |
| "learning_rate": 6.426180682830107e-05, | |
| "loss": 1.5573, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.549848237216904, | |
| "grad_norm": 0.03196649247686066, | |
| "learning_rate": 6.354125838044098e-05, | |
| "loss": 1.5597, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.5591874854074246, | |
| "grad_norm": 0.030359755432035333, | |
| "learning_rate": 6.282288646184638e-05, | |
| "loss": 1.5625, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.5685267335979454, | |
| "grad_norm": 0.03030640438940187, | |
| "learning_rate": 6.210673395818571e-05, | |
| "loss": 1.5717, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.577865981788466, | |
| "grad_norm": 0.032197470232298186, | |
| "learning_rate": 6.139284362263185e-05, | |
| "loss": 1.5663, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.587205229978987, | |
| "grad_norm": 0.030983733397891462, | |
| "learning_rate": 6.0681258073309756e-05, | |
| "loss": 1.5657, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.5965444781695073, | |
| "grad_norm": 0.030427577702286164, | |
| "learning_rate": 5.9972019790752385e-05, | |
| "loss": 1.5708, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.605883726360028, | |
| "grad_norm": 0.032761226318855745, | |
| "learning_rate": 5.9265171115364495e-05, | |
| "loss": 1.5641, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.615222974550549, | |
| "grad_norm": 0.0317533648622182, | |
| "learning_rate": 5.856075424489511e-05, | |
| "loss": 1.5613, | |
| "step": 2800 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 4280, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 4, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.267163894821054e+20, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |