aleegis's picture
Training in progress, step 3000, checkpoint
3389001 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.4752098843655948,
"eval_steps": 500,
"global_step": 3000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.001584032947885316,
"grad_norm": 0.1950516402721405,
"learning_rate": 3e-06,
"loss": 1.8238,
"step": 10
},
{
"epoch": 0.003168065895770632,
"grad_norm": 0.26157480478286743,
"learning_rate": 6.333333333333334e-06,
"loss": 1.6558,
"step": 20
},
{
"epoch": 0.004752098843655948,
"grad_norm": 0.2209806740283966,
"learning_rate": 9.666666666666667e-06,
"loss": 1.6879,
"step": 30
},
{
"epoch": 0.006336131791541264,
"grad_norm": 0.2419026494026184,
"learning_rate": 1.3000000000000001e-05,
"loss": 1.6009,
"step": 40
},
{
"epoch": 0.00792016473942658,
"grad_norm": 0.3198487162590027,
"learning_rate": 1.6333333333333335e-05,
"loss": 1.5071,
"step": 50
},
{
"epoch": 0.009504197687311896,
"grad_norm": 0.19524769484996796,
"learning_rate": 1.9666666666666666e-05,
"loss": 1.4839,
"step": 60
},
{
"epoch": 0.011088230635197212,
"grad_norm": 0.25131815671920776,
"learning_rate": 2.3000000000000003e-05,
"loss": 1.5845,
"step": 70
},
{
"epoch": 0.012672263583082528,
"grad_norm": 0.22962462902069092,
"learning_rate": 2.633333333333333e-05,
"loss": 1.4563,
"step": 80
},
{
"epoch": 0.014256296530967844,
"grad_norm": 0.2426823377609253,
"learning_rate": 2.9666666666666672e-05,
"loss": 1.3567,
"step": 90
},
{
"epoch": 0.01584032947885316,
"grad_norm": 0.2379242181777954,
"learning_rate": 3.3e-05,
"loss": 1.4356,
"step": 100
},
{
"epoch": 0.017424362426738477,
"grad_norm": 0.26871052384376526,
"learning_rate": 3.633333333333333e-05,
"loss": 1.4511,
"step": 110
},
{
"epoch": 0.01900839537462379,
"grad_norm": 0.2720854580402374,
"learning_rate": 3.966666666666667e-05,
"loss": 1.4477,
"step": 120
},
{
"epoch": 0.02059242832250911,
"grad_norm": 0.253899484872818,
"learning_rate": 4.3e-05,
"loss": 1.3941,
"step": 130
},
{
"epoch": 0.022176461270394424,
"grad_norm": 0.27712586522102356,
"learning_rate": 4.633333333333333e-05,
"loss": 1.3824,
"step": 140
},
{
"epoch": 0.02376049421827974,
"grad_norm": 0.2928788959980011,
"learning_rate": 4.966666666666667e-05,
"loss": 1.3998,
"step": 150
},
{
"epoch": 0.025344527166165056,
"grad_norm": 0.24572506546974182,
"learning_rate": 5.300000000000001e-05,
"loss": 1.4438,
"step": 160
},
{
"epoch": 0.02692856011405037,
"grad_norm": 0.23786427080631256,
"learning_rate": 5.633333333333334e-05,
"loss": 1.4539,
"step": 170
},
{
"epoch": 0.02851259306193569,
"grad_norm": 0.2809438705444336,
"learning_rate": 5.966666666666667e-05,
"loss": 1.4591,
"step": 180
},
{
"epoch": 0.030096626009821003,
"grad_norm": 0.23395936191082,
"learning_rate": 6.3e-05,
"loss": 1.3964,
"step": 190
},
{
"epoch": 0.03168065895770632,
"grad_norm": 0.22326090931892395,
"learning_rate": 6.633333333333334e-05,
"loss": 1.414,
"step": 200
},
{
"epoch": 0.033264691905591635,
"grad_norm": 0.24155190587043762,
"learning_rate": 6.966666666666668e-05,
"loss": 1.428,
"step": 210
},
{
"epoch": 0.03484872485347695,
"grad_norm": 0.30839428305625916,
"learning_rate": 7.3e-05,
"loss": 1.4577,
"step": 220
},
{
"epoch": 0.03643275780136227,
"grad_norm": 0.3329930603504181,
"learning_rate": 7.633333333333334e-05,
"loss": 1.4346,
"step": 230
},
{
"epoch": 0.03801679074924758,
"grad_norm": 0.38281047344207764,
"learning_rate": 7.966666666666666e-05,
"loss": 1.3822,
"step": 240
},
{
"epoch": 0.0396008236971329,
"grad_norm": 0.35989031195640564,
"learning_rate": 8.3e-05,
"loss": 1.3283,
"step": 250
},
{
"epoch": 0.04118485664501822,
"grad_norm": 0.29035285115242004,
"learning_rate": 8.633333333333334e-05,
"loss": 1.4289,
"step": 260
},
{
"epoch": 0.04276888959290353,
"grad_norm": 0.4185609519481659,
"learning_rate": 8.966666666666666e-05,
"loss": 1.3676,
"step": 270
},
{
"epoch": 0.04435292254078885,
"grad_norm": 0.3571210205554962,
"learning_rate": 9.300000000000001e-05,
"loss": 1.4014,
"step": 280
},
{
"epoch": 0.045936955488674165,
"grad_norm": 0.3920586407184601,
"learning_rate": 9.633333333333335e-05,
"loss": 1.4603,
"step": 290
},
{
"epoch": 0.04752098843655948,
"grad_norm": 0.40256696939468384,
"learning_rate": 9.966666666666667e-05,
"loss": 1.3409,
"step": 300
},
{
"epoch": 0.049105021384444794,
"grad_norm": 0.3628294765949249,
"learning_rate": 9.999725846827562e-05,
"loss": 1.3767,
"step": 310
},
{
"epoch": 0.05068905433233011,
"grad_norm": 0.4101487398147583,
"learning_rate": 9.998778195446311e-05,
"loss": 1.4544,
"step": 320
},
{
"epoch": 0.05227308728021543,
"grad_norm": 0.40901538729667664,
"learning_rate": 9.997153789515461e-05,
"loss": 1.406,
"step": 330
},
{
"epoch": 0.05385712022810074,
"grad_norm": 0.40885481238365173,
"learning_rate": 9.994852848953574e-05,
"loss": 1.4384,
"step": 340
},
{
"epoch": 0.05544115317598606,
"grad_norm": 0.433713436126709,
"learning_rate": 9.991875685271168e-05,
"loss": 1.4379,
"step": 350
},
{
"epoch": 0.05702518612387138,
"grad_norm": 0.41924989223480225,
"learning_rate": 9.988222701528547e-05,
"loss": 1.3935,
"step": 360
},
{
"epoch": 0.058609219071756695,
"grad_norm": 0.48709481954574585,
"learning_rate": 9.983894392281237e-05,
"loss": 1.3913,
"step": 370
},
{
"epoch": 0.060193252019642006,
"grad_norm": 0.44844549894332886,
"learning_rate": 9.978891343513023e-05,
"loss": 1.3975,
"step": 380
},
{
"epoch": 0.061777284967527324,
"grad_norm": 0.510023295879364,
"learning_rate": 9.973214232556622e-05,
"loss": 1.3778,
"step": 390
},
{
"epoch": 0.06336131791541263,
"grad_norm": 0.5296265482902527,
"learning_rate": 9.966863828001982e-05,
"loss": 1.4633,
"step": 400
},
{
"epoch": 0.06494535086329796,
"grad_norm": 0.5563903450965881,
"learning_rate": 9.959840989592226e-05,
"loss": 1.4098,
"step": 410
},
{
"epoch": 0.06652938381118327,
"grad_norm": 0.6422920227050781,
"learning_rate": 9.952146668107254e-05,
"loss": 1.3916,
"step": 420
},
{
"epoch": 0.06811341675906858,
"grad_norm": 0.5075757503509521,
"learning_rate": 9.94378190523503e-05,
"loss": 1.4111,
"step": 430
},
{
"epoch": 0.0696974497069539,
"grad_norm": 0.6157119274139404,
"learning_rate": 9.934747833430547e-05,
"loss": 1.4315,
"step": 440
},
{
"epoch": 0.07128148265483922,
"grad_norm": 0.5845485925674438,
"learning_rate": 9.925045675762514e-05,
"loss": 1.3969,
"step": 450
},
{
"epoch": 0.07286551560272454,
"grad_norm": 0.49031880497932434,
"learning_rate": 9.914676745747772e-05,
"loss": 1.3132,
"step": 460
},
{
"epoch": 0.07444954855060985,
"grad_norm": 0.643332302570343,
"learning_rate": 9.903642447173465e-05,
"loss": 1.4596,
"step": 470
},
{
"epoch": 0.07603358149849516,
"grad_norm": 0.604245662689209,
"learning_rate": 9.891944273906986e-05,
"loss": 1.4994,
"step": 480
},
{
"epoch": 0.07761761444638049,
"grad_norm": 0.4713222086429596,
"learning_rate": 9.879583809693738e-05,
"loss": 1.372,
"step": 490
},
{
"epoch": 0.0792016473942658,
"grad_norm": 0.47081106901168823,
"learning_rate": 9.866562727942714e-05,
"loss": 1.5145,
"step": 500
},
{
"epoch": 0.08078568034215111,
"grad_norm": 0.6371116638183594,
"learning_rate": 9.85288279149995e-05,
"loss": 1.4835,
"step": 510
},
{
"epoch": 0.08236971329003644,
"grad_norm": 0.6258746981620789,
"learning_rate": 9.838545852409857e-05,
"loss": 1.4214,
"step": 520
},
{
"epoch": 0.08395374623792175,
"grad_norm": 0.7464697360992432,
"learning_rate": 9.823553851664489e-05,
"loss": 1.4559,
"step": 530
},
{
"epoch": 0.08553777918580706,
"grad_norm": 0.5535822510719299,
"learning_rate": 9.807908818940761e-05,
"loss": 1.4096,
"step": 540
},
{
"epoch": 0.08712181213369238,
"grad_norm": 0.5659494400024414,
"learning_rate": 9.791612872325667e-05,
"loss": 1.4298,
"step": 550
},
{
"epoch": 0.0887058450815777,
"grad_norm": 0.5127139091491699,
"learning_rate": 9.77466821802952e-05,
"loss": 1.339,
"step": 560
},
{
"epoch": 0.09028987802946302,
"grad_norm": 0.5496402382850647,
"learning_rate": 9.75707715008727e-05,
"loss": 1.4232,
"step": 570
},
{
"epoch": 0.09187391097734833,
"grad_norm": 0.6117046475410461,
"learning_rate": 9.73884205004793e-05,
"loss": 1.4693,
"step": 580
},
{
"epoch": 0.09345794392523364,
"grad_norm": 0.5658081769943237,
"learning_rate": 9.719965386652141e-05,
"loss": 1.3002,
"step": 590
},
{
"epoch": 0.09504197687311897,
"grad_norm": 0.7319624423980713,
"learning_rate": 9.700449715497961e-05,
"loss": 1.5359,
"step": 600
},
{
"epoch": 0.09662600982100428,
"grad_norm": 0.7383710741996765,
"learning_rate": 9.680297678694867e-05,
"loss": 1.5258,
"step": 610
},
{
"epoch": 0.09821004276888959,
"grad_norm": 0.6048529148101807,
"learning_rate": 9.659512004506057e-05,
"loss": 1.3593,
"step": 620
},
{
"epoch": 0.09979407571677491,
"grad_norm": 0.6163527369499207,
"learning_rate": 9.63809550697909e-05,
"loss": 1.3932,
"step": 630
},
{
"epoch": 0.10137810866466022,
"grad_norm": 0.6468575596809387,
"learning_rate": 9.616051085564906e-05,
"loss": 1.4886,
"step": 640
},
{
"epoch": 0.10296214161254554,
"grad_norm": 0.642622709274292,
"learning_rate": 9.593381724725285e-05,
"loss": 1.412,
"step": 650
},
{
"epoch": 0.10454617456043086,
"grad_norm": 0.6709442138671875,
"learning_rate": 9.570090493528809e-05,
"loss": 1.4161,
"step": 660
},
{
"epoch": 0.10613020750831617,
"grad_norm": 0.6280019283294678,
"learning_rate": 9.546180545235344e-05,
"loss": 1.4344,
"step": 670
},
{
"epoch": 0.10771424045620148,
"grad_norm": 0.5947321057319641,
"learning_rate": 9.52165511686915e-05,
"loss": 1.3838,
"step": 680
},
{
"epoch": 0.10929827340408681,
"grad_norm": 0.7332488894462585,
"learning_rate": 9.496517528780637e-05,
"loss": 1.3477,
"step": 690
},
{
"epoch": 0.11088230635197212,
"grad_norm": 0.6739678978919983,
"learning_rate": 9.47077118419684e-05,
"loss": 1.487,
"step": 700
},
{
"epoch": 0.11246633929985744,
"grad_norm": 0.6523484587669373,
"learning_rate": 9.444419568760684e-05,
"loss": 1.484,
"step": 710
},
{
"epoch": 0.11405037224774275,
"grad_norm": 0.6200110912322998,
"learning_rate": 9.417466250059073e-05,
"loss": 1.3793,
"step": 720
},
{
"epoch": 0.11563440519562806,
"grad_norm": 0.6055252552032471,
"learning_rate": 9.389914877139903e-05,
"loss": 1.3878,
"step": 730
},
{
"epoch": 0.11721843814351339,
"grad_norm": 0.620250940322876,
"learning_rate": 9.361769180018038e-05,
"loss": 1.3316,
"step": 740
},
{
"epoch": 0.1188024710913987,
"grad_norm": 0.590551495552063,
"learning_rate": 9.333032969170326e-05,
"loss": 1.3479,
"step": 750
},
{
"epoch": 0.12038650403928401,
"grad_norm": 0.6573076844215393,
"learning_rate": 9.30371013501972e-05,
"loss": 1.3434,
"step": 760
},
{
"epoch": 0.12197053698716934,
"grad_norm": 0.6856533288955688,
"learning_rate": 9.273804647408575e-05,
"loss": 1.3815,
"step": 770
},
{
"epoch": 0.12355456993505465,
"grad_norm": 0.6879425644874573,
"learning_rate": 9.243320555061205e-05,
"loss": 1.3747,
"step": 780
},
{
"epoch": 0.12513860288293996,
"grad_norm": 0.5395861864089966,
"learning_rate": 9.212261985035739e-05,
"loss": 1.4633,
"step": 790
},
{
"epoch": 0.12672263583082527,
"grad_norm": 0.66850346326828,
"learning_rate": 9.180633142165384e-05,
"loss": 1.4978,
"step": 800
},
{
"epoch": 0.1283066687787106,
"grad_norm": 0.6203956007957458,
"learning_rate": 9.148438308489168e-05,
"loss": 1.3428,
"step": 810
},
{
"epoch": 0.12989070172659592,
"grad_norm": 0.8913874626159668,
"learning_rate": 9.11568184267221e-05,
"loss": 1.4052,
"step": 820
},
{
"epoch": 0.13147473467448123,
"grad_norm": 0.745405375957489,
"learning_rate": 9.082368179415632e-05,
"loss": 1.3781,
"step": 830
},
{
"epoch": 0.13305876762236654,
"grad_norm": 0.7052398324012756,
"learning_rate": 9.04850182885617e-05,
"loss": 1.378,
"step": 840
},
{
"epoch": 0.13464280057025185,
"grad_norm": 0.7111234664916992,
"learning_rate": 9.014087375955573e-05,
"loss": 1.4304,
"step": 850
},
{
"epoch": 0.13622683351813716,
"grad_norm": 0.620119571685791,
"learning_rate": 8.979129479879873e-05,
"loss": 1.3285,
"step": 860
},
{
"epoch": 0.1378108664660225,
"grad_norm": 0.7514825463294983,
"learning_rate": 8.943632873368611e-05,
"loss": 1.3782,
"step": 870
},
{
"epoch": 0.1393948994139078,
"grad_norm": 0.6254695057868958,
"learning_rate": 8.907602362094094e-05,
"loss": 1.4062,
"step": 880
},
{
"epoch": 0.14097893236179312,
"grad_norm": 0.6469830870628357,
"learning_rate": 8.871042824010791e-05,
"loss": 1.2769,
"step": 890
},
{
"epoch": 0.14256296530967844,
"grad_norm": 0.7647883296012878,
"learning_rate": 8.833959208694929e-05,
"loss": 1.3646,
"step": 900
},
{
"epoch": 0.14414699825756375,
"grad_norm": 0.6833468675613403,
"learning_rate": 8.796356536674403e-05,
"loss": 1.3508,
"step": 910
},
{
"epoch": 0.14573103120544909,
"grad_norm": 0.6526575088500977,
"learning_rate": 8.758239898749085e-05,
"loss": 1.341,
"step": 920
},
{
"epoch": 0.1473150641533344,
"grad_norm": 0.6929855346679688,
"learning_rate": 8.719614455301593e-05,
"loss": 1.3153,
"step": 930
},
{
"epoch": 0.1488990971012197,
"grad_norm": 0.7150231599807739,
"learning_rate": 8.680485435598673e-05,
"loss": 1.3874,
"step": 940
},
{
"epoch": 0.15048313004910502,
"grad_norm": 0.7030635476112366,
"learning_rate": 8.640858137083232e-05,
"loss": 1.4539,
"step": 950
},
{
"epoch": 0.15206716299699033,
"grad_norm": 0.8606137037277222,
"learning_rate": 8.600737924657156e-05,
"loss": 1.3539,
"step": 960
},
{
"epoch": 0.15365119594487564,
"grad_norm": 0.6643692851066589,
"learning_rate": 8.560130229954984e-05,
"loss": 1.3639,
"step": 970
},
{
"epoch": 0.15523522889276098,
"grad_norm": 0.7612583637237549,
"learning_rate": 8.519040550608546e-05,
"loss": 1.4779,
"step": 980
},
{
"epoch": 0.1568192618406463,
"grad_norm": 0.7252205610275269,
"learning_rate": 8.477474449502682e-05,
"loss": 1.2725,
"step": 990
},
{
"epoch": 0.1584032947885316,
"grad_norm": 0.8104795217514038,
"learning_rate": 8.435437554022115e-05,
"loss": 1.3696,
"step": 1000
},
{
"epoch": 0.1599873277364169,
"grad_norm": 0.7129403948783875,
"learning_rate": 8.392935555289584e-05,
"loss": 1.3503,
"step": 1010
},
{
"epoch": 0.16157136068430222,
"grad_norm": 0.6620548963546753,
"learning_rate": 8.349974207395366e-05,
"loss": 1.4173,
"step": 1020
},
{
"epoch": 0.16315539363218756,
"grad_norm": 0.7386293411254883,
"learning_rate": 8.306559326618259e-05,
"loss": 1.3785,
"step": 1030
},
{
"epoch": 0.16473942658007287,
"grad_norm": 0.6005551218986511,
"learning_rate": 8.26269679063816e-05,
"loss": 1.4535,
"step": 1040
},
{
"epoch": 0.16632345952795818,
"grad_norm": 0.6504058241844177,
"learning_rate": 8.218392537740305e-05,
"loss": 1.3744,
"step": 1050
},
{
"epoch": 0.1679074924758435,
"grad_norm": 0.6573604345321655,
"learning_rate": 8.173652566011338e-05,
"loss": 1.4504,
"step": 1060
},
{
"epoch": 0.1694915254237288,
"grad_norm": 0.8271021246910095,
"learning_rate": 8.128482932527255e-05,
"loss": 1.3817,
"step": 1070
},
{
"epoch": 0.17107555837161412,
"grad_norm": 0.7833305597305298,
"learning_rate": 8.082889752533375e-05,
"loss": 1.3311,
"step": 1080
},
{
"epoch": 0.17265959131949946,
"grad_norm": 0.7202659845352173,
"learning_rate": 8.036879198616434e-05,
"loss": 1.3852,
"step": 1090
},
{
"epoch": 0.17424362426738477,
"grad_norm": 0.8033239841461182,
"learning_rate": 7.990457499868919e-05,
"loss": 1.3402,
"step": 1100
},
{
"epoch": 0.17582765721527008,
"grad_norm": 0.6961438059806824,
"learning_rate": 7.943630941045744e-05,
"loss": 1.4112,
"step": 1110
},
{
"epoch": 0.1774116901631554,
"grad_norm": 0.6666472554206848,
"learning_rate": 7.896405861713394e-05,
"loss": 1.4063,
"step": 1120
},
{
"epoch": 0.1789957231110407,
"grad_norm": 0.7580092549324036,
"learning_rate": 7.848788655391658e-05,
"loss": 1.359,
"step": 1130
},
{
"epoch": 0.18057975605892604,
"grad_norm": 0.9506746530532837,
"learning_rate": 7.800785768688035e-05,
"loss": 1.3162,
"step": 1140
},
{
"epoch": 0.18216378900681135,
"grad_norm": 0.7477443218231201,
"learning_rate": 7.752403700424979e-05,
"loss": 1.46,
"step": 1150
},
{
"epoch": 0.18374782195469666,
"grad_norm": 0.7236498594284058,
"learning_rate": 7.703649000760053e-05,
"loss": 1.3986,
"step": 1160
},
{
"epoch": 0.18533185490258197,
"grad_norm": 0.6435336470603943,
"learning_rate": 7.654528270299154e-05,
"loss": 1.4204,
"step": 1170
},
{
"epoch": 0.18691588785046728,
"grad_norm": 0.6321675181388855,
"learning_rate": 7.605048159202883e-05,
"loss": 1.2533,
"step": 1180
},
{
"epoch": 0.1884999207983526,
"grad_norm": 0.6827548742294312,
"learning_rate": 7.555215366286227e-05,
"loss": 1.4564,
"step": 1190
},
{
"epoch": 0.19008395374623793,
"grad_norm": 0.865310788154602,
"learning_rate": 7.505036638111648e-05,
"loss": 1.4417,
"step": 1200
},
{
"epoch": 0.19166798669412324,
"grad_norm": 0.7004736065864563,
"learning_rate": 7.454518768075704e-05,
"loss": 1.367,
"step": 1210
},
{
"epoch": 0.19325201964200855,
"grad_norm": 0.8335785269737244,
"learning_rate": 7.403668595489333e-05,
"loss": 1.3212,
"step": 1220
},
{
"epoch": 0.19483605258989387,
"grad_norm": 0.989263117313385,
"learning_rate": 7.352493004651916e-05,
"loss": 1.3552,
"step": 1230
},
{
"epoch": 0.19642008553777918,
"grad_norm": 0.6966485381126404,
"learning_rate": 7.300998923919259e-05,
"loss": 1.3778,
"step": 1240
},
{
"epoch": 0.19800411848566452,
"grad_norm": 0.7453016042709351,
"learning_rate": 7.249193324765599e-05,
"loss": 1.3938,
"step": 1250
},
{
"epoch": 0.19958815143354983,
"grad_norm": 0.9610586762428284,
"learning_rate": 7.197083220839785e-05,
"loss": 1.3999,
"step": 1260
},
{
"epoch": 0.20117218438143514,
"grad_norm": 0.802983820438385,
"learning_rate": 7.14467566701573e-05,
"loss": 1.364,
"step": 1270
},
{
"epoch": 0.20275621732932045,
"grad_norm": 1.156919240951538,
"learning_rate": 7.091977758437311e-05,
"loss": 1.3004,
"step": 1280
},
{
"epoch": 0.20434025027720576,
"grad_norm": 0.8025760650634766,
"learning_rate": 7.038996629557783e-05,
"loss": 1.3141,
"step": 1290
},
{
"epoch": 0.20592428322509107,
"grad_norm": 0.8748348355293274,
"learning_rate": 6.985739453173903e-05,
"loss": 1.4061,
"step": 1300
},
{
"epoch": 0.2075083161729764,
"grad_norm": 0.7871893644332886,
"learning_rate": 6.932213439454837e-05,
"loss": 1.4073,
"step": 1310
},
{
"epoch": 0.20909234912086172,
"grad_norm": 0.7885726094245911,
"learning_rate": 6.87842583496602e-05,
"loss": 1.3923,
"step": 1320
},
{
"epoch": 0.21067638206874703,
"grad_norm": 0.6420142650604248,
"learning_rate": 6.824383921688098e-05,
"loss": 1.373,
"step": 1330
},
{
"epoch": 0.21226041501663234,
"grad_norm": 0.9715414643287659,
"learning_rate": 6.77009501603105e-05,
"loss": 1.3522,
"step": 1340
},
{
"epoch": 0.21384444796451765,
"grad_norm": 0.9413536190986633,
"learning_rate": 6.71556646784367e-05,
"loss": 1.4675,
"step": 1350
},
{
"epoch": 0.21542848091240296,
"grad_norm": 0.7030996680259705,
"learning_rate": 6.660805659418516e-05,
"loss": 1.3469,
"step": 1360
},
{
"epoch": 0.2170125138602883,
"grad_norm": 0.852599561214447,
"learning_rate": 6.605820004492467e-05,
"loss": 1.4319,
"step": 1370
},
{
"epoch": 0.21859654680817361,
"grad_norm": 0.7496755123138428,
"learning_rate": 6.550616947243009e-05,
"loss": 1.4509,
"step": 1380
},
{
"epoch": 0.22018057975605893,
"grad_norm": 0.8560190200805664,
"learning_rate": 6.495203961280434e-05,
"loss": 1.467,
"step": 1390
},
{
"epoch": 0.22176461270394424,
"grad_norm": 0.6567692160606384,
"learning_rate": 6.439588548636016e-05,
"loss": 1.2536,
"step": 1400
},
{
"epoch": 0.22334864565182955,
"grad_norm": 0.9644126296043396,
"learning_rate": 6.38377823874636e-05,
"loss": 1.3045,
"step": 1410
},
{
"epoch": 0.22493267859971489,
"grad_norm": 0.716455340385437,
"learning_rate": 6.327780587434044e-05,
"loss": 1.333,
"step": 1420
},
{
"epoch": 0.2265167115476002,
"grad_norm": 0.7941800951957703,
"learning_rate": 6.27160317588467e-05,
"loss": 1.4361,
"step": 1430
},
{
"epoch": 0.2281007444954855,
"grad_norm": 0.8310253024101257,
"learning_rate": 6.215253609620498e-05,
"loss": 1.4649,
"step": 1440
},
{
"epoch": 0.22968477744337082,
"grad_norm": 0.7695097923278809,
"learning_rate": 6.158739517470786e-05,
"loss": 1.3969,
"step": 1450
},
{
"epoch": 0.23126881039125613,
"grad_norm": 0.7473721504211426,
"learning_rate": 6.102068550538962e-05,
"loss": 1.3001,
"step": 1460
},
{
"epoch": 0.23285284333914144,
"grad_norm": 0.7866524457931519,
"learning_rate": 6.045248381166783e-05,
"loss": 1.3603,
"step": 1470
},
{
"epoch": 0.23443687628702678,
"grad_norm": 0.6364443898200989,
"learning_rate": 5.988286701895631e-05,
"loss": 1.285,
"step": 1480
},
{
"epoch": 0.2360209092349121,
"grad_norm": 0.8835400342941284,
"learning_rate": 5.9311912244250675e-05,
"loss": 1.4436,
"step": 1490
},
{
"epoch": 0.2376049421827974,
"grad_norm": 0.7543832063674927,
"learning_rate": 5.873969678568784e-05,
"loss": 1.3405,
"step": 1500
},
{
"epoch": 0.2391889751306827,
"grad_norm": 0.8088416457176208,
"learning_rate": 5.816629811208112e-05,
"loss": 1.3383,
"step": 1510
},
{
"epoch": 0.24077300807856802,
"grad_norm": 0.7820030450820923,
"learning_rate": 5.759179385243224e-05,
"loss": 1.4342,
"step": 1520
},
{
"epoch": 0.24235704102645336,
"grad_norm": 0.7880681157112122,
"learning_rate": 5.701626178542158e-05,
"loss": 1.2435,
"step": 1530
},
{
"epoch": 0.24394107397433867,
"grad_norm": 0.7700749635696411,
"learning_rate": 5.643977982887815e-05,
"loss": 1.3295,
"step": 1540
},
{
"epoch": 0.24552510692222398,
"grad_norm": 0.8325817584991455,
"learning_rate": 5.586242602923081e-05,
"loss": 1.3995,
"step": 1550
},
{
"epoch": 0.2471091398701093,
"grad_norm": 0.9384058117866516,
"learning_rate": 5.528427855094206e-05,
"loss": 1.4681,
"step": 1560
},
{
"epoch": 0.2486931728179946,
"grad_norm": 0.8473599553108215,
"learning_rate": 5.470541566592573e-05,
"loss": 1.4149,
"step": 1570
},
{
"epoch": 0.2502772057658799,
"grad_norm": 0.8693270683288574,
"learning_rate": 5.4125915742950275e-05,
"loss": 1.3144,
"step": 1580
},
{
"epoch": 0.25186123871376526,
"grad_norm": 0.9569868445396423,
"learning_rate": 5.354585723702893e-05,
"loss": 1.4269,
"step": 1590
},
{
"epoch": 0.25344527166165054,
"grad_norm": 1.0059610605239868,
"learning_rate": 5.296531867879809e-05,
"loss": 1.404,
"step": 1600
},
{
"epoch": 0.2550293046095359,
"grad_norm": 0.7387624979019165,
"learning_rate": 5.2384378663885545e-05,
"loss": 1.3275,
"step": 1610
},
{
"epoch": 0.2566133375574212,
"grad_norm": 0.7581918239593506,
"learning_rate": 5.180311584226991e-05,
"loss": 1.3166,
"step": 1620
},
{
"epoch": 0.2581973705053065,
"grad_norm": 0.9148341417312622,
"learning_rate": 5.1221608907632665e-05,
"loss": 1.3702,
"step": 1630
},
{
"epoch": 0.25978140345319184,
"grad_norm": 0.7988713383674622,
"learning_rate": 5.063993658670425e-05,
"loss": 1.2571,
"step": 1640
},
{
"epoch": 0.2613654364010771,
"grad_norm": 0.6659321188926697,
"learning_rate": 5.0058177628605795e-05,
"loss": 1.345,
"step": 1650
},
{
"epoch": 0.26294946934896246,
"grad_norm": 0.9008516073226929,
"learning_rate": 4.947641079418773e-05,
"loss": 1.3583,
"step": 1660
},
{
"epoch": 0.2645335022968478,
"grad_norm": 0.8323536515235901,
"learning_rate": 4.889471484536672e-05,
"loss": 1.3591,
"step": 1670
},
{
"epoch": 0.2661175352447331,
"grad_norm": 0.7164818644523621,
"learning_rate": 4.83131685344628e-05,
"loss": 1.3439,
"step": 1680
},
{
"epoch": 0.2677015681926184,
"grad_norm": 0.7881910800933838,
"learning_rate": 4.773185059353732e-05,
"loss": 1.3651,
"step": 1690
},
{
"epoch": 0.2692856011405037,
"grad_norm": 0.9037622213363647,
"learning_rate": 4.715083972373401e-05,
"loss": 1.3871,
"step": 1700
},
{
"epoch": 0.27086963408838904,
"grad_norm": 0.8563185930252075,
"learning_rate": 4.657021458462409e-05,
"loss": 1.4886,
"step": 1710
},
{
"epoch": 0.2724536670362743,
"grad_norm": 0.8638001084327698,
"learning_rate": 4.599005378355706e-05,
"loss": 1.4762,
"step": 1720
},
{
"epoch": 0.27403769998415967,
"grad_norm": 0.7504866123199463,
"learning_rate": 4.541043586501842e-05,
"loss": 1.2971,
"step": 1730
},
{
"epoch": 0.275621732932045,
"grad_norm": 0.786354660987854,
"learning_rate": 4.4831439299996084e-05,
"loss": 1.3203,
"step": 1740
},
{
"epoch": 0.2772057658799303,
"grad_norm": 0.7911379337310791,
"learning_rate": 4.425314247535668e-05,
"loss": 1.3526,
"step": 1750
},
{
"epoch": 0.2787897988278156,
"grad_norm": 0.9542713165283203,
"learning_rate": 4.3675623683233135e-05,
"loss": 1.2415,
"step": 1760
},
{
"epoch": 0.2803738317757009,
"grad_norm": 0.7208961844444275,
"learning_rate": 4.309896111042529e-05,
"loss": 1.3313,
"step": 1770
},
{
"epoch": 0.28195786472358625,
"grad_norm": 0.8950613141059875,
"learning_rate": 4.252323282781453e-05,
"loss": 1.3802,
"step": 1780
},
{
"epoch": 0.2835418976714716,
"grad_norm": 0.7467291951179504,
"learning_rate": 4.1948516779794364e-05,
"loss": 1.4616,
"step": 1790
},
{
"epoch": 0.28512593061935687,
"grad_norm": 0.9284554719924927,
"learning_rate": 4.137489077371787e-05,
"loss": 1.3483,
"step": 1800
},
{
"epoch": 0.2867099635672422,
"grad_norm": 0.6686108708381653,
"learning_rate": 4.080243246936399e-05,
"loss": 1.3667,
"step": 1810
},
{
"epoch": 0.2882939965151275,
"grad_norm": 0.7982807159423828,
"learning_rate": 4.0231219368423466e-05,
"loss": 1.3307,
"step": 1820
},
{
"epoch": 0.28987802946301283,
"grad_norm": 0.9453319907188416,
"learning_rate": 3.9661328804006475e-05,
"loss": 1.2947,
"step": 1830
},
{
"epoch": 0.29146206241089817,
"grad_norm": 0.7439360022544861,
"learning_rate": 3.9092837930172884e-05,
"loss": 1.3138,
"step": 1840
},
{
"epoch": 0.29304609535878345,
"grad_norm": 0.8020259141921997,
"learning_rate": 3.852582371148687e-05,
"loss": 1.295,
"step": 1850
},
{
"epoch": 0.2946301283066688,
"grad_norm": 0.8272374868392944,
"learning_rate": 3.796036291259718e-05,
"loss": 1.3511,
"step": 1860
},
{
"epoch": 0.2962141612545541,
"grad_norm": 0.7449880838394165,
"learning_rate": 3.739653208784432e-05,
"loss": 1.4048,
"step": 1870
},
{
"epoch": 0.2977981942024394,
"grad_norm": 0.7538899779319763,
"learning_rate": 3.683440757089646e-05,
"loss": 1.2801,
"step": 1880
},
{
"epoch": 0.29938222715032475,
"grad_norm": 0.7433683276176453,
"learning_rate": 3.627406546441494e-05,
"loss": 1.3446,
"step": 1890
},
{
"epoch": 0.30096626009821004,
"grad_norm": 1.033248782157898,
"learning_rate": 3.5715581629751326e-05,
"loss": 1.4297,
"step": 1900
},
{
"epoch": 0.3025502930460954,
"grad_norm": 0.8158900737762451,
"learning_rate": 3.515903167667686e-05,
"loss": 1.3286,
"step": 1910
},
{
"epoch": 0.30413432599398066,
"grad_norm": 0.7832969427108765,
"learning_rate": 3.460449095314621e-05,
"loss": 1.4322,
"step": 1920
},
{
"epoch": 0.305718358941866,
"grad_norm": 1.0758531093597412,
"learning_rate": 3.40520345350965e-05,
"loss": 1.4076,
"step": 1930
},
{
"epoch": 0.3073023918897513,
"grad_norm": 0.8335056900978088,
"learning_rate": 3.35017372162833e-05,
"loss": 1.3312,
"step": 1940
},
{
"epoch": 0.3088864248376366,
"grad_norm": 0.8425313830375671,
"learning_rate": 3.295367349815469e-05,
"loss": 1.3194,
"step": 1950
},
{
"epoch": 0.31047045778552196,
"grad_norm": 1.0642461776733398,
"learning_rate": 3.240791757976491e-05,
"loss": 1.3145,
"step": 1960
},
{
"epoch": 0.31205449073340724,
"grad_norm": 0.8336025476455688,
"learning_rate": 3.186454334772916e-05,
"loss": 1.3895,
"step": 1970
},
{
"epoch": 0.3136385236812926,
"grad_norm": 0.8160053491592407,
"learning_rate": 3.132362436622035e-05,
"loss": 1.3814,
"step": 1980
},
{
"epoch": 0.31522255662917786,
"grad_norm": 0.848243772983551,
"learning_rate": 3.078523386700982e-05,
"loss": 1.2523,
"step": 1990
},
{
"epoch": 0.3168065895770632,
"grad_norm": 0.7689033150672913,
"learning_rate": 3.0249444739552844e-05,
"loss": 1.3768,
"step": 2000
},
{
"epoch": 0.31839062252494854,
"grad_norm": 0.9181113243103027,
"learning_rate": 2.971632952112066e-05,
"loss": 1.3851,
"step": 2010
},
{
"epoch": 0.3199746554728338,
"grad_norm": 0.7203896641731262,
"learning_rate": 2.918596038697995e-05,
"loss": 1.3689,
"step": 2020
},
{
"epoch": 0.32155868842071916,
"grad_norm": 0.8374003767967224,
"learning_rate": 2.86584091406216e-05,
"loss": 1.3121,
"step": 2030
},
{
"epoch": 0.32314272136860445,
"grad_norm": 0.7389172315597534,
"learning_rate": 2.8133747204039574e-05,
"loss": 1.343,
"step": 2040
},
{
"epoch": 0.3247267543164898,
"grad_norm": 0.8278707265853882,
"learning_rate": 2.761204560806152e-05,
"loss": 1.4285,
"step": 2050
},
{
"epoch": 0.3263107872643751,
"grad_norm": 0.7910729646682739,
"learning_rate": 2.709337498273243e-05,
"loss": 1.3735,
"step": 2060
},
{
"epoch": 0.3278948202122604,
"grad_norm": 0.7963743209838867,
"learning_rate": 2.65778055477523e-05,
"loss": 1.3698,
"step": 2070
},
{
"epoch": 0.32947885316014575,
"grad_norm": 0.9226623177528381,
"learning_rate": 2.6065407102969664e-05,
"loss": 1.2923,
"step": 2080
},
{
"epoch": 0.33106288610803103,
"grad_norm": 0.8737735152244568,
"learning_rate": 2.555624901893171e-05,
"loss": 1.3832,
"step": 2090
},
{
"epoch": 0.33264691905591637,
"grad_norm": 0.6608365178108215,
"learning_rate": 2.505040022749265e-05,
"loss": 1.2165,
"step": 2100
},
{
"epoch": 0.3342309520038017,
"grad_norm": 1.0949828624725342,
"learning_rate": 2.4547929212481435e-05,
"loss": 1.2863,
"step": 2110
},
{
"epoch": 0.335814984951687,
"grad_norm": 0.6609100103378296,
"learning_rate": 2.404890400043023e-05,
"loss": 1.2751,
"step": 2120
},
{
"epoch": 0.33739901789957233,
"grad_norm": 0.7404264807701111,
"learning_rate": 2.3553392151364536e-05,
"loss": 1.3004,
"step": 2130
},
{
"epoch": 0.3389830508474576,
"grad_norm": 0.8399338722229004,
"learning_rate": 2.3061460749656844e-05,
"loss": 1.4249,
"step": 2140
},
{
"epoch": 0.34056708379534295,
"grad_norm": 0.9257438778877258,
"learning_rate": 2.2573176394944328e-05,
"loss": 1.4161,
"step": 2150
},
{
"epoch": 0.34215111674322823,
"grad_norm": 0.8689798712730408,
"learning_rate": 2.2088605193112383e-05,
"loss": 1.2699,
"step": 2160
},
{
"epoch": 0.3437351496911136,
"grad_norm": 0.7370318174362183,
"learning_rate": 2.160781274734495e-05,
"loss": 1.3336,
"step": 2170
},
{
"epoch": 0.3453191826389989,
"grad_norm": 0.7378177046775818,
"learning_rate": 2.1130864149242878e-05,
"loss": 1.3478,
"step": 2180
},
{
"epoch": 0.3469032155868842,
"grad_norm": 0.7509024143218994,
"learning_rate": 2.0657823970011618e-05,
"loss": 1.3091,
"step": 2190
},
{
"epoch": 0.34848724853476953,
"grad_norm": 0.7333298921585083,
"learning_rate": 2.0188756251719203e-05,
"loss": 1.3382,
"step": 2200
},
{
"epoch": 0.3500712814826548,
"grad_norm": 0.729038655757904,
"learning_rate": 1.9723724498626105e-05,
"loss": 1.2599,
"step": 2210
},
{
"epoch": 0.35165531443054016,
"grad_norm": 1.0300594568252563,
"learning_rate": 1.9262791668587676e-05,
"loss": 1.332,
"step": 2220
},
{
"epoch": 0.3532393473784255,
"grad_norm": 1.1736242771148682,
"learning_rate": 1.8806020164530702e-05,
"loss": 1.2849,
"step": 2230
},
{
"epoch": 0.3548233803263108,
"grad_norm": 0.758115828037262,
"learning_rate": 1.8353471826005036e-05,
"loss": 1.4054,
"step": 2240
},
{
"epoch": 0.3564074132741961,
"grad_norm": 0.9238961935043335,
"learning_rate": 1.7905207920811572e-05,
"loss": 1.4045,
"step": 2250
},
{
"epoch": 0.3579914462220814,
"grad_norm": 0.8140641450881958,
"learning_rate": 1.746128913670746e-05,
"loss": 1.3212,
"step": 2260
},
{
"epoch": 0.35957547916996674,
"grad_norm": 0.690086305141449,
"learning_rate": 1.7021775573190013e-05,
"loss": 1.3071,
"step": 2270
},
{
"epoch": 0.3611595121178521,
"grad_norm": 0.7119179368019104,
"learning_rate": 1.6586726733360237e-05,
"loss": 1.1963,
"step": 2280
},
{
"epoch": 0.36274354506573736,
"grad_norm": 0.90193772315979,
"learning_rate": 1.615620151586697e-05,
"loss": 1.3169,
"step": 2290
},
{
"epoch": 0.3643275780136227,
"grad_norm": 0.791403591632843,
"learning_rate": 1.5730258206933025e-05,
"loss": 1.3956,
"step": 2300
},
{
"epoch": 0.365911610961508,
"grad_norm": 0.9166008234024048,
"learning_rate": 1.530895447246411e-05,
"loss": 1.2468,
"step": 2310
},
{
"epoch": 0.3674956439093933,
"grad_norm": 0.8105589151382446,
"learning_rate": 1.4892347350241881e-05,
"loss": 1.2856,
"step": 2320
},
{
"epoch": 0.3690796768572786,
"grad_norm": 0.7731050252914429,
"learning_rate": 1.448049324220181e-05,
"loss": 1.32,
"step": 2330
},
{
"epoch": 0.37066370980516394,
"grad_norm": 0.6832409501075745,
"learning_rate": 1.4073447906797376e-05,
"loss": 1.4165,
"step": 2340
},
{
"epoch": 0.3722477427530493,
"grad_norm": 0.7237154841423035,
"learning_rate": 1.367126645145121e-05,
"loss": 1.3432,
"step": 2350
},
{
"epoch": 0.37383177570093457,
"grad_norm": 0.8293668031692505,
"learning_rate": 1.327400332509442e-05,
"loss": 1.3723,
"step": 2360
},
{
"epoch": 0.3754158086488199,
"grad_norm": 0.9197924733161926,
"learning_rate": 1.2881712310795118e-05,
"loss": 1.2893,
"step": 2370
},
{
"epoch": 0.3769998415967052,
"grad_norm": 0.725936233997345,
"learning_rate": 1.2494446518477022e-05,
"loss": 1.2668,
"step": 2380
},
{
"epoch": 0.3785838745445905,
"grad_norm": 0.768979012966156,
"learning_rate": 1.2112258377729274e-05,
"loss": 1.347,
"step": 2390
},
{
"epoch": 0.38016790749247587,
"grad_norm": 0.7712375521659851,
"learning_rate": 1.1735199630708222e-05,
"loss": 1.2925,
"step": 2400
},
{
"epoch": 0.38175194044036115,
"grad_norm": 0.7395961284637451,
"learning_rate": 1.1363321325132447e-05,
"loss": 1.3992,
"step": 2410
},
{
"epoch": 0.3833359733882465,
"grad_norm": 0.8247182369232178,
"learning_rate": 1.0996673807371677e-05,
"loss": 1.3175,
"step": 2420
},
{
"epoch": 0.38492000633613177,
"grad_norm": 0.8275562524795532,
"learning_rate": 1.0635306715630682e-05,
"loss": 1.3007,
"step": 2430
},
{
"epoch": 0.3865040392840171,
"grad_norm": 0.768756628036499,
"learning_rate": 1.0279268973229089e-05,
"loss": 1.3922,
"step": 2440
},
{
"epoch": 0.38808807223190245,
"grad_norm": 0.804603636264801,
"learning_rate": 9.928608781977966e-06,
"loss": 1.3236,
"step": 2450
},
{
"epoch": 0.38967210517978773,
"grad_norm": 0.7628573775291443,
"learning_rate": 9.583373615653978e-06,
"loss": 1.3312,
"step": 2460
},
{
"epoch": 0.39125613812767307,
"grad_norm": 0.8191434741020203,
"learning_rate": 9.243610213572285e-06,
"loss": 1.2456,
"step": 2470
},
{
"epoch": 0.39284017107555835,
"grad_norm": 0.9523441195487976,
"learning_rate": 8.909364574258793e-06,
"loss": 1.343,
"step": 2480
},
{
"epoch": 0.3944242040234437,
"grad_norm": 0.730061948299408,
"learning_rate": 8.580681949222568e-06,
"loss": 1.3723,
"step": 2490
},
{
"epoch": 0.39600823697132903,
"grad_norm": 0.7538689374923706,
"learning_rate": 8.257606836829678e-06,
"loss": 1.2467,
"step": 2500
},
{
"epoch": 0.3975922699192143,
"grad_norm": 1.1191153526306152,
"learning_rate": 7.940182976278692e-06,
"loss": 1.3746,
"step": 2510
},
{
"epoch": 0.39917630286709965,
"grad_norm": 0.7855995297431946,
"learning_rate": 7.6284533416791814e-06,
"loss": 1.2963,
"step": 2520
},
{
"epoch": 0.40076033581498494,
"grad_norm": 0.7669951319694519,
"learning_rate": 7.322460136233622e-06,
"loss": 1.2613,
"step": 2530
},
{
"epoch": 0.4023443687628703,
"grad_norm": 0.8027651906013489,
"learning_rate": 7.02224478652388e-06,
"loss": 1.2966,
"step": 2540
},
{
"epoch": 0.40392840171075556,
"grad_norm": 0.9282720685005188,
"learning_rate": 6.727847936902543e-06,
"loss": 1.3406,
"step": 2550
},
{
"epoch": 0.4055124346586409,
"grad_norm": 0.8110417127609253,
"learning_rate": 6.439309443990532e-06,
"loss": 1.2994,
"step": 2560
},
{
"epoch": 0.40709646760652624,
"grad_norm": 0.6770531535148621,
"learning_rate": 6.1566683712809824e-06,
"loss": 1.3193,
"step": 2570
},
{
"epoch": 0.4086805005544115,
"grad_norm": 0.8474519848823547,
"learning_rate": 5.879962983850745e-06,
"loss": 1.3126,
"step": 2580
},
{
"epoch": 0.41026453350229686,
"grad_norm": 0.9139429926872253,
"learning_rate": 5.6092307431799384e-06,
"loss": 1.3506,
"step": 2590
},
{
"epoch": 0.41184856645018214,
"grad_norm": 0.7944324016571045,
"learning_rate": 5.34450830208017e-06,
"loss": 1.4193,
"step": 2600
},
{
"epoch": 0.4134325993980675,
"grad_norm": 0.7817209959030151,
"learning_rate": 5.08583149973243e-06,
"loss": 1.222,
"step": 2610
},
{
"epoch": 0.4150166323459528,
"grad_norm": 0.8059713840484619,
"learning_rate": 4.833235356834959e-06,
"loss": 1.261,
"step": 2620
},
{
"epoch": 0.4166006652938381,
"grad_norm": 0.7949492335319519,
"learning_rate": 4.586754070862099e-06,
"loss": 1.3544,
"step": 2630
},
{
"epoch": 0.41818469824172344,
"grad_norm": 0.879030168056488,
"learning_rate": 4.346421011434382e-06,
"loss": 1.2447,
"step": 2640
},
{
"epoch": 0.4197687311896087,
"grad_norm": 0.7277154922485352,
"learning_rate": 4.112268715800943e-06,
"loss": 1.4064,
"step": 2650
},
{
"epoch": 0.42135276413749406,
"grad_norm": 0.8153344392776489,
"learning_rate": 3.884328884434402e-06,
"loss": 1.4184,
"step": 2660
},
{
"epoch": 0.4229367970853794,
"grad_norm": 0.9249367117881775,
"learning_rate": 3.6626323767391777e-06,
"loss": 1.2782,
"step": 2670
},
{
"epoch": 0.4245208300332647,
"grad_norm": 0.7652601003646851,
"learning_rate": 3.4472092068735916e-06,
"loss": 1.3451,
"step": 2680
},
{
"epoch": 0.42610486298115,
"grad_norm": 0.944959282875061,
"learning_rate": 3.238088539686451e-06,
"loss": 1.2975,
"step": 2690
},
{
"epoch": 0.4276888959290353,
"grad_norm": 0.740173876285553,
"learning_rate": 3.0352986867686007e-06,
"loss": 1.2348,
"step": 2700
},
{
"epoch": 0.42927292887692065,
"grad_norm": 0.7749059796333313,
"learning_rate": 2.8388671026199522e-06,
"loss": 1.254,
"step": 2710
},
{
"epoch": 0.43085696182480593,
"grad_norm": 0.7517712712287903,
"learning_rate": 2.6488203809326207e-06,
"loss": 1.3453,
"step": 2720
},
{
"epoch": 0.43244099477269127,
"grad_norm": 0.7981254458427429,
"learning_rate": 2.4651842509905487e-06,
"loss": 1.3381,
"step": 2730
},
{
"epoch": 0.4340250277205766,
"grad_norm": 0.6932268738746643,
"learning_rate": 2.2879835741861586e-06,
"loss": 1.2535,
"step": 2740
},
{
"epoch": 0.4356090606684619,
"grad_norm": 0.8161441683769226,
"learning_rate": 2.1172423406545516e-06,
"loss": 1.2834,
"step": 2750
},
{
"epoch": 0.43719309361634723,
"grad_norm": 0.848377525806427,
"learning_rate": 1.9529836660256096e-06,
"loss": 1.3685,
"step": 2760
},
{
"epoch": 0.4387771265642325,
"grad_norm": 0.7809950113296509,
"learning_rate": 1.7952297882945003e-06,
"loss": 1.3941,
"step": 2770
},
{
"epoch": 0.44036115951211785,
"grad_norm": 0.7642554044723511,
"learning_rate": 1.6440020648110067e-06,
"loss": 1.3021,
"step": 2780
},
{
"epoch": 0.4419451924600032,
"grad_norm": 0.6984448432922363,
"learning_rate": 1.4993209693881183e-06,
"loss": 1.339,
"step": 2790
},
{
"epoch": 0.44352922540788847,
"grad_norm": 0.9434962272644043,
"learning_rate": 1.3612060895301759e-06,
"loss": 1.4138,
"step": 2800
},
{
"epoch": 0.4451132583557738,
"grad_norm": 0.9491485953330994,
"learning_rate": 1.2296761237810207e-06,
"loss": 1.3785,
"step": 2810
},
{
"epoch": 0.4466972913036591,
"grad_norm": 0.8270556330680847,
"learning_rate": 1.104748879192552e-06,
"loss": 1.2682,
"step": 2820
},
{
"epoch": 0.44828132425154443,
"grad_norm": 0.7466399669647217,
"learning_rate": 9.864412689139123e-07,
"loss": 1.2793,
"step": 2830
},
{
"epoch": 0.44986535719942977,
"grad_norm": 0.796255350112915,
"learning_rate": 8.747693099017129e-07,
"loss": 1.2731,
"step": 2840
},
{
"epoch": 0.45144939014731505,
"grad_norm": 0.6511625647544861,
"learning_rate": 7.697481207516289e-07,
"loss": 1.3496,
"step": 2850
},
{
"epoch": 0.4530334230952004,
"grad_norm": 0.8428515195846558,
"learning_rate": 6.713919196515317e-07,
"loss": 1.3259,
"step": 2860
},
{
"epoch": 0.4546174560430857,
"grad_norm": 0.7594891786575317,
"learning_rate": 5.797140224566122e-07,
"loss": 1.3121,
"step": 2870
},
{
"epoch": 0.456201488990971,
"grad_norm": 0.8366693258285522,
"learning_rate": 4.947268408866113e-07,
"loss": 1.4236,
"step": 2880
},
{
"epoch": 0.45778552193885635,
"grad_norm": 0.7590285539627075,
"learning_rate": 4.1644188084548063e-07,
"loss": 1.3011,
"step": 2890
},
{
"epoch": 0.45936955488674164,
"grad_norm": 0.8987306952476501,
"learning_rate": 3.4486974086366253e-07,
"loss": 1.2998,
"step": 2900
},
{
"epoch": 0.460953587834627,
"grad_norm": 0.7959816455841064,
"learning_rate": 2.800201106632205e-07,
"loss": 1.3055,
"step": 2910
},
{
"epoch": 0.46253762078251226,
"grad_norm": 0.9299723505973816,
"learning_rate": 2.219017698460002e-07,
"loss": 1.3327,
"step": 2920
},
{
"epoch": 0.4641216537303976,
"grad_norm": 0.9437219500541687,
"learning_rate": 1.7052258670501308e-07,
"loss": 1.3535,
"step": 2930
},
{
"epoch": 0.4657056866782829,
"grad_norm": 0.858355700969696,
"learning_rate": 1.2588951715921116e-07,
"loss": 1.456,
"step": 2940
},
{
"epoch": 0.4672897196261682,
"grad_norm": 0.8487655520439148,
"learning_rate": 8.800860381173448e-08,
"loss": 1.1843,
"step": 2950
},
{
"epoch": 0.46887375257405356,
"grad_norm": 0.7240117788314819,
"learning_rate": 5.688497513188229e-08,
"loss": 1.352,
"step": 2960
},
{
"epoch": 0.47045778552193884,
"grad_norm": 0.7505178451538086,
"learning_rate": 3.2522844760762836e-08,
"loss": 1.3472,
"step": 2970
},
{
"epoch": 0.4720418184698242,
"grad_norm": 0.8503928184509277,
"learning_rate": 1.4925510940844156e-08,
"loss": 1.3577,
"step": 2980
},
{
"epoch": 0.47362585141770946,
"grad_norm": 0.8528128862380981,
"learning_rate": 4.095356069439005e-09,
"loss": 1.4115,
"step": 2990
},
{
"epoch": 0.4752098843655948,
"grad_norm": 0.9052889943122864,
"learning_rate": 3.384637615733155e-11,
"loss": 1.3026,
"step": 3000
}
],
"logging_steps": 10,
"max_steps": 3000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 300,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 9.8780760244224e+17,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}