sla-cpt-base / q2.5-zh /checkpoint-1500 /trainer_state.json
tvkain's picture
Add files using upload-large-folder tool
19d6272 verified
raw
history blame
131 kB
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0190314358538657,
"eval_steps": 500,
"global_step": 1500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0006796941376380628,
"grad_norm": 9.122925758361816,
"learning_rate": 0.0,
"loss": 6.6013,
"step": 1
},
{
"epoch": 0.0013593882752761257,
"grad_norm": 9.549845695495605,
"learning_rate": 6.756756756756758e-07,
"loss": 6.7658,
"step": 2
},
{
"epoch": 0.0027187765505522514,
"grad_norm": 8.209335327148438,
"learning_rate": 2.0270270270270273e-06,
"loss": 6.5902,
"step": 4
},
{
"epoch": 0.0040781648258283775,
"grad_norm": 6.113947868347168,
"learning_rate": 3.3783783783783788e-06,
"loss": 6.5818,
"step": 6
},
{
"epoch": 0.005437553101104503,
"grad_norm": 6.703476428985596,
"learning_rate": 4.72972972972973e-06,
"loss": 6.5232,
"step": 8
},
{
"epoch": 0.006796941376380629,
"grad_norm": 13.405858039855957,
"learning_rate": 6.081081081081082e-06,
"loss": 6.5521,
"step": 10
},
{
"epoch": 0.008156329651656755,
"grad_norm": 11.226860046386719,
"learning_rate": 7.432432432432433e-06,
"loss": 6.584,
"step": 12
},
{
"epoch": 0.009515717926932881,
"grad_norm": 7.006751537322998,
"learning_rate": 8.783783783783785e-06,
"loss": 6.518,
"step": 14
},
{
"epoch": 0.010875106202209005,
"grad_norm": 6.206234455108643,
"learning_rate": 1.0135135135135136e-05,
"loss": 6.4523,
"step": 16
},
{
"epoch": 0.012234494477485132,
"grad_norm": 3.75468111038208,
"learning_rate": 1.1486486486486488e-05,
"loss": 6.4488,
"step": 18
},
{
"epoch": 0.013593882752761258,
"grad_norm": 3.5509755611419678,
"learning_rate": 1.2837837837837838e-05,
"loss": 6.3353,
"step": 20
},
{
"epoch": 0.014953271028037384,
"grad_norm": 2.838531494140625,
"learning_rate": 1.4189189189189189e-05,
"loss": 6.2737,
"step": 22
},
{
"epoch": 0.01631265930331351,
"grad_norm": 2.849353790283203,
"learning_rate": 1.554054054054054e-05,
"loss": 6.2386,
"step": 24
},
{
"epoch": 0.017672047578589634,
"grad_norm": 3.192340850830078,
"learning_rate": 1.6891891891891892e-05,
"loss": 6.1459,
"step": 26
},
{
"epoch": 0.019031435853865762,
"grad_norm": 3.079922914505005,
"learning_rate": 1.8243243243243244e-05,
"loss": 6.1783,
"step": 28
},
{
"epoch": 0.020390824129141887,
"grad_norm": 3.689027786254883,
"learning_rate": 1.9594594594594595e-05,
"loss": 5.9851,
"step": 30
},
{
"epoch": 0.02175021240441801,
"grad_norm": 2.39050555229187,
"learning_rate": 2.0945945945945947e-05,
"loss": 6.0281,
"step": 32
},
{
"epoch": 0.02310960067969414,
"grad_norm": 2.3905773162841797,
"learning_rate": 2.2297297297297298e-05,
"loss": 6.0399,
"step": 34
},
{
"epoch": 0.024468988954970263,
"grad_norm": 2.676403045654297,
"learning_rate": 2.364864864864865e-05,
"loss": 6.0026,
"step": 36
},
{
"epoch": 0.025828377230246388,
"grad_norm": 2.220277786254883,
"learning_rate": 2.5e-05,
"loss": 5.9469,
"step": 38
},
{
"epoch": 0.027187765505522515,
"grad_norm": 3.7453274726867676,
"learning_rate": 2.635135135135135e-05,
"loss": 5.804,
"step": 40
},
{
"epoch": 0.02854715378079864,
"grad_norm": 4.522032737731934,
"learning_rate": 2.7702702702702704e-05,
"loss": 5.8153,
"step": 42
},
{
"epoch": 0.029906542056074768,
"grad_norm": 3.07928204536438,
"learning_rate": 2.9054054054054052e-05,
"loss": 5.8357,
"step": 44
},
{
"epoch": 0.031265930331350895,
"grad_norm": 3.2400898933410645,
"learning_rate": 3.0405405405405407e-05,
"loss": 5.8705,
"step": 46
},
{
"epoch": 0.03262531860662702,
"grad_norm": 5.057046890258789,
"learning_rate": 3.175675675675676e-05,
"loss": 5.6707,
"step": 48
},
{
"epoch": 0.033984706881903144,
"grad_norm": 4.462399005889893,
"learning_rate": 3.310810810810811e-05,
"loss": 5.7012,
"step": 50
},
{
"epoch": 0.03534409515717927,
"grad_norm": 3.095761299133301,
"learning_rate": 3.445945945945946e-05,
"loss": 5.6685,
"step": 52
},
{
"epoch": 0.03670348343245539,
"grad_norm": 3.478303909301758,
"learning_rate": 3.581081081081081e-05,
"loss": 5.6353,
"step": 54
},
{
"epoch": 0.038062871707731524,
"grad_norm": 4.6464433670043945,
"learning_rate": 3.7162162162162165e-05,
"loss": 5.6277,
"step": 56
},
{
"epoch": 0.03942225998300765,
"grad_norm": 4.2293572425842285,
"learning_rate": 3.851351351351351e-05,
"loss": 5.5346,
"step": 58
},
{
"epoch": 0.04078164825828377,
"grad_norm": 4.188422679901123,
"learning_rate": 3.986486486486487e-05,
"loss": 5.5544,
"step": 60
},
{
"epoch": 0.0421410365335599,
"grad_norm": 3.0673420429229736,
"learning_rate": 4.1216216216216216e-05,
"loss": 5.53,
"step": 62
},
{
"epoch": 0.04350042480883602,
"grad_norm": 3.3032662868499756,
"learning_rate": 4.256756756756757e-05,
"loss": 5.5605,
"step": 64
},
{
"epoch": 0.044859813084112146,
"grad_norm": 3.896825075149536,
"learning_rate": 4.391891891891892e-05,
"loss": 5.4221,
"step": 66
},
{
"epoch": 0.04621920135938828,
"grad_norm": 4.151010990142822,
"learning_rate": 4.5270270270270274e-05,
"loss": 5.3967,
"step": 68
},
{
"epoch": 0.0475785896346644,
"grad_norm": 3.938117265701294,
"learning_rate": 4.662162162162162e-05,
"loss": 5.4716,
"step": 70
},
{
"epoch": 0.048937977909940526,
"grad_norm": 3.1217191219329834,
"learning_rate": 4.797297297297298e-05,
"loss": 5.4567,
"step": 72
},
{
"epoch": 0.05029736618521665,
"grad_norm": 3.293020725250244,
"learning_rate": 4.9324324324324325e-05,
"loss": 5.4291,
"step": 74
},
{
"epoch": 0.051656754460492775,
"grad_norm": 3.9366047382354736,
"learning_rate": 5.067567567567568e-05,
"loss": 5.378,
"step": 76
},
{
"epoch": 0.053016142735768906,
"grad_norm": 4.825038909912109,
"learning_rate": 5.202702702702703e-05,
"loss": 5.3462,
"step": 78
},
{
"epoch": 0.05437553101104503,
"grad_norm": 4.513136386871338,
"learning_rate": 5.337837837837838e-05,
"loss": 5.4209,
"step": 80
},
{
"epoch": 0.055734919286321155,
"grad_norm": 4.524239540100098,
"learning_rate": 5.472972972972973e-05,
"loss": 5.3715,
"step": 82
},
{
"epoch": 0.05709430756159728,
"grad_norm": 5.1905317306518555,
"learning_rate": 5.6081081081081086e-05,
"loss": 5.2334,
"step": 84
},
{
"epoch": 0.058453695836873404,
"grad_norm": 4.657945156097412,
"learning_rate": 5.7432432432432434e-05,
"loss": 5.2899,
"step": 86
},
{
"epoch": 0.059813084112149535,
"grad_norm": 3.7982685565948486,
"learning_rate": 5.878378378378379e-05,
"loss": 5.2191,
"step": 88
},
{
"epoch": 0.06117247238742566,
"grad_norm": 3.5835001468658447,
"learning_rate": 6.013513513513514e-05,
"loss": 5.1858,
"step": 90
},
{
"epoch": 0.06253186066270179,
"grad_norm": 4.594094276428223,
"learning_rate": 6.14864864864865e-05,
"loss": 5.2013,
"step": 92
},
{
"epoch": 0.06389124893797792,
"grad_norm": 3.8048019409179688,
"learning_rate": 6.283783783783784e-05,
"loss": 5.1493,
"step": 94
},
{
"epoch": 0.06525063721325404,
"grad_norm": 3.9920341968536377,
"learning_rate": 6.41891891891892e-05,
"loss": 5.0612,
"step": 96
},
{
"epoch": 0.06661002548853016,
"grad_norm": 3.4856226444244385,
"learning_rate": 6.554054054054054e-05,
"loss": 5.1978,
"step": 98
},
{
"epoch": 0.06796941376380629,
"grad_norm": 3.485684871673584,
"learning_rate": 6.68918918918919e-05,
"loss": 5.2438,
"step": 100
},
{
"epoch": 0.06932880203908241,
"grad_norm": 2.92802095413208,
"learning_rate": 6.824324324324325e-05,
"loss": 5.0203,
"step": 102
},
{
"epoch": 0.07068819031435854,
"grad_norm": 3.472078561782837,
"learning_rate": 6.95945945945946e-05,
"loss": 5.1175,
"step": 104
},
{
"epoch": 0.07204757858963466,
"grad_norm": 3.5529918670654297,
"learning_rate": 7.094594594594594e-05,
"loss": 5.1952,
"step": 106
},
{
"epoch": 0.07340696686491079,
"grad_norm": 5.627261638641357,
"learning_rate": 7.229729729729731e-05,
"loss": 5.0469,
"step": 108
},
{
"epoch": 0.07476635514018691,
"grad_norm": 4.3943305015563965,
"learning_rate": 7.364864864864865e-05,
"loss": 5.0147,
"step": 110
},
{
"epoch": 0.07612574341546305,
"grad_norm": 2.405991792678833,
"learning_rate": 7.500000000000001e-05,
"loss": 5.0281,
"step": 112
},
{
"epoch": 0.07748513169073917,
"grad_norm": 3.361250162124634,
"learning_rate": 7.635135135135135e-05,
"loss": 4.9389,
"step": 114
},
{
"epoch": 0.0788445199660153,
"grad_norm": 3.5558111667633057,
"learning_rate": 7.77027027027027e-05,
"loss": 4.9327,
"step": 116
},
{
"epoch": 0.08020390824129142,
"grad_norm": 3.6313676834106445,
"learning_rate": 7.905405405405406e-05,
"loss": 4.9751,
"step": 118
},
{
"epoch": 0.08156329651656755,
"grad_norm": 3.766629219055176,
"learning_rate": 8.040540540540541e-05,
"loss": 4.9362,
"step": 120
},
{
"epoch": 0.08292268479184367,
"grad_norm": 3.8239798545837402,
"learning_rate": 8.175675675675675e-05,
"loss": 4.9906,
"step": 122
},
{
"epoch": 0.0842820730671198,
"grad_norm": 3.1650514602661133,
"learning_rate": 8.310810810810811e-05,
"loss": 4.8465,
"step": 124
},
{
"epoch": 0.08564146134239592,
"grad_norm": 3.0690271854400635,
"learning_rate": 8.445945945945946e-05,
"loss": 4.8147,
"step": 126
},
{
"epoch": 0.08700084961767204,
"grad_norm": 3.1290276050567627,
"learning_rate": 8.581081081081082e-05,
"loss": 4.8495,
"step": 128
},
{
"epoch": 0.08836023789294817,
"grad_norm": 3.1333677768707275,
"learning_rate": 8.716216216216216e-05,
"loss": 4.8112,
"step": 130
},
{
"epoch": 0.08971962616822429,
"grad_norm": 2.8959381580352783,
"learning_rate": 8.851351351351352e-05,
"loss": 4.7989,
"step": 132
},
{
"epoch": 0.09107901444350043,
"grad_norm": 2.715139389038086,
"learning_rate": 8.986486486486487e-05,
"loss": 4.7689,
"step": 134
},
{
"epoch": 0.09243840271877656,
"grad_norm": 2.3525729179382324,
"learning_rate": 9.121621621621623e-05,
"loss": 4.7503,
"step": 136
},
{
"epoch": 0.09379779099405268,
"grad_norm": 2.5053319931030273,
"learning_rate": 9.256756756756757e-05,
"loss": 4.8267,
"step": 138
},
{
"epoch": 0.0951571792693288,
"grad_norm": 3.2830920219421387,
"learning_rate": 9.391891891891892e-05,
"loss": 4.734,
"step": 140
},
{
"epoch": 0.09651656754460493,
"grad_norm": 3.367637872695923,
"learning_rate": 9.527027027027028e-05,
"loss": 4.6487,
"step": 142
},
{
"epoch": 0.09787595581988105,
"grad_norm": 4.157845973968506,
"learning_rate": 9.662162162162163e-05,
"loss": 4.7186,
"step": 144
},
{
"epoch": 0.09923534409515718,
"grad_norm": 3.549011707305908,
"learning_rate": 9.797297297297297e-05,
"loss": 4.7013,
"step": 146
},
{
"epoch": 0.1005947323704333,
"grad_norm": 2.438737392425537,
"learning_rate": 9.932432432432433e-05,
"loss": 4.6463,
"step": 148
},
{
"epoch": 0.10195412064570943,
"grad_norm": 2.62125301361084,
"learning_rate": 9.999996843793759e-05,
"loss": 4.6266,
"step": 150
},
{
"epoch": 0.10331350892098555,
"grad_norm": 2.5557775497436523,
"learning_rate": 9.999971594167742e-05,
"loss": 4.6659,
"step": 152
},
{
"epoch": 0.10467289719626169,
"grad_norm": 2.435065746307373,
"learning_rate": 9.999921095043215e-05,
"loss": 4.6833,
"step": 154
},
{
"epoch": 0.10603228547153781,
"grad_norm": 2.715564012527466,
"learning_rate": 9.999845346675197e-05,
"loss": 4.6256,
"step": 156
},
{
"epoch": 0.10739167374681394,
"grad_norm": 2.129850149154663,
"learning_rate": 9.999744349446207e-05,
"loss": 4.4834,
"step": 158
},
{
"epoch": 0.10875106202209006,
"grad_norm": 2.3702259063720703,
"learning_rate": 9.99961810386628e-05,
"loss": 4.5664,
"step": 160
},
{
"epoch": 0.11011045029736619,
"grad_norm": 1.9370046854019165,
"learning_rate": 9.999466610572944e-05,
"loss": 4.5847,
"step": 162
},
{
"epoch": 0.11146983857264231,
"grad_norm": 2.4077095985412598,
"learning_rate": 9.999289870331232e-05,
"loss": 4.6685,
"step": 164
},
{
"epoch": 0.11282922684791843,
"grad_norm": 3.0132172107696533,
"learning_rate": 9.999087884033666e-05,
"loss": 4.5605,
"step": 166
},
{
"epoch": 0.11418861512319456,
"grad_norm": 3.6081573963165283,
"learning_rate": 9.998860652700263e-05,
"loss": 4.4315,
"step": 168
},
{
"epoch": 0.11554800339847068,
"grad_norm": 2.687088966369629,
"learning_rate": 9.998608177478525e-05,
"loss": 4.5634,
"step": 170
},
{
"epoch": 0.11690739167374681,
"grad_norm": 2.3163015842437744,
"learning_rate": 9.998330459643437e-05,
"loss": 4.3725,
"step": 172
},
{
"epoch": 0.11826677994902295,
"grad_norm": 2.576303720474243,
"learning_rate": 9.998027500597451e-05,
"loss": 4.4502,
"step": 174
},
{
"epoch": 0.11962616822429907,
"grad_norm": 3.0173189640045166,
"learning_rate": 9.997699301870488e-05,
"loss": 4.4904,
"step": 176
},
{
"epoch": 0.1209855564995752,
"grad_norm": 1.8845309019088745,
"learning_rate": 9.99734586511993e-05,
"loss": 4.4285,
"step": 178
},
{
"epoch": 0.12234494477485132,
"grad_norm": 1.8597114086151123,
"learning_rate": 9.996967192130606e-05,
"loss": 4.4114,
"step": 180
},
{
"epoch": 0.12370433305012744,
"grad_norm": 1.9403643608093262,
"learning_rate": 9.996563284814788e-05,
"loss": 4.3586,
"step": 182
},
{
"epoch": 0.12506372132540358,
"grad_norm": 2.1628377437591553,
"learning_rate": 9.99613414521218e-05,
"loss": 4.4004,
"step": 184
},
{
"epoch": 0.1264231096006797,
"grad_norm": 2.213683843612671,
"learning_rate": 9.995679775489906e-05,
"loss": 4.4017,
"step": 186
},
{
"epoch": 0.12778249787595583,
"grad_norm": 1.9236798286437988,
"learning_rate": 9.995200177942499e-05,
"loss": 4.3356,
"step": 188
},
{
"epoch": 0.12914188615123195,
"grad_norm": 2.8310718536376953,
"learning_rate": 9.994695354991892e-05,
"loss": 4.2476,
"step": 190
},
{
"epoch": 0.13050127442650808,
"grad_norm": 2.613215446472168,
"learning_rate": 9.994165309187406e-05,
"loss": 4.4249,
"step": 192
},
{
"epoch": 0.1318606627017842,
"grad_norm": 3.2933475971221924,
"learning_rate": 9.993610043205735e-05,
"loss": 4.359,
"step": 194
},
{
"epoch": 0.13322005097706033,
"grad_norm": 2.660553455352783,
"learning_rate": 9.993029559850932e-05,
"loss": 4.3591,
"step": 196
},
{
"epoch": 0.13457943925233645,
"grad_norm": 2.223825693130493,
"learning_rate": 9.992423862054397e-05,
"loss": 4.2638,
"step": 198
},
{
"epoch": 0.13593882752761258,
"grad_norm": 1.6391338109970093,
"learning_rate": 9.991792952874857e-05,
"loss": 4.2506,
"step": 200
},
{
"epoch": 0.1372982158028887,
"grad_norm": 1.568050742149353,
"learning_rate": 9.991136835498363e-05,
"loss": 4.1789,
"step": 202
},
{
"epoch": 0.13865760407816483,
"grad_norm": 1.8366698026657104,
"learning_rate": 9.990455513238257e-05,
"loss": 4.2361,
"step": 204
},
{
"epoch": 0.14001699235344095,
"grad_norm": 2.0478951930999756,
"learning_rate": 9.98974898953517e-05,
"loss": 4.2613,
"step": 206
},
{
"epoch": 0.14137638062871707,
"grad_norm": 1.7681331634521484,
"learning_rate": 9.989017267956994e-05,
"loss": 4.2437,
"step": 208
},
{
"epoch": 0.1427357689039932,
"grad_norm": 2.2257468700408936,
"learning_rate": 9.988260352198872e-05,
"loss": 4.1724,
"step": 210
},
{
"epoch": 0.14409515717926932,
"grad_norm": 1.6590179204940796,
"learning_rate": 9.987478246083175e-05,
"loss": 4.1619,
"step": 212
},
{
"epoch": 0.14545454545454545,
"grad_norm": 2.029710292816162,
"learning_rate": 9.986670953559482e-05,
"loss": 4.2611,
"step": 214
},
{
"epoch": 0.14681393372982157,
"grad_norm": 1.7355066537857056,
"learning_rate": 9.985838478704563e-05,
"loss": 4.222,
"step": 216
},
{
"epoch": 0.1481733220050977,
"grad_norm": 1.91265869140625,
"learning_rate": 9.984980825722356e-05,
"loss": 4.0887,
"step": 218
},
{
"epoch": 0.14953271028037382,
"grad_norm": 2.1522412300109863,
"learning_rate": 9.984097998943947e-05,
"loss": 4.1331,
"step": 220
},
{
"epoch": 0.15089209855564995,
"grad_norm": 1.7838095426559448,
"learning_rate": 9.983190002827546e-05,
"loss": 4.0928,
"step": 222
},
{
"epoch": 0.1522514868309261,
"grad_norm": 1.8782153129577637,
"learning_rate": 9.982256841958472e-05,
"loss": 4.2071,
"step": 224
},
{
"epoch": 0.15361087510620222,
"grad_norm": 2.179396390914917,
"learning_rate": 9.981298521049118e-05,
"loss": 4.0642,
"step": 226
},
{
"epoch": 0.15497026338147835,
"grad_norm": 2.1441640853881836,
"learning_rate": 9.980315044938939e-05,
"loss": 4.0892,
"step": 228
},
{
"epoch": 0.15632965165675447,
"grad_norm": 2.6898701190948486,
"learning_rate": 9.979306418594417e-05,
"loss": 4.1155,
"step": 230
},
{
"epoch": 0.1576890399320306,
"grad_norm": 2.3028266429901123,
"learning_rate": 9.97827264710904e-05,
"loss": 4.1381,
"step": 232
},
{
"epoch": 0.15904842820730672,
"grad_norm": 1.8704326152801514,
"learning_rate": 9.977213735703283e-05,
"loss": 4.1299,
"step": 234
},
{
"epoch": 0.16040781648258284,
"grad_norm": 1.5334903001785278,
"learning_rate": 9.976129689724574e-05,
"loss": 4.1585,
"step": 236
},
{
"epoch": 0.16176720475785897,
"grad_norm": 1.5391136407852173,
"learning_rate": 9.975020514647267e-05,
"loss": 4.0774,
"step": 238
},
{
"epoch": 0.1631265930331351,
"grad_norm": 1.731969952583313,
"learning_rate": 9.973886216072614e-05,
"loss": 4.1801,
"step": 240
},
{
"epoch": 0.16448598130841122,
"grad_norm": 1.355950117111206,
"learning_rate": 9.972726799728744e-05,
"loss": 4.1208,
"step": 242
},
{
"epoch": 0.16584536958368734,
"grad_norm": 1.6355708837509155,
"learning_rate": 9.971542271470625e-05,
"loss": 4.0135,
"step": 244
},
{
"epoch": 0.16720475785896347,
"grad_norm": 1.612067461013794,
"learning_rate": 9.970332637280041e-05,
"loss": 4.008,
"step": 246
},
{
"epoch": 0.1685641461342396,
"grad_norm": 1.5609122514724731,
"learning_rate": 9.969097903265558e-05,
"loss": 3.9615,
"step": 248
},
{
"epoch": 0.16992353440951571,
"grad_norm": 2.1877589225769043,
"learning_rate": 9.967838075662495e-05,
"loss": 4.0187,
"step": 250
},
{
"epoch": 0.17128292268479184,
"grad_norm": 2.0836243629455566,
"learning_rate": 9.966553160832889e-05,
"loss": 4.0108,
"step": 252
},
{
"epoch": 0.17264231096006796,
"grad_norm": 1.8262373208999634,
"learning_rate": 9.96524316526547e-05,
"loss": 3.9729,
"step": 254
},
{
"epoch": 0.1740016992353441,
"grad_norm": 1.4357279539108276,
"learning_rate": 9.96390809557562e-05,
"loss": 3.9418,
"step": 256
},
{
"epoch": 0.1753610875106202,
"grad_norm": 1.4747521877288818,
"learning_rate": 9.962547958505346e-05,
"loss": 4.0073,
"step": 258
},
{
"epoch": 0.17672047578589634,
"grad_norm": 1.5109456777572632,
"learning_rate": 9.961162760923244e-05,
"loss": 4.0114,
"step": 260
},
{
"epoch": 0.17807986406117246,
"grad_norm": 1.6962803602218628,
"learning_rate": 9.959752509824462e-05,
"loss": 3.8997,
"step": 262
},
{
"epoch": 0.17943925233644858,
"grad_norm": 1.2874037027359009,
"learning_rate": 9.958317212330665e-05,
"loss": 3.9746,
"step": 264
},
{
"epoch": 0.18079864061172474,
"grad_norm": 1.4089356660842896,
"learning_rate": 9.956856875690006e-05,
"loss": 3.8799,
"step": 266
},
{
"epoch": 0.18215802888700086,
"grad_norm": 1.4761899709701538,
"learning_rate": 9.95537150727708e-05,
"loss": 3.9084,
"step": 268
},
{
"epoch": 0.18351741716227699,
"grad_norm": 1.2963216304779053,
"learning_rate": 9.953861114592889e-05,
"loss": 3.884,
"step": 270
},
{
"epoch": 0.1848768054375531,
"grad_norm": 1.2376818656921387,
"learning_rate": 9.952325705264806e-05,
"loss": 3.9434,
"step": 272
},
{
"epoch": 0.18623619371282923,
"grad_norm": 1.6393024921417236,
"learning_rate": 9.950765287046543e-05,
"loss": 3.9175,
"step": 274
},
{
"epoch": 0.18759558198810536,
"grad_norm": 1.2873233556747437,
"learning_rate": 9.949179867818099e-05,
"loss": 3.9513,
"step": 276
},
{
"epoch": 0.18895497026338148,
"grad_norm": 1.3314156532287598,
"learning_rate": 9.947569455585726e-05,
"loss": 3.9345,
"step": 278
},
{
"epoch": 0.1903143585386576,
"grad_norm": 1.392342448234558,
"learning_rate": 9.945934058481892e-05,
"loss": 3.8092,
"step": 280
},
{
"epoch": 0.19167374681393373,
"grad_norm": 1.4349101781845093,
"learning_rate": 9.944273684765235e-05,
"loss": 3.8548,
"step": 282
},
{
"epoch": 0.19303313508920986,
"grad_norm": 1.2190157175064087,
"learning_rate": 9.942588342820521e-05,
"loss": 3.9121,
"step": 284
},
{
"epoch": 0.19439252336448598,
"grad_norm": 1.4537711143493652,
"learning_rate": 9.94087804115861e-05,
"loss": 3.8502,
"step": 286
},
{
"epoch": 0.1957519116397621,
"grad_norm": 1.6733758449554443,
"learning_rate": 9.939142788416398e-05,
"loss": 3.8743,
"step": 288
},
{
"epoch": 0.19711129991503823,
"grad_norm": 1.4261025190353394,
"learning_rate": 9.937382593356793e-05,
"loss": 3.8947,
"step": 290
},
{
"epoch": 0.19847068819031435,
"grad_norm": 1.6536645889282227,
"learning_rate": 9.93559746486865e-05,
"loss": 3.9158,
"step": 292
},
{
"epoch": 0.19983007646559048,
"grad_norm": 1.71151864528656,
"learning_rate": 9.933787411966742e-05,
"loss": 3.8466,
"step": 294
},
{
"epoch": 0.2011894647408666,
"grad_norm": 1.8195589780807495,
"learning_rate": 9.931952443791703e-05,
"loss": 3.8113,
"step": 296
},
{
"epoch": 0.20254885301614273,
"grad_norm": 1.5555843114852905,
"learning_rate": 9.930092569609996e-05,
"loss": 3.8505,
"step": 298
},
{
"epoch": 0.20390824129141885,
"grad_norm": 1.402797818183899,
"learning_rate": 9.928207798813849e-05,
"loss": 3.8856,
"step": 300
},
{
"epoch": 0.20526762956669498,
"grad_norm": 1.33147394657135,
"learning_rate": 9.926298140921221e-05,
"loss": 3.8581,
"step": 302
},
{
"epoch": 0.2066270178419711,
"grad_norm": 1.1469197273254395,
"learning_rate": 9.924363605575746e-05,
"loss": 3.8449,
"step": 304
},
{
"epoch": 0.20798640611724725,
"grad_norm": 1.3006025552749634,
"learning_rate": 9.922404202546691e-05,
"loss": 3.8268,
"step": 306
},
{
"epoch": 0.20934579439252338,
"grad_norm": 1.4287155866622925,
"learning_rate": 9.9204199417289e-05,
"loss": 3.7724,
"step": 308
},
{
"epoch": 0.2107051826677995,
"grad_norm": 1.5455858707427979,
"learning_rate": 9.918410833142748e-05,
"loss": 3.7289,
"step": 310
},
{
"epoch": 0.21206457094307563,
"grad_norm": 2.112565517425537,
"learning_rate": 9.91637688693409e-05,
"loss": 3.7394,
"step": 312
},
{
"epoch": 0.21342395921835175,
"grad_norm": 1.6835887432098389,
"learning_rate": 9.914318113374208e-05,
"loss": 3.7487,
"step": 314
},
{
"epoch": 0.21478334749362787,
"grad_norm": 1.4296996593475342,
"learning_rate": 9.912234522859761e-05,
"loss": 3.7731,
"step": 316
},
{
"epoch": 0.216142735768904,
"grad_norm": 1.7142002582550049,
"learning_rate": 9.910126125912733e-05,
"loss": 3.8279,
"step": 318
},
{
"epoch": 0.21750212404418012,
"grad_norm": 1.5620222091674805,
"learning_rate": 9.907992933180376e-05,
"loss": 3.8279,
"step": 320
},
{
"epoch": 0.21886151231945625,
"grad_norm": 1.3424922227859497,
"learning_rate": 9.905834955435162e-05,
"loss": 3.7903,
"step": 322
},
{
"epoch": 0.22022090059473237,
"grad_norm": 1.4565094709396362,
"learning_rate": 9.903652203574722e-05,
"loss": 3.793,
"step": 324
},
{
"epoch": 0.2215802888700085,
"grad_norm": 1.7999119758605957,
"learning_rate": 9.901444688621801e-05,
"loss": 3.7354,
"step": 326
},
{
"epoch": 0.22293967714528462,
"grad_norm": 1.4900187253952026,
"learning_rate": 9.899212421724187e-05,
"loss": 3.7323,
"step": 328
},
{
"epoch": 0.22429906542056074,
"grad_norm": 1.4624853134155273,
"learning_rate": 9.896955414154669e-05,
"loss": 3.7207,
"step": 330
},
{
"epoch": 0.22565845369583687,
"grad_norm": 1.7633172273635864,
"learning_rate": 9.894673677310972e-05,
"loss": 3.7566,
"step": 332
},
{
"epoch": 0.227017841971113,
"grad_norm": 1.172234296798706,
"learning_rate": 9.892367222715709e-05,
"loss": 3.7376,
"step": 334
},
{
"epoch": 0.22837723024638912,
"grad_norm": 1.537023901939392,
"learning_rate": 9.890036062016306e-05,
"loss": 3.7157,
"step": 336
},
{
"epoch": 0.22973661852166524,
"grad_norm": 1.3012125492095947,
"learning_rate": 9.887680206984959e-05,
"loss": 3.6776,
"step": 338
},
{
"epoch": 0.23109600679694137,
"grad_norm": 1.1854647397994995,
"learning_rate": 9.885299669518569e-05,
"loss": 3.6635,
"step": 340
},
{
"epoch": 0.2324553950722175,
"grad_norm": 1.1112992763519287,
"learning_rate": 9.882894461638676e-05,
"loss": 3.7341,
"step": 342
},
{
"epoch": 0.23381478334749362,
"grad_norm": 1.1130858659744263,
"learning_rate": 9.88046459549141e-05,
"loss": 3.6422,
"step": 344
},
{
"epoch": 0.23517417162276974,
"grad_norm": 1.441116213798523,
"learning_rate": 9.878010083347419e-05,
"loss": 3.6886,
"step": 346
},
{
"epoch": 0.2365335598980459,
"grad_norm": 1.3626590967178345,
"learning_rate": 9.875530937601816e-05,
"loss": 3.7735,
"step": 348
},
{
"epoch": 0.23789294817332202,
"grad_norm": 1.2444162368774414,
"learning_rate": 9.873027170774109e-05,
"loss": 3.7312,
"step": 350
},
{
"epoch": 0.23925233644859814,
"grad_norm": 1.3234375715255737,
"learning_rate": 9.87049879550814e-05,
"loss": 3.748,
"step": 352
},
{
"epoch": 0.24061172472387427,
"grad_norm": 1.333979606628418,
"learning_rate": 9.867945824572024e-05,
"loss": 3.6207,
"step": 354
},
{
"epoch": 0.2419711129991504,
"grad_norm": 1.0207340717315674,
"learning_rate": 9.865368270858082e-05,
"loss": 3.7018,
"step": 356
},
{
"epoch": 0.24333050127442651,
"grad_norm": 1.098137378692627,
"learning_rate": 9.862766147382774e-05,
"loss": 3.6689,
"step": 358
},
{
"epoch": 0.24468988954970264,
"grad_norm": 1.1118202209472656,
"learning_rate": 9.860139467286638e-05,
"loss": 3.7185,
"step": 360
},
{
"epoch": 0.24604927782497876,
"grad_norm": 1.4026211500167847,
"learning_rate": 9.857488243834219e-05,
"loss": 3.6949,
"step": 362
},
{
"epoch": 0.2474086661002549,
"grad_norm": 1.528132677078247,
"learning_rate": 9.85481249041401e-05,
"loss": 3.5872,
"step": 364
},
{
"epoch": 0.248768054375531,
"grad_norm": 1.4865642786026,
"learning_rate": 9.852112220538367e-05,
"loss": 3.6044,
"step": 366
},
{
"epoch": 0.25012744265080716,
"grad_norm": 1.1037031412124634,
"learning_rate": 9.849387447843467e-05,
"loss": 3.7614,
"step": 368
},
{
"epoch": 0.25148683092608326,
"grad_norm": 1.0702588558197021,
"learning_rate": 9.846638186089214e-05,
"loss": 3.6226,
"step": 370
},
{
"epoch": 0.2528462192013594,
"grad_norm": 0.970947802066803,
"learning_rate": 9.843864449159182e-05,
"loss": 3.6127,
"step": 372
},
{
"epoch": 0.2542056074766355,
"grad_norm": 1.1656701564788818,
"learning_rate": 9.841066251060543e-05,
"loss": 3.694,
"step": 374
},
{
"epoch": 0.25556499575191166,
"grad_norm": 1.2996894121170044,
"learning_rate": 9.838243605924001e-05,
"loss": 3.6226,
"step": 376
},
{
"epoch": 0.25692438402718776,
"grad_norm": 1.392196536064148,
"learning_rate": 9.835396528003707e-05,
"loss": 3.6542,
"step": 378
},
{
"epoch": 0.2582837723024639,
"grad_norm": 1.3840879201889038,
"learning_rate": 9.832525031677205e-05,
"loss": 3.6416,
"step": 380
},
{
"epoch": 0.25964316057774,
"grad_norm": 1.5829066038131714,
"learning_rate": 9.829629131445342e-05,
"loss": 3.6383,
"step": 382
},
{
"epoch": 0.26100254885301616,
"grad_norm": 1.260533332824707,
"learning_rate": 9.826708841932209e-05,
"loss": 3.6034,
"step": 384
},
{
"epoch": 0.26236193712829226,
"grad_norm": 1.3130146265029907,
"learning_rate": 9.823764177885059e-05,
"loss": 3.5935,
"step": 386
},
{
"epoch": 0.2637213254035684,
"grad_norm": 1.4189637899398804,
"learning_rate": 9.820795154174235e-05,
"loss": 3.6792,
"step": 388
},
{
"epoch": 0.2650807136788445,
"grad_norm": 1.3872414827346802,
"learning_rate": 9.817801785793092e-05,
"loss": 3.6149,
"step": 390
},
{
"epoch": 0.26644010195412066,
"grad_norm": 1.487898826599121,
"learning_rate": 9.814784087857927e-05,
"loss": 3.6161,
"step": 392
},
{
"epoch": 0.26779949022939675,
"grad_norm": 1.374002456665039,
"learning_rate": 9.8117420756079e-05,
"loss": 3.6394,
"step": 394
},
{
"epoch": 0.2691588785046729,
"grad_norm": 0.9476630091667175,
"learning_rate": 9.808675764404953e-05,
"loss": 3.5447,
"step": 396
},
{
"epoch": 0.270518266779949,
"grad_norm": 1.2160744667053223,
"learning_rate": 9.805585169733738e-05,
"loss": 3.552,
"step": 398
},
{
"epoch": 0.27187765505522515,
"grad_norm": 1.2432382106781006,
"learning_rate": 9.802470307201538e-05,
"loss": 3.5518,
"step": 400
},
{
"epoch": 0.27323704333050125,
"grad_norm": 1.0426836013793945,
"learning_rate": 9.799331192538185e-05,
"loss": 3.6109,
"step": 402
},
{
"epoch": 0.2745964316057774,
"grad_norm": 1.1359163522720337,
"learning_rate": 9.796167841595986e-05,
"loss": 3.563,
"step": 404
},
{
"epoch": 0.2759558198810535,
"grad_norm": 1.2553869485855103,
"learning_rate": 9.792980270349633e-05,
"loss": 3.6323,
"step": 406
},
{
"epoch": 0.27731520815632965,
"grad_norm": 1.0180846452713013,
"learning_rate": 9.789768494896132e-05,
"loss": 3.4775,
"step": 408
},
{
"epoch": 0.2786745964316058,
"grad_norm": 1.2593415975570679,
"learning_rate": 9.786532531454722e-05,
"loss": 3.6039,
"step": 410
},
{
"epoch": 0.2800339847068819,
"grad_norm": 1.2103174924850464,
"learning_rate": 9.783272396366784e-05,
"loss": 3.6399,
"step": 412
},
{
"epoch": 0.28139337298215805,
"grad_norm": 1.3131142854690552,
"learning_rate": 9.77998810609577e-05,
"loss": 3.6405,
"step": 414
},
{
"epoch": 0.28275276125743415,
"grad_norm": 1.4719713926315308,
"learning_rate": 9.77667967722711e-05,
"loss": 3.6138,
"step": 416
},
{
"epoch": 0.2841121495327103,
"grad_norm": 1.4857118129730225,
"learning_rate": 9.773347126468128e-05,
"loss": 3.5811,
"step": 418
},
{
"epoch": 0.2854715378079864,
"grad_norm": 1.424742579460144,
"learning_rate": 9.769990470647974e-05,
"loss": 3.5766,
"step": 420
},
{
"epoch": 0.28683092608326255,
"grad_norm": 1.399685263633728,
"learning_rate": 9.766609726717515e-05,
"loss": 3.5816,
"step": 422
},
{
"epoch": 0.28819031435853865,
"grad_norm": 1.0692592859268188,
"learning_rate": 9.763204911749267e-05,
"loss": 3.5316,
"step": 424
},
{
"epoch": 0.2895497026338148,
"grad_norm": 0.9437915682792664,
"learning_rate": 9.759776042937302e-05,
"loss": 3.5464,
"step": 426
},
{
"epoch": 0.2909090909090909,
"grad_norm": 1.3703209161758423,
"learning_rate": 9.756323137597159e-05,
"loss": 3.5578,
"step": 428
},
{
"epoch": 0.29226847918436705,
"grad_norm": 1.31071138381958,
"learning_rate": 9.752846213165767e-05,
"loss": 3.6392,
"step": 430
},
{
"epoch": 0.29362786745964314,
"grad_norm": 0.9742053747177124,
"learning_rate": 9.749345287201343e-05,
"loss": 3.5328,
"step": 432
},
{
"epoch": 0.2949872557349193,
"grad_norm": 0.9459298253059387,
"learning_rate": 9.745820377383314e-05,
"loss": 3.4811,
"step": 434
},
{
"epoch": 0.2963466440101954,
"grad_norm": 1.5522956848144531,
"learning_rate": 9.74227150151222e-05,
"loss": 3.5842,
"step": 436
},
{
"epoch": 0.29770603228547154,
"grad_norm": 1.2024612426757812,
"learning_rate": 9.738698677509632e-05,
"loss": 3.5591,
"step": 438
},
{
"epoch": 0.29906542056074764,
"grad_norm": 1.2312219142913818,
"learning_rate": 9.735101923418054e-05,
"loss": 3.5347,
"step": 440
},
{
"epoch": 0.3004248088360238,
"grad_norm": 1.215908169746399,
"learning_rate": 9.731481257400838e-05,
"loss": 3.5233,
"step": 442
},
{
"epoch": 0.3017841971112999,
"grad_norm": 1.025688886642456,
"learning_rate": 9.727836697742086e-05,
"loss": 3.4792,
"step": 444
},
{
"epoch": 0.30314358538657604,
"grad_norm": 0.8059235215187073,
"learning_rate": 9.724168262846566e-05,
"loss": 3.4884,
"step": 446
},
{
"epoch": 0.3045029736618522,
"grad_norm": 1.061319351196289,
"learning_rate": 9.720475971239609e-05,
"loss": 3.5429,
"step": 448
},
{
"epoch": 0.3058623619371283,
"grad_norm": 1.2992972135543823,
"learning_rate": 9.716759841567025e-05,
"loss": 3.4973,
"step": 450
},
{
"epoch": 0.30722175021240444,
"grad_norm": 1.206768274307251,
"learning_rate": 9.713019892595003e-05,
"loss": 3.5063,
"step": 452
},
{
"epoch": 0.30858113848768054,
"grad_norm": 0.9437035918235779,
"learning_rate": 9.709256143210015e-05,
"loss": 3.4601,
"step": 454
},
{
"epoch": 0.3099405267629567,
"grad_norm": 1.4230656623840332,
"learning_rate": 9.705468612418727e-05,
"loss": 3.5248,
"step": 456
},
{
"epoch": 0.3112999150382328,
"grad_norm": 1.3039008378982544,
"learning_rate": 9.701657319347902e-05,
"loss": 3.3868,
"step": 458
},
{
"epoch": 0.31265930331350894,
"grad_norm": 1.0053726434707642,
"learning_rate": 9.69782228324429e-05,
"loss": 3.4689,
"step": 460
},
{
"epoch": 0.31401869158878504,
"grad_norm": 1.0519505739212036,
"learning_rate": 9.693963523474554e-05,
"loss": 3.5104,
"step": 462
},
{
"epoch": 0.3153780798640612,
"grad_norm": 1.2941850423812866,
"learning_rate": 9.690081059525154e-05,
"loss": 3.4889,
"step": 464
},
{
"epoch": 0.3167374681393373,
"grad_norm": 1.2811554670333862,
"learning_rate": 9.686174911002253e-05,
"loss": 3.5412,
"step": 466
},
{
"epoch": 0.31809685641461344,
"grad_norm": 1.0608913898468018,
"learning_rate": 9.682245097631622e-05,
"loss": 3.5063,
"step": 468
},
{
"epoch": 0.31945624468988953,
"grad_norm": 0.7853614687919617,
"learning_rate": 9.678291639258537e-05,
"loss": 3.5436,
"step": 470
},
{
"epoch": 0.3208156329651657,
"grad_norm": 1.3205409049987793,
"learning_rate": 9.674314555847682e-05,
"loss": 3.5236,
"step": 472
},
{
"epoch": 0.3221750212404418,
"grad_norm": 1.1064730882644653,
"learning_rate": 9.670313867483041e-05,
"loss": 3.4756,
"step": 474
},
{
"epoch": 0.32353440951571794,
"grad_norm": 0.9121582508087158,
"learning_rate": 9.666289594367803e-05,
"loss": 3.5151,
"step": 476
},
{
"epoch": 0.32489379779099403,
"grad_norm": 0.9929459691047668,
"learning_rate": 9.662241756824261e-05,
"loss": 3.4621,
"step": 478
},
{
"epoch": 0.3262531860662702,
"grad_norm": 1.1451283693313599,
"learning_rate": 9.658170375293703e-05,
"loss": 3.4464,
"step": 480
},
{
"epoch": 0.3276125743415463,
"grad_norm": 1.161496639251709,
"learning_rate": 9.654075470336317e-05,
"loss": 3.4189,
"step": 482
},
{
"epoch": 0.32897196261682243,
"grad_norm": 1.0276612043380737,
"learning_rate": 9.649957062631078e-05,
"loss": 3.4705,
"step": 484
},
{
"epoch": 0.33033135089209853,
"grad_norm": 1.061078667640686,
"learning_rate": 9.645815172975649e-05,
"loss": 3.4573,
"step": 486
},
{
"epoch": 0.3316907391673747,
"grad_norm": 1.14658522605896,
"learning_rate": 9.641649822286278e-05,
"loss": 3.4265,
"step": 488
},
{
"epoch": 0.33305012744265083,
"grad_norm": 1.1178537607192993,
"learning_rate": 9.637461031597686e-05,
"loss": 3.4919,
"step": 490
},
{
"epoch": 0.33440951571792693,
"grad_norm": 1.1290613412857056,
"learning_rate": 9.633248822062968e-05,
"loss": 3.5057,
"step": 492
},
{
"epoch": 0.3357689039932031,
"grad_norm": 0.9476689696311951,
"learning_rate": 9.629013214953478e-05,
"loss": 3.4467,
"step": 494
},
{
"epoch": 0.3371282922684792,
"grad_norm": 1.0658568143844604,
"learning_rate": 9.624754231658731e-05,
"loss": 3.4675,
"step": 496
},
{
"epoch": 0.33848768054375533,
"grad_norm": 0.9237107634544373,
"learning_rate": 9.620471893686287e-05,
"loss": 3.5161,
"step": 498
},
{
"epoch": 0.33984706881903143,
"grad_norm": 1.1166672706604004,
"learning_rate": 9.616166222661646e-05,
"loss": 3.4474,
"step": 500
},
{
"epoch": 0.3412064570943076,
"grad_norm": 1.3173065185546875,
"learning_rate": 9.611837240328138e-05,
"loss": 3.4247,
"step": 502
},
{
"epoch": 0.3425658453695837,
"grad_norm": 0.8932580947875977,
"learning_rate": 9.607484968546813e-05,
"loss": 3.425,
"step": 504
},
{
"epoch": 0.34392523364485983,
"grad_norm": 1.3925460577011108,
"learning_rate": 9.603109429296333e-05,
"loss": 3.4579,
"step": 506
},
{
"epoch": 0.3452846219201359,
"grad_norm": 1.265743374824524,
"learning_rate": 9.598710644672859e-05,
"loss": 3.584,
"step": 508
},
{
"epoch": 0.3466440101954121,
"grad_norm": 1.1315666437149048,
"learning_rate": 9.594288636889936e-05,
"loss": 3.3599,
"step": 510
},
{
"epoch": 0.3480033984706882,
"grad_norm": 1.1435672044754028,
"learning_rate": 9.589843428278388e-05,
"loss": 3.4324,
"step": 512
},
{
"epoch": 0.3493627867459643,
"grad_norm": 1.0228195190429688,
"learning_rate": 9.5853750412862e-05,
"loss": 3.3526,
"step": 514
},
{
"epoch": 0.3507221750212404,
"grad_norm": 0.9447354674339294,
"learning_rate": 9.580883498478406e-05,
"loss": 3.4243,
"step": 516
},
{
"epoch": 0.3520815632965166,
"grad_norm": 0.9552397131919861,
"learning_rate": 9.576368822536976e-05,
"loss": 3.3066,
"step": 518
},
{
"epoch": 0.35344095157179267,
"grad_norm": 1.1417309045791626,
"learning_rate": 9.571831036260699e-05,
"loss": 3.3925,
"step": 520
},
{
"epoch": 0.3548003398470688,
"grad_norm": 1.0355608463287354,
"learning_rate": 9.567270162565073e-05,
"loss": 3.4224,
"step": 522
},
{
"epoch": 0.3561597281223449,
"grad_norm": 0.9213873744010925,
"learning_rate": 9.562686224482182e-05,
"loss": 3.4091,
"step": 524
},
{
"epoch": 0.3575191163976211,
"grad_norm": 0.97687828540802,
"learning_rate": 9.558079245160584e-05,
"loss": 3.3404,
"step": 526
},
{
"epoch": 0.35887850467289717,
"grad_norm": 1.0606272220611572,
"learning_rate": 9.553449247865199e-05,
"loss": 3.4489,
"step": 528
},
{
"epoch": 0.3602378929481733,
"grad_norm": 1.0698935985565186,
"learning_rate": 9.548796255977175e-05,
"loss": 3.4235,
"step": 530
},
{
"epoch": 0.3615972812234495,
"grad_norm": 1.0898542404174805,
"learning_rate": 9.544120292993795e-05,
"loss": 3.4447,
"step": 532
},
{
"epoch": 0.36295666949872557,
"grad_norm": 1.1248620748519897,
"learning_rate": 9.539421382528331e-05,
"loss": 3.4496,
"step": 534
},
{
"epoch": 0.3643160577740017,
"grad_norm": 1.1177901029586792,
"learning_rate": 9.534699548309948e-05,
"loss": 3.3291,
"step": 536
},
{
"epoch": 0.3656754460492778,
"grad_norm": 0.8826277256011963,
"learning_rate": 9.529954814183572e-05,
"loss": 3.4179,
"step": 538
},
{
"epoch": 0.36703483432455397,
"grad_norm": 0.8299089074134827,
"learning_rate": 9.525187204109767e-05,
"loss": 3.2932,
"step": 540
},
{
"epoch": 0.36839422259983007,
"grad_norm": 1.1234545707702637,
"learning_rate": 9.520396742164624e-05,
"loss": 3.3519,
"step": 542
},
{
"epoch": 0.3697536108751062,
"grad_norm": 0.8834955096244812,
"learning_rate": 9.515583452539633e-05,
"loss": 3.3694,
"step": 544
},
{
"epoch": 0.3711129991503823,
"grad_norm": 1.1295942068099976,
"learning_rate": 9.510747359541562e-05,
"loss": 3.4234,
"step": 546
},
{
"epoch": 0.37247238742565847,
"grad_norm": 1.1070072650909424,
"learning_rate": 9.505888487592333e-05,
"loss": 3.2804,
"step": 548
},
{
"epoch": 0.37383177570093457,
"grad_norm": 0.8992867469787598,
"learning_rate": 9.501006861228903e-05,
"loss": 3.3935,
"step": 550
},
{
"epoch": 0.3751911639762107,
"grad_norm": 0.9362753033638,
"learning_rate": 9.496102505103135e-05,
"loss": 3.4367,
"step": 552
},
{
"epoch": 0.3765505522514868,
"grad_norm": 0.997207760810852,
"learning_rate": 9.491175443981677e-05,
"loss": 3.3499,
"step": 554
},
{
"epoch": 0.37790994052676297,
"grad_norm": 0.9916525483131409,
"learning_rate": 9.486225702745833e-05,
"loss": 3.3746,
"step": 556
},
{
"epoch": 0.37926932880203906,
"grad_norm": 0.9965975284576416,
"learning_rate": 9.481253306391445e-05,
"loss": 3.2899,
"step": 558
},
{
"epoch": 0.3806287170773152,
"grad_norm": 0.9485024213790894,
"learning_rate": 9.476258280028753e-05,
"loss": 3.2999,
"step": 560
},
{
"epoch": 0.3819881053525913,
"grad_norm": 0.9940890669822693,
"learning_rate": 9.471240648882288e-05,
"loss": 3.2989,
"step": 562
},
{
"epoch": 0.38334749362786746,
"grad_norm": 0.8807222843170166,
"learning_rate": 9.466200438290724e-05,
"loss": 3.3442,
"step": 564
},
{
"epoch": 0.38470688190314356,
"grad_norm": 0.923343300819397,
"learning_rate": 9.461137673706768e-05,
"loss": 3.3407,
"step": 566
},
{
"epoch": 0.3860662701784197,
"grad_norm": 1.0074143409729004,
"learning_rate": 9.456052380697015e-05,
"loss": 3.3164,
"step": 568
},
{
"epoch": 0.3874256584536958,
"grad_norm": 0.9250668883323669,
"learning_rate": 9.450944584941831e-05,
"loss": 3.3366,
"step": 570
},
{
"epoch": 0.38878504672897196,
"grad_norm": 0.9320377707481384,
"learning_rate": 9.44581431223522e-05,
"loss": 3.2337,
"step": 572
},
{
"epoch": 0.3901444350042481,
"grad_norm": 0.8707028031349182,
"learning_rate": 9.440661588484691e-05,
"loss": 3.3565,
"step": 574
},
{
"epoch": 0.3915038232795242,
"grad_norm": 0.9949326515197754,
"learning_rate": 9.43548643971113e-05,
"loss": 3.3029,
"step": 576
},
{
"epoch": 0.39286321155480036,
"grad_norm": 0.7374237775802612,
"learning_rate": 9.430288892048666e-05,
"loss": 3.3016,
"step": 578
},
{
"epoch": 0.39422259983007646,
"grad_norm": 1.2260855436325073,
"learning_rate": 9.425068971744547e-05,
"loss": 3.4076,
"step": 580
},
{
"epoch": 0.3955819881053526,
"grad_norm": 1.042262315750122,
"learning_rate": 9.419826705158994e-05,
"loss": 3.4558,
"step": 582
},
{
"epoch": 0.3969413763806287,
"grad_norm": 0.9673371911048889,
"learning_rate": 9.414562118765077e-05,
"loss": 3.3727,
"step": 584
},
{
"epoch": 0.39830076465590486,
"grad_norm": 0.8633317947387695,
"learning_rate": 9.40927523914858e-05,
"loss": 3.3029,
"step": 586
},
{
"epoch": 0.39966015293118096,
"grad_norm": 1.0391029119491577,
"learning_rate": 9.40396609300787e-05,
"loss": 3.3455,
"step": 588
},
{
"epoch": 0.4010195412064571,
"grad_norm": 0.9045621752738953,
"learning_rate": 9.398634707153752e-05,
"loss": 3.365,
"step": 590
},
{
"epoch": 0.4023789294817332,
"grad_norm": 1.0436588525772095,
"learning_rate": 9.393281108509342e-05,
"loss": 3.3427,
"step": 592
},
{
"epoch": 0.40373831775700936,
"grad_norm": 0.9874547719955444,
"learning_rate": 9.387905324109934e-05,
"loss": 3.2496,
"step": 594
},
{
"epoch": 0.40509770603228545,
"grad_norm": 0.7234767079353333,
"learning_rate": 9.382507381102849e-05,
"loss": 3.2806,
"step": 596
},
{
"epoch": 0.4064570943075616,
"grad_norm": 0.7901805639266968,
"learning_rate": 9.377087306747315e-05,
"loss": 3.2479,
"step": 598
},
{
"epoch": 0.4078164825828377,
"grad_norm": 0.8309746980667114,
"learning_rate": 9.37164512841432e-05,
"loss": 3.2532,
"step": 600
},
{
"epoch": 0.40917587085811385,
"grad_norm": 0.7645075917243958,
"learning_rate": 9.366180873586475e-05,
"loss": 3.3121,
"step": 602
},
{
"epoch": 0.41053525913338995,
"grad_norm": 0.8399624824523926,
"learning_rate": 9.360694569857873e-05,
"loss": 3.2899,
"step": 604
},
{
"epoch": 0.4118946474086661,
"grad_norm": 0.7422559857368469,
"learning_rate": 9.355186244933959e-05,
"loss": 3.3512,
"step": 606
},
{
"epoch": 0.4132540356839422,
"grad_norm": 0.8472399115562439,
"learning_rate": 9.349655926631375e-05,
"loss": 3.3467,
"step": 608
},
{
"epoch": 0.41461342395921835,
"grad_norm": 0.745278000831604,
"learning_rate": 9.344103642877837e-05,
"loss": 3.2806,
"step": 610
},
{
"epoch": 0.4159728122344945,
"grad_norm": 0.9684063196182251,
"learning_rate": 9.338529421711977e-05,
"loss": 3.2831,
"step": 612
},
{
"epoch": 0.4173322005097706,
"grad_norm": 0.8448832631111145,
"learning_rate": 9.332933291283215e-05,
"loss": 3.2381,
"step": 614
},
{
"epoch": 0.41869158878504675,
"grad_norm": 0.9104022979736328,
"learning_rate": 9.327315279851605e-05,
"loss": 3.3765,
"step": 616
},
{
"epoch": 0.42005097706032285,
"grad_norm": 0.9372128248214722,
"learning_rate": 9.321675415787707e-05,
"loss": 3.3079,
"step": 618
},
{
"epoch": 0.421410365335599,
"grad_norm": 0.8389849066734314,
"learning_rate": 9.316013727572429e-05,
"loss": 3.3161,
"step": 620
},
{
"epoch": 0.4227697536108751,
"grad_norm": 0.982002854347229,
"learning_rate": 9.31033024379689e-05,
"loss": 3.2549,
"step": 622
},
{
"epoch": 0.42412914188615125,
"grad_norm": 0.9176488518714905,
"learning_rate": 9.304624993162276e-05,
"loss": 3.2436,
"step": 624
},
{
"epoch": 0.42548853016142735,
"grad_norm": 0.8668674826622009,
"learning_rate": 9.298898004479697e-05,
"loss": 3.2616,
"step": 626
},
{
"epoch": 0.4268479184367035,
"grad_norm": 0.9916755557060242,
"learning_rate": 9.293149306670032e-05,
"loss": 3.2799,
"step": 628
},
{
"epoch": 0.4282073067119796,
"grad_norm": 0.843906819820404,
"learning_rate": 9.287378928763798e-05,
"loss": 3.2975,
"step": 630
},
{
"epoch": 0.42956669498725575,
"grad_norm": 0.9572488069534302,
"learning_rate": 9.281586899900985e-05,
"loss": 3.2525,
"step": 632
},
{
"epoch": 0.43092608326253184,
"grad_norm": 0.9162222743034363,
"learning_rate": 9.275773249330927e-05,
"loss": 3.3031,
"step": 634
},
{
"epoch": 0.432285471537808,
"grad_norm": 1.137613296508789,
"learning_rate": 9.269938006412142e-05,
"loss": 3.2803,
"step": 636
},
{
"epoch": 0.4336448598130841,
"grad_norm": 0.9166907072067261,
"learning_rate": 9.26408120061219e-05,
"loss": 3.3167,
"step": 638
},
{
"epoch": 0.43500424808836025,
"grad_norm": 0.8309205174446106,
"learning_rate": 9.258202861507518e-05,
"loss": 3.2414,
"step": 640
},
{
"epoch": 0.43636363636363634,
"grad_norm": 0.9707709550857544,
"learning_rate": 9.252303018783324e-05,
"loss": 3.3043,
"step": 642
},
{
"epoch": 0.4377230246389125,
"grad_norm": 0.7566142678260803,
"learning_rate": 9.246381702233385e-05,
"loss": 3.2545,
"step": 644
},
{
"epoch": 0.4390824129141886,
"grad_norm": 1.1095722913742065,
"learning_rate": 9.240438941759926e-05,
"loss": 3.3787,
"step": 646
},
{
"epoch": 0.44044180118946474,
"grad_norm": 0.8704729080200195,
"learning_rate": 9.234474767373465e-05,
"loss": 3.2712,
"step": 648
},
{
"epoch": 0.44180118946474084,
"grad_norm": 0.7083353400230408,
"learning_rate": 9.228489209192652e-05,
"loss": 3.3055,
"step": 650
},
{
"epoch": 0.443160577740017,
"grad_norm": 0.8583936095237732,
"learning_rate": 9.222482297444131e-05,
"loss": 3.2406,
"step": 652
},
{
"epoch": 0.44451996601529314,
"grad_norm": 0.9450501203536987,
"learning_rate": 9.216454062462374e-05,
"loss": 3.2526,
"step": 654
},
{
"epoch": 0.44587935429056924,
"grad_norm": 0.7373863458633423,
"learning_rate": 9.210404534689536e-05,
"loss": 3.2554,
"step": 656
},
{
"epoch": 0.4472387425658454,
"grad_norm": 1.0712296962738037,
"learning_rate": 9.2043337446753e-05,
"loss": 3.3109,
"step": 658
},
{
"epoch": 0.4485981308411215,
"grad_norm": 0.8983728885650635,
"learning_rate": 9.198241723076719e-05,
"loss": 3.2369,
"step": 660
},
{
"epoch": 0.44995751911639764,
"grad_norm": 0.8862040042877197,
"learning_rate": 9.192128500658068e-05,
"loss": 3.2532,
"step": 662
},
{
"epoch": 0.45131690739167374,
"grad_norm": 0.8523765206336975,
"learning_rate": 9.185994108290682e-05,
"loss": 3.2559,
"step": 664
},
{
"epoch": 0.4526762956669499,
"grad_norm": 0.9425597190856934,
"learning_rate": 9.179838576952802e-05,
"loss": 3.2814,
"step": 666
},
{
"epoch": 0.454035683942226,
"grad_norm": 0.7966018915176392,
"learning_rate": 9.173661937729421e-05,
"loss": 3.2376,
"step": 668
},
{
"epoch": 0.45539507221750214,
"grad_norm": 0.7609128952026367,
"learning_rate": 9.167464221812126e-05,
"loss": 3.1981,
"step": 670
},
{
"epoch": 0.45675446049277824,
"grad_norm": 0.7252809405326843,
"learning_rate": 9.161245460498936e-05,
"loss": 3.3001,
"step": 672
},
{
"epoch": 0.4581138487680544,
"grad_norm": 0.5679906606674194,
"learning_rate": 9.155005685194152e-05,
"loss": 3.2215,
"step": 674
},
{
"epoch": 0.4594732370433305,
"grad_norm": 0.8006565570831299,
"learning_rate": 9.148744927408193e-05,
"loss": 3.2696,
"step": 676
},
{
"epoch": 0.46083262531860664,
"grad_norm": 0.9909971952438354,
"learning_rate": 9.142463218757437e-05,
"loss": 3.3098,
"step": 678
},
{
"epoch": 0.46219201359388273,
"grad_norm": 0.729555606842041,
"learning_rate": 9.136160590964063e-05,
"loss": 3.188,
"step": 680
},
{
"epoch": 0.4635514018691589,
"grad_norm": 0.9540812969207764,
"learning_rate": 9.129837075855887e-05,
"loss": 3.2464,
"step": 682
},
{
"epoch": 0.464910790144435,
"grad_norm": 0.9037480354309082,
"learning_rate": 9.123492705366212e-05,
"loss": 3.3134,
"step": 684
},
{
"epoch": 0.46627017841971113,
"grad_norm": 0.7540014386177063,
"learning_rate": 9.117127511533654e-05,
"loss": 3.2704,
"step": 686
},
{
"epoch": 0.46762956669498723,
"grad_norm": 0.7283887267112732,
"learning_rate": 9.110741526501982e-05,
"loss": 3.147,
"step": 688
},
{
"epoch": 0.4689889549702634,
"grad_norm": 0.7288716435432434,
"learning_rate": 9.104334782519969e-05,
"loss": 3.2257,
"step": 690
},
{
"epoch": 0.4703483432455395,
"grad_norm": 0.8213950991630554,
"learning_rate": 9.097907311941208e-05,
"loss": 3.1973,
"step": 692
},
{
"epoch": 0.47170773152081563,
"grad_norm": 0.773909866809845,
"learning_rate": 9.091459147223968e-05,
"loss": 3.2165,
"step": 694
},
{
"epoch": 0.4730671197960918,
"grad_norm": 0.9411752820014954,
"learning_rate": 9.08499032093102e-05,
"loss": 3.2191,
"step": 696
},
{
"epoch": 0.4744265080713679,
"grad_norm": 0.8191618919372559,
"learning_rate": 9.078500865729471e-05,
"loss": 3.1823,
"step": 698
},
{
"epoch": 0.47578589634664403,
"grad_norm": 0.7489372491836548,
"learning_rate": 9.071990814390606e-05,
"loss": 3.1833,
"step": 700
},
{
"epoch": 0.47714528462192013,
"grad_norm": 0.7682393193244934,
"learning_rate": 9.065460199789719e-05,
"loss": 3.2761,
"step": 702
},
{
"epoch": 0.4785046728971963,
"grad_norm": 0.9797720313072205,
"learning_rate": 9.058909054905946e-05,
"loss": 3.2743,
"step": 704
},
{
"epoch": 0.4798640611724724,
"grad_norm": 0.9919414520263672,
"learning_rate": 9.052337412822096e-05,
"loss": 3.2964,
"step": 706
},
{
"epoch": 0.48122344944774853,
"grad_norm": 0.8961633443832397,
"learning_rate": 9.045745306724495e-05,
"loss": 3.2386,
"step": 708
},
{
"epoch": 0.4825828377230246,
"grad_norm": 0.78994220495224,
"learning_rate": 9.039132769902808e-05,
"loss": 3.3232,
"step": 710
},
{
"epoch": 0.4839422259983008,
"grad_norm": 0.7165075540542603,
"learning_rate": 9.032499835749868e-05,
"loss": 3.1898,
"step": 712
},
{
"epoch": 0.4853016142735769,
"grad_norm": 0.6712597608566284,
"learning_rate": 9.02584653776152e-05,
"loss": 3.2442,
"step": 714
},
{
"epoch": 0.48666100254885303,
"grad_norm": 0.7340356707572937,
"learning_rate": 9.019172909536441e-05,
"loss": 3.2368,
"step": 716
},
{
"epoch": 0.4880203908241291,
"grad_norm": 0.9872474074363708,
"learning_rate": 9.012478984775977e-05,
"loss": 3.2627,
"step": 718
},
{
"epoch": 0.4893797790994053,
"grad_norm": 0.8943246006965637,
"learning_rate": 9.005764797283969e-05,
"loss": 3.1771,
"step": 720
},
{
"epoch": 0.4907391673746814,
"grad_norm": 0.8461526036262512,
"learning_rate": 8.99903038096658e-05,
"loss": 3.2448,
"step": 722
},
{
"epoch": 0.4920985556499575,
"grad_norm": 0.9530143737792969,
"learning_rate": 8.992275769832135e-05,
"loss": 3.1837,
"step": 724
},
{
"epoch": 0.4934579439252336,
"grad_norm": 0.8631094098091125,
"learning_rate": 8.985500997990933e-05,
"loss": 3.2388,
"step": 726
},
{
"epoch": 0.4948173322005098,
"grad_norm": 0.7376362681388855,
"learning_rate": 8.978706099655088e-05,
"loss": 3.2585,
"step": 728
},
{
"epoch": 0.49617672047578587,
"grad_norm": 0.7359983325004578,
"learning_rate": 8.97189110913835e-05,
"loss": 3.2278,
"step": 730
},
{
"epoch": 0.497536108751062,
"grad_norm": 0.8055249452590942,
"learning_rate": 8.965056060855931e-05,
"loss": 3.2128,
"step": 732
},
{
"epoch": 0.4988954970263382,
"grad_norm": 0.8565118908882141,
"learning_rate": 8.95820098932434e-05,
"loss": 3.2337,
"step": 734
},
{
"epoch": 0.5002548853016143,
"grad_norm": 0.7358447313308716,
"learning_rate": 8.951325929161191e-05,
"loss": 3.2258,
"step": 736
},
{
"epoch": 0.5016142735768904,
"grad_norm": 0.8792275786399841,
"learning_rate": 8.944430915085051e-05,
"loss": 3.2875,
"step": 738
},
{
"epoch": 0.5029736618521665,
"grad_norm": 0.9467582106590271,
"learning_rate": 8.937515981915245e-05,
"loss": 3.2911,
"step": 740
},
{
"epoch": 0.5043330501274427,
"grad_norm": 0.7510040998458862,
"learning_rate": 8.930581164571692e-05,
"loss": 3.2342,
"step": 742
},
{
"epoch": 0.5056924384027188,
"grad_norm": 0.9511517882347107,
"learning_rate": 8.92362649807472e-05,
"loss": 3.2036,
"step": 744
},
{
"epoch": 0.5070518266779949,
"grad_norm": 0.7870407104492188,
"learning_rate": 8.916652017544899e-05,
"loss": 3.2493,
"step": 746
},
{
"epoch": 0.508411214953271,
"grad_norm": 0.8817398548126221,
"learning_rate": 8.909657758202857e-05,
"loss": 3.1132,
"step": 748
},
{
"epoch": 0.5097706032285472,
"grad_norm": 0.8300465941429138,
"learning_rate": 8.9026437553691e-05,
"loss": 3.2223,
"step": 750
},
{
"epoch": 0.5111299915038233,
"grad_norm": 0.7648670077323914,
"learning_rate": 8.89561004446384e-05,
"loss": 3.2196,
"step": 752
},
{
"epoch": 0.5124893797790994,
"grad_norm": 0.6600602865219116,
"learning_rate": 8.888556661006818e-05,
"loss": 3.1552,
"step": 754
},
{
"epoch": 0.5138487680543755,
"grad_norm": 0.7653377652168274,
"learning_rate": 8.881483640617112e-05,
"loss": 3.1453,
"step": 756
},
{
"epoch": 0.5152081563296517,
"grad_norm": 0.793511688709259,
"learning_rate": 8.874391019012967e-05,
"loss": 3.1663,
"step": 758
},
{
"epoch": 0.5165675446049278,
"grad_norm": 0.6547530889511108,
"learning_rate": 8.867278832011617e-05,
"loss": 3.212,
"step": 760
},
{
"epoch": 0.5179269328802039,
"grad_norm": 0.7647628784179688,
"learning_rate": 8.860147115529096e-05,
"loss": 3.1953,
"step": 762
},
{
"epoch": 0.51928632115548,
"grad_norm": 0.6164153218269348,
"learning_rate": 8.852995905580063e-05,
"loss": 3.1791,
"step": 764
},
{
"epoch": 0.5206457094307562,
"grad_norm": 0.7801313996315002,
"learning_rate": 8.845825238277614e-05,
"loss": 3.1899,
"step": 766
},
{
"epoch": 0.5220050977060323,
"grad_norm": 0.8477320075035095,
"learning_rate": 8.838635149833106e-05,
"loss": 3.2452,
"step": 768
},
{
"epoch": 0.5233644859813084,
"grad_norm": 0.69857257604599,
"learning_rate": 8.831425676555971e-05,
"loss": 3.218,
"step": 770
},
{
"epoch": 0.5247238742565845,
"grad_norm": 0.8784200549125671,
"learning_rate": 8.824196854853533e-05,
"loss": 3.1743,
"step": 772
},
{
"epoch": 0.5260832625318607,
"grad_norm": 0.7521477937698364,
"learning_rate": 8.816948721230822e-05,
"loss": 3.204,
"step": 774
},
{
"epoch": 0.5274426508071368,
"grad_norm": 0.7114652395248413,
"learning_rate": 8.809681312290397e-05,
"loss": 3.1783,
"step": 776
},
{
"epoch": 0.528802039082413,
"grad_norm": 0.7126603126525879,
"learning_rate": 8.802394664732152e-05,
"loss": 3.1708,
"step": 778
},
{
"epoch": 0.530161427357689,
"grad_norm": 0.7315563559532166,
"learning_rate": 8.795088815353135e-05,
"loss": 3.1273,
"step": 780
},
{
"epoch": 0.5315208156329652,
"grad_norm": 0.7203145623207092,
"learning_rate": 8.78776380104736e-05,
"loss": 3.1287,
"step": 782
},
{
"epoch": 0.5328802039082413,
"grad_norm": 0.7698382139205933,
"learning_rate": 8.780419658805625e-05,
"loss": 3.1671,
"step": 784
},
{
"epoch": 0.5342395921835175,
"grad_norm": 0.7925199866294861,
"learning_rate": 8.773056425715326e-05,
"loss": 3.089,
"step": 786
},
{
"epoch": 0.5355989804587935,
"grad_norm": 0.6636077761650085,
"learning_rate": 8.765674138960261e-05,
"loss": 3.2514,
"step": 788
},
{
"epoch": 0.5369583687340697,
"grad_norm": 0.785798966884613,
"learning_rate": 8.758272835820451e-05,
"loss": 3.2389,
"step": 790
},
{
"epoch": 0.5383177570093458,
"grad_norm": 0.7094962000846863,
"learning_rate": 8.750852553671942e-05,
"loss": 3.2141,
"step": 792
},
{
"epoch": 0.539677145284622,
"grad_norm": 0.7799323797225952,
"learning_rate": 8.743413329986632e-05,
"loss": 3.1314,
"step": 794
},
{
"epoch": 0.541036533559898,
"grad_norm": 0.6692888736724854,
"learning_rate": 8.735955202332065e-05,
"loss": 3.1683,
"step": 796
},
{
"epoch": 0.5423959218351742,
"grad_norm": 0.7615490555763245,
"learning_rate": 8.728478208371256e-05,
"loss": 3.2109,
"step": 798
},
{
"epoch": 0.5437553101104503,
"grad_norm": 0.7399694919586182,
"learning_rate": 8.720982385862483e-05,
"loss": 3.1026,
"step": 800
},
{
"epoch": 0.5451146983857265,
"grad_norm": 0.6580244898796082,
"learning_rate": 8.713467772659118e-05,
"loss": 3.1839,
"step": 802
},
{
"epoch": 0.5464740866610025,
"grad_norm": 0.7819783091545105,
"learning_rate": 8.705934406709417e-05,
"loss": 3.132,
"step": 804
},
{
"epoch": 0.5478334749362787,
"grad_norm": 0.7655234336853027,
"learning_rate": 8.69838232605634e-05,
"loss": 3.1175,
"step": 806
},
{
"epoch": 0.5491928632115548,
"grad_norm": 0.7018226385116577,
"learning_rate": 8.690811568837352e-05,
"loss": 3.0879,
"step": 808
},
{
"epoch": 0.550552251486831,
"grad_norm": 0.5926535129547119,
"learning_rate": 8.683222173284236e-05,
"loss": 3.1408,
"step": 810
},
{
"epoch": 0.551911639762107,
"grad_norm": 0.7037495970726013,
"learning_rate": 8.675614177722895e-05,
"loss": 3.1749,
"step": 812
},
{
"epoch": 0.5532710280373832,
"grad_norm": 0.7136229872703552,
"learning_rate": 8.667987620573163e-05,
"loss": 3.1631,
"step": 814
},
{
"epoch": 0.5546304163126593,
"grad_norm": 0.7994447946548462,
"learning_rate": 8.660342540348606e-05,
"loss": 3.149,
"step": 816
},
{
"epoch": 0.5559898045879355,
"grad_norm": 0.5606616735458374,
"learning_rate": 8.652678975656335e-05,
"loss": 3.1668,
"step": 818
},
{
"epoch": 0.5573491928632116,
"grad_norm": 0.8005346059799194,
"learning_rate": 8.644996965196797e-05,
"loss": 3.1792,
"step": 820
},
{
"epoch": 0.5587085811384876,
"grad_norm": 0.6956243515014648,
"learning_rate": 8.637296547763601e-05,
"loss": 3.1572,
"step": 822
},
{
"epoch": 0.5600679694137638,
"grad_norm": 0.7457271218299866,
"learning_rate": 8.6295777622433e-05,
"loss": 3.2719,
"step": 824
},
{
"epoch": 0.56142735768904,
"grad_norm": 0.7460895776748657,
"learning_rate": 8.621840647615207e-05,
"loss": 3.1218,
"step": 826
},
{
"epoch": 0.5627867459643161,
"grad_norm": 0.7265388369560242,
"learning_rate": 8.614085242951201e-05,
"loss": 3.1595,
"step": 828
},
{
"epoch": 0.5641461342395921,
"grad_norm": 0.7856431007385254,
"learning_rate": 8.606311587415518e-05,
"loss": 3.16,
"step": 830
},
{
"epoch": 0.5655055225148683,
"grad_norm": 0.7534942030906677,
"learning_rate": 8.598519720264562e-05,
"loss": 3.1308,
"step": 832
},
{
"epoch": 0.5668649107901444,
"grad_norm": 0.6927157640457153,
"learning_rate": 8.590709680846702e-05,
"loss": 3.1319,
"step": 834
},
{
"epoch": 0.5682242990654206,
"grad_norm": 0.7308480739593506,
"learning_rate": 8.58288150860208e-05,
"loss": 3.2368,
"step": 836
},
{
"epoch": 0.5695836873406966,
"grad_norm": 0.691619336605072,
"learning_rate": 8.575035243062407e-05,
"loss": 3.0438,
"step": 838
},
{
"epoch": 0.5709430756159728,
"grad_norm": 0.6600204110145569,
"learning_rate": 8.567170923850759e-05,
"loss": 3.1309,
"step": 840
},
{
"epoch": 0.572302463891249,
"grad_norm": 0.6632899641990662,
"learning_rate": 8.559288590681387e-05,
"loss": 3.0793,
"step": 842
},
{
"epoch": 0.5736618521665251,
"grad_norm": 0.7121036052703857,
"learning_rate": 8.551388283359507e-05,
"loss": 3.1492,
"step": 844
},
{
"epoch": 0.5750212404418011,
"grad_norm": 0.9197178483009338,
"learning_rate": 8.543470041781107e-05,
"loss": 3.3029,
"step": 846
},
{
"epoch": 0.5763806287170773,
"grad_norm": 0.6388615369796753,
"learning_rate": 8.535533905932738e-05,
"loss": 3.169,
"step": 848
},
{
"epoch": 0.5777400169923534,
"grad_norm": 0.687680184841156,
"learning_rate": 8.527579915891321e-05,
"loss": 3.074,
"step": 850
},
{
"epoch": 0.5790994052676296,
"grad_norm": 0.6250393390655518,
"learning_rate": 8.519608111823931e-05,
"loss": 3.1493,
"step": 852
},
{
"epoch": 0.5804587935429056,
"grad_norm": 0.6668142676353455,
"learning_rate": 8.511618533987613e-05,
"loss": 3.236,
"step": 854
},
{
"epoch": 0.5818181818181818,
"grad_norm": 0.7983154058456421,
"learning_rate": 8.50361122272916e-05,
"loss": 3.1506,
"step": 856
},
{
"epoch": 0.5831775700934579,
"grad_norm": 0.8851556181907654,
"learning_rate": 8.495586218484923e-05,
"loss": 3.1296,
"step": 858
},
{
"epoch": 0.5845369583687341,
"grad_norm": 0.8044039607048035,
"learning_rate": 8.487543561780595e-05,
"loss": 3.1169,
"step": 860
},
{
"epoch": 0.5858963466440102,
"grad_norm": 0.8117210865020752,
"learning_rate": 8.479483293231019e-05,
"loss": 3.1419,
"step": 862
},
{
"epoch": 0.5872557349192863,
"grad_norm": 0.7853211164474487,
"learning_rate": 8.471405453539974e-05,
"loss": 3.1049,
"step": 864
},
{
"epoch": 0.5886151231945624,
"grad_norm": 0.7936228513717651,
"learning_rate": 8.463310083499971e-05,
"loss": 3.2174,
"step": 866
},
{
"epoch": 0.5899745114698386,
"grad_norm": 0.69145268201828,
"learning_rate": 8.45519722399205e-05,
"loss": 3.1685,
"step": 868
},
{
"epoch": 0.5913338997451147,
"grad_norm": 0.8211259841918945,
"learning_rate": 8.447066915985568e-05,
"loss": 3.1633,
"step": 870
},
{
"epoch": 0.5926932880203908,
"grad_norm": 0.7517729997634888,
"learning_rate": 8.438919200538003e-05,
"loss": 3.1548,
"step": 872
},
{
"epoch": 0.5940526762956669,
"grad_norm": 0.7419262528419495,
"learning_rate": 8.43075411879473e-05,
"loss": 3.1094,
"step": 874
},
{
"epoch": 0.5954120645709431,
"grad_norm": 0.7104995846748352,
"learning_rate": 8.42257171198883e-05,
"loss": 3.1426,
"step": 876
},
{
"epoch": 0.5967714528462192,
"grad_norm": 0.6559504270553589,
"learning_rate": 8.414372021440868e-05,
"loss": 3.1492,
"step": 878
},
{
"epoch": 0.5981308411214953,
"grad_norm": 0.734095573425293,
"learning_rate": 8.406155088558698e-05,
"loss": 3.1309,
"step": 880
},
{
"epoch": 0.5994902293967714,
"grad_norm": 0.5765071511268616,
"learning_rate": 8.397920954837242e-05,
"loss": 3.1223,
"step": 882
},
{
"epoch": 0.6008496176720476,
"grad_norm": 0.73897385597229,
"learning_rate": 8.389669661858284e-05,
"loss": 3.1529,
"step": 884
},
{
"epoch": 0.6022090059473237,
"grad_norm": 0.6814929842948914,
"learning_rate": 8.381401251290264e-05,
"loss": 3.0561,
"step": 886
},
{
"epoch": 0.6035683942225998,
"grad_norm": 0.730022132396698,
"learning_rate": 8.373115764888062e-05,
"loss": 3.0702,
"step": 888
},
{
"epoch": 0.6049277824978759,
"grad_norm": 0.6274394392967224,
"learning_rate": 8.36481324449279e-05,
"loss": 3.1253,
"step": 890
},
{
"epoch": 0.6062871707731521,
"grad_norm": 0.7467242479324341,
"learning_rate": 8.356493732031586e-05,
"loss": 3.0619,
"step": 892
},
{
"epoch": 0.6076465590484282,
"grad_norm": 0.6906052231788635,
"learning_rate": 8.34815726951739e-05,
"loss": 3.2207,
"step": 894
},
{
"epoch": 0.6090059473237044,
"grad_norm": 0.7023007273674011,
"learning_rate": 8.339803899048737e-05,
"loss": 3.1186,
"step": 896
},
{
"epoch": 0.6103653355989804,
"grad_norm": 0.644953727722168,
"learning_rate": 8.331433662809555e-05,
"loss": 3.1045,
"step": 898
},
{
"epoch": 0.6117247238742566,
"grad_norm": 0.6254658102989197,
"learning_rate": 8.323046603068934e-05,
"loss": 3.0299,
"step": 900
},
{
"epoch": 0.6130841121495327,
"grad_norm": 0.9075002074241638,
"learning_rate": 8.314642762180927e-05,
"loss": 3.1854,
"step": 902
},
{
"epoch": 0.6144435004248089,
"grad_norm": 0.8213093280792236,
"learning_rate": 8.306222182584328e-05,
"loss": 3.2035,
"step": 904
},
{
"epoch": 0.6158028887000849,
"grad_norm": 0.7945600748062134,
"learning_rate": 8.297784906802462e-05,
"loss": 3.1396,
"step": 906
},
{
"epoch": 0.6171622769753611,
"grad_norm": 0.8038851022720337,
"learning_rate": 8.289330977442967e-05,
"loss": 3.1608,
"step": 908
},
{
"epoch": 0.6185216652506372,
"grad_norm": 0.6917005777359009,
"learning_rate": 8.280860437197579e-05,
"loss": 3.1391,
"step": 910
},
{
"epoch": 0.6198810535259134,
"grad_norm": 1.0512206554412842,
"learning_rate": 8.272373328841923e-05,
"loss": 3.1953,
"step": 912
},
{
"epoch": 0.6212404418011894,
"grad_norm": 0.9130175709724426,
"learning_rate": 8.263869695235285e-05,
"loss": 3.1308,
"step": 914
},
{
"epoch": 0.6225998300764656,
"grad_norm": 0.794154942035675,
"learning_rate": 8.255349579320408e-05,
"loss": 3.1505,
"step": 916
},
{
"epoch": 0.6239592183517417,
"grad_norm": 0.7815708518028259,
"learning_rate": 8.246813024123267e-05,
"loss": 3.0952,
"step": 918
},
{
"epoch": 0.6253186066270179,
"grad_norm": 0.7132216691970825,
"learning_rate": 8.238260072752855e-05,
"loss": 3.1855,
"step": 920
},
{
"epoch": 0.6266779949022939,
"grad_norm": 0.7026944756507874,
"learning_rate": 8.229690768400962e-05,
"loss": 3.078,
"step": 922
},
{
"epoch": 0.6280373831775701,
"grad_norm": 0.809191882610321,
"learning_rate": 8.221105154341964e-05,
"loss": 3.1573,
"step": 924
},
{
"epoch": 0.6293967714528462,
"grad_norm": 0.721347987651825,
"learning_rate": 8.212503273932593e-05,
"loss": 3.2011,
"step": 926
},
{
"epoch": 0.6307561597281224,
"grad_norm": 0.7457506060600281,
"learning_rate": 8.203885170611734e-05,
"loss": 3.0995,
"step": 928
},
{
"epoch": 0.6321155480033984,
"grad_norm": 0.6334044933319092,
"learning_rate": 8.195250887900188e-05,
"loss": 3.1587,
"step": 930
},
{
"epoch": 0.6334749362786746,
"grad_norm": 0.5967175960540771,
"learning_rate": 8.186600469400467e-05,
"loss": 3.1125,
"step": 932
},
{
"epoch": 0.6348343245539507,
"grad_norm": 0.6264853477478027,
"learning_rate": 8.177933958796565e-05,
"loss": 3.055,
"step": 934
},
{
"epoch": 0.6361937128292269,
"grad_norm": 0.5743052959442139,
"learning_rate": 8.169251399853737e-05,
"loss": 3.0728,
"step": 936
},
{
"epoch": 0.637553101104503,
"grad_norm": 0.5435692667961121,
"learning_rate": 8.160552836418285e-05,
"loss": 3.0973,
"step": 938
},
{
"epoch": 0.6389124893797791,
"grad_norm": 0.6908643245697021,
"learning_rate": 8.151838312417332e-05,
"loss": 3.0482,
"step": 940
},
{
"epoch": 0.6402718776550552,
"grad_norm": 0.598528265953064,
"learning_rate": 8.143107871858603e-05,
"loss": 3.0724,
"step": 942
},
{
"epoch": 0.6416312659303314,
"grad_norm": 0.669939398765564,
"learning_rate": 8.134361558830193e-05,
"loss": 3.1008,
"step": 944
},
{
"epoch": 0.6429906542056075,
"grad_norm": 0.6620224118232727,
"learning_rate": 8.125599417500359e-05,
"loss": 3.0508,
"step": 946
},
{
"epoch": 0.6443500424808836,
"grad_norm": 0.556448221206665,
"learning_rate": 8.116821492117285e-05,
"loss": 3.101,
"step": 948
},
{
"epoch": 0.6457094307561597,
"grad_norm": 0.6877358555793762,
"learning_rate": 8.108027827008871e-05,
"loss": 3.1337,
"step": 950
},
{
"epoch": 0.6470688190314359,
"grad_norm": 0.6986392140388489,
"learning_rate": 8.09921846658249e-05,
"loss": 3.0152,
"step": 952
},
{
"epoch": 0.648428207306712,
"grad_norm": 0.6411210298538208,
"learning_rate": 8.090393455324783e-05,
"loss": 3.1043,
"step": 954
},
{
"epoch": 0.6497875955819881,
"grad_norm": 0.6838498115539551,
"learning_rate": 8.081552837801427e-05,
"loss": 3.1058,
"step": 956
},
{
"epoch": 0.6511469838572642,
"grad_norm": 0.6829114556312561,
"learning_rate": 8.072696658656906e-05,
"loss": 3.1245,
"step": 958
},
{
"epoch": 0.6525063721325404,
"grad_norm": 0.6059818863868713,
"learning_rate": 8.06382496261429e-05,
"loss": 3.0519,
"step": 960
},
{
"epoch": 0.6538657604078165,
"grad_norm": 0.6728681325912476,
"learning_rate": 8.05493779447501e-05,
"loss": 3.0275,
"step": 962
},
{
"epoch": 0.6552251486830926,
"grad_norm": 0.7594171762466431,
"learning_rate": 8.046035199118626e-05,
"loss": 3.1189,
"step": 964
},
{
"epoch": 0.6565845369583687,
"grad_norm": 0.5925278067588806,
"learning_rate": 8.037117221502611e-05,
"loss": 3.0737,
"step": 966
},
{
"epoch": 0.6579439252336449,
"grad_norm": 0.7238386273384094,
"learning_rate": 8.028183906662113e-05,
"loss": 3.0778,
"step": 968
},
{
"epoch": 0.659303313508921,
"grad_norm": 0.627905011177063,
"learning_rate": 8.019235299709726e-05,
"loss": 3.0713,
"step": 970
},
{
"epoch": 0.6606627017841971,
"grad_norm": 0.6918131113052368,
"learning_rate": 8.010271445835282e-05,
"loss": 3.1039,
"step": 972
},
{
"epoch": 0.6620220900594732,
"grad_norm": 0.6929610371589661,
"learning_rate": 8.001292390305597e-05,
"loss": 2.9954,
"step": 974
},
{
"epoch": 0.6633814783347494,
"grad_norm": 0.6240336894989014,
"learning_rate": 7.992298178464261e-05,
"loss": 3.0909,
"step": 976
},
{
"epoch": 0.6647408666100255,
"grad_norm": 0.678880512714386,
"learning_rate": 7.983288855731398e-05,
"loss": 3.1088,
"step": 978
},
{
"epoch": 0.6661002548853017,
"grad_norm": 0.716385006904602,
"learning_rate": 7.974264467603442e-05,
"loss": 3.0681,
"step": 980
},
{
"epoch": 0.6674596431605777,
"grad_norm": 0.7473633289337158,
"learning_rate": 7.965225059652909e-05,
"loss": 3.0411,
"step": 982
},
{
"epoch": 0.6688190314358539,
"grad_norm": 0.6619102954864502,
"learning_rate": 7.956170677528159e-05,
"loss": 3.1295,
"step": 984
},
{
"epoch": 0.67017841971113,
"grad_norm": 0.6102796792984009,
"learning_rate": 7.947101366953177e-05,
"loss": 3.0331,
"step": 986
},
{
"epoch": 0.6715378079864062,
"grad_norm": 0.5997730493545532,
"learning_rate": 7.938017173727328e-05,
"loss": 3.1051,
"step": 988
},
{
"epoch": 0.6728971962616822,
"grad_norm": 0.7003008127212524,
"learning_rate": 7.92891814372514e-05,
"loss": 3.1023,
"step": 990
},
{
"epoch": 0.6742565845369584,
"grad_norm": 0.6912761330604553,
"learning_rate": 7.919804322896062e-05,
"loss": 3.1274,
"step": 992
},
{
"epoch": 0.6756159728122345,
"grad_norm": 0.6718102097511292,
"learning_rate": 7.910675757264238e-05,
"loss": 3.0487,
"step": 994
},
{
"epoch": 0.6769753610875107,
"grad_norm": 0.6638507843017578,
"learning_rate": 7.901532492928269e-05,
"loss": 3.0132,
"step": 996
},
{
"epoch": 0.6783347493627867,
"grad_norm": 0.6588859558105469,
"learning_rate": 7.892374576060986e-05,
"loss": 3.0839,
"step": 998
},
{
"epoch": 0.6796941376380629,
"grad_norm": 0.7421138882637024,
"learning_rate": 7.88320205290921e-05,
"loss": 3.0802,
"step": 1000
},
{
"epoch": 0.681053525913339,
"grad_norm": 0.6962835192680359,
"learning_rate": 7.874014969793533e-05,
"loss": 3.1173,
"step": 1002
},
{
"epoch": 0.6824129141886152,
"grad_norm": 0.7081164717674255,
"learning_rate": 7.864813373108061e-05,
"loss": 3.0741,
"step": 1004
},
{
"epoch": 0.6837723024638912,
"grad_norm": 0.7623039484024048,
"learning_rate": 7.8555973093202e-05,
"loss": 3.1217,
"step": 1006
},
{
"epoch": 0.6851316907391674,
"grad_norm": 0.5826273560523987,
"learning_rate": 7.846366824970413e-05,
"loss": 3.0731,
"step": 1008
},
{
"epoch": 0.6864910790144435,
"grad_norm": 0.6277479529380798,
"learning_rate": 7.837121966671986e-05,
"loss": 3.1101,
"step": 1010
},
{
"epoch": 0.6878504672897197,
"grad_norm": 0.6380975842475891,
"learning_rate": 7.827862781110788e-05,
"loss": 3.1143,
"step": 1012
},
{
"epoch": 0.6892098555649957,
"grad_norm": 0.72269207239151,
"learning_rate": 7.818589315045046e-05,
"loss": 3.0956,
"step": 1014
},
{
"epoch": 0.6905692438402719,
"grad_norm": 0.65102219581604,
"learning_rate": 7.809301615305098e-05,
"loss": 3.0571,
"step": 1016
},
{
"epoch": 0.691928632115548,
"grad_norm": 0.7135101556777954,
"learning_rate": 7.799999728793162e-05,
"loss": 3.0262,
"step": 1018
},
{
"epoch": 0.6932880203908242,
"grad_norm": 0.6412085294723511,
"learning_rate": 7.790683702483102e-05,
"loss": 3.0946,
"step": 1020
},
{
"epoch": 0.6946474086661003,
"grad_norm": 0.5655577182769775,
"learning_rate": 7.78135358342018e-05,
"loss": 3.0359,
"step": 1022
},
{
"epoch": 0.6960067969413763,
"grad_norm": 0.7037889361381531,
"learning_rate": 7.772009418720832e-05,
"loss": 3.0498,
"step": 1024
},
{
"epoch": 0.6973661852166525,
"grad_norm": 0.5886219143867493,
"learning_rate": 7.76265125557242e-05,
"loss": 3.0468,
"step": 1026
},
{
"epoch": 0.6987255734919287,
"grad_norm": 0.5971575379371643,
"learning_rate": 7.753279141232995e-05,
"loss": 3.0748,
"step": 1028
},
{
"epoch": 0.7000849617672048,
"grad_norm": 0.6181765198707581,
"learning_rate": 7.743893123031068e-05,
"loss": 3.0709,
"step": 1030
},
{
"epoch": 0.7014443500424808,
"grad_norm": 0.6223234534263611,
"learning_rate": 7.734493248365355e-05,
"loss": 3.0975,
"step": 1032
},
{
"epoch": 0.702803738317757,
"grad_norm": 0.7307218313217163,
"learning_rate": 7.72507956470455e-05,
"loss": 3.1162,
"step": 1034
},
{
"epoch": 0.7041631265930332,
"grad_norm": 0.713442325592041,
"learning_rate": 7.715652119587085e-05,
"loss": 3.1112,
"step": 1036
},
{
"epoch": 0.7055225148683093,
"grad_norm": 0.6621354818344116,
"learning_rate": 7.706210960620878e-05,
"loss": 3.1128,
"step": 1038
},
{
"epoch": 0.7068819031435853,
"grad_norm": 0.6017237901687622,
"learning_rate": 7.696756135483109e-05,
"loss": 3.0969,
"step": 1040
},
{
"epoch": 0.7082412914188615,
"grad_norm": 0.6268143057823181,
"learning_rate": 7.687287691919968e-05,
"loss": 3.0387,
"step": 1042
},
{
"epoch": 0.7096006796941376,
"grad_norm": 0.6222430467605591,
"learning_rate": 7.677805677746415e-05,
"loss": 3.0769,
"step": 1044
},
{
"epoch": 0.7109600679694138,
"grad_norm": 0.6244910359382629,
"learning_rate": 7.668310140845944e-05,
"loss": 2.9467,
"step": 1046
},
{
"epoch": 0.7123194562446898,
"grad_norm": 0.7029285430908203,
"learning_rate": 7.658801129170335e-05,
"loss": 3.0942,
"step": 1048
},
{
"epoch": 0.713678844519966,
"grad_norm": 0.7414494156837463,
"learning_rate": 7.649278690739418e-05,
"loss": 3.07,
"step": 1050
},
{
"epoch": 0.7150382327952421,
"grad_norm": 0.6464896202087402,
"learning_rate": 7.639742873640825e-05,
"loss": 3.0708,
"step": 1052
},
{
"epoch": 0.7163976210705183,
"grad_norm": 0.8883520364761353,
"learning_rate": 7.630193726029746e-05,
"loss": 3.1425,
"step": 1054
},
{
"epoch": 0.7177570093457943,
"grad_norm": 0.7830668687820435,
"learning_rate": 7.620631296128698e-05,
"loss": 3.1458,
"step": 1056
},
{
"epoch": 0.7191163976210705,
"grad_norm": 0.790330708026886,
"learning_rate": 7.611055632227262e-05,
"loss": 3.0844,
"step": 1058
},
{
"epoch": 0.7204757858963466,
"grad_norm": 0.7381483316421509,
"learning_rate": 7.601466782681859e-05,
"loss": 3.075,
"step": 1060
},
{
"epoch": 0.7218351741716228,
"grad_norm": 0.6107451319694519,
"learning_rate": 7.591864795915492e-05,
"loss": 3.0409,
"step": 1062
},
{
"epoch": 0.723194562446899,
"grad_norm": 0.627289354801178,
"learning_rate": 7.582249720417504e-05,
"loss": 2.9797,
"step": 1064
},
{
"epoch": 0.724553950722175,
"grad_norm": 0.63350909948349,
"learning_rate": 7.57262160474334e-05,
"loss": 3.083,
"step": 1066
},
{
"epoch": 0.7259133389974511,
"grad_norm": 0.6966723799705505,
"learning_rate": 7.562980497514294e-05,
"loss": 3.0733,
"step": 1068
},
{
"epoch": 0.7272727272727273,
"grad_norm": 0.6452199220657349,
"learning_rate": 7.553326447417267e-05,
"loss": 3.0801,
"step": 1070
},
{
"epoch": 0.7286321155480034,
"grad_norm": 0.6325047016143799,
"learning_rate": 7.543659503204522e-05,
"loss": 2.959,
"step": 1072
},
{
"epoch": 0.7299915038232795,
"grad_norm": 0.6129205226898193,
"learning_rate": 7.533979713693433e-05,
"loss": 3.0407,
"step": 1074
},
{
"epoch": 0.7313508920985556,
"grad_norm": 0.6302885413169861,
"learning_rate": 7.524287127766245e-05,
"loss": 3.102,
"step": 1076
},
{
"epoch": 0.7327102803738318,
"grad_norm": 0.6648517847061157,
"learning_rate": 7.514581794369822e-05,
"loss": 3.1113,
"step": 1078
},
{
"epoch": 0.7340696686491079,
"grad_norm": 0.7655189633369446,
"learning_rate": 7.504863762515404e-05,
"loss": 3.0586,
"step": 1080
},
{
"epoch": 0.735429056924384,
"grad_norm": 0.6116030216217041,
"learning_rate": 7.495133081278356e-05,
"loss": 3.0208,
"step": 1082
},
{
"epoch": 0.7367884451996601,
"grad_norm": 0.6955434679985046,
"learning_rate": 7.48538979979792e-05,
"loss": 3.1177,
"step": 1084
},
{
"epoch": 0.7381478334749363,
"grad_norm": 0.6810325384140015,
"learning_rate": 7.47563396727697e-05,
"loss": 3.0297,
"step": 1086
},
{
"epoch": 0.7395072217502124,
"grad_norm": 0.6736805438995361,
"learning_rate": 7.465865632981763e-05,
"loss": 3.0411,
"step": 1088
},
{
"epoch": 0.7408666100254885,
"grad_norm": 0.689484179019928,
"learning_rate": 7.456084846241687e-05,
"loss": 3.0751,
"step": 1090
},
{
"epoch": 0.7422259983007646,
"grad_norm": 0.621320366859436,
"learning_rate": 7.446291656449014e-05,
"loss": 3.035,
"step": 1092
},
{
"epoch": 0.7435853865760408,
"grad_norm": 0.6816211938858032,
"learning_rate": 7.436486113058651e-05,
"loss": 3.0209,
"step": 1094
},
{
"epoch": 0.7449447748513169,
"grad_norm": 0.7227087616920471,
"learning_rate": 7.426668265587892e-05,
"loss": 2.9714,
"step": 1096
},
{
"epoch": 0.746304163126593,
"grad_norm": 0.6621195673942566,
"learning_rate": 7.416838163616162e-05,
"loss": 3.0837,
"step": 1098
},
{
"epoch": 0.7476635514018691,
"grad_norm": 0.5446696877479553,
"learning_rate": 7.406995856784772e-05,
"loss": 3.0743,
"step": 1100
},
{
"epoch": 0.7490229396771453,
"grad_norm": 0.6033000349998474,
"learning_rate": 7.397141394796667e-05,
"loss": 2.9843,
"step": 1102
},
{
"epoch": 0.7503823279524214,
"grad_norm": 0.5980247259140015,
"learning_rate": 7.387274827416175e-05,
"loss": 3.066,
"step": 1104
},
{
"epoch": 0.7517417162276976,
"grad_norm": 0.6181382536888123,
"learning_rate": 7.377396204468754e-05,
"loss": 3.0066,
"step": 1106
},
{
"epoch": 0.7531011045029736,
"grad_norm": 0.7175232172012329,
"learning_rate": 7.367505575840741e-05,
"loss": 3.0747,
"step": 1108
},
{
"epoch": 0.7544604927782498,
"grad_norm": 0.6160265207290649,
"learning_rate": 7.357602991479106e-05,
"loss": 3.0461,
"step": 1110
},
{
"epoch": 0.7558198810535259,
"grad_norm": 0.6809048652648926,
"learning_rate": 7.347688501391187e-05,
"loss": 3.0333,
"step": 1112
},
{
"epoch": 0.7571792693288021,
"grad_norm": 0.6925624012947083,
"learning_rate": 7.337762155644454e-05,
"loss": 3.0453,
"step": 1114
},
{
"epoch": 0.7585386576040781,
"grad_norm": 0.6381937265396118,
"learning_rate": 7.327824004366237e-05,
"loss": 3.0466,
"step": 1116
},
{
"epoch": 0.7598980458793543,
"grad_norm": 0.6669313311576843,
"learning_rate": 7.317874097743491e-05,
"loss": 2.9596,
"step": 1118
},
{
"epoch": 0.7612574341546304,
"grad_norm": 0.7228459119796753,
"learning_rate": 7.30791248602253e-05,
"loss": 3.093,
"step": 1120
},
{
"epoch": 0.7626168224299066,
"grad_norm": 0.5597134828567505,
"learning_rate": 7.297939219508781e-05,
"loss": 3.0038,
"step": 1122
},
{
"epoch": 0.7639762107051826,
"grad_norm": 0.6500145196914673,
"learning_rate": 7.287954348566529e-05,
"loss": 2.9154,
"step": 1124
},
{
"epoch": 0.7653355989804588,
"grad_norm": 0.5991110801696777,
"learning_rate": 7.277957923618652e-05,
"loss": 3.0487,
"step": 1126
},
{
"epoch": 0.7666949872557349,
"grad_norm": 0.7363496422767639,
"learning_rate": 7.267949995146383e-05,
"loss": 3.0155,
"step": 1128
},
{
"epoch": 0.7680543755310111,
"grad_norm": 0.6778906583786011,
"learning_rate": 7.257930613689043e-05,
"loss": 3.0946,
"step": 1130
},
{
"epoch": 0.7694137638062871,
"grad_norm": 0.6843693256378174,
"learning_rate": 7.24789982984379e-05,
"loss": 2.9874,
"step": 1132
},
{
"epoch": 0.7707731520815633,
"grad_norm": 0.6377856135368347,
"learning_rate": 7.237857694265368e-05,
"loss": 3.0625,
"step": 1134
},
{
"epoch": 0.7721325403568394,
"grad_norm": 0.6182267069816589,
"learning_rate": 7.227804257665837e-05,
"loss": 3.0279,
"step": 1136
},
{
"epoch": 0.7734919286321156,
"grad_norm": 0.5409005284309387,
"learning_rate": 7.217739570814337e-05,
"loss": 3.023,
"step": 1138
},
{
"epoch": 0.7748513169073916,
"grad_norm": 0.6478193402290344,
"learning_rate": 7.207663684536814e-05,
"loss": 3.0739,
"step": 1140
},
{
"epoch": 0.7762107051826678,
"grad_norm": 0.646008312702179,
"learning_rate": 7.197576649715771e-05,
"loss": 3.0265,
"step": 1142
},
{
"epoch": 0.7775700934579439,
"grad_norm": 0.6225748658180237,
"learning_rate": 7.187478517290014e-05,
"loss": 3.0443,
"step": 1144
},
{
"epoch": 0.7789294817332201,
"grad_norm": 0.6438111066818237,
"learning_rate": 7.177369338254385e-05,
"loss": 3.0836,
"step": 1146
},
{
"epoch": 0.7802888700084962,
"grad_norm": 0.6899060010910034,
"learning_rate": 7.167249163659518e-05,
"loss": 3.0378,
"step": 1148
},
{
"epoch": 0.7816482582837723,
"grad_norm": 0.5617516040802002,
"learning_rate": 7.157118044611569e-05,
"loss": 3.0376,
"step": 1150
},
{
"epoch": 0.7830076465590484,
"grad_norm": 0.6777760982513428,
"learning_rate": 7.146976032271961e-05,
"loss": 3.0566,
"step": 1152
},
{
"epoch": 0.7843670348343246,
"grad_norm": 0.5246532559394836,
"learning_rate": 7.136823177857132e-05,
"loss": 3.0416,
"step": 1154
},
{
"epoch": 0.7857264231096007,
"grad_norm": 0.5032817125320435,
"learning_rate": 7.126659532638272e-05,
"loss": 3.0473,
"step": 1156
},
{
"epoch": 0.7870858113848768,
"grad_norm": 0.6064226031303406,
"learning_rate": 7.116485147941059e-05,
"loss": 2.974,
"step": 1158
},
{
"epoch": 0.7884451996601529,
"grad_norm": 0.6149367690086365,
"learning_rate": 7.106300075145408e-05,
"loss": 3.0362,
"step": 1160
},
{
"epoch": 0.7898045879354291,
"grad_norm": 0.5678831934928894,
"learning_rate": 7.096104365685206e-05,
"loss": 3.0259,
"step": 1162
},
{
"epoch": 0.7911639762107052,
"grad_norm": 0.560962975025177,
"learning_rate": 7.085898071048056e-05,
"loss": 3.0255,
"step": 1164
},
{
"epoch": 0.7925233644859813,
"grad_norm": 0.5633766651153564,
"learning_rate": 7.075681242775017e-05,
"loss": 2.9758,
"step": 1166
},
{
"epoch": 0.7938827527612574,
"grad_norm": 0.5855138301849365,
"learning_rate": 7.065453932460337e-05,
"loss": 3.0208,
"step": 1168
},
{
"epoch": 0.7952421410365336,
"grad_norm": 0.6067277789115906,
"learning_rate": 7.055216191751204e-05,
"loss": 3.0361,
"step": 1170
},
{
"epoch": 0.7966015293118097,
"grad_norm": 0.5953718423843384,
"learning_rate": 7.044968072347473e-05,
"loss": 3.0646,
"step": 1172
},
{
"epoch": 0.7979609175870858,
"grad_norm": 0.648472785949707,
"learning_rate": 7.034709626001416e-05,
"loss": 3.0444,
"step": 1174
},
{
"epoch": 0.7993203058623619,
"grad_norm": 0.5768356919288635,
"learning_rate": 7.024440904517448e-05,
"loss": 3.0244,
"step": 1176
},
{
"epoch": 0.8006796941376381,
"grad_norm": 0.6846725940704346,
"learning_rate": 7.014161959751882e-05,
"loss": 3.0442,
"step": 1178
},
{
"epoch": 0.8020390824129142,
"grad_norm": 0.6223293542861938,
"learning_rate": 7.00387284361265e-05,
"loss": 3.0533,
"step": 1180
},
{
"epoch": 0.8033984706881904,
"grad_norm": 0.6202958822250366,
"learning_rate": 6.993573608059052e-05,
"loss": 3.0495,
"step": 1182
},
{
"epoch": 0.8047578589634664,
"grad_norm": 0.5777451992034912,
"learning_rate": 6.983264305101491e-05,
"loss": 3.0278,
"step": 1184
},
{
"epoch": 0.8061172472387426,
"grad_norm": 0.6108320355415344,
"learning_rate": 6.972944986801209e-05,
"loss": 2.8924,
"step": 1186
},
{
"epoch": 0.8074766355140187,
"grad_norm": 0.5958619713783264,
"learning_rate": 6.962615705270023e-05,
"loss": 3.0179,
"step": 1188
},
{
"epoch": 0.8088360237892949,
"grad_norm": 0.5943612456321716,
"learning_rate": 6.952276512670065e-05,
"loss": 3.0178,
"step": 1190
},
{
"epoch": 0.8101954120645709,
"grad_norm": 0.6541878581047058,
"learning_rate": 6.941927461213518e-05,
"loss": 3.0324,
"step": 1192
},
{
"epoch": 0.8115548003398471,
"grad_norm": 0.7149216532707214,
"learning_rate": 6.931568603162351e-05,
"loss": 3.0708,
"step": 1194
},
{
"epoch": 0.8129141886151232,
"grad_norm": 0.6609304547309875,
"learning_rate": 6.921199990828055e-05,
"loss": 3.0639,
"step": 1196
},
{
"epoch": 0.8142735768903994,
"grad_norm": 0.661949872970581,
"learning_rate": 6.910821676571381e-05,
"loss": 2.9924,
"step": 1198
},
{
"epoch": 0.8156329651656754,
"grad_norm": 0.5939015746116638,
"learning_rate": 6.90043371280207e-05,
"loss": 3.0055,
"step": 1200
},
{
"epoch": 0.8169923534409516,
"grad_norm": 0.5653124451637268,
"learning_rate": 6.890036151978598e-05,
"loss": 2.9889,
"step": 1202
},
{
"epoch": 0.8183517417162277,
"grad_norm": 0.5646810531616211,
"learning_rate": 6.879629046607903e-05,
"loss": 3.0132,
"step": 1204
},
{
"epoch": 0.8197111299915039,
"grad_norm": 0.551462709903717,
"learning_rate": 6.869212449245118e-05,
"loss": 2.9411,
"step": 1206
},
{
"epoch": 0.8210705182667799,
"grad_norm": 0.6366941332817078,
"learning_rate": 6.858786412493317e-05,
"loss": 2.9909,
"step": 1208
},
{
"epoch": 0.822429906542056,
"grad_norm": 0.5554643869400024,
"learning_rate": 6.848350989003237e-05,
"loss": 2.9887,
"step": 1210
},
{
"epoch": 0.8237892948173322,
"grad_norm": 0.5416108965873718,
"learning_rate": 6.837906231473023e-05,
"loss": 2.9745,
"step": 1212
},
{
"epoch": 0.8251486830926084,
"grad_norm": 0.6187860369682312,
"learning_rate": 6.82745219264795e-05,
"loss": 2.9978,
"step": 1214
},
{
"epoch": 0.8265080713678844,
"grad_norm": 0.5307159423828125,
"learning_rate": 6.816988925320162e-05,
"loss": 3.0299,
"step": 1216
},
{
"epoch": 0.8278674596431606,
"grad_norm": 0.6538923382759094,
"learning_rate": 6.806516482328418e-05,
"loss": 3.0287,
"step": 1218
},
{
"epoch": 0.8292268479184367,
"grad_norm": 0.6692806482315063,
"learning_rate": 6.796034916557797e-05,
"loss": 3.0234,
"step": 1220
},
{
"epoch": 0.8305862361937129,
"grad_norm": 0.5698544383049011,
"learning_rate": 6.78554428093946e-05,
"loss": 3.0521,
"step": 1222
},
{
"epoch": 0.831945624468989,
"grad_norm": 0.5961766839027405,
"learning_rate": 6.775044628450366e-05,
"loss": 2.974,
"step": 1224
},
{
"epoch": 0.833305012744265,
"grad_norm": 0.8825117349624634,
"learning_rate": 6.764536012113005e-05,
"loss": 3.0019,
"step": 1226
},
{
"epoch": 0.8346644010195412,
"grad_norm": 0.6309018135070801,
"learning_rate": 6.754018484995142e-05,
"loss": 3.0101,
"step": 1228
},
{
"epoch": 0.8360237892948174,
"grad_norm": 0.6422116160392761,
"learning_rate": 6.74349210020953e-05,
"loss": 2.9641,
"step": 1230
},
{
"epoch": 0.8373831775700935,
"grad_norm": 0.7694607377052307,
"learning_rate": 6.732956910913661e-05,
"loss": 2.999,
"step": 1232
},
{
"epoch": 0.8387425658453695,
"grad_norm": 0.6966648697853088,
"learning_rate": 6.722412970309488e-05,
"loss": 3.0015,
"step": 1234
},
{
"epoch": 0.8401019541206457,
"grad_norm": 0.6441610455513,
"learning_rate": 6.711860331643154e-05,
"loss": 3.0141,
"step": 1236
},
{
"epoch": 0.8414613423959219,
"grad_norm": 0.7339099645614624,
"learning_rate": 6.70129904820473e-05,
"loss": 3.0003,
"step": 1238
},
{
"epoch": 0.842820730671198,
"grad_norm": 0.7242470383644104,
"learning_rate": 6.690729173327938e-05,
"loss": 3.0713,
"step": 1240
},
{
"epoch": 0.844180118946474,
"grad_norm": 0.6477665901184082,
"learning_rate": 6.680150760389894e-05,
"loss": 3.0893,
"step": 1242
},
{
"epoch": 0.8455395072217502,
"grad_norm": 0.5959832072257996,
"learning_rate": 6.669563862810825e-05,
"loss": 3.0074,
"step": 1244
},
{
"epoch": 0.8468988954970263,
"grad_norm": 0.6699416637420654,
"learning_rate": 6.6589685340538e-05,
"loss": 2.94,
"step": 1246
},
{
"epoch": 0.8482582837723025,
"grad_norm": 0.6104918122291565,
"learning_rate": 6.648364827624477e-05,
"loss": 2.9702,
"step": 1248
},
{
"epoch": 0.8496176720475785,
"grad_norm": 0.6065095663070679,
"learning_rate": 6.63775279707081e-05,
"loss": 3.0213,
"step": 1250
},
{
"epoch": 0.8509770603228547,
"grad_norm": 0.5962669253349304,
"learning_rate": 6.627132495982797e-05,
"loss": 2.9429,
"step": 1252
},
{
"epoch": 0.8523364485981308,
"grad_norm": 0.5762045383453369,
"learning_rate": 6.616503977992197e-05,
"loss": 2.9507,
"step": 1254
},
{
"epoch": 0.853695836873407,
"grad_norm": 0.5952661633491516,
"learning_rate": 6.605867296772261e-05,
"loss": 3.0323,
"step": 1256
},
{
"epoch": 0.855055225148683,
"grad_norm": 0.5376638174057007,
"learning_rate": 6.595222506037472e-05,
"loss": 3.0315,
"step": 1258
},
{
"epoch": 0.8564146134239592,
"grad_norm": 0.6530884504318237,
"learning_rate": 6.58456965954326e-05,
"loss": 2.9814,
"step": 1260
},
{
"epoch": 0.8577740016992353,
"grad_norm": 0.7119038701057434,
"learning_rate": 6.573908811085734e-05,
"loss": 3.0361,
"step": 1262
},
{
"epoch": 0.8591333899745115,
"grad_norm": 0.6418982744216919,
"learning_rate": 6.56324001450142e-05,
"loss": 2.9703,
"step": 1264
},
{
"epoch": 0.8604927782497876,
"grad_norm": 0.5768359303474426,
"learning_rate": 6.552563323666973e-05,
"loss": 3.0508,
"step": 1266
},
{
"epoch": 0.8618521665250637,
"grad_norm": 0.5932447910308838,
"learning_rate": 6.541878792498919e-05,
"loss": 3.0444,
"step": 1268
},
{
"epoch": 0.8632115548003398,
"grad_norm": 0.5155834555625916,
"learning_rate": 6.531186474953375e-05,
"loss": 2.9336,
"step": 1270
},
{
"epoch": 0.864570943075616,
"grad_norm": 0.4931122064590454,
"learning_rate": 6.520486425025778e-05,
"loss": 3.0018,
"step": 1272
},
{
"epoch": 0.8659303313508921,
"grad_norm": 0.629960834980011,
"learning_rate": 6.509778696750614e-05,
"loss": 3.1188,
"step": 1274
},
{
"epoch": 0.8672897196261682,
"grad_norm": 0.5573826432228088,
"learning_rate": 6.499063344201146e-05,
"loss": 2.9807,
"step": 1276
},
{
"epoch": 0.8686491079014443,
"grad_norm": 0.5982751250267029,
"learning_rate": 6.488340421489136e-05,
"loss": 3.0134,
"step": 1278
},
{
"epoch": 0.8700084961767205,
"grad_norm": 0.5728147029876709,
"learning_rate": 6.477609982764575e-05,
"loss": 3.0718,
"step": 1280
},
{
"epoch": 0.8713678844519966,
"grad_norm": 0.5682603120803833,
"learning_rate": 6.46687208221541e-05,
"loss": 3.044,
"step": 1282
},
{
"epoch": 0.8727272727272727,
"grad_norm": 0.5737953186035156,
"learning_rate": 6.45612677406727e-05,
"loss": 3.013,
"step": 1284
},
{
"epoch": 0.8740866610025488,
"grad_norm": 0.6498506665229797,
"learning_rate": 6.445374112583196e-05,
"loss": 3.0197,
"step": 1286
},
{
"epoch": 0.875446049277825,
"grad_norm": 0.6554915308952332,
"learning_rate": 6.434614152063352e-05,
"loss": 2.9238,
"step": 1288
},
{
"epoch": 0.8768054375531011,
"grad_norm": 0.6168110370635986,
"learning_rate": 6.423846946844771e-05,
"loss": 2.979,
"step": 1290
},
{
"epoch": 0.8781648258283772,
"grad_norm": 0.7791323661804199,
"learning_rate": 6.41307255130107e-05,
"loss": 2.9561,
"step": 1292
},
{
"epoch": 0.8795242141036533,
"grad_norm": 0.6808146834373474,
"learning_rate": 6.402291019842171e-05,
"loss": 3.0073,
"step": 1294
},
{
"epoch": 0.8808836023789295,
"grad_norm": 0.6812180280685425,
"learning_rate": 6.391502406914039e-05,
"loss": 2.9763,
"step": 1296
},
{
"epoch": 0.8822429906542056,
"grad_norm": 0.5727677345275879,
"learning_rate": 6.380706766998395e-05,
"loss": 2.9236,
"step": 1298
},
{
"epoch": 0.8836023789294817,
"grad_norm": 0.7085919380187988,
"learning_rate": 6.369904154612448e-05,
"loss": 3.0897,
"step": 1300
},
{
"epoch": 0.8849617672047578,
"grad_norm": 0.7252781391143799,
"learning_rate": 6.35909462430862e-05,
"loss": 2.991,
"step": 1302
},
{
"epoch": 0.886321155480034,
"grad_norm": 0.5278810858726501,
"learning_rate": 6.348278230674258e-05,
"loss": 2.9693,
"step": 1304
},
{
"epoch": 0.8876805437553101,
"grad_norm": 0.6835475564002991,
"learning_rate": 6.337455028331382e-05,
"loss": 3.0092,
"step": 1306
},
{
"epoch": 0.8890399320305863,
"grad_norm": 0.6431845426559448,
"learning_rate": 6.326625071936388e-05,
"loss": 2.9966,
"step": 1308
},
{
"epoch": 0.8903993203058623,
"grad_norm": 0.5237377882003784,
"learning_rate": 6.315788416179775e-05,
"loss": 2.8934,
"step": 1310
},
{
"epoch": 0.8917587085811385,
"grad_norm": 0.5790330171585083,
"learning_rate": 6.304945115785885e-05,
"loss": 3.0384,
"step": 1312
},
{
"epoch": 0.8931180968564146,
"grad_norm": 0.5629638433456421,
"learning_rate": 6.294095225512603e-05,
"loss": 2.9331,
"step": 1314
},
{
"epoch": 0.8944774851316908,
"grad_norm": 0.5486363768577576,
"learning_rate": 6.283238800151103e-05,
"loss": 2.9491,
"step": 1316
},
{
"epoch": 0.8958368734069668,
"grad_norm": 0.5565508008003235,
"learning_rate": 6.272375894525553e-05,
"loss": 2.951,
"step": 1318
},
{
"epoch": 0.897196261682243,
"grad_norm": 0.609849214553833,
"learning_rate": 6.261506563492848e-05,
"loss": 2.9716,
"step": 1320
},
{
"epoch": 0.8985556499575191,
"grad_norm": 0.6158019304275513,
"learning_rate": 6.250630861942333e-05,
"loss": 2.8781,
"step": 1322
},
{
"epoch": 0.8999150382327953,
"grad_norm": 0.5898751020431519,
"learning_rate": 6.239748844795521e-05,
"loss": 2.9243,
"step": 1324
},
{
"epoch": 0.9012744265080713,
"grad_norm": 0.5750930905342102,
"learning_rate": 6.228860567005819e-05,
"loss": 2.9699,
"step": 1326
},
{
"epoch": 0.9026338147833475,
"grad_norm": 0.571783185005188,
"learning_rate": 6.21796608355825e-05,
"loss": 2.9596,
"step": 1328
},
{
"epoch": 0.9039932030586236,
"grad_norm": 0.5380290746688843,
"learning_rate": 6.207065449469178e-05,
"loss": 3.0468,
"step": 1330
},
{
"epoch": 0.9053525913338998,
"grad_norm": 0.5547913908958435,
"learning_rate": 6.196158719786021e-05,
"loss": 2.9804,
"step": 1332
},
{
"epoch": 0.9067119796091758,
"grad_norm": 0.6021085381507874,
"learning_rate": 6.185245949586986e-05,
"loss": 2.9849,
"step": 1334
},
{
"epoch": 0.908071367884452,
"grad_norm": 0.5301669239997864,
"learning_rate": 6.174327193980778e-05,
"loss": 2.9676,
"step": 1336
},
{
"epoch": 0.9094307561597281,
"grad_norm": 0.5973559617996216,
"learning_rate": 6.163402508106334e-05,
"loss": 2.9417,
"step": 1338
},
{
"epoch": 0.9107901444350043,
"grad_norm": 0.6148339509963989,
"learning_rate": 6.152471947132532e-05,
"loss": 2.951,
"step": 1340
},
{
"epoch": 0.9121495327102803,
"grad_norm": 0.6172875165939331,
"learning_rate": 6.141535566257926e-05,
"loss": 2.988,
"step": 1342
},
{
"epoch": 0.9135089209855565,
"grad_norm": 0.6237571835517883,
"learning_rate": 6.130593420710452e-05,
"loss": 3.0442,
"step": 1344
},
{
"epoch": 0.9148683092608326,
"grad_norm": 0.6334381103515625,
"learning_rate": 6.119645565747165e-05,
"loss": 2.993,
"step": 1346
},
{
"epoch": 0.9162276975361088,
"grad_norm": 0.5856964588165283,
"learning_rate": 6.108692056653948e-05,
"loss": 2.9579,
"step": 1348
},
{
"epoch": 0.9175870858113849,
"grad_norm": 0.634194552898407,
"learning_rate": 6.097732948745235e-05,
"loss": 2.8742,
"step": 1350
},
{
"epoch": 0.918946474086661,
"grad_norm": 0.6679513454437256,
"learning_rate": 6.0867682973637394e-05,
"loss": 3.0189,
"step": 1352
},
{
"epoch": 0.9203058623619371,
"grad_norm": 0.6384778022766113,
"learning_rate": 6.075798157880164e-05,
"loss": 3.0204,
"step": 1354
},
{
"epoch": 0.9216652506372133,
"grad_norm": 0.532374382019043,
"learning_rate": 6.0648225856929275e-05,
"loss": 2.996,
"step": 1356
},
{
"epoch": 0.9230246389124894,
"grad_norm": 0.5407797694206238,
"learning_rate": 6.0538416362278824e-05,
"loss": 2.9277,
"step": 1358
},
{
"epoch": 0.9243840271877655,
"grad_norm": 0.6642601490020752,
"learning_rate": 6.0428553649380415e-05,
"loss": 2.989,
"step": 1360
},
{
"epoch": 0.9257434154630416,
"grad_norm": 0.6345245242118835,
"learning_rate": 6.031863827303284e-05,
"loss": 2.9936,
"step": 1362
},
{
"epoch": 0.9271028037383178,
"grad_norm": 0.5593590140342712,
"learning_rate": 6.020867078830089e-05,
"loss": 2.9882,
"step": 1364
},
{
"epoch": 0.9284621920135939,
"grad_norm": 0.5335503220558167,
"learning_rate": 6.009865175051248e-05,
"loss": 2.963,
"step": 1366
},
{
"epoch": 0.92982158028887,
"grad_norm": 0.6434462666511536,
"learning_rate": 5.9988581715255876e-05,
"loss": 2.9936,
"step": 1368
},
{
"epoch": 0.9311809685641461,
"grad_norm": 0.583660900592804,
"learning_rate": 5.9878461238376904e-05,
"loss": 2.9092,
"step": 1370
},
{
"epoch": 0.9325403568394223,
"grad_norm": 0.7153940200805664,
"learning_rate": 5.976829087597605e-05,
"loss": 2.9744,
"step": 1372
},
{
"epoch": 0.9338997451146984,
"grad_norm": 1.2551978826522827,
"learning_rate": 5.965807118440576e-05,
"loss": 2.9742,
"step": 1374
},
{
"epoch": 0.9352591333899745,
"grad_norm": 0.6354015469551086,
"learning_rate": 5.954780272026761e-05,
"loss": 2.9045,
"step": 1376
},
{
"epoch": 0.9366185216652506,
"grad_norm": 0.6488978266716003,
"learning_rate": 5.94374860404094e-05,
"loss": 2.9853,
"step": 1378
},
{
"epoch": 0.9379779099405268,
"grad_norm": 0.5952944755554199,
"learning_rate": 5.9327121701922516e-05,
"loss": 2.9655,
"step": 1380
},
{
"epoch": 0.9393372982158029,
"grad_norm": 0.6394802331924438,
"learning_rate": 5.921671026213893e-05,
"loss": 2.9707,
"step": 1382
},
{
"epoch": 0.940696686491079,
"grad_norm": 0.7093409299850464,
"learning_rate": 5.91062522786285e-05,
"loss": 2.9166,
"step": 1384
},
{
"epoch": 0.9420560747663551,
"grad_norm": 0.5998436808586121,
"learning_rate": 5.8995748309196184e-05,
"loss": 2.983,
"step": 1386
},
{
"epoch": 0.9434154630416313,
"grad_norm": 0.6887815594673157,
"learning_rate": 5.888519891187906e-05,
"loss": 2.9567,
"step": 1388
},
{
"epoch": 0.9447748513169074,
"grad_norm": 0.659731388092041,
"learning_rate": 5.877460464494369e-05,
"loss": 2.8726,
"step": 1390
},
{
"epoch": 0.9461342395921836,
"grad_norm": 0.7009007334709167,
"learning_rate": 5.8663966066883205e-05,
"loss": 2.9669,
"step": 1392
},
{
"epoch": 0.9474936278674596,
"grad_norm": 0.7139768004417419,
"learning_rate": 5.855328373641449e-05,
"loss": 2.9808,
"step": 1394
},
{
"epoch": 0.9488530161427358,
"grad_norm": 0.6618505120277405,
"learning_rate": 5.8442558212475416e-05,
"loss": 2.9656,
"step": 1396
},
{
"epoch": 0.9502124044180119,
"grad_norm": 0.5960121750831604,
"learning_rate": 5.83317900542219e-05,
"loss": 3.0878,
"step": 1398
},
{
"epoch": 0.9515717926932881,
"grad_norm": 0.6161683797836304,
"learning_rate": 5.8220979821025254e-05,
"loss": 2.9296,
"step": 1400
},
{
"epoch": 0.9529311809685641,
"grad_norm": 0.6301623582839966,
"learning_rate": 5.8110128072469206e-05,
"loss": 2.9037,
"step": 1402
},
{
"epoch": 0.9542905692438403,
"grad_norm": 0.6042844653129578,
"learning_rate": 5.799923536834715e-05,
"loss": 3.0262,
"step": 1404
},
{
"epoch": 0.9556499575191164,
"grad_norm": 0.5644305348396301,
"learning_rate": 5.7888302268659286e-05,
"loss": 2.9875,
"step": 1406
},
{
"epoch": 0.9570093457943926,
"grad_norm": 0.567065417766571,
"learning_rate": 5.7777329333609855e-05,
"loss": 3.0178,
"step": 1408
},
{
"epoch": 0.9583687340696686,
"grad_norm": 0.5456449389457703,
"learning_rate": 5.76663171236042e-05,
"loss": 2.9099,
"step": 1410
},
{
"epoch": 0.9597281223449448,
"grad_norm": 0.5395920276641846,
"learning_rate": 5.755526619924605e-05,
"loss": 2.9196,
"step": 1412
},
{
"epoch": 0.9610875106202209,
"grad_norm": 0.533607542514801,
"learning_rate": 5.744417712133462e-05,
"loss": 2.9564,
"step": 1414
},
{
"epoch": 0.9624468988954971,
"grad_norm": 0.5302989482879639,
"learning_rate": 5.733305045086179e-05,
"loss": 2.9872,
"step": 1416
},
{
"epoch": 0.9638062871707731,
"grad_norm": 0.6003543138504028,
"learning_rate": 5.722188674900929e-05,
"loss": 2.9267,
"step": 1418
},
{
"epoch": 0.9651656754460493,
"grad_norm": 0.6156508326530457,
"learning_rate": 5.7110686577145865e-05,
"loss": 2.9157,
"step": 1420
},
{
"epoch": 0.9665250637213254,
"grad_norm": 0.5998055934906006,
"learning_rate": 5.6999450496824416e-05,
"loss": 2.9314,
"step": 1422
},
{
"epoch": 0.9678844519966016,
"grad_norm": 0.6056944131851196,
"learning_rate": 5.688817906977917e-05,
"loss": 2.9823,
"step": 1424
},
{
"epoch": 0.9692438402718777,
"grad_norm": 0.5981131196022034,
"learning_rate": 5.677687285792288e-05,
"loss": 2.9677,
"step": 1426
},
{
"epoch": 0.9706032285471538,
"grad_norm": 0.5991801023483276,
"learning_rate": 5.666553242334394e-05,
"loss": 2.984,
"step": 1428
},
{
"epoch": 0.9719626168224299,
"grad_norm": 0.5065740942955017,
"learning_rate": 5.655415832830357e-05,
"loss": 2.9601,
"step": 1430
},
{
"epoch": 0.9733220050977061,
"grad_norm": 0.5771386027336121,
"learning_rate": 5.644275113523297e-05,
"loss": 2.9288,
"step": 1432
},
{
"epoch": 0.9746813933729822,
"grad_norm": 0.5725680589675903,
"learning_rate": 5.63313114067305e-05,
"loss": 2.9348,
"step": 1434
},
{
"epoch": 0.9760407816482582,
"grad_norm": 0.5777420997619629,
"learning_rate": 5.621983970555881e-05,
"loss": 2.9346,
"step": 1436
},
{
"epoch": 0.9774001699235344,
"grad_norm": 0.5346077680587769,
"learning_rate": 5.6108336594641996e-05,
"loss": 3.0165,
"step": 1438
},
{
"epoch": 0.9787595581988106,
"grad_norm": 0.6149086952209473,
"learning_rate": 5.599680263706278e-05,
"loss": 2.9962,
"step": 1440
},
{
"epoch": 0.9801189464740867,
"grad_norm": 0.5737910866737366,
"learning_rate": 5.588523839605968e-05,
"loss": 2.9621,
"step": 1442
},
{
"epoch": 0.9814783347493627,
"grad_norm": 0.5987441539764404,
"learning_rate": 5.577364443502412e-05,
"loss": 3.0176,
"step": 1444
},
{
"epoch": 0.9828377230246389,
"grad_norm": 0.5962843894958496,
"learning_rate": 5.56620213174976e-05,
"loss": 2.9302,
"step": 1446
},
{
"epoch": 0.984197111299915,
"grad_norm": 0.5651360154151917,
"learning_rate": 5.5550369607168874e-05,
"loss": 2.9685,
"step": 1448
},
{
"epoch": 0.9855564995751912,
"grad_norm": 0.5286944508552551,
"learning_rate": 5.543868986787109e-05,
"loss": 2.9918,
"step": 1450
},
{
"epoch": 0.9869158878504672,
"grad_norm": 0.5632530450820923,
"learning_rate": 5.532698266357892e-05,
"loss": 2.9934,
"step": 1452
},
{
"epoch": 0.9882752761257434,
"grad_norm": 0.6063055396080017,
"learning_rate": 5.521524855840578e-05,
"loss": 2.9871,
"step": 1454
},
{
"epoch": 0.9896346644010195,
"grad_norm": 0.5442188382148743,
"learning_rate": 5.510348811660084e-05,
"loss": 2.9771,
"step": 1456
},
{
"epoch": 0.9909940526762957,
"grad_norm": 0.5566977262496948,
"learning_rate": 5.499170190254641e-05,
"loss": 2.875,
"step": 1458
},
{
"epoch": 0.9923534409515717,
"grad_norm": 0.521381139755249,
"learning_rate": 5.4879890480754795e-05,
"loss": 2.9807,
"step": 1460
},
{
"epoch": 0.9937128292268479,
"grad_norm": 0.5293747186660767,
"learning_rate": 5.476805441586569e-05,
"loss": 2.9315,
"step": 1462
},
{
"epoch": 0.995072217502124,
"grad_norm": 0.5370301008224487,
"learning_rate": 5.465619427264323e-05,
"loss": 2.9827,
"step": 1464
},
{
"epoch": 0.9964316057774002,
"grad_norm": 0.5640749335289001,
"learning_rate": 5.454431061597311e-05,
"loss": 2.9615,
"step": 1466
},
{
"epoch": 0.9977909940526763,
"grad_norm": 0.5775633454322815,
"learning_rate": 5.4432404010859804e-05,
"loss": 2.9576,
"step": 1468
},
{
"epoch": 0.9991503823279524,
"grad_norm": 0.5625415444374084,
"learning_rate": 5.4320475022423647e-05,
"loss": 2.8864,
"step": 1470
},
{
"epoch": 1.0,
"grad_norm": 0.88974928855896,
"learning_rate": 5.4208524215897985e-05,
"loss": 3.0896,
"step": 1472
},
{
"epoch": 1.001359388275276,
"grad_norm": 0.7388569712638855,
"learning_rate": 5.409655215662642e-05,
"loss": 2.8507,
"step": 1474
},
{
"epoch": 1.0027187765505523,
"grad_norm": 0.6877476572990417,
"learning_rate": 5.3984559410059796e-05,
"loss": 2.9142,
"step": 1476
},
{
"epoch": 1.0040781648258283,
"grad_norm": 0.6670885682106018,
"learning_rate": 5.38725465417535e-05,
"loss": 2.9301,
"step": 1478
},
{
"epoch": 1.0054375531011046,
"grad_norm": 0.6211088299751282,
"learning_rate": 5.376051411736447e-05,
"loss": 2.9291,
"step": 1480
},
{
"epoch": 1.0067969413763806,
"grad_norm": 0.630263090133667,
"learning_rate": 5.364846270264842e-05,
"loss": 2.9646,
"step": 1482
},
{
"epoch": 1.0081563296516567,
"grad_norm": 0.5889977812767029,
"learning_rate": 5.353639286345699e-05,
"loss": 2.946,
"step": 1484
},
{
"epoch": 1.009515717926933,
"grad_norm": 0.5902710556983948,
"learning_rate": 5.342430516573485e-05,
"loss": 2.8681,
"step": 1486
},
{
"epoch": 1.010875106202209,
"grad_norm": 0.5864720344543457,
"learning_rate": 5.3312200175516815e-05,
"loss": 2.9029,
"step": 1488
},
{
"epoch": 1.012234494477485,
"grad_norm": 0.5937751531600952,
"learning_rate": 5.320007845892509e-05,
"loss": 2.9837,
"step": 1490
},
{
"epoch": 1.0135938827527613,
"grad_norm": 0.6514058113098145,
"learning_rate": 5.3087940582166287e-05,
"loss": 2.8821,
"step": 1492
},
{
"epoch": 1.0149532710280373,
"grad_norm": 0.5811493992805481,
"learning_rate": 5.297578711152867e-05,
"loss": 2.945,
"step": 1494
},
{
"epoch": 1.0163126593033136,
"grad_norm": 0.5168861150741577,
"learning_rate": 5.286361861337924e-05,
"loss": 2.9208,
"step": 1496
},
{
"epoch": 1.0176720475785896,
"grad_norm": 0.48943331837654114,
"learning_rate": 5.275143565416086e-05,
"loss": 2.9156,
"step": 1498
},
{
"epoch": 1.0190314358538657,
"grad_norm": 0.5392592549324036,
"learning_rate": 5.2639238800389465e-05,
"loss": 2.9327,
"step": 1500
}
],
"logging_steps": 2,
"max_steps": 2944,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 300,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.6879307804807004e+18,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}