sla-cpt-base / q2.5-eu /checkpoint-2424 /trainer_state.json
tvkain's picture
Add files using upload-large-folder tool
7b910bd verified
raw
history blame
212 kB
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 2424,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00041269022440030954,
"grad_norm": 5.357641696929932,
"learning_rate": 0.0,
"loss": 6.1189,
"step": 1
},
{
"epoch": 0.0008253804488006191,
"grad_norm": 5.467103004455566,
"learning_rate": 8.19672131147541e-07,
"loss": 6.1624,
"step": 2
},
{
"epoch": 0.0016507608976012382,
"grad_norm": 5.270254611968994,
"learning_rate": 2.459016393442623e-06,
"loss": 6.1301,
"step": 4
},
{
"epoch": 0.002476141346401857,
"grad_norm": 4.018209457397461,
"learning_rate": 4.098360655737704e-06,
"loss": 6.1305,
"step": 6
},
{
"epoch": 0.0033015217952024763,
"grad_norm": 3.973541259765625,
"learning_rate": 5.737704918032787e-06,
"loss": 6.0095,
"step": 8
},
{
"epoch": 0.0041269022440030955,
"grad_norm": 3.5741689205169678,
"learning_rate": 7.3770491803278695e-06,
"loss": 5.9256,
"step": 10
},
{
"epoch": 0.004952282692803714,
"grad_norm": 4.449726581573486,
"learning_rate": 9.016393442622952e-06,
"loss": 5.8059,
"step": 12
},
{
"epoch": 0.005777663141604333,
"grad_norm": 4.23346471786499,
"learning_rate": 1.0655737704918032e-05,
"loss": 5.6952,
"step": 14
},
{
"epoch": 0.006603043590404953,
"grad_norm": 2.9237682819366455,
"learning_rate": 1.2295081967213116e-05,
"loss": 5.5323,
"step": 16
},
{
"epoch": 0.007428424039205571,
"grad_norm": 2.1714608669281006,
"learning_rate": 1.3934426229508196e-05,
"loss": 5.3692,
"step": 18
},
{
"epoch": 0.008253804488006191,
"grad_norm": 1.9534616470336914,
"learning_rate": 1.557377049180328e-05,
"loss": 5.1891,
"step": 20
},
{
"epoch": 0.00907918493680681,
"grad_norm": 1.7847157716751099,
"learning_rate": 1.721311475409836e-05,
"loss": 5.0308,
"step": 22
},
{
"epoch": 0.009904565385607429,
"grad_norm": 1.329651117324829,
"learning_rate": 1.8852459016393442e-05,
"loss": 4.8984,
"step": 24
},
{
"epoch": 0.010729945834408047,
"grad_norm": 1.3483397960662842,
"learning_rate": 2.0491803278688525e-05,
"loss": 4.7523,
"step": 26
},
{
"epoch": 0.011555326283208666,
"grad_norm": 1.213932991027832,
"learning_rate": 2.2131147540983607e-05,
"loss": 4.6048,
"step": 28
},
{
"epoch": 0.012380706732009285,
"grad_norm": 1.2783870697021484,
"learning_rate": 2.377049180327869e-05,
"loss": 4.5203,
"step": 30
},
{
"epoch": 0.013206087180809905,
"grad_norm": 1.4986013174057007,
"learning_rate": 2.540983606557377e-05,
"loss": 4.3679,
"step": 32
},
{
"epoch": 0.014031467629610524,
"grad_norm": 0.9324406981468201,
"learning_rate": 2.7049180327868856e-05,
"loss": 4.2537,
"step": 34
},
{
"epoch": 0.014856848078411143,
"grad_norm": 2.258493185043335,
"learning_rate": 2.8688524590163935e-05,
"loss": 4.1994,
"step": 36
},
{
"epoch": 0.015682228527211763,
"grad_norm": 2.1433424949645996,
"learning_rate": 3.0327868852459017e-05,
"loss": 4.1349,
"step": 38
},
{
"epoch": 0.016507608976012382,
"grad_norm": 2.0129520893096924,
"learning_rate": 3.19672131147541e-05,
"loss": 4.0318,
"step": 40
},
{
"epoch": 0.017332989424813,
"grad_norm": 1.2179840803146362,
"learning_rate": 3.360655737704918e-05,
"loss": 3.9541,
"step": 42
},
{
"epoch": 0.01815836987361362,
"grad_norm": 1.133366346359253,
"learning_rate": 3.524590163934427e-05,
"loss": 3.8931,
"step": 44
},
{
"epoch": 0.01898375032241424,
"grad_norm": 1.4476598501205444,
"learning_rate": 3.6885245901639346e-05,
"loss": 3.8386,
"step": 46
},
{
"epoch": 0.019809130771214857,
"grad_norm": 1.8356342315673828,
"learning_rate": 3.8524590163934424e-05,
"loss": 3.7593,
"step": 48
},
{
"epoch": 0.020634511220015476,
"grad_norm": 1.1986051797866821,
"learning_rate": 4.016393442622951e-05,
"loss": 3.7367,
"step": 50
},
{
"epoch": 0.021459891668816095,
"grad_norm": 1.4108922481536865,
"learning_rate": 4.1803278688524595e-05,
"loss": 3.6536,
"step": 52
},
{
"epoch": 0.022285272117616713,
"grad_norm": 1.194887638092041,
"learning_rate": 4.3442622950819674e-05,
"loss": 3.6377,
"step": 54
},
{
"epoch": 0.023110652566417332,
"grad_norm": 1.5970392227172852,
"learning_rate": 4.508196721311476e-05,
"loss": 3.592,
"step": 56
},
{
"epoch": 0.02393603301521795,
"grad_norm": 1.7871198654174805,
"learning_rate": 4.672131147540984e-05,
"loss": 3.5467,
"step": 58
},
{
"epoch": 0.02476141346401857,
"grad_norm": 2.0405406951904297,
"learning_rate": 4.836065573770492e-05,
"loss": 3.5071,
"step": 60
},
{
"epoch": 0.025586793912819192,
"grad_norm": 1.6245758533477783,
"learning_rate": 5e-05,
"loss": 3.4754,
"step": 62
},
{
"epoch": 0.02641217436161981,
"grad_norm": 1.3766052722930908,
"learning_rate": 5.163934426229509e-05,
"loss": 3.4631,
"step": 64
},
{
"epoch": 0.02723755481042043,
"grad_norm": 1.3058711290359497,
"learning_rate": 5.327868852459017e-05,
"loss": 3.4197,
"step": 66
},
{
"epoch": 0.028062935259221048,
"grad_norm": 1.545015573501587,
"learning_rate": 5.491803278688525e-05,
"loss": 3.4313,
"step": 68
},
{
"epoch": 0.028888315708021667,
"grad_norm": 1.439721703529358,
"learning_rate": 5.6557377049180324e-05,
"loss": 3.3894,
"step": 70
},
{
"epoch": 0.029713696156822286,
"grad_norm": 1.6934937238693237,
"learning_rate": 5.819672131147541e-05,
"loss": 3.3193,
"step": 72
},
{
"epoch": 0.030539076605622904,
"grad_norm": 1.3454101085662842,
"learning_rate": 5.9836065573770495e-05,
"loss": 3.3252,
"step": 74
},
{
"epoch": 0.03136445705442353,
"grad_norm": 1.610787272453308,
"learning_rate": 6.147540983606557e-05,
"loss": 3.2966,
"step": 76
},
{
"epoch": 0.03218983750322414,
"grad_norm": 2.0271148681640625,
"learning_rate": 6.311475409836067e-05,
"loss": 3.2892,
"step": 78
},
{
"epoch": 0.033015217952024764,
"grad_norm": 2.165980100631714,
"learning_rate": 6.475409836065574e-05,
"loss": 3.2792,
"step": 80
},
{
"epoch": 0.03384059840082538,
"grad_norm": 1.7957913875579834,
"learning_rate": 6.639344262295082e-05,
"loss": 3.2543,
"step": 82
},
{
"epoch": 0.034665978849626,
"grad_norm": 1.3472362756729126,
"learning_rate": 6.80327868852459e-05,
"loss": 3.251,
"step": 84
},
{
"epoch": 0.03549135929842662,
"grad_norm": 1.3264447450637817,
"learning_rate": 6.967213114754098e-05,
"loss": 3.2185,
"step": 86
},
{
"epoch": 0.03631673974722724,
"grad_norm": 1.5266629457473755,
"learning_rate": 7.131147540983607e-05,
"loss": 3.2058,
"step": 88
},
{
"epoch": 0.037142120196027854,
"grad_norm": 1.36456298828125,
"learning_rate": 7.295081967213115e-05,
"loss": 3.1753,
"step": 90
},
{
"epoch": 0.03796750064482848,
"grad_norm": 1.3469734191894531,
"learning_rate": 7.459016393442624e-05,
"loss": 3.1905,
"step": 92
},
{
"epoch": 0.03879288109362909,
"grad_norm": 1.2221981287002563,
"learning_rate": 7.622950819672131e-05,
"loss": 3.1526,
"step": 94
},
{
"epoch": 0.039618261542429714,
"grad_norm": 1.3852319717407227,
"learning_rate": 7.78688524590164e-05,
"loss": 3.156,
"step": 96
},
{
"epoch": 0.04044364199123033,
"grad_norm": 1.3925163745880127,
"learning_rate": 7.950819672131148e-05,
"loss": 3.149,
"step": 98
},
{
"epoch": 0.04126902244003095,
"grad_norm": 1.4574236869812012,
"learning_rate": 8.114754098360656e-05,
"loss": 3.1252,
"step": 100
},
{
"epoch": 0.042094402888831574,
"grad_norm": 1.1478595733642578,
"learning_rate": 8.278688524590165e-05,
"loss": 3.1093,
"step": 102
},
{
"epoch": 0.04291978333763219,
"grad_norm": 1.4830694198608398,
"learning_rate": 8.442622950819673e-05,
"loss": 3.0921,
"step": 104
},
{
"epoch": 0.04374516378643281,
"grad_norm": 1.4275153875350952,
"learning_rate": 8.606557377049181e-05,
"loss": 3.0861,
"step": 106
},
{
"epoch": 0.04457054423523343,
"grad_norm": 2.0077579021453857,
"learning_rate": 8.770491803278689e-05,
"loss": 3.0694,
"step": 108
},
{
"epoch": 0.04539592468403405,
"grad_norm": 1.533471703529358,
"learning_rate": 8.934426229508197e-05,
"loss": 3.0526,
"step": 110
},
{
"epoch": 0.046221305132834664,
"grad_norm": 1.394168496131897,
"learning_rate": 9.098360655737706e-05,
"loss": 3.0655,
"step": 112
},
{
"epoch": 0.047046685581635286,
"grad_norm": 1.6954408884048462,
"learning_rate": 9.262295081967214e-05,
"loss": 3.0338,
"step": 114
},
{
"epoch": 0.0478720660304359,
"grad_norm": 1.4712835550308228,
"learning_rate": 9.426229508196722e-05,
"loss": 3.0511,
"step": 116
},
{
"epoch": 0.048697446479236524,
"grad_norm": 1.473305344581604,
"learning_rate": 9.59016393442623e-05,
"loss": 3.031,
"step": 118
},
{
"epoch": 0.04952282692803714,
"grad_norm": 1.5957138538360596,
"learning_rate": 9.754098360655737e-05,
"loss": 3.003,
"step": 120
},
{
"epoch": 0.05034820737683776,
"grad_norm": 1.7283776998519897,
"learning_rate": 9.918032786885247e-05,
"loss": 3.0025,
"step": 122
},
{
"epoch": 0.051173587825638384,
"grad_norm": 1.286211609840393,
"learning_rate": 9.999995343827644e-05,
"loss": 3.0046,
"step": 124
},
{
"epoch": 0.051998968274439,
"grad_norm": 1.612631916999817,
"learning_rate": 9.99995809450083e-05,
"loss": 2.9935,
"step": 126
},
{
"epoch": 0.05282434872323962,
"grad_norm": 1.7912741899490356,
"learning_rate": 9.9998835961247e-05,
"loss": 3.0016,
"step": 128
},
{
"epoch": 0.053649729172040236,
"grad_norm": 1.7926831245422363,
"learning_rate": 9.999771849254263e-05,
"loss": 2.9718,
"step": 130
},
{
"epoch": 0.05447510962084086,
"grad_norm": 1.4038861989974976,
"learning_rate": 9.999622854722017e-05,
"loss": 2.9792,
"step": 132
},
{
"epoch": 0.055300490069641474,
"grad_norm": 1.7067828178405762,
"learning_rate": 9.99943661363795e-05,
"loss": 2.968,
"step": 134
},
{
"epoch": 0.056125870518442096,
"grad_norm": 1.5349268913269043,
"learning_rate": 9.999213127389536e-05,
"loss": 2.9373,
"step": 136
},
{
"epoch": 0.05695125096724271,
"grad_norm": 1.2763527631759644,
"learning_rate": 9.99895239764172e-05,
"loss": 2.9384,
"step": 138
},
{
"epoch": 0.057776631416043334,
"grad_norm": 1.3789610862731934,
"learning_rate": 9.998654426336905e-05,
"loss": 2.9416,
"step": 140
},
{
"epoch": 0.05860201186484395,
"grad_norm": 1.6175661087036133,
"learning_rate": 9.998319215694936e-05,
"loss": 2.9323,
"step": 142
},
{
"epoch": 0.05942739231364457,
"grad_norm": 1.5398344993591309,
"learning_rate": 9.997946768213095e-05,
"loss": 2.9334,
"step": 144
},
{
"epoch": 0.060252772762445186,
"grad_norm": 1.6788642406463623,
"learning_rate": 9.997537086666063e-05,
"loss": 2.9218,
"step": 146
},
{
"epoch": 0.06107815321124581,
"grad_norm": 1.4843031167984009,
"learning_rate": 9.997090174105919e-05,
"loss": 2.9366,
"step": 148
},
{
"epoch": 0.06190353366004643,
"grad_norm": 1.3358060121536255,
"learning_rate": 9.996606033862102e-05,
"loss": 2.9279,
"step": 150
},
{
"epoch": 0.06272891410884705,
"grad_norm": 1.9375636577606201,
"learning_rate": 9.996084669541397e-05,
"loss": 2.9202,
"step": 152
},
{
"epoch": 0.06355429455764766,
"grad_norm": 1.3849859237670898,
"learning_rate": 9.9955260850279e-05,
"loss": 2.895,
"step": 154
},
{
"epoch": 0.06437967500644828,
"grad_norm": 1.3628286123275757,
"learning_rate": 9.994930284482993e-05,
"loss": 2.8983,
"step": 156
},
{
"epoch": 0.0652050554552489,
"grad_norm": 1.2561815977096558,
"learning_rate": 9.994297272345319e-05,
"loss": 2.9089,
"step": 158
},
{
"epoch": 0.06603043590404953,
"grad_norm": 1.3705800771713257,
"learning_rate": 9.993627053330732e-05,
"loss": 2.878,
"step": 160
},
{
"epoch": 0.06685581635285014,
"grad_norm": 1.2955900430679321,
"learning_rate": 9.99291963243228e-05,
"loss": 2.8591,
"step": 162
},
{
"epoch": 0.06768119680165076,
"grad_norm": 1.231101155281067,
"learning_rate": 9.992175014920161e-05,
"loss": 2.8616,
"step": 164
},
{
"epoch": 0.06850657725045138,
"grad_norm": 1.1412620544433594,
"learning_rate": 9.991393206341677e-05,
"loss": 2.8353,
"step": 166
},
{
"epoch": 0.069331957699252,
"grad_norm": 1.102623701095581,
"learning_rate": 9.990574212521205e-05,
"loss": 2.8262,
"step": 168
},
{
"epoch": 0.07015733814805261,
"grad_norm": 1.0235016345977783,
"learning_rate": 9.98971803956014e-05,
"loss": 2.8581,
"step": 170
},
{
"epoch": 0.07098271859685323,
"grad_norm": 1.475123643875122,
"learning_rate": 9.988824693836864e-05,
"loss": 2.8709,
"step": 172
},
{
"epoch": 0.07180809904565386,
"grad_norm": 0.8155277371406555,
"learning_rate": 9.98789418200669e-05,
"loss": 2.8426,
"step": 174
},
{
"epoch": 0.07263347949445448,
"grad_norm": 1.3113749027252197,
"learning_rate": 9.98692651100181e-05,
"loss": 2.8017,
"step": 176
},
{
"epoch": 0.0734588599432551,
"grad_norm": 1.2450861930847168,
"learning_rate": 9.985921688031252e-05,
"loss": 2.8317,
"step": 178
},
{
"epoch": 0.07428424039205571,
"grad_norm": 1.304402470588684,
"learning_rate": 9.984879720580816e-05,
"loss": 2.8157,
"step": 180
},
{
"epoch": 0.07510962084085633,
"grad_norm": 1.1851410865783691,
"learning_rate": 9.983800616413026e-05,
"loss": 2.8245,
"step": 182
},
{
"epoch": 0.07593500128965695,
"grad_norm": 1.2967396974563599,
"learning_rate": 9.982684383567071e-05,
"loss": 2.8363,
"step": 184
},
{
"epoch": 0.07676038173845758,
"grad_norm": 1.2011407613754272,
"learning_rate": 9.981531030358746e-05,
"loss": 2.8142,
"step": 186
},
{
"epoch": 0.07758576218725818,
"grad_norm": 1.0165106058120728,
"learning_rate": 9.980340565380382e-05,
"loss": 2.7913,
"step": 188
},
{
"epoch": 0.0784111426360588,
"grad_norm": 1.3044579029083252,
"learning_rate": 9.979112997500792e-05,
"loss": 2.7805,
"step": 190
},
{
"epoch": 0.07923652308485943,
"grad_norm": 1.1849685907363892,
"learning_rate": 9.9778483358652e-05,
"loss": 2.7707,
"step": 192
},
{
"epoch": 0.08006190353366005,
"grad_norm": 0.9122027158737183,
"learning_rate": 9.976546589895175e-05,
"loss": 2.7777,
"step": 194
},
{
"epoch": 0.08088728398246066,
"grad_norm": 1.0830117464065552,
"learning_rate": 9.975207769288556e-05,
"loss": 2.8048,
"step": 196
},
{
"epoch": 0.08171266443126128,
"grad_norm": 1.1544275283813477,
"learning_rate": 9.973831884019387e-05,
"loss": 2.7761,
"step": 198
},
{
"epoch": 0.0825380448800619,
"grad_norm": 0.8355935215950012,
"learning_rate": 9.972418944337835e-05,
"loss": 2.7593,
"step": 200
},
{
"epoch": 0.08336342532886253,
"grad_norm": 1.203262209892273,
"learning_rate": 9.970968960770124e-05,
"loss": 2.7695,
"step": 202
},
{
"epoch": 0.08418880577766315,
"grad_norm": 1.23800790309906,
"learning_rate": 9.969481944118443e-05,
"loss": 2.7576,
"step": 204
},
{
"epoch": 0.08501418622646376,
"grad_norm": 0.8839966058731079,
"learning_rate": 9.96795790546088e-05,
"loss": 2.7442,
"step": 206
},
{
"epoch": 0.08583956667526438,
"grad_norm": 0.9399611949920654,
"learning_rate": 9.966396856151326e-05,
"loss": 2.7402,
"step": 208
},
{
"epoch": 0.086664947124065,
"grad_norm": 1.1721992492675781,
"learning_rate": 9.964798807819397e-05,
"loss": 2.7378,
"step": 210
},
{
"epoch": 0.08749032757286562,
"grad_norm": 0.9647835493087769,
"learning_rate": 9.963163772370352e-05,
"loss": 2.7256,
"step": 212
},
{
"epoch": 0.08831570802166623,
"grad_norm": 0.9155466556549072,
"learning_rate": 9.961491761984996e-05,
"loss": 2.7255,
"step": 214
},
{
"epoch": 0.08914108847046685,
"grad_norm": 0.9373721480369568,
"learning_rate": 9.959782789119592e-05,
"loss": 2.7544,
"step": 216
},
{
"epoch": 0.08996646891926748,
"grad_norm": 0.9547314643859863,
"learning_rate": 9.958036866505772e-05,
"loss": 2.7333,
"step": 218
},
{
"epoch": 0.0907918493680681,
"grad_norm": 1.0028138160705566,
"learning_rate": 9.956254007150432e-05,
"loss": 2.7232,
"step": 220
},
{
"epoch": 0.0916172298168687,
"grad_norm": 1.2652791738510132,
"learning_rate": 9.954434224335649e-05,
"loss": 2.7268,
"step": 222
},
{
"epoch": 0.09244261026566933,
"grad_norm": 1.1313235759735107,
"learning_rate": 9.952577531618574e-05,
"loss": 2.7417,
"step": 224
},
{
"epoch": 0.09326799071446995,
"grad_norm": 0.7514833211898804,
"learning_rate": 9.950683942831328e-05,
"loss": 2.6898,
"step": 226
},
{
"epoch": 0.09409337116327057,
"grad_norm": 0.9731917381286621,
"learning_rate": 9.948753472080907e-05,
"loss": 2.686,
"step": 228
},
{
"epoch": 0.0949187516120712,
"grad_norm": 0.8640966415405273,
"learning_rate": 9.946786133749071e-05,
"loss": 2.7168,
"step": 230
},
{
"epoch": 0.0957441320608718,
"grad_norm": 0.9116567969322205,
"learning_rate": 9.944781942492242e-05,
"loss": 2.7123,
"step": 232
},
{
"epoch": 0.09656951250967243,
"grad_norm": 1.0034291744232178,
"learning_rate": 9.942740913241386e-05,
"loss": 2.7146,
"step": 234
},
{
"epoch": 0.09739489295847305,
"grad_norm": 0.8208848237991333,
"learning_rate": 9.94066306120191e-05,
"loss": 2.6773,
"step": 236
},
{
"epoch": 0.09822027340727367,
"grad_norm": 0.8781367540359497,
"learning_rate": 9.938548401853547e-05,
"loss": 2.719,
"step": 238
},
{
"epoch": 0.09904565385607428,
"grad_norm": 0.7302896976470947,
"learning_rate": 9.93639695095024e-05,
"loss": 2.7011,
"step": 240
},
{
"epoch": 0.0998710343048749,
"grad_norm": 0.705086350440979,
"learning_rate": 9.934208724520024e-05,
"loss": 2.6648,
"step": 242
},
{
"epoch": 0.10069641475367552,
"grad_norm": 0.8350553512573242,
"learning_rate": 9.931983738864904e-05,
"loss": 2.687,
"step": 244
},
{
"epoch": 0.10152179520247614,
"grad_norm": 0.6524394154548645,
"learning_rate": 9.92972201056074e-05,
"loss": 2.7015,
"step": 246
},
{
"epoch": 0.10234717565127677,
"grad_norm": 0.6503209471702576,
"learning_rate": 9.927423556457121e-05,
"loss": 2.6148,
"step": 248
},
{
"epoch": 0.10317255610007738,
"grad_norm": 0.7506954073905945,
"learning_rate": 9.925088393677236e-05,
"loss": 2.6914,
"step": 250
},
{
"epoch": 0.103997936548878,
"grad_norm": 1.1561987400054932,
"learning_rate": 9.922716539617746e-05,
"loss": 2.6659,
"step": 252
},
{
"epoch": 0.10482331699767862,
"grad_norm": 1.0000964403152466,
"learning_rate": 9.920308011948665e-05,
"loss": 2.6626,
"step": 254
},
{
"epoch": 0.10564869744647924,
"grad_norm": 0.8899397850036621,
"learning_rate": 9.917862828613214e-05,
"loss": 2.6666,
"step": 256
},
{
"epoch": 0.10647407789527985,
"grad_norm": 1.1503660678863525,
"learning_rate": 9.915381007827698e-05,
"loss": 2.6395,
"step": 258
},
{
"epoch": 0.10729945834408047,
"grad_norm": 0.8070819973945618,
"learning_rate": 9.912862568081364e-05,
"loss": 2.6531,
"step": 260
},
{
"epoch": 0.1081248387928811,
"grad_norm": 0.8623407483100891,
"learning_rate": 9.910307528136266e-05,
"loss": 2.6588,
"step": 262
},
{
"epoch": 0.10895021924168172,
"grad_norm": 0.9573660492897034,
"learning_rate": 9.907715907027129e-05,
"loss": 2.6823,
"step": 264
},
{
"epoch": 0.10977559969048233,
"grad_norm": 1.0500940084457397,
"learning_rate": 9.905087724061195e-05,
"loss": 2.6545,
"step": 266
},
{
"epoch": 0.11060098013928295,
"grad_norm": 1.0520515441894531,
"learning_rate": 9.902422998818094e-05,
"loss": 2.6371,
"step": 268
},
{
"epoch": 0.11142636058808357,
"grad_norm": 0.9879215955734253,
"learning_rate": 9.899721751149688e-05,
"loss": 2.6474,
"step": 270
},
{
"epoch": 0.11225174103688419,
"grad_norm": 0.8972532749176025,
"learning_rate": 9.896984001179925e-05,
"loss": 2.6271,
"step": 272
},
{
"epoch": 0.11307712148568481,
"grad_norm": 0.6369883418083191,
"learning_rate": 9.894209769304696e-05,
"loss": 2.6054,
"step": 274
},
{
"epoch": 0.11390250193448542,
"grad_norm": 0.6478956937789917,
"learning_rate": 9.891399076191674e-05,
"loss": 2.6168,
"step": 276
},
{
"epoch": 0.11472788238328605,
"grad_norm": 0.8620642423629761,
"learning_rate": 9.888551942780162e-05,
"loss": 2.6313,
"step": 278
},
{
"epoch": 0.11555326283208667,
"grad_norm": 0.740717887878418,
"learning_rate": 9.885668390280941e-05,
"loss": 2.6307,
"step": 280
},
{
"epoch": 0.11637864328088729,
"grad_norm": 0.7513862252235413,
"learning_rate": 9.882748440176109e-05,
"loss": 2.625,
"step": 282
},
{
"epoch": 0.1172040237296879,
"grad_norm": 0.8409993052482605,
"learning_rate": 9.879792114218921e-05,
"loss": 2.6034,
"step": 284
},
{
"epoch": 0.11802940417848852,
"grad_norm": 0.8200739622116089,
"learning_rate": 9.876799434433628e-05,
"loss": 2.599,
"step": 286
},
{
"epoch": 0.11885478462728914,
"grad_norm": 0.9191763401031494,
"learning_rate": 9.873770423115314e-05,
"loss": 2.6168,
"step": 288
},
{
"epoch": 0.11968016507608976,
"grad_norm": 0.7739763855934143,
"learning_rate": 9.870705102829723e-05,
"loss": 2.6279,
"step": 290
},
{
"epoch": 0.12050554552489037,
"grad_norm": 0.6580247282981873,
"learning_rate": 9.867603496413103e-05,
"loss": 2.599,
"step": 292
},
{
"epoch": 0.121330925973691,
"grad_norm": 0.7197789549827576,
"learning_rate": 9.864465626972023e-05,
"loss": 2.5948,
"step": 294
},
{
"epoch": 0.12215630642249162,
"grad_norm": 0.9027787446975708,
"learning_rate": 9.861291517883213e-05,
"loss": 2.6058,
"step": 296
},
{
"epoch": 0.12298168687129224,
"grad_norm": 1.048640489578247,
"learning_rate": 9.858081192793378e-05,
"loss": 2.6128,
"step": 298
},
{
"epoch": 0.12380706732009286,
"grad_norm": 0.827551543712616,
"learning_rate": 9.85483467561903e-05,
"loss": 2.6058,
"step": 300
},
{
"epoch": 0.12463244776889347,
"grad_norm": 0.9751214981079102,
"learning_rate": 9.851551990546306e-05,
"loss": 2.57,
"step": 302
},
{
"epoch": 0.1254578282176941,
"grad_norm": 1.0783475637435913,
"learning_rate": 9.848233162030794e-05,
"loss": 2.6116,
"step": 304
},
{
"epoch": 0.1262832086664947,
"grad_norm": 0.8441977500915527,
"learning_rate": 9.84487821479734e-05,
"loss": 2.59,
"step": 306
},
{
"epoch": 0.12710858911529532,
"grad_norm": 1.0184507369995117,
"learning_rate": 9.841487173839873e-05,
"loss": 2.579,
"step": 308
},
{
"epoch": 0.12793396956409595,
"grad_norm": 0.9782202243804932,
"learning_rate": 9.838060064421217e-05,
"loss": 2.5996,
"step": 310
},
{
"epoch": 0.12875935001289657,
"grad_norm": 0.8052064180374146,
"learning_rate": 9.834596912072897e-05,
"loss": 2.594,
"step": 312
},
{
"epoch": 0.1295847304616972,
"grad_norm": 0.765544056892395,
"learning_rate": 9.831097742594958e-05,
"loss": 2.581,
"step": 314
},
{
"epoch": 0.1304101109104978,
"grad_norm": 0.8481123447418213,
"learning_rate": 9.827562582055765e-05,
"loss": 2.6006,
"step": 316
},
{
"epoch": 0.13123549135929843,
"grad_norm": 0.8604638576507568,
"learning_rate": 9.823991456791811e-05,
"loss": 2.5875,
"step": 318
},
{
"epoch": 0.13206087180809906,
"grad_norm": 0.6848525404930115,
"learning_rate": 9.820384393407525e-05,
"loss": 2.5788,
"step": 320
},
{
"epoch": 0.13288625225689968,
"grad_norm": 0.7860177159309387,
"learning_rate": 9.816741418775066e-05,
"loss": 2.5961,
"step": 322
},
{
"epoch": 0.13371163270570027,
"grad_norm": 0.7415710091590881,
"learning_rate": 9.813062560034134e-05,
"loss": 2.5805,
"step": 324
},
{
"epoch": 0.1345370131545009,
"grad_norm": 0.8323041200637817,
"learning_rate": 9.809347844591753e-05,
"loss": 2.5799,
"step": 326
},
{
"epoch": 0.13536239360330152,
"grad_norm": 0.9540163278579712,
"learning_rate": 9.805597300122081e-05,
"loss": 2.5704,
"step": 328
},
{
"epoch": 0.13618777405210214,
"grad_norm": 0.7344382405281067,
"learning_rate": 9.801810954566195e-05,
"loss": 2.5649,
"step": 330
},
{
"epoch": 0.13701315450090276,
"grad_norm": 0.7706190347671509,
"learning_rate": 9.797988836131884e-05,
"loss": 2.5583,
"step": 332
},
{
"epoch": 0.13783853494970338,
"grad_norm": 0.7073199152946472,
"learning_rate": 9.794130973293445e-05,
"loss": 2.5523,
"step": 334
},
{
"epoch": 0.138663915398504,
"grad_norm": 0.6726153492927551,
"learning_rate": 9.790237394791461e-05,
"loss": 2.5673,
"step": 336
},
{
"epoch": 0.13948929584730463,
"grad_norm": 0.6806092262268066,
"learning_rate": 9.786308129632598e-05,
"loss": 2.5748,
"step": 338
},
{
"epoch": 0.14031467629610522,
"grad_norm": 0.7212201952934265,
"learning_rate": 9.782343207089377e-05,
"loss": 2.5615,
"step": 340
},
{
"epoch": 0.14114005674490585,
"grad_norm": 0.7233206629753113,
"learning_rate": 9.778342656699964e-05,
"loss": 2.5509,
"step": 342
},
{
"epoch": 0.14196543719370647,
"grad_norm": 0.6308603882789612,
"learning_rate": 9.77430650826795e-05,
"loss": 2.5133,
"step": 344
},
{
"epoch": 0.1427908176425071,
"grad_norm": 0.676368772983551,
"learning_rate": 9.770234791862125e-05,
"loss": 2.5293,
"step": 346
},
{
"epoch": 0.1436161980913077,
"grad_norm": 0.687326192855835,
"learning_rate": 9.766127537816256e-05,
"loss": 2.531,
"step": 348
},
{
"epoch": 0.14444157854010833,
"grad_norm": 0.7347912788391113,
"learning_rate": 9.761984776728864e-05,
"loss": 2.5468,
"step": 350
},
{
"epoch": 0.14526695898890896,
"grad_norm": 0.5843812227249146,
"learning_rate": 9.757806539462985e-05,
"loss": 2.539,
"step": 352
},
{
"epoch": 0.14609233943770958,
"grad_norm": 0.7662450671195984,
"learning_rate": 9.753592857145957e-05,
"loss": 2.5297,
"step": 354
},
{
"epoch": 0.1469177198865102,
"grad_norm": 0.6813721656799316,
"learning_rate": 9.749343761169171e-05,
"loss": 2.5519,
"step": 356
},
{
"epoch": 0.1477431003353108,
"grad_norm": 0.7090803384780884,
"learning_rate": 9.745059283187857e-05,
"loss": 2.515,
"step": 358
},
{
"epoch": 0.14856848078411142,
"grad_norm": 0.5888863801956177,
"learning_rate": 9.74073945512082e-05,
"loss": 2.5622,
"step": 360
},
{
"epoch": 0.14939386123291204,
"grad_norm": 0.7369230389595032,
"learning_rate": 9.736384309150233e-05,
"loss": 2.5482,
"step": 362
},
{
"epoch": 0.15021924168171266,
"grad_norm": 0.6921555995941162,
"learning_rate": 9.731993877721377e-05,
"loss": 2.5304,
"step": 364
},
{
"epoch": 0.15104462213051328,
"grad_norm": 0.755409300327301,
"learning_rate": 9.727568193542403e-05,
"loss": 2.5214,
"step": 366
},
{
"epoch": 0.1518700025793139,
"grad_norm": 0.9340344071388245,
"learning_rate": 9.723107289584095e-05,
"loss": 2.5248,
"step": 368
},
{
"epoch": 0.15269538302811453,
"grad_norm": 0.9866952300071716,
"learning_rate": 9.718611199079617e-05,
"loss": 2.5281,
"step": 370
},
{
"epoch": 0.15352076347691515,
"grad_norm": 0.8538560271263123,
"learning_rate": 9.714079955524269e-05,
"loss": 2.5436,
"step": 372
},
{
"epoch": 0.15434614392571577,
"grad_norm": 0.6923696398735046,
"learning_rate": 9.709513592675236e-05,
"loss": 2.5431,
"step": 374
},
{
"epoch": 0.15517152437451637,
"grad_norm": 0.6311334371566772,
"learning_rate": 9.704912144551341e-05,
"loss": 2.5473,
"step": 376
},
{
"epoch": 0.155996904823317,
"grad_norm": 0.7558380961418152,
"learning_rate": 9.700275645432784e-05,
"loss": 2.4998,
"step": 378
},
{
"epoch": 0.1568222852721176,
"grad_norm": 0.8375957608222961,
"learning_rate": 9.695604129860889e-05,
"loss": 2.524,
"step": 380
},
{
"epoch": 0.15764766572091823,
"grad_norm": 0.9554900527000427,
"learning_rate": 9.690897632637852e-05,
"loss": 2.5184,
"step": 382
},
{
"epoch": 0.15847304616971886,
"grad_norm": 0.7582331299781799,
"learning_rate": 9.686156188826478e-05,
"loss": 2.5177,
"step": 384
},
{
"epoch": 0.15929842661851948,
"grad_norm": 0.8506788611412048,
"learning_rate": 9.681379833749915e-05,
"loss": 2.4866,
"step": 386
},
{
"epoch": 0.1601238070673201,
"grad_norm": 0.738979697227478,
"learning_rate": 9.676568602991399e-05,
"loss": 2.5286,
"step": 388
},
{
"epoch": 0.16094918751612072,
"grad_norm": 0.6935485005378723,
"learning_rate": 9.671722532393985e-05,
"loss": 2.532,
"step": 390
},
{
"epoch": 0.16177456796492132,
"grad_norm": 0.7109572887420654,
"learning_rate": 9.666841658060282e-05,
"loss": 2.5126,
"step": 392
},
{
"epoch": 0.16259994841372194,
"grad_norm": 0.8224465847015381,
"learning_rate": 9.661926016352178e-05,
"loss": 2.5062,
"step": 394
},
{
"epoch": 0.16342532886252256,
"grad_norm": 0.7705041766166687,
"learning_rate": 9.656975643890578e-05,
"loss": 2.5173,
"step": 396
},
{
"epoch": 0.16425070931132318,
"grad_norm": 0.6699206829071045,
"learning_rate": 9.651990577555122e-05,
"loss": 2.5047,
"step": 398
},
{
"epoch": 0.1650760897601238,
"grad_norm": 0.6773229241371155,
"learning_rate": 9.64697085448392e-05,
"loss": 2.5437,
"step": 400
},
{
"epoch": 0.16590147020892443,
"grad_norm": 0.6482560634613037,
"learning_rate": 9.641916512073268e-05,
"loss": 2.4866,
"step": 402
},
{
"epoch": 0.16672685065772505,
"grad_norm": 0.5687413811683655,
"learning_rate": 9.636827587977368e-05,
"loss": 2.5004,
"step": 404
},
{
"epoch": 0.16755223110652567,
"grad_norm": 0.591502845287323,
"learning_rate": 9.63170412010806e-05,
"loss": 2.4925,
"step": 406
},
{
"epoch": 0.1683776115553263,
"grad_norm": 0.7202515602111816,
"learning_rate": 9.626546146634523e-05,
"loss": 2.4834,
"step": 408
},
{
"epoch": 0.1692029920041269,
"grad_norm": 0.5923997163772583,
"learning_rate": 9.621353705982998e-05,
"loss": 2.4832,
"step": 410
},
{
"epoch": 0.1700283724529275,
"grad_norm": 0.519095242023468,
"learning_rate": 9.616126836836508e-05,
"loss": 2.4909,
"step": 412
},
{
"epoch": 0.17085375290172813,
"grad_norm": 0.6338533759117126,
"learning_rate": 9.61086557813456e-05,
"loss": 2.5027,
"step": 414
},
{
"epoch": 0.17167913335052876,
"grad_norm": 0.5483947992324829,
"learning_rate": 9.60556996907286e-05,
"loss": 2.4864,
"step": 416
},
{
"epoch": 0.17250451379932938,
"grad_norm": 0.6210249662399292,
"learning_rate": 9.600240049103017e-05,
"loss": 2.4987,
"step": 418
},
{
"epoch": 0.17332989424813,
"grad_norm": 0.6927972435951233,
"learning_rate": 9.594875857932258e-05,
"loss": 2.4987,
"step": 420
},
{
"epoch": 0.17415527469693062,
"grad_norm": 0.6685944199562073,
"learning_rate": 9.589477435523118e-05,
"loss": 2.4794,
"step": 422
},
{
"epoch": 0.17498065514573125,
"grad_norm": 0.89150470495224,
"learning_rate": 9.584044822093157e-05,
"loss": 2.467,
"step": 424
},
{
"epoch": 0.17580603559453187,
"grad_norm": 0.8726872205734253,
"learning_rate": 9.57857805811465e-05,
"loss": 2.4917,
"step": 426
},
{
"epoch": 0.17663141604333246,
"grad_norm": 0.8355669379234314,
"learning_rate": 9.573077184314294e-05,
"loss": 2.5095,
"step": 428
},
{
"epoch": 0.17745679649213308,
"grad_norm": 0.7523061037063599,
"learning_rate": 9.567542241672891e-05,
"loss": 2.4695,
"step": 430
},
{
"epoch": 0.1782821769409337,
"grad_norm": 0.5890743732452393,
"learning_rate": 9.561973271425061e-05,
"loss": 2.4748,
"step": 432
},
{
"epoch": 0.17910755738973433,
"grad_norm": 0.6020349860191345,
"learning_rate": 9.55637031505892e-05,
"loss": 2.4746,
"step": 434
},
{
"epoch": 0.17993293783853495,
"grad_norm": 0.6291228532791138,
"learning_rate": 9.550733414315776e-05,
"loss": 2.4537,
"step": 436
},
{
"epoch": 0.18075831828733557,
"grad_norm": 0.6267942786216736,
"learning_rate": 9.545062611189821e-05,
"loss": 2.4663,
"step": 438
},
{
"epoch": 0.1815836987361362,
"grad_norm": 0.7870015501976013,
"learning_rate": 9.539357947927815e-05,
"loss": 2.4794,
"step": 440
},
{
"epoch": 0.18240907918493682,
"grad_norm": 0.818481981754303,
"learning_rate": 9.53361946702877e-05,
"loss": 2.4538,
"step": 442
},
{
"epoch": 0.1832344596337374,
"grad_norm": 0.6101433634757996,
"learning_rate": 9.527847211243635e-05,
"loss": 2.5041,
"step": 444
},
{
"epoch": 0.18405984008253803,
"grad_norm": 0.7772427201271057,
"learning_rate": 9.52204122357498e-05,
"loss": 2.4497,
"step": 446
},
{
"epoch": 0.18488522053133866,
"grad_norm": 0.6459339261054993,
"learning_rate": 9.516201547276668e-05,
"loss": 2.4636,
"step": 448
},
{
"epoch": 0.18571060098013928,
"grad_norm": 0.6417956948280334,
"learning_rate": 9.510328225853549e-05,
"loss": 2.4489,
"step": 450
},
{
"epoch": 0.1865359814289399,
"grad_norm": 0.5950794219970703,
"learning_rate": 9.50442130306111e-05,
"loss": 2.4418,
"step": 452
},
{
"epoch": 0.18736136187774052,
"grad_norm": 0.4874417185783386,
"learning_rate": 9.498480822905176e-05,
"loss": 2.4458,
"step": 454
},
{
"epoch": 0.18818674232654115,
"grad_norm": 0.6025642156600952,
"learning_rate": 9.492506829641566e-05,
"loss": 2.4582,
"step": 456
},
{
"epoch": 0.18901212277534177,
"grad_norm": 0.5527782440185547,
"learning_rate": 9.486499367775764e-05,
"loss": 2.4732,
"step": 458
},
{
"epoch": 0.1898375032241424,
"grad_norm": 0.5585253238677979,
"learning_rate": 9.480458482062594e-05,
"loss": 2.4494,
"step": 460
},
{
"epoch": 0.19066288367294298,
"grad_norm": 0.7675066590309143,
"learning_rate": 9.474384217505883e-05,
"loss": 2.4404,
"step": 462
},
{
"epoch": 0.1914882641217436,
"grad_norm": 0.6983161568641663,
"learning_rate": 9.468276619358129e-05,
"loss": 2.4409,
"step": 464
},
{
"epoch": 0.19231364457054423,
"grad_norm": 0.6638076305389404,
"learning_rate": 9.462135733120156e-05,
"loss": 2.4746,
"step": 466
},
{
"epoch": 0.19313902501934485,
"grad_norm": 0.7150386571884155,
"learning_rate": 9.455961604540784e-05,
"loss": 2.4841,
"step": 468
},
{
"epoch": 0.19396440546814547,
"grad_norm": 0.6076739430427551,
"learning_rate": 9.449754279616481e-05,
"loss": 2.4154,
"step": 470
},
{
"epoch": 0.1947897859169461,
"grad_norm": 0.6565660238265991,
"learning_rate": 9.443513804591026e-05,
"loss": 2.4033,
"step": 472
},
{
"epoch": 0.19561516636574672,
"grad_norm": 0.5531965494155884,
"learning_rate": 9.43724022595516e-05,
"loss": 2.4344,
"step": 474
},
{
"epoch": 0.19644054681454734,
"grad_norm": 0.6353370547294617,
"learning_rate": 9.430933590446244e-05,
"loss": 2.4631,
"step": 476
},
{
"epoch": 0.19726592726334796,
"grad_norm": 0.6833090782165527,
"learning_rate": 9.424593945047906e-05,
"loss": 2.4402,
"step": 478
},
{
"epoch": 0.19809130771214856,
"grad_norm": 0.5862318277359009,
"learning_rate": 9.418221336989695e-05,
"loss": 2.4308,
"step": 480
},
{
"epoch": 0.19891668816094918,
"grad_norm": 0.6512525081634521,
"learning_rate": 9.411815813746726e-05,
"loss": 2.4239,
"step": 482
},
{
"epoch": 0.1997420686097498,
"grad_norm": 0.6480604410171509,
"learning_rate": 9.405377423039331e-05,
"loss": 2.4211,
"step": 484
},
{
"epoch": 0.20056744905855042,
"grad_norm": 0.5605840682983398,
"learning_rate": 9.398906212832699e-05,
"loss": 2.4346,
"step": 486
},
{
"epoch": 0.20139282950735105,
"grad_norm": 0.6704816818237305,
"learning_rate": 9.392402231336518e-05,
"loss": 2.4309,
"step": 488
},
{
"epoch": 0.20221820995615167,
"grad_norm": 1.019185185432434,
"learning_rate": 9.38586552700462e-05,
"loss": 2.4341,
"step": 490
},
{
"epoch": 0.2030435904049523,
"grad_norm": 0.816562294960022,
"learning_rate": 9.379296148534619e-05,
"loss": 2.4389,
"step": 492
},
{
"epoch": 0.2038689708537529,
"grad_norm": 0.8160488605499268,
"learning_rate": 9.372694144867544e-05,
"loss": 2.4259,
"step": 494
},
{
"epoch": 0.20469435130255353,
"grad_norm": 0.6071799993515015,
"learning_rate": 9.36605956518748e-05,
"loss": 2.4479,
"step": 496
},
{
"epoch": 0.20551973175135413,
"grad_norm": 0.5928800106048584,
"learning_rate": 9.359392458921198e-05,
"loss": 2.4467,
"step": 498
},
{
"epoch": 0.20634511220015475,
"grad_norm": 0.6091005802154541,
"learning_rate": 9.352692875737787e-05,
"loss": 2.4204,
"step": 500
},
{
"epoch": 0.20717049264895537,
"grad_norm": 0.6261982917785645,
"learning_rate": 9.34596086554829e-05,
"loss": 2.3813,
"step": 502
},
{
"epoch": 0.207995873097756,
"grad_norm": 0.597626805305481,
"learning_rate": 9.339196478505321e-05,
"loss": 2.4288,
"step": 504
},
{
"epoch": 0.20882125354655662,
"grad_norm": 0.6542629599571228,
"learning_rate": 9.332399765002698e-05,
"loss": 2.4192,
"step": 506
},
{
"epoch": 0.20964663399535724,
"grad_norm": 0.5791048407554626,
"learning_rate": 9.32557077567507e-05,
"loss": 2.3969,
"step": 508
},
{
"epoch": 0.21047201444415786,
"grad_norm": 0.7887916564941406,
"learning_rate": 9.318709561397537e-05,
"loss": 2.4048,
"step": 510
},
{
"epoch": 0.21129739489295848,
"grad_norm": 0.5982603430747986,
"learning_rate": 9.311816173285268e-05,
"loss": 2.4113,
"step": 512
},
{
"epoch": 0.21212277534175908,
"grad_norm": 0.6093983054161072,
"learning_rate": 9.304890662693123e-05,
"loss": 2.4111,
"step": 514
},
{
"epoch": 0.2129481557905597,
"grad_norm": 0.6429126858711243,
"learning_rate": 9.297933081215273e-05,
"loss": 2.4279,
"step": 516
},
{
"epoch": 0.21377353623936032,
"grad_norm": 0.5441975593566895,
"learning_rate": 9.29094348068481e-05,
"loss": 2.4287,
"step": 518
},
{
"epoch": 0.21459891668816095,
"grad_norm": 0.635387659072876,
"learning_rate": 9.283921913173368e-05,
"loss": 2.43,
"step": 520
},
{
"epoch": 0.21542429713696157,
"grad_norm": 0.5462561845779419,
"learning_rate": 9.276868430990726e-05,
"loss": 2.4366,
"step": 522
},
{
"epoch": 0.2162496775857622,
"grad_norm": 0.53558748960495,
"learning_rate": 9.269783086684428e-05,
"loss": 2.4123,
"step": 524
},
{
"epoch": 0.2170750580345628,
"grad_norm": 0.6640864014625549,
"learning_rate": 9.262665933039381e-05,
"loss": 2.4034,
"step": 526
},
{
"epoch": 0.21790043848336343,
"grad_norm": 0.6774006485939026,
"learning_rate": 9.255517023077472e-05,
"loss": 2.4487,
"step": 528
},
{
"epoch": 0.21872581893216406,
"grad_norm": 0.7681392431259155,
"learning_rate": 9.248336410057168e-05,
"loss": 2.4377,
"step": 530
},
{
"epoch": 0.21955119938096465,
"grad_norm": 0.6729565262794495,
"learning_rate": 9.24112414747312e-05,
"loss": 2.4305,
"step": 532
},
{
"epoch": 0.22037657982976527,
"grad_norm": 0.5404065847396851,
"learning_rate": 9.233880289055761e-05,
"loss": 2.3878,
"step": 534
},
{
"epoch": 0.2212019602785659,
"grad_norm": 0.5672969818115234,
"learning_rate": 9.226604888770911e-05,
"loss": 2.4138,
"step": 536
},
{
"epoch": 0.22202734072736652,
"grad_norm": 0.5919613838195801,
"learning_rate": 9.219298000819376e-05,
"loss": 2.4219,
"step": 538
},
{
"epoch": 0.22285272117616714,
"grad_norm": 0.6129051446914673,
"learning_rate": 9.211959679636535e-05,
"loss": 2.3922,
"step": 540
},
{
"epoch": 0.22367810162496776,
"grad_norm": 0.5665661096572876,
"learning_rate": 9.204589979891946e-05,
"loss": 2.4229,
"step": 542
},
{
"epoch": 0.22450348207376838,
"grad_norm": 0.6748535633087158,
"learning_rate": 9.197188956488931e-05,
"loss": 2.3801,
"step": 544
},
{
"epoch": 0.225328862522569,
"grad_norm": 0.666181743144989,
"learning_rate": 9.189756664564167e-05,
"loss": 2.3888,
"step": 546
},
{
"epoch": 0.22615424297136963,
"grad_norm": 0.6297335028648376,
"learning_rate": 9.182293159487281e-05,
"loss": 2.3812,
"step": 548
},
{
"epoch": 0.22697962342017022,
"grad_norm": 0.6825816631317139,
"learning_rate": 9.174798496860433e-05,
"loss": 2.3837,
"step": 550
},
{
"epoch": 0.22780500386897085,
"grad_norm": 0.7759227156639099,
"learning_rate": 9.167272732517903e-05,
"loss": 2.3769,
"step": 552
},
{
"epoch": 0.22863038431777147,
"grad_norm": 0.6477057933807373,
"learning_rate": 9.159715922525673e-05,
"loss": 2.3852,
"step": 554
},
{
"epoch": 0.2294557647665721,
"grad_norm": 0.8262442946434021,
"learning_rate": 9.152128123181013e-05,
"loss": 2.3868,
"step": 556
},
{
"epoch": 0.2302811452153727,
"grad_norm": 0.6992378830909729,
"learning_rate": 9.14450939101206e-05,
"loss": 2.3896,
"step": 558
},
{
"epoch": 0.23110652566417333,
"grad_norm": 0.6080542206764221,
"learning_rate": 9.136859782777394e-05,
"loss": 2.3806,
"step": 560
},
{
"epoch": 0.23193190611297396,
"grad_norm": 0.7257338166236877,
"learning_rate": 9.129179355465621e-05,
"loss": 2.4114,
"step": 562
},
{
"epoch": 0.23275728656177458,
"grad_norm": 0.6741151213645935,
"learning_rate": 9.121468166294945e-05,
"loss": 2.3753,
"step": 564
},
{
"epoch": 0.23358266701057517,
"grad_norm": 0.6519246101379395,
"learning_rate": 9.113726272712734e-05,
"loss": 2.3937,
"step": 566
},
{
"epoch": 0.2344080474593758,
"grad_norm": 0.7125470042228699,
"learning_rate": 9.105953732395116e-05,
"loss": 2.4047,
"step": 568
},
{
"epoch": 0.23523342790817642,
"grad_norm": 0.5559272170066833,
"learning_rate": 9.098150603246517e-05,
"loss": 2.3927,
"step": 570
},
{
"epoch": 0.23605880835697704,
"grad_norm": 0.5721775889396667,
"learning_rate": 9.090316943399255e-05,
"loss": 2.3992,
"step": 572
},
{
"epoch": 0.23688418880577766,
"grad_norm": 0.5262630581855774,
"learning_rate": 9.082452811213095e-05,
"loss": 2.3898,
"step": 574
},
{
"epoch": 0.23770956925457828,
"grad_norm": 0.5832549333572388,
"learning_rate": 9.074558265274819e-05,
"loss": 2.3943,
"step": 576
},
{
"epoch": 0.2385349497033789,
"grad_norm": 0.5760109424591064,
"learning_rate": 9.066633364397786e-05,
"loss": 2.4223,
"step": 578
},
{
"epoch": 0.23936033015217953,
"grad_norm": 0.5974195003509521,
"learning_rate": 9.058678167621493e-05,
"loss": 2.402,
"step": 580
},
{
"epoch": 0.24018571060098015,
"grad_norm": 0.5613354444503784,
"learning_rate": 9.050692734211142e-05,
"loss": 2.3602,
"step": 582
},
{
"epoch": 0.24101109104978075,
"grad_norm": 0.6307066082954407,
"learning_rate": 9.042677123657191e-05,
"loss": 2.4034,
"step": 584
},
{
"epoch": 0.24183647149858137,
"grad_norm": 0.7120583057403564,
"learning_rate": 9.034631395674917e-05,
"loss": 2.3909,
"step": 586
},
{
"epoch": 0.242661851947382,
"grad_norm": 0.7483564615249634,
"learning_rate": 9.026555610203964e-05,
"loss": 2.4026,
"step": 588
},
{
"epoch": 0.2434872323961826,
"grad_norm": 0.5825770497322083,
"learning_rate": 9.018449827407905e-05,
"loss": 2.3736,
"step": 590
},
{
"epoch": 0.24431261284498323,
"grad_norm": 0.5669400095939636,
"learning_rate": 9.010314107673783e-05,
"loss": 2.376,
"step": 592
},
{
"epoch": 0.24513799329378386,
"grad_norm": 0.6108945608139038,
"learning_rate": 9.002148511611675e-05,
"loss": 2.3765,
"step": 594
},
{
"epoch": 0.24596337374258448,
"grad_norm": 0.499024361371994,
"learning_rate": 8.993953100054224e-05,
"loss": 2.4033,
"step": 596
},
{
"epoch": 0.2467887541913851,
"grad_norm": 0.4951154291629791,
"learning_rate": 8.985727934056207e-05,
"loss": 2.3808,
"step": 598
},
{
"epoch": 0.24761413464018572,
"grad_norm": 0.4967551529407501,
"learning_rate": 8.977473074894052e-05,
"loss": 2.3749,
"step": 600
},
{
"epoch": 0.24843951508898632,
"grad_norm": 0.6024683117866516,
"learning_rate": 8.969188584065412e-05,
"loss": 2.3745,
"step": 602
},
{
"epoch": 0.24926489553778694,
"grad_norm": 0.5306781530380249,
"learning_rate": 8.960874523288683e-05,
"loss": 2.3824,
"step": 604
},
{
"epoch": 0.25009027598658756,
"grad_norm": 0.5470788478851318,
"learning_rate": 8.952530954502557e-05,
"loss": 2.3828,
"step": 606
},
{
"epoch": 0.2509156564353882,
"grad_norm": 0.5227565765380859,
"learning_rate": 8.944157939865556e-05,
"loss": 2.3512,
"step": 608
},
{
"epoch": 0.2517410368841888,
"grad_norm": 0.5495042204856873,
"learning_rate": 8.935755541755569e-05,
"loss": 2.3553,
"step": 610
},
{
"epoch": 0.2525664173329894,
"grad_norm": 0.554063081741333,
"learning_rate": 8.927323822769386e-05,
"loss": 2.3492,
"step": 612
},
{
"epoch": 0.25339179778179005,
"grad_norm": 0.596449077129364,
"learning_rate": 8.918862845722243e-05,
"loss": 2.3708,
"step": 614
},
{
"epoch": 0.25421717823059065,
"grad_norm": 0.5647444128990173,
"learning_rate": 8.910372673647336e-05,
"loss": 2.3716,
"step": 616
},
{
"epoch": 0.2550425586793913,
"grad_norm": 0.6036911606788635,
"learning_rate": 8.901853369795361e-05,
"loss": 2.3728,
"step": 618
},
{
"epoch": 0.2558679391281919,
"grad_norm": 0.6191892027854919,
"learning_rate": 8.893304997634045e-05,
"loss": 2.36,
"step": 620
},
{
"epoch": 0.25669331957699254,
"grad_norm": 0.5799134373664856,
"learning_rate": 8.884727620847669e-05,
"loss": 2.3686,
"step": 622
},
{
"epoch": 0.25751870002579313,
"grad_norm": 0.6006870865821838,
"learning_rate": 8.876121303336596e-05,
"loss": 2.3705,
"step": 624
},
{
"epoch": 0.2583440804745938,
"grad_norm": 0.6628080606460571,
"learning_rate": 8.867486109216789e-05,
"loss": 2.3723,
"step": 626
},
{
"epoch": 0.2591694609233944,
"grad_norm": 0.6496407389640808,
"learning_rate": 8.858822102819347e-05,
"loss": 2.3438,
"step": 628
},
{
"epoch": 0.259994841372195,
"grad_norm": 0.5622187256813049,
"learning_rate": 8.850129348690004e-05,
"loss": 2.3729,
"step": 630
},
{
"epoch": 0.2608202218209956,
"grad_norm": 0.6995213031768799,
"learning_rate": 8.841407911588675e-05,
"loss": 2.3889,
"step": 632
},
{
"epoch": 0.2616456022697962,
"grad_norm": 0.6655398607254028,
"learning_rate": 8.832657856488949e-05,
"loss": 2.3803,
"step": 634
},
{
"epoch": 0.26247098271859687,
"grad_norm": 0.7044237852096558,
"learning_rate": 8.82387924857762e-05,
"loss": 2.3709,
"step": 636
},
{
"epoch": 0.26329636316739746,
"grad_norm": 0.6213721036911011,
"learning_rate": 8.815072153254195e-05,
"loss": 2.3698,
"step": 638
},
{
"epoch": 0.2641217436161981,
"grad_norm": 0.6705930233001709,
"learning_rate": 8.806236636130411e-05,
"loss": 2.329,
"step": 640
},
{
"epoch": 0.2649471240649987,
"grad_norm": 0.4926806688308716,
"learning_rate": 8.797372763029742e-05,
"loss": 2.3431,
"step": 642
},
{
"epoch": 0.26577250451379936,
"grad_norm": 0.5596938729286194,
"learning_rate": 8.78848059998691e-05,
"loss": 2.3888,
"step": 644
},
{
"epoch": 0.26659788496259995,
"grad_norm": 0.4732595980167389,
"learning_rate": 8.779560213247395e-05,
"loss": 2.3267,
"step": 646
},
{
"epoch": 0.26742326541140055,
"grad_norm": 0.5391475558280945,
"learning_rate": 8.770611669266938e-05,
"loss": 2.3869,
"step": 648
},
{
"epoch": 0.2682486458602012,
"grad_norm": 0.5030598640441895,
"learning_rate": 8.76163503471105e-05,
"loss": 2.3535,
"step": 650
},
{
"epoch": 0.2690740263090018,
"grad_norm": 0.6067845225334167,
"learning_rate": 8.752630376454511e-05,
"loss": 2.3711,
"step": 652
},
{
"epoch": 0.26989940675780244,
"grad_norm": 0.5053196549415588,
"learning_rate": 8.743597761580877e-05,
"loss": 2.3643,
"step": 654
},
{
"epoch": 0.27072478720660303,
"grad_norm": 0.5403187274932861,
"learning_rate": 8.734537257381973e-05,
"loss": 2.3531,
"step": 656
},
{
"epoch": 0.2715501676554037,
"grad_norm": 0.498923659324646,
"learning_rate": 8.7254489313574e-05,
"loss": 2.3423,
"step": 658
},
{
"epoch": 0.2723755481042043,
"grad_norm": 0.5593265295028687,
"learning_rate": 8.716332851214024e-05,
"loss": 2.3548,
"step": 660
},
{
"epoch": 0.2732009285530049,
"grad_norm": 0.5866305232048035,
"learning_rate": 8.707189084865481e-05,
"loss": 2.3677,
"step": 662
},
{
"epoch": 0.2740263090018055,
"grad_norm": 0.5718086957931519,
"learning_rate": 8.698017700431662e-05,
"loss": 2.3416,
"step": 664
},
{
"epoch": 0.2748516894506061,
"grad_norm": 0.6683026552200317,
"learning_rate": 8.688818766238208e-05,
"loss": 2.3658,
"step": 666
},
{
"epoch": 0.27567706989940677,
"grad_norm": 0.682115912437439,
"learning_rate": 8.679592350816007e-05,
"loss": 2.3287,
"step": 668
},
{
"epoch": 0.27650245034820736,
"grad_norm": 0.6516245007514954,
"learning_rate": 8.670338522900672e-05,
"loss": 2.3095,
"step": 670
},
{
"epoch": 0.277327830797008,
"grad_norm": 0.5818968415260315,
"learning_rate": 8.66105735143204e-05,
"loss": 2.3354,
"step": 672
},
{
"epoch": 0.2781532112458086,
"grad_norm": 0.5822069644927979,
"learning_rate": 8.651748905553656e-05,
"loss": 2.3402,
"step": 674
},
{
"epoch": 0.27897859169460926,
"grad_norm": 0.5988590717315674,
"learning_rate": 8.642413254612251e-05,
"loss": 2.3488,
"step": 676
},
{
"epoch": 0.27980397214340985,
"grad_norm": 0.5264620184898376,
"learning_rate": 8.633050468157234e-05,
"loss": 2.3336,
"step": 678
},
{
"epoch": 0.28062935259221045,
"grad_norm": 0.5173693895339966,
"learning_rate": 8.623660615940165e-05,
"loss": 2.3201,
"step": 680
},
{
"epoch": 0.2814547330410111,
"grad_norm": 0.5370919108390808,
"learning_rate": 8.61424376791425e-05,
"loss": 2.353,
"step": 682
},
{
"epoch": 0.2822801134898117,
"grad_norm": 0.5648570656776428,
"learning_rate": 8.604799994233798e-05,
"loss": 2.3517,
"step": 684
},
{
"epoch": 0.28310549393861234,
"grad_norm": 0.5215715169906616,
"learning_rate": 8.595329365253719e-05,
"loss": 2.3333,
"step": 686
},
{
"epoch": 0.28393087438741293,
"grad_norm": 0.6153488755226135,
"learning_rate": 8.585831951528991e-05,
"loss": 2.3617,
"step": 688
},
{
"epoch": 0.2847562548362136,
"grad_norm": 0.6054225564002991,
"learning_rate": 8.576307823814132e-05,
"loss": 2.3149,
"step": 690
},
{
"epoch": 0.2855816352850142,
"grad_norm": 0.5253807902336121,
"learning_rate": 8.566757053062678e-05,
"loss": 2.3114,
"step": 692
},
{
"epoch": 0.28640701573381483,
"grad_norm": 0.5196321606636047,
"learning_rate": 8.55717971042665e-05,
"loss": 2.3275,
"step": 694
},
{
"epoch": 0.2872323961826154,
"grad_norm": 0.5359232425689697,
"learning_rate": 8.54757586725603e-05,
"loss": 2.3108,
"step": 696
},
{
"epoch": 0.288057776631416,
"grad_norm": 0.5835343599319458,
"learning_rate": 8.537945595098222e-05,
"loss": 2.3423,
"step": 698
},
{
"epoch": 0.28888315708021667,
"grad_norm": 0.49723029136657715,
"learning_rate": 8.528288965697523e-05,
"loss": 2.2839,
"step": 700
},
{
"epoch": 0.28970853752901726,
"grad_norm": 0.5888681411743164,
"learning_rate": 8.518606050994591e-05,
"loss": 2.3485,
"step": 702
},
{
"epoch": 0.2905339179778179,
"grad_norm": 0.5658313632011414,
"learning_rate": 8.508896923125901e-05,
"loss": 2.359,
"step": 704
},
{
"epoch": 0.2913592984266185,
"grad_norm": 0.5612741112709045,
"learning_rate": 8.499161654423219e-05,
"loss": 2.3482,
"step": 706
},
{
"epoch": 0.29218467887541916,
"grad_norm": 0.51905757188797,
"learning_rate": 8.48940031741305e-05,
"loss": 2.3349,
"step": 708
},
{
"epoch": 0.29301005932421975,
"grad_norm": 0.5525624752044678,
"learning_rate": 8.479612984816112e-05,
"loss": 2.3318,
"step": 710
},
{
"epoch": 0.2938354397730204,
"grad_norm": 0.6043487191200256,
"learning_rate": 8.469799729546781e-05,
"loss": 2.3399,
"step": 712
},
{
"epoch": 0.294660820221821,
"grad_norm": 0.5291466116905212,
"learning_rate": 8.459960624712556e-05,
"loss": 2.3175,
"step": 714
},
{
"epoch": 0.2954862006706216,
"grad_norm": 0.5733122229576111,
"learning_rate": 8.450095743613512e-05,
"loss": 2.3366,
"step": 716
},
{
"epoch": 0.29631158111942224,
"grad_norm": 0.5417082905769348,
"learning_rate": 8.440205159741752e-05,
"loss": 2.3335,
"step": 718
},
{
"epoch": 0.29713696156822283,
"grad_norm": 0.565000593662262,
"learning_rate": 8.430288946780865e-05,
"loss": 2.3384,
"step": 720
},
{
"epoch": 0.2979623420170235,
"grad_norm": 0.6104756593704224,
"learning_rate": 8.420347178605367e-05,
"loss": 2.3587,
"step": 722
},
{
"epoch": 0.2987877224658241,
"grad_norm": 0.5492483973503113,
"learning_rate": 8.410379929280168e-05,
"loss": 2.3169,
"step": 724
},
{
"epoch": 0.29961310291462473,
"grad_norm": 0.5351945161819458,
"learning_rate": 8.400387273059998e-05,
"loss": 2.3195,
"step": 726
},
{
"epoch": 0.3004384833634253,
"grad_norm": 0.5342651605606079,
"learning_rate": 8.39036928438887e-05,
"loss": 2.2912,
"step": 728
},
{
"epoch": 0.301263863812226,
"grad_norm": 0.5106682181358337,
"learning_rate": 8.380326037899522e-05,
"loss": 2.3283,
"step": 730
},
{
"epoch": 0.30208924426102657,
"grad_norm": 0.5767691731452942,
"learning_rate": 8.370257608412857e-05,
"loss": 2.3286,
"step": 732
},
{
"epoch": 0.30291462470982716,
"grad_norm": 0.54031902551651,
"learning_rate": 8.360164070937389e-05,
"loss": 2.3243,
"step": 734
},
{
"epoch": 0.3037400051586278,
"grad_norm": 0.6625233888626099,
"learning_rate": 8.350045500668681e-05,
"loss": 2.3232,
"step": 736
},
{
"epoch": 0.3045653856074284,
"grad_norm": 0.6196743845939636,
"learning_rate": 8.339901972988795e-05,
"loss": 2.3216,
"step": 738
},
{
"epoch": 0.30539076605622906,
"grad_norm": 0.5990370512008667,
"learning_rate": 8.329733563465711e-05,
"loss": 2.3401,
"step": 740
},
{
"epoch": 0.30621614650502965,
"grad_norm": 0.5713345408439636,
"learning_rate": 8.319540347852786e-05,
"loss": 2.3416,
"step": 742
},
{
"epoch": 0.3070415269538303,
"grad_norm": 0.5813875198364258,
"learning_rate": 8.30932240208817e-05,
"loss": 2.2979,
"step": 744
},
{
"epoch": 0.3078669074026309,
"grad_norm": 0.6877428889274597,
"learning_rate": 8.299079802294258e-05,
"loss": 2.317,
"step": 746
},
{
"epoch": 0.30869228785143155,
"grad_norm": 0.5318434834480286,
"learning_rate": 8.288812624777109e-05,
"loss": 2.2947,
"step": 748
},
{
"epoch": 0.30951766830023214,
"grad_norm": 0.5925495624542236,
"learning_rate": 8.278520946025884e-05,
"loss": 2.3114,
"step": 750
},
{
"epoch": 0.31034304874903273,
"grad_norm": 0.5941009521484375,
"learning_rate": 8.268204842712278e-05,
"loss": 2.3463,
"step": 752
},
{
"epoch": 0.3111684291978334,
"grad_norm": 0.5976232886314392,
"learning_rate": 8.25786439168994e-05,
"loss": 2.2996,
"step": 754
},
{
"epoch": 0.311993809646634,
"grad_norm": 0.5421935319900513,
"learning_rate": 8.24749966999391e-05,
"loss": 2.3143,
"step": 756
},
{
"epoch": 0.31281919009543463,
"grad_norm": 0.5300918817520142,
"learning_rate": 8.237110754840043e-05,
"loss": 2.2946,
"step": 758
},
{
"epoch": 0.3136445705442352,
"grad_norm": 0.5926389098167419,
"learning_rate": 8.22669772362443e-05,
"loss": 2.3312,
"step": 760
},
{
"epoch": 0.3144699509930359,
"grad_norm": 0.5365331768989563,
"learning_rate": 8.216260653922823e-05,
"loss": 2.3165,
"step": 762
},
{
"epoch": 0.31529533144183647,
"grad_norm": 0.5278505086898804,
"learning_rate": 8.205799623490055e-05,
"loss": 2.2811,
"step": 764
},
{
"epoch": 0.31612071189063706,
"grad_norm": 0.5568265318870544,
"learning_rate": 8.195314710259475e-05,
"loss": 2.2899,
"step": 766
},
{
"epoch": 0.3169460923394377,
"grad_norm": 0.6386959552764893,
"learning_rate": 8.184805992342342e-05,
"loss": 2.3193,
"step": 768
},
{
"epoch": 0.3177714727882383,
"grad_norm": 0.5363957285881042,
"learning_rate": 8.174273548027262e-05,
"loss": 2.3255,
"step": 770
},
{
"epoch": 0.31859685323703896,
"grad_norm": 0.6581987142562866,
"learning_rate": 8.163717455779602e-05,
"loss": 2.3159,
"step": 772
},
{
"epoch": 0.31942223368583955,
"grad_norm": 0.7844798564910889,
"learning_rate": 8.153137794240903e-05,
"loss": 2.3241,
"step": 774
},
{
"epoch": 0.3202476141346402,
"grad_norm": 0.5486951470375061,
"learning_rate": 8.142534642228288e-05,
"loss": 2.2834,
"step": 776
},
{
"epoch": 0.3210729945834408,
"grad_norm": 0.6187033653259277,
"learning_rate": 8.13190807873389e-05,
"loss": 2.31,
"step": 778
},
{
"epoch": 0.32189837503224145,
"grad_norm": 0.6599840521812439,
"learning_rate": 8.121258182924247e-05,
"loss": 2.3075,
"step": 780
},
{
"epoch": 0.32272375548104204,
"grad_norm": 0.6156419515609741,
"learning_rate": 8.110585034139723e-05,
"loss": 2.3171,
"step": 782
},
{
"epoch": 0.32354913592984264,
"grad_norm": 0.652897834777832,
"learning_rate": 8.099888711893917e-05,
"loss": 2.2963,
"step": 784
},
{
"epoch": 0.3243745163786433,
"grad_norm": 0.5405826568603516,
"learning_rate": 8.089169295873058e-05,
"loss": 2.299,
"step": 786
},
{
"epoch": 0.3251998968274439,
"grad_norm": 0.5489581823348999,
"learning_rate": 8.078426865935432e-05,
"loss": 2.3051,
"step": 788
},
{
"epoch": 0.32602527727624453,
"grad_norm": 0.5330743789672852,
"learning_rate": 8.067661502110768e-05,
"loss": 2.306,
"step": 790
},
{
"epoch": 0.3268506577250451,
"grad_norm": 0.5833327174186707,
"learning_rate": 8.056873284599648e-05,
"loss": 2.3197,
"step": 792
},
{
"epoch": 0.3276760381738458,
"grad_norm": 0.638880729675293,
"learning_rate": 8.046062293772922e-05,
"loss": 2.309,
"step": 794
},
{
"epoch": 0.32850141862264637,
"grad_norm": 0.6887226104736328,
"learning_rate": 8.035228610171085e-05,
"loss": 2.3192,
"step": 796
},
{
"epoch": 0.329326799071447,
"grad_norm": 0.5981518030166626,
"learning_rate": 8.024372314503701e-05,
"loss": 2.2958,
"step": 798
},
{
"epoch": 0.3301521795202476,
"grad_norm": 0.5953544974327087,
"learning_rate": 8.013493487648782e-05,
"loss": 2.3161,
"step": 800
},
{
"epoch": 0.3309775599690482,
"grad_norm": 0.576503574848175,
"learning_rate": 8.002592210652202e-05,
"loss": 2.2901,
"step": 802
},
{
"epoch": 0.33180294041784886,
"grad_norm": 0.6154365539550781,
"learning_rate": 7.991668564727082e-05,
"loss": 2.3093,
"step": 804
},
{
"epoch": 0.33262832086664945,
"grad_norm": 0.5416200757026672,
"learning_rate": 7.98072263125319e-05,
"loss": 2.3004,
"step": 806
},
{
"epoch": 0.3334537013154501,
"grad_norm": 0.5973731875419617,
"learning_rate": 7.969754491776329e-05,
"loss": 2.3224,
"step": 808
},
{
"epoch": 0.3342790817642507,
"grad_norm": 0.5144022107124329,
"learning_rate": 7.958764228007741e-05,
"loss": 2.2899,
"step": 810
},
{
"epoch": 0.33510446221305135,
"grad_norm": 0.627142608165741,
"learning_rate": 7.947751921823488e-05,
"loss": 2.3196,
"step": 812
},
{
"epoch": 0.33592984266185194,
"grad_norm": 0.5556638240814209,
"learning_rate": 7.936717655263841e-05,
"loss": 2.2991,
"step": 814
},
{
"epoch": 0.3367552231106526,
"grad_norm": 0.5637221336364746,
"learning_rate": 7.925661510532681e-05,
"loss": 2.3093,
"step": 816
},
{
"epoch": 0.3375806035594532,
"grad_norm": 0.6272327899932861,
"learning_rate": 7.91458356999687e-05,
"loss": 2.3286,
"step": 818
},
{
"epoch": 0.3384059840082538,
"grad_norm": 0.5124315023422241,
"learning_rate": 7.903483916185654e-05,
"loss": 2.2879,
"step": 820
},
{
"epoch": 0.33923136445705443,
"grad_norm": 0.5467550158500671,
"learning_rate": 7.892362631790035e-05,
"loss": 2.3107,
"step": 822
},
{
"epoch": 0.340056744905855,
"grad_norm": 0.5180369019508362,
"learning_rate": 7.881219799662164e-05,
"loss": 2.2784,
"step": 824
},
{
"epoch": 0.3408821253546557,
"grad_norm": 0.5402101874351501,
"learning_rate": 7.870055502814714e-05,
"loss": 2.2704,
"step": 826
},
{
"epoch": 0.34170750580345627,
"grad_norm": 0.4879278242588043,
"learning_rate": 7.858869824420272e-05,
"loss": 2.2913,
"step": 828
},
{
"epoch": 0.3425328862522569,
"grad_norm": 0.5525892972946167,
"learning_rate": 7.847662847810713e-05,
"loss": 2.3071,
"step": 830
},
{
"epoch": 0.3433582667010575,
"grad_norm": 0.517867922782898,
"learning_rate": 7.836434656476583e-05,
"loss": 2.2748,
"step": 832
},
{
"epoch": 0.34418364714985816,
"grad_norm": 0.5947239398956299,
"learning_rate": 7.825185334066475e-05,
"loss": 2.2687,
"step": 834
},
{
"epoch": 0.34500902759865876,
"grad_norm": 0.6157646775245667,
"learning_rate": 7.813914964386401e-05,
"loss": 2.2769,
"step": 836
},
{
"epoch": 0.34583440804745935,
"grad_norm": 0.5485296845436096,
"learning_rate": 7.802623631399176e-05,
"loss": 2.2663,
"step": 838
},
{
"epoch": 0.34665978849626,
"grad_norm": 0.4869697093963623,
"learning_rate": 7.791311419223791e-05,
"loss": 2.2908,
"step": 840
},
{
"epoch": 0.3474851689450606,
"grad_norm": 0.5534776449203491,
"learning_rate": 7.779978412134783e-05,
"loss": 2.2998,
"step": 842
},
{
"epoch": 0.34831054939386125,
"grad_norm": 0.5947521328926086,
"learning_rate": 7.768624694561604e-05,
"loss": 2.2907,
"step": 844
},
{
"epoch": 0.34913592984266184,
"grad_norm": 0.5616730451583862,
"learning_rate": 7.757250351088004e-05,
"loss": 2.3003,
"step": 846
},
{
"epoch": 0.3499613102914625,
"grad_norm": 0.5840707421302795,
"learning_rate": 7.745855466451385e-05,
"loss": 2.2683,
"step": 848
},
{
"epoch": 0.3507866907402631,
"grad_norm": 0.5235263705253601,
"learning_rate": 7.734440125542186e-05,
"loss": 2.2976,
"step": 850
},
{
"epoch": 0.35161207118906374,
"grad_norm": 0.5673782229423523,
"learning_rate": 7.723004413403238e-05,
"loss": 2.2936,
"step": 852
},
{
"epoch": 0.35243745163786433,
"grad_norm": 0.5472978353500366,
"learning_rate": 7.711548415229135e-05,
"loss": 2.2896,
"step": 854
},
{
"epoch": 0.3532628320866649,
"grad_norm": 0.5697288513183594,
"learning_rate": 7.700072216365602e-05,
"loss": 2.3239,
"step": 856
},
{
"epoch": 0.3540882125354656,
"grad_norm": 0.6162300705909729,
"learning_rate": 7.688575902308854e-05,
"loss": 2.2973,
"step": 858
},
{
"epoch": 0.35491359298426617,
"grad_norm": 0.4970763027667999,
"learning_rate": 7.677059558704965e-05,
"loss": 2.3018,
"step": 860
},
{
"epoch": 0.3557389734330668,
"grad_norm": 0.486848920583725,
"learning_rate": 7.665523271349221e-05,
"loss": 2.3116,
"step": 862
},
{
"epoch": 0.3565643538818674,
"grad_norm": 0.5172179341316223,
"learning_rate": 7.65396712618549e-05,
"loss": 2.2898,
"step": 864
},
{
"epoch": 0.35738973433066806,
"grad_norm": 0.5573694705963135,
"learning_rate": 7.642391209305581e-05,
"loss": 2.2866,
"step": 866
},
{
"epoch": 0.35821511477946866,
"grad_norm": 0.540338397026062,
"learning_rate": 7.630795606948592e-05,
"loss": 2.3066,
"step": 868
},
{
"epoch": 0.3590404952282693,
"grad_norm": 0.5216573476791382,
"learning_rate": 7.619180405500284e-05,
"loss": 2.2489,
"step": 870
},
{
"epoch": 0.3598658756770699,
"grad_norm": 0.5518911480903625,
"learning_rate": 7.607545691492421e-05,
"loss": 2.2621,
"step": 872
},
{
"epoch": 0.3606912561258705,
"grad_norm": 0.5395997166633606,
"learning_rate": 7.595891551602139e-05,
"loss": 2.2828,
"step": 874
},
{
"epoch": 0.36151663657467115,
"grad_norm": 0.5570027828216553,
"learning_rate": 7.584218072651291e-05,
"loss": 2.2721,
"step": 876
},
{
"epoch": 0.36234201702347174,
"grad_norm": 0.530053436756134,
"learning_rate": 7.572525341605805e-05,
"loss": 2.3146,
"step": 878
},
{
"epoch": 0.3631673974722724,
"grad_norm": 0.5206712484359741,
"learning_rate": 7.560813445575032e-05,
"loss": 2.2633,
"step": 880
},
{
"epoch": 0.363992777921073,
"grad_norm": 0.5709933638572693,
"learning_rate": 7.549082471811105e-05,
"loss": 2.2524,
"step": 882
},
{
"epoch": 0.36481815836987364,
"grad_norm": 0.5382503271102905,
"learning_rate": 7.53733250770828e-05,
"loss": 2.2871,
"step": 884
},
{
"epoch": 0.36564353881867423,
"grad_norm": 0.49123620986938477,
"learning_rate": 7.525563640802286e-05,
"loss": 2.2906,
"step": 886
},
{
"epoch": 0.3664689192674748,
"grad_norm": 0.48953843116760254,
"learning_rate": 7.513775958769683e-05,
"loss": 2.2912,
"step": 888
},
{
"epoch": 0.3672942997162755,
"grad_norm": 0.5467056632041931,
"learning_rate": 7.501969549427195e-05,
"loss": 2.2961,
"step": 890
},
{
"epoch": 0.36811968016507607,
"grad_norm": 0.5301052331924438,
"learning_rate": 7.49014450073106e-05,
"loss": 2.2588,
"step": 892
},
{
"epoch": 0.3689450606138767,
"grad_norm": 0.5778504014015198,
"learning_rate": 7.478300900776387e-05,
"loss": 2.2627,
"step": 894
},
{
"epoch": 0.3697704410626773,
"grad_norm": 0.5600552558898926,
"learning_rate": 7.46643883779648e-05,
"loss": 2.2874,
"step": 896
},
{
"epoch": 0.37059582151147796,
"grad_norm": 0.5173321962356567,
"learning_rate": 7.454558400162195e-05,
"loss": 2.2787,
"step": 898
},
{
"epoch": 0.37142120196027856,
"grad_norm": 0.534444272518158,
"learning_rate": 7.442659676381275e-05,
"loss": 2.2883,
"step": 900
},
{
"epoch": 0.3722465824090792,
"grad_norm": 0.5139839053153992,
"learning_rate": 7.430742755097689e-05,
"loss": 2.2999,
"step": 902
},
{
"epoch": 0.3730719628578798,
"grad_norm": 0.4908299744129181,
"learning_rate": 7.418807725090983e-05,
"loss": 2.285,
"step": 904
},
{
"epoch": 0.3738973433066804,
"grad_norm": 0.49594688415527344,
"learning_rate": 7.406854675275605e-05,
"loss": 2.2884,
"step": 906
},
{
"epoch": 0.37472272375548105,
"grad_norm": 0.49902480840682983,
"learning_rate": 7.39488369470025e-05,
"loss": 2.2778,
"step": 908
},
{
"epoch": 0.37554810420428164,
"grad_norm": 0.46000784635543823,
"learning_rate": 7.382894872547195e-05,
"loss": 2.2797,
"step": 910
},
{
"epoch": 0.3763734846530823,
"grad_norm": 0.4911547899246216,
"learning_rate": 7.370888298131633e-05,
"loss": 2.2611,
"step": 912
},
{
"epoch": 0.3771988651018829,
"grad_norm": 0.5099422335624695,
"learning_rate": 7.35886406090101e-05,
"loss": 2.2536,
"step": 914
},
{
"epoch": 0.37802424555068354,
"grad_norm": 0.42195364832878113,
"learning_rate": 7.34682225043436e-05,
"loss": 2.2597,
"step": 916
},
{
"epoch": 0.37884962599948413,
"grad_norm": 0.5352451205253601,
"learning_rate": 7.334762956441632e-05,
"loss": 2.2866,
"step": 918
},
{
"epoch": 0.3796750064482848,
"grad_norm": 0.4699161946773529,
"learning_rate": 7.322686268763026e-05,
"loss": 2.2801,
"step": 920
},
{
"epoch": 0.3805003868970854,
"grad_norm": 0.5075897574424744,
"learning_rate": 7.310592277368322e-05,
"loss": 2.2814,
"step": 922
},
{
"epoch": 0.38132576734588597,
"grad_norm": 0.4679708480834961,
"learning_rate": 7.298481072356214e-05,
"loss": 2.2524,
"step": 924
},
{
"epoch": 0.3821511477946866,
"grad_norm": 0.5117970705032349,
"learning_rate": 7.28635274395363e-05,
"loss": 2.2605,
"step": 926
},
{
"epoch": 0.3829765282434872,
"grad_norm": 0.518598735332489,
"learning_rate": 7.274207382515071e-05,
"loss": 2.2685,
"step": 928
},
{
"epoch": 0.38380190869228786,
"grad_norm": 0.5765763521194458,
"learning_rate": 7.262045078521924e-05,
"loss": 2.2894,
"step": 930
},
{
"epoch": 0.38462728914108846,
"grad_norm": 0.48513707518577576,
"learning_rate": 7.249865922581807e-05,
"loss": 2.2658,
"step": 932
},
{
"epoch": 0.3854526695898891,
"grad_norm": 0.5026444792747498,
"learning_rate": 7.237670005427872e-05,
"loss": 2.283,
"step": 934
},
{
"epoch": 0.3862780500386897,
"grad_norm": 0.5490180253982544,
"learning_rate": 7.225457417918144e-05,
"loss": 2.241,
"step": 936
},
{
"epoch": 0.38710343048749035,
"grad_norm": 0.54227215051651,
"learning_rate": 7.213228251034844e-05,
"loss": 2.2338,
"step": 938
},
{
"epoch": 0.38792881093629095,
"grad_norm": 0.5452665686607361,
"learning_rate": 7.2009825958837e-05,
"loss": 2.2478,
"step": 940
},
{
"epoch": 0.38875419138509154,
"grad_norm": 0.5434293150901794,
"learning_rate": 7.188720543693283e-05,
"loss": 2.2489,
"step": 942
},
{
"epoch": 0.3895795718338922,
"grad_norm": 0.5174756050109863,
"learning_rate": 7.176442185814312e-05,
"loss": 2.2562,
"step": 944
},
{
"epoch": 0.3904049522826928,
"grad_norm": 0.5360869765281677,
"learning_rate": 7.164147613718986e-05,
"loss": 2.254,
"step": 946
},
{
"epoch": 0.39123033273149344,
"grad_norm": 0.5211062431335449,
"learning_rate": 7.151836919000299e-05,
"loss": 2.2501,
"step": 948
},
{
"epoch": 0.39205571318029403,
"grad_norm": 0.5354079604148865,
"learning_rate": 7.139510193371352e-05,
"loss": 2.2585,
"step": 950
},
{
"epoch": 0.3928810936290947,
"grad_norm": 0.5471782684326172,
"learning_rate": 7.127167528664682e-05,
"loss": 2.2502,
"step": 952
},
{
"epoch": 0.3937064740778953,
"grad_norm": 0.5891269445419312,
"learning_rate": 7.114809016831558e-05,
"loss": 2.2916,
"step": 954
},
{
"epoch": 0.3945318545266959,
"grad_norm": 0.6164124608039856,
"learning_rate": 7.10243474994132e-05,
"loss": 2.2526,
"step": 956
},
{
"epoch": 0.3953572349754965,
"grad_norm": 0.5750834345817566,
"learning_rate": 7.090044820180673e-05,
"loss": 2.25,
"step": 958
},
{
"epoch": 0.3961826154242971,
"grad_norm": 0.5516604781150818,
"learning_rate": 7.077639319853013e-05,
"loss": 2.2624,
"step": 960
},
{
"epoch": 0.39700799587309776,
"grad_norm": 0.5377829670906067,
"learning_rate": 7.065218341377734e-05,
"loss": 2.2542,
"step": 962
},
{
"epoch": 0.39783337632189836,
"grad_norm": 0.5297653079032898,
"learning_rate": 7.05278197728954e-05,
"loss": 2.2784,
"step": 964
},
{
"epoch": 0.398658756770699,
"grad_norm": 0.5507510304450989,
"learning_rate": 7.040330320237752e-05,
"loss": 2.2848,
"step": 966
},
{
"epoch": 0.3994841372194996,
"grad_norm": 0.571143388748169,
"learning_rate": 7.027863462985628e-05,
"loss": 2.2697,
"step": 968
},
{
"epoch": 0.40030951766830025,
"grad_norm": 0.538505494594574,
"learning_rate": 7.015381498409661e-05,
"loss": 2.2532,
"step": 970
},
{
"epoch": 0.40113489811710085,
"grad_norm": 0.53383868932724,
"learning_rate": 7.002884519498895e-05,
"loss": 2.2399,
"step": 972
},
{
"epoch": 0.4019602785659015,
"grad_norm": 0.46435126662254333,
"learning_rate": 6.990372619354224e-05,
"loss": 2.2689,
"step": 974
},
{
"epoch": 0.4027856590147021,
"grad_norm": 0.49260959029197693,
"learning_rate": 6.977845891187708e-05,
"loss": 2.2653,
"step": 976
},
{
"epoch": 0.4036110394635027,
"grad_norm": 0.4841521084308624,
"learning_rate": 6.965304428321874e-05,
"loss": 2.2743,
"step": 978
},
{
"epoch": 0.40443641991230334,
"grad_norm": 0.45740070939064026,
"learning_rate": 6.952748324189016e-05,
"loss": 2.2406,
"step": 980
},
{
"epoch": 0.40526180036110393,
"grad_norm": 0.4746413826942444,
"learning_rate": 6.940177672330508e-05,
"loss": 2.2461,
"step": 982
},
{
"epoch": 0.4060871808099046,
"grad_norm": 0.4509133994579315,
"learning_rate": 6.9275925663961e-05,
"loss": 2.2287,
"step": 984
},
{
"epoch": 0.4069125612587052,
"grad_norm": 0.44701477885246277,
"learning_rate": 6.914993100143224e-05,
"loss": 2.2771,
"step": 986
},
{
"epoch": 0.4077379417075058,
"grad_norm": 0.49677759408950806,
"learning_rate": 6.902379367436296e-05,
"loss": 2.2434,
"step": 988
},
{
"epoch": 0.4085633221563064,
"grad_norm": 0.5722448229789734,
"learning_rate": 6.889751462246013e-05,
"loss": 2.228,
"step": 990
},
{
"epoch": 0.40938870260510707,
"grad_norm": 0.4709642827510834,
"learning_rate": 6.877109478648656e-05,
"loss": 2.2444,
"step": 992
},
{
"epoch": 0.41021408305390766,
"grad_norm": 0.6394116282463074,
"learning_rate": 6.864453510825388e-05,
"loss": 2.2317,
"step": 994
},
{
"epoch": 0.41103946350270826,
"grad_norm": 0.5443106889724731,
"learning_rate": 6.851783653061555e-05,
"loss": 2.2135,
"step": 996
},
{
"epoch": 0.4118648439515089,
"grad_norm": 0.6894340515136719,
"learning_rate": 6.83909999974598e-05,
"loss": 2.2545,
"step": 998
},
{
"epoch": 0.4126902244003095,
"grad_norm": 0.5471647381782532,
"learning_rate": 6.826402645370256e-05,
"loss": 2.2036,
"step": 1000
},
{
"epoch": 0.41351560484911015,
"grad_norm": 0.5389876365661621,
"learning_rate": 6.813691684528054e-05,
"loss": 2.2575,
"step": 1002
},
{
"epoch": 0.41434098529791075,
"grad_norm": 0.5603637099266052,
"learning_rate": 6.800967211914409e-05,
"loss": 2.2577,
"step": 1004
},
{
"epoch": 0.4151663657467114,
"grad_norm": 0.5469692349433899,
"learning_rate": 6.788229322325022e-05,
"loss": 2.2472,
"step": 1006
},
{
"epoch": 0.415991746195512,
"grad_norm": 0.5489314794540405,
"learning_rate": 6.775478110655535e-05,
"loss": 2.2818,
"step": 1008
},
{
"epoch": 0.4168171266443126,
"grad_norm": 0.4870460331439972,
"learning_rate": 6.762713671900853e-05,
"loss": 2.2747,
"step": 1010
},
{
"epoch": 0.41764250709311324,
"grad_norm": 0.5328664183616638,
"learning_rate": 6.74993610115441e-05,
"loss": 2.2826,
"step": 1012
},
{
"epoch": 0.41846788754191383,
"grad_norm": 0.5465196371078491,
"learning_rate": 6.737145493607482e-05,
"loss": 2.2562,
"step": 1014
},
{
"epoch": 0.4192932679907145,
"grad_norm": 0.4999396502971649,
"learning_rate": 6.724341944548459e-05,
"loss": 2.2545,
"step": 1016
},
{
"epoch": 0.4201186484395151,
"grad_norm": 0.49227967858314514,
"learning_rate": 6.711525549362144e-05,
"loss": 2.2322,
"step": 1018
},
{
"epoch": 0.4209440288883157,
"grad_norm": 0.4593503773212433,
"learning_rate": 6.698696403529049e-05,
"loss": 2.2817,
"step": 1020
},
{
"epoch": 0.4217694093371163,
"grad_norm": 0.477332204580307,
"learning_rate": 6.685854602624668e-05,
"loss": 2.25,
"step": 1022
},
{
"epoch": 0.42259478978591697,
"grad_norm": 0.4970756471157074,
"learning_rate": 6.673000242318782e-05,
"loss": 2.2937,
"step": 1024
},
{
"epoch": 0.42342017023471756,
"grad_norm": 0.4330197274684906,
"learning_rate": 6.660133418374732e-05,
"loss": 2.2231,
"step": 1026
},
{
"epoch": 0.42424555068351816,
"grad_norm": 0.45938703417778015,
"learning_rate": 6.647254226648711e-05,
"loss": 2.2166,
"step": 1028
},
{
"epoch": 0.4250709311323188,
"grad_norm": 0.45608118176460266,
"learning_rate": 6.634362763089056e-05,
"loss": 2.2346,
"step": 1030
},
{
"epoch": 0.4258963115811194,
"grad_norm": 0.5052918195724487,
"learning_rate": 6.621459123735522e-05,
"loss": 2.2608,
"step": 1032
},
{
"epoch": 0.42672169202992005,
"grad_norm": 0.4969039559364319,
"learning_rate": 6.608543404718578e-05,
"loss": 2.2349,
"step": 1034
},
{
"epoch": 0.42754707247872065,
"grad_norm": 0.5392360091209412,
"learning_rate": 6.595615702258676e-05,
"loss": 2.2463,
"step": 1036
},
{
"epoch": 0.4283724529275213,
"grad_norm": 0.5267171859741211,
"learning_rate": 6.58267611266555e-05,
"loss": 2.2157,
"step": 1038
},
{
"epoch": 0.4291978333763219,
"grad_norm": 0.4976198673248291,
"learning_rate": 6.569724732337495e-05,
"loss": 2.214,
"step": 1040
},
{
"epoch": 0.43002321382512254,
"grad_norm": 0.46527931094169617,
"learning_rate": 6.556761657760635e-05,
"loss": 2.2162,
"step": 1042
},
{
"epoch": 0.43084859427392314,
"grad_norm": 0.4234091341495514,
"learning_rate": 6.543786985508223e-05,
"loss": 2.2026,
"step": 1044
},
{
"epoch": 0.43167397472272373,
"grad_norm": 0.41595569252967834,
"learning_rate": 6.53080081223991e-05,
"loss": 2.2413,
"step": 1046
},
{
"epoch": 0.4324993551715244,
"grad_norm": 0.43087056279182434,
"learning_rate": 6.517803234701025e-05,
"loss": 2.2465,
"step": 1048
},
{
"epoch": 0.433324735620325,
"grad_norm": 0.4584639072418213,
"learning_rate": 6.504794349721866e-05,
"loss": 2.2353,
"step": 1050
},
{
"epoch": 0.4341501160691256,
"grad_norm": 0.49019527435302734,
"learning_rate": 6.491774254216963e-05,
"loss": 2.218,
"step": 1052
},
{
"epoch": 0.4349754965179262,
"grad_norm": 0.46099725365638733,
"learning_rate": 6.478743045184365e-05,
"loss": 2.2402,
"step": 1054
},
{
"epoch": 0.43580087696672687,
"grad_norm": 0.48430031538009644,
"learning_rate": 6.465700819704913e-05,
"loss": 2.2382,
"step": 1056
},
{
"epoch": 0.43662625741552746,
"grad_norm": 0.48902517557144165,
"learning_rate": 6.452647674941524e-05,
"loss": 2.2328,
"step": 1058
},
{
"epoch": 0.4374516378643281,
"grad_norm": 0.46378427743911743,
"learning_rate": 6.439583708138459e-05,
"loss": 2.2587,
"step": 1060
},
{
"epoch": 0.4382770183131287,
"grad_norm": 0.4577491879463196,
"learning_rate": 6.426509016620603e-05,
"loss": 2.2379,
"step": 1062
},
{
"epoch": 0.4391023987619293,
"grad_norm": 0.44876828789711,
"learning_rate": 6.413423697792737e-05,
"loss": 2.2537,
"step": 1064
},
{
"epoch": 0.43992777921072995,
"grad_norm": 0.501110851764679,
"learning_rate": 6.400327849138814e-05,
"loss": 2.2101,
"step": 1066
},
{
"epoch": 0.44075315965953055,
"grad_norm": 0.43247148394584656,
"learning_rate": 6.387221568221238e-05,
"loss": 2.2493,
"step": 1068
},
{
"epoch": 0.4415785401083312,
"grad_norm": 0.5204429030418396,
"learning_rate": 6.374104952680125e-05,
"loss": 2.2396,
"step": 1070
},
{
"epoch": 0.4424039205571318,
"grad_norm": 0.4856724143028259,
"learning_rate": 6.360978100232587e-05,
"loss": 2.2443,
"step": 1072
},
{
"epoch": 0.44322930100593244,
"grad_norm": 0.5140888690948486,
"learning_rate": 6.347841108672e-05,
"loss": 2.2539,
"step": 1074
},
{
"epoch": 0.44405468145473304,
"grad_norm": 0.4858645796775818,
"learning_rate": 6.334694075867269e-05,
"loss": 2.2525,
"step": 1076
},
{
"epoch": 0.4448800619035337,
"grad_norm": 0.4742701053619385,
"learning_rate": 6.321537099762114e-05,
"loss": 2.244,
"step": 1078
},
{
"epoch": 0.4457054423523343,
"grad_norm": 0.4515312910079956,
"learning_rate": 6.308370278374325e-05,
"loss": 2.225,
"step": 1080
},
{
"epoch": 0.4465308228011349,
"grad_norm": 0.46387842297554016,
"learning_rate": 6.295193709795037e-05,
"loss": 2.2135,
"step": 1082
},
{
"epoch": 0.4473562032499355,
"grad_norm": 0.44970181584358215,
"learning_rate": 6.282007492188011e-05,
"loss": 2.248,
"step": 1084
},
{
"epoch": 0.4481815836987361,
"grad_norm": 0.46752220392227173,
"learning_rate": 6.268811723788877e-05,
"loss": 2.2795,
"step": 1086
},
{
"epoch": 0.44900696414753677,
"grad_norm": 0.48435142636299133,
"learning_rate": 6.255606502904429e-05,
"loss": 2.2396,
"step": 1088
},
{
"epoch": 0.44983234459633736,
"grad_norm": 0.4091418385505676,
"learning_rate": 6.242391927911872e-05,
"loss": 2.2252,
"step": 1090
},
{
"epoch": 0.450657725045138,
"grad_norm": 0.4950377345085144,
"learning_rate": 6.229168097258106e-05,
"loss": 2.2474,
"step": 1092
},
{
"epoch": 0.4514831054939386,
"grad_norm": 0.4584418535232544,
"learning_rate": 6.21593510945898e-05,
"loss": 2.245,
"step": 1094
},
{
"epoch": 0.45230848594273926,
"grad_norm": 0.4853164851665497,
"learning_rate": 6.202693063098561e-05,
"loss": 2.2587,
"step": 1096
},
{
"epoch": 0.45313386639153985,
"grad_norm": 0.48432743549346924,
"learning_rate": 6.189442056828407e-05,
"loss": 2.2225,
"step": 1098
},
{
"epoch": 0.45395924684034045,
"grad_norm": 0.4741179943084717,
"learning_rate": 6.176182189366819e-05,
"loss": 2.2461,
"step": 1100
},
{
"epoch": 0.4547846272891411,
"grad_norm": 0.48972228169441223,
"learning_rate": 6.16291355949812e-05,
"loss": 2.2534,
"step": 1102
},
{
"epoch": 0.4556100077379417,
"grad_norm": 0.501619279384613,
"learning_rate": 6.149636266071904e-05,
"loss": 2.2196,
"step": 1104
},
{
"epoch": 0.45643538818674234,
"grad_norm": 0.4932022988796234,
"learning_rate": 6.136350408002314e-05,
"loss": 2.1943,
"step": 1106
},
{
"epoch": 0.45726076863554294,
"grad_norm": 0.4711919128894806,
"learning_rate": 6.123056084267296e-05,
"loss": 2.2274,
"step": 1108
},
{
"epoch": 0.4580861490843436,
"grad_norm": 0.4498264491558075,
"learning_rate": 6.109753393907862e-05,
"loss": 2.2332,
"step": 1110
},
{
"epoch": 0.4589115295331442,
"grad_norm": 0.4392760694026947,
"learning_rate": 6.09644243602736e-05,
"loss": 2.2566,
"step": 1112
},
{
"epoch": 0.45973690998194483,
"grad_norm": 0.48753243684768677,
"learning_rate": 6.0831233097907236e-05,
"loss": 2.2242,
"step": 1114
},
{
"epoch": 0.4605622904307454,
"grad_norm": 0.4300667643547058,
"learning_rate": 6.069796114423743e-05,
"loss": 2.2305,
"step": 1116
},
{
"epoch": 0.461387670879546,
"grad_norm": 0.4927161633968353,
"learning_rate": 6.056460949212324e-05,
"loss": 2.2336,
"step": 1118
},
{
"epoch": 0.46221305132834667,
"grad_norm": 0.4289657175540924,
"learning_rate": 6.043117913501741e-05,
"loss": 2.2242,
"step": 1120
},
{
"epoch": 0.46303843177714726,
"grad_norm": 0.44931545853614807,
"learning_rate": 6.029767106695909e-05,
"loss": 2.222,
"step": 1122
},
{
"epoch": 0.4638638122259479,
"grad_norm": 0.5036013126373291,
"learning_rate": 6.0164086282566326e-05,
"loss": 2.2366,
"step": 1124
},
{
"epoch": 0.4646891926747485,
"grad_norm": 0.536802351474762,
"learning_rate": 6.0030425777028685e-05,
"loss": 2.2069,
"step": 1126
},
{
"epoch": 0.46551457312354916,
"grad_norm": 0.46372362971305847,
"learning_rate": 5.9896690546099906e-05,
"loss": 2.2497,
"step": 1128
},
{
"epoch": 0.46633995357234975,
"grad_norm": 0.48737701773643494,
"learning_rate": 5.9762881586090344e-05,
"loss": 2.2389,
"step": 1130
},
{
"epoch": 0.46716533402115035,
"grad_norm": 0.4532935321331024,
"learning_rate": 5.962899989385969e-05,
"loss": 2.1895,
"step": 1132
},
{
"epoch": 0.467990714469951,
"grad_norm": 0.45759317278862,
"learning_rate": 5.9495046466809444e-05,
"loss": 2.2228,
"step": 1134
},
{
"epoch": 0.4688160949187516,
"grad_norm": 0.41080954670906067,
"learning_rate": 5.936102230287553e-05,
"loss": 2.2269,
"step": 1136
},
{
"epoch": 0.46964147536755224,
"grad_norm": 0.42970025539398193,
"learning_rate": 5.9226928400520854e-05,
"loss": 2.2291,
"step": 1138
},
{
"epoch": 0.47046685581635284,
"grad_norm": 0.4297996759414673,
"learning_rate": 5.9092765758727854e-05,
"loss": 2.2319,
"step": 1140
},
{
"epoch": 0.4712922362651535,
"grad_norm": 0.42044776678085327,
"learning_rate": 5.8958535376991106e-05,
"loss": 2.1991,
"step": 1142
},
{
"epoch": 0.4721176167139541,
"grad_norm": 0.4343169927597046,
"learning_rate": 5.882423825530981e-05,
"loss": 2.224,
"step": 1144
},
{
"epoch": 0.47294299716275473,
"grad_norm": 0.41891202330589294,
"learning_rate": 5.8689875394180335e-05,
"loss": 2.2232,
"step": 1146
},
{
"epoch": 0.4737683776115553,
"grad_norm": 0.4097048342227936,
"learning_rate": 5.855544779458887e-05,
"loss": 2.2341,
"step": 1148
},
{
"epoch": 0.4745937580603559,
"grad_norm": 0.5282487273216248,
"learning_rate": 5.8420956458003876e-05,
"loss": 2.2616,
"step": 1150
},
{
"epoch": 0.47541913850915657,
"grad_norm": 0.441388875246048,
"learning_rate": 5.828640238636861e-05,
"loss": 2.1994,
"step": 1152
},
{
"epoch": 0.47624451895795716,
"grad_norm": 0.4527244567871094,
"learning_rate": 5.815178658209372e-05,
"loss": 2.2287,
"step": 1154
},
{
"epoch": 0.4770698994067578,
"grad_norm": 0.4659084379673004,
"learning_rate": 5.801711004804979e-05,
"loss": 2.2059,
"step": 1156
},
{
"epoch": 0.4778952798555584,
"grad_norm": 0.46441391110420227,
"learning_rate": 5.7882373787559775e-05,
"loss": 2.2184,
"step": 1158
},
{
"epoch": 0.47872066030435906,
"grad_norm": 0.47255754470825195,
"learning_rate": 5.7747578804391624e-05,
"loss": 2.2101,
"step": 1160
},
{
"epoch": 0.47954604075315965,
"grad_norm": 0.4485050439834595,
"learning_rate": 5.761272610275074e-05,
"loss": 2.2034,
"step": 1162
},
{
"epoch": 0.4803714212019603,
"grad_norm": 0.402884840965271,
"learning_rate": 5.747781668727251e-05,
"loss": 2.2278,
"step": 1164
},
{
"epoch": 0.4811968016507609,
"grad_norm": 0.45074397325515747,
"learning_rate": 5.73428515630149e-05,
"loss": 2.2272,
"step": 1166
},
{
"epoch": 0.4820221820995615,
"grad_norm": 0.48101773858070374,
"learning_rate": 5.72078317354508e-05,
"loss": 2.2207,
"step": 1168
},
{
"epoch": 0.48284756254836214,
"grad_norm": 0.5213938355445862,
"learning_rate": 5.7072758210460716e-05,
"loss": 2.2025,
"step": 1170
},
{
"epoch": 0.48367294299716274,
"grad_norm": 0.5374065041542053,
"learning_rate": 5.693763199432516e-05,
"loss": 2.2319,
"step": 1172
},
{
"epoch": 0.4844983234459634,
"grad_norm": 0.5042341351509094,
"learning_rate": 5.680245409371716e-05,
"loss": 2.2216,
"step": 1174
},
{
"epoch": 0.485323703894764,
"grad_norm": 0.4710129499435425,
"learning_rate": 5.666722551569484e-05,
"loss": 2.2126,
"step": 1176
},
{
"epoch": 0.48614908434356463,
"grad_norm": 0.46355435252189636,
"learning_rate": 5.653194726769382e-05,
"loss": 2.2465,
"step": 1178
},
{
"epoch": 0.4869744647923652,
"grad_norm": 0.5684232711791992,
"learning_rate": 5.63966203575198e-05,
"loss": 2.2307,
"step": 1180
},
{
"epoch": 0.4877998452411659,
"grad_norm": 0.5232070684432983,
"learning_rate": 5.6261245793340944e-05,
"loss": 2.204,
"step": 1182
},
{
"epoch": 0.48862522568996647,
"grad_norm": 0.47950464487075806,
"learning_rate": 5.612582458368047e-05,
"loss": 2.2277,
"step": 1184
},
{
"epoch": 0.48945060613876706,
"grad_norm": 0.551558792591095,
"learning_rate": 5.599035773740915e-05,
"loss": 2.2111,
"step": 1186
},
{
"epoch": 0.4902759865875677,
"grad_norm": 0.5343023538589478,
"learning_rate": 5.5854846263737625e-05,
"loss": 2.2322,
"step": 1188
},
{
"epoch": 0.4911013670363683,
"grad_norm": 0.4963313639163971,
"learning_rate": 5.571929117220911e-05,
"loss": 2.2048,
"step": 1190
},
{
"epoch": 0.49192674748516896,
"grad_norm": 0.564353883266449,
"learning_rate": 5.558369347269169e-05,
"loss": 2.2381,
"step": 1192
},
{
"epoch": 0.49275212793396955,
"grad_norm": 0.5161218047142029,
"learning_rate": 5.544805417537096e-05,
"loss": 2.2253,
"step": 1194
},
{
"epoch": 0.4935775083827702,
"grad_norm": 0.5393477082252502,
"learning_rate": 5.531237429074231e-05,
"loss": 2.2268,
"step": 1196
},
{
"epoch": 0.4944028888315708,
"grad_norm": 0.5184178948402405,
"learning_rate": 5.517665482960359e-05,
"loss": 2.2219,
"step": 1198
},
{
"epoch": 0.49522826928037145,
"grad_norm": 0.45648929476737976,
"learning_rate": 5.504089680304745e-05,
"loss": 2.207,
"step": 1200
},
{
"epoch": 0.49605364972917204,
"grad_norm": 0.4045144021511078,
"learning_rate": 5.490510122245384e-05,
"loss": 2.2117,
"step": 1202
},
{
"epoch": 0.49687903017797264,
"grad_norm": 0.4706050157546997,
"learning_rate": 5.47692690994825e-05,
"loss": 2.1831,
"step": 1204
},
{
"epoch": 0.4977044106267733,
"grad_norm": 0.428549587726593,
"learning_rate": 5.463340144606541e-05,
"loss": 2.207,
"step": 1206
},
{
"epoch": 0.4985297910755739,
"grad_norm": 0.41387081146240234,
"learning_rate": 5.449749927439922e-05,
"loss": 2.1875,
"step": 1208
},
{
"epoch": 0.49935517152437453,
"grad_norm": 0.4002302587032318,
"learning_rate": 5.436156359693777e-05,
"loss": 2.2019,
"step": 1210
},
{
"epoch": 0.5001805519731751,
"grad_norm": 0.4339626133441925,
"learning_rate": 5.422559542638448e-05,
"loss": 2.1963,
"step": 1212
},
{
"epoch": 0.5010059324219758,
"grad_norm": 0.47335943579673767,
"learning_rate": 5.4089595775684886e-05,
"loss": 2.2179,
"step": 1214
},
{
"epoch": 0.5018313128707764,
"grad_norm": 0.5027151107788086,
"learning_rate": 5.395356565801899e-05,
"loss": 2.1787,
"step": 1216
},
{
"epoch": 0.502656693319577,
"grad_norm": 0.6695459485054016,
"learning_rate": 5.3817506086793813e-05,
"loss": 2.1898,
"step": 1218
},
{
"epoch": 0.5034820737683776,
"grad_norm": 0.46689918637275696,
"learning_rate": 5.368141807563578e-05,
"loss": 2.198,
"step": 1220
},
{
"epoch": 0.5043074542171783,
"grad_norm": 0.42445775866508484,
"learning_rate": 5.35453026383832e-05,
"loss": 2.1951,
"step": 1222
},
{
"epoch": 0.5051328346659788,
"grad_norm": 0.4324966371059418,
"learning_rate": 5.34091607890787e-05,
"loss": 2.2097,
"step": 1224
},
{
"epoch": 0.5059582151147795,
"grad_norm": 0.5108333826065063,
"learning_rate": 5.327299354196167e-05,
"loss": 2.219,
"step": 1226
},
{
"epoch": 0.5067835955635801,
"grad_norm": 0.4502308964729309,
"learning_rate": 5.313680191146071e-05,
"loss": 2.1852,
"step": 1228
},
{
"epoch": 0.5076089760123808,
"grad_norm": 0.4594053328037262,
"learning_rate": 5.300058691218607e-05,
"loss": 2.2387,
"step": 1230
},
{
"epoch": 0.5084343564611813,
"grad_norm": 0.4119742512702942,
"learning_rate": 5.28643495589221e-05,
"loss": 2.1933,
"step": 1232
},
{
"epoch": 0.5092597369099819,
"grad_norm": 0.48443832993507385,
"learning_rate": 5.272809086661972e-05,
"loss": 2.1906,
"step": 1234
},
{
"epoch": 0.5100851173587826,
"grad_norm": 0.5221107602119446,
"learning_rate": 5.2591811850388726e-05,
"loss": 2.2195,
"step": 1236
},
{
"epoch": 0.5109104978075831,
"grad_norm": 0.47215166687965393,
"learning_rate": 5.245551352549044e-05,
"loss": 2.192,
"step": 1238
},
{
"epoch": 0.5117358782563838,
"grad_norm": 0.49213218688964844,
"learning_rate": 5.231919690732995e-05,
"loss": 2.2189,
"step": 1240
},
{
"epoch": 0.5125612587051844,
"grad_norm": 0.4713018536567688,
"learning_rate": 5.218286301144867e-05,
"loss": 2.1888,
"step": 1242
},
{
"epoch": 0.5133866391539851,
"grad_norm": 0.47867047786712646,
"learning_rate": 5.2046512853516696e-05,
"loss": 2.2104,
"step": 1244
},
{
"epoch": 0.5142120196027856,
"grad_norm": 0.4218139946460724,
"learning_rate": 5.1910147449325295e-05,
"loss": 2.1981,
"step": 1246
},
{
"epoch": 0.5150374000515863,
"grad_norm": 0.3904942572116852,
"learning_rate": 5.177376781477933e-05,
"loss": 2.1932,
"step": 1248
},
{
"epoch": 0.5158627805003869,
"grad_norm": 0.4622825086116791,
"learning_rate": 5.163737496588964e-05,
"loss": 2.2195,
"step": 1250
},
{
"epoch": 0.5166881609491876,
"grad_norm": 0.49373888969421387,
"learning_rate": 5.150096991876556e-05,
"loss": 2.2207,
"step": 1252
},
{
"epoch": 0.5175135413979881,
"grad_norm": 0.4939506947994232,
"learning_rate": 5.1364553689607264e-05,
"loss": 2.214,
"step": 1254
},
{
"epoch": 0.5183389218467888,
"grad_norm": 0.4570372402667999,
"learning_rate": 5.1228127294698225e-05,
"loss": 2.173,
"step": 1256
},
{
"epoch": 0.5191643022955894,
"grad_norm": 0.4791816174983978,
"learning_rate": 5.1091691750397675e-05,
"loss": 2.2091,
"step": 1258
},
{
"epoch": 0.51998968274439,
"grad_norm": 0.4587259292602539,
"learning_rate": 5.0955248073132975e-05,
"loss": 2.2092,
"step": 1260
},
{
"epoch": 0.5208150631931906,
"grad_norm": 0.4694560170173645,
"learning_rate": 5.081879727939214e-05,
"loss": 2.188,
"step": 1262
},
{
"epoch": 0.5216404436419912,
"grad_norm": 0.4412589371204376,
"learning_rate": 5.068234038571612e-05,
"loss": 2.2085,
"step": 1264
},
{
"epoch": 0.5224658240907919,
"grad_norm": 0.4659062325954437,
"learning_rate": 5.054587840869136e-05,
"loss": 2.206,
"step": 1266
},
{
"epoch": 0.5232912045395924,
"grad_norm": 0.45120978355407715,
"learning_rate": 5.0409412364942165e-05,
"loss": 2.2031,
"step": 1268
},
{
"epoch": 0.5241165849883931,
"grad_norm": 0.5263309478759766,
"learning_rate": 5.027294327112314e-05,
"loss": 2.2222,
"step": 1270
},
{
"epoch": 0.5249419654371937,
"grad_norm": 0.452806681394577,
"learning_rate": 5.01364721439116e-05,
"loss": 2.2038,
"step": 1272
},
{
"epoch": 0.5257673458859943,
"grad_norm": 0.4813989996910095,
"learning_rate": 5e-05,
"loss": 2.2207,
"step": 1274
},
{
"epoch": 0.5265927263347949,
"grad_norm": 0.5097128748893738,
"learning_rate": 4.986352785608842e-05,
"loss": 2.1874,
"step": 1276
},
{
"epoch": 0.5274181067835956,
"grad_norm": 0.4897962510585785,
"learning_rate": 4.9727056728876865e-05,
"loss": 2.2289,
"step": 1278
},
{
"epoch": 0.5282434872323962,
"grad_norm": 0.4591223895549774,
"learning_rate": 4.959058763505784e-05,
"loss": 2.2128,
"step": 1280
},
{
"epoch": 0.5290688676811968,
"grad_norm": 0.4303410053253174,
"learning_rate": 4.945412159130864e-05,
"loss": 2.2377,
"step": 1282
},
{
"epoch": 0.5298942481299974,
"grad_norm": 0.4496016800403595,
"learning_rate": 4.931765961428389e-05,
"loss": 2.2125,
"step": 1284
},
{
"epoch": 0.5307196285787981,
"grad_norm": 0.49691838026046753,
"learning_rate": 4.9181202720607874e-05,
"loss": 2.1943,
"step": 1286
},
{
"epoch": 0.5315450090275987,
"grad_norm": 0.4995412230491638,
"learning_rate": 4.904475192686702e-05,
"loss": 2.1847,
"step": 1288
},
{
"epoch": 0.5323703894763993,
"grad_norm": 0.47865188121795654,
"learning_rate": 4.890830824960234e-05,
"loss": 2.2069,
"step": 1290
},
{
"epoch": 0.5331957699251999,
"grad_norm": 0.40386030077934265,
"learning_rate": 4.877187270530178e-05,
"loss": 2.1561,
"step": 1292
},
{
"epoch": 0.5340211503740006,
"grad_norm": 0.42778220772743225,
"learning_rate": 4.863544631039275e-05,
"loss": 2.2129,
"step": 1294
},
{
"epoch": 0.5348465308228011,
"grad_norm": 0.4645983874797821,
"learning_rate": 4.8499030081234444e-05,
"loss": 2.2059,
"step": 1296
},
{
"epoch": 0.5356719112716017,
"grad_norm": 0.4080425202846527,
"learning_rate": 4.8362625034110354e-05,
"loss": 2.1921,
"step": 1298
},
{
"epoch": 0.5364972917204024,
"grad_norm": 0.49392467737197876,
"learning_rate": 4.8226232185220684e-05,
"loss": 2.2011,
"step": 1300
},
{
"epoch": 0.537322672169203,
"grad_norm": 0.46411821246147156,
"learning_rate": 4.808985255067473e-05,
"loss": 2.1962,
"step": 1302
},
{
"epoch": 0.5381480526180036,
"grad_norm": 0.4228093922138214,
"learning_rate": 4.7953487146483315e-05,
"loss": 2.1824,
"step": 1304
},
{
"epoch": 0.5389734330668042,
"grad_norm": 0.44242537021636963,
"learning_rate": 4.781713698855135e-05,
"loss": 2.1818,
"step": 1306
},
{
"epoch": 0.5397988135156049,
"grad_norm": 0.4092284142971039,
"learning_rate": 4.7680803092670054e-05,
"loss": 2.2122,
"step": 1308
},
{
"epoch": 0.5406241939644054,
"grad_norm": 0.4461856484413147,
"learning_rate": 4.754448647450957e-05,
"loss": 2.2202,
"step": 1310
},
{
"epoch": 0.5414495744132061,
"grad_norm": 0.42323821783065796,
"learning_rate": 4.740818814961129e-05,
"loss": 2.1846,
"step": 1312
},
{
"epoch": 0.5422749548620067,
"grad_norm": 0.4296724498271942,
"learning_rate": 4.72719091333803e-05,
"loss": 2.1705,
"step": 1314
},
{
"epoch": 0.5431003353108074,
"grad_norm": 0.44609442353248596,
"learning_rate": 4.713565044107792e-05,
"loss": 2.2015,
"step": 1316
},
{
"epoch": 0.5439257157596079,
"grad_norm": 0.44201433658599854,
"learning_rate": 4.699941308781394e-05,
"loss": 2.1932,
"step": 1318
},
{
"epoch": 0.5447510962084086,
"grad_norm": 0.4727790653705597,
"learning_rate": 4.686319808853931e-05,
"loss": 2.1913,
"step": 1320
},
{
"epoch": 0.5455764766572092,
"grad_norm": 0.4268248677253723,
"learning_rate": 4.6727006458038357e-05,
"loss": 2.1954,
"step": 1322
},
{
"epoch": 0.5464018571060097,
"grad_norm": 0.4677101969718933,
"learning_rate": 4.659083921092131e-05,
"loss": 2.2065,
"step": 1324
},
{
"epoch": 0.5472272375548104,
"grad_norm": 0.44317469000816345,
"learning_rate": 4.6454697361616816e-05,
"loss": 2.203,
"step": 1326
},
{
"epoch": 0.548052618003611,
"grad_norm": 0.40845441818237305,
"learning_rate": 4.631858192436422e-05,
"loss": 2.1871,
"step": 1328
},
{
"epoch": 0.5488779984524117,
"grad_norm": 0.42979684472084045,
"learning_rate": 4.6182493913206205e-05,
"loss": 2.1843,
"step": 1330
},
{
"epoch": 0.5497033789012122,
"grad_norm": 0.43987905979156494,
"learning_rate": 4.6046434341981034e-05,
"loss": 2.1977,
"step": 1332
},
{
"epoch": 0.5505287593500129,
"grad_norm": 0.4020345211029053,
"learning_rate": 4.591040422431512e-05,
"loss": 2.1815,
"step": 1334
},
{
"epoch": 0.5513541397988135,
"grad_norm": 0.42069509625434875,
"learning_rate": 4.5774404573615534e-05,
"loss": 2.1759,
"step": 1336
},
{
"epoch": 0.5521795202476142,
"grad_norm": 0.45546865463256836,
"learning_rate": 4.563843640306225e-05,
"loss": 2.2009,
"step": 1338
},
{
"epoch": 0.5530049006964147,
"grad_norm": 0.42900213599205017,
"learning_rate": 4.550250072560079e-05,
"loss": 2.224,
"step": 1340
},
{
"epoch": 0.5538302811452154,
"grad_norm": 0.47856220602989197,
"learning_rate": 4.5366598553934606e-05,
"loss": 2.1765,
"step": 1342
},
{
"epoch": 0.554655661594016,
"grad_norm": 0.44292357563972473,
"learning_rate": 4.523073090051751e-05,
"loss": 2.2077,
"step": 1344
},
{
"epoch": 0.5554810420428166,
"grad_norm": 0.4365878403186798,
"learning_rate": 4.5094898777546175e-05,
"loss": 2.1926,
"step": 1346
},
{
"epoch": 0.5563064224916172,
"grad_norm": 0.46365126967430115,
"learning_rate": 4.495910319695257e-05,
"loss": 2.1937,
"step": 1348
},
{
"epoch": 0.5571318029404179,
"grad_norm": 0.444984495639801,
"learning_rate": 4.4823345170396417e-05,
"loss": 2.1887,
"step": 1350
},
{
"epoch": 0.5579571833892185,
"grad_norm": 0.45117077231407166,
"learning_rate": 4.468762570925771e-05,
"loss": 2.173,
"step": 1352
},
{
"epoch": 0.558782563838019,
"grad_norm": 0.42752307653427124,
"learning_rate": 4.455194582462906e-05,
"loss": 2.1704,
"step": 1354
},
{
"epoch": 0.5596079442868197,
"grad_norm": 0.42912495136260986,
"learning_rate": 4.441630652730831e-05,
"loss": 2.1733,
"step": 1356
},
{
"epoch": 0.5604333247356204,
"grad_norm": 0.43053561449050903,
"learning_rate": 4.428070882779091e-05,
"loss": 2.1811,
"step": 1358
},
{
"epoch": 0.5612587051844209,
"grad_norm": 0.43450236320495605,
"learning_rate": 4.4145153736262387e-05,
"loss": 2.1957,
"step": 1360
},
{
"epoch": 0.5620840856332215,
"grad_norm": 0.4460732936859131,
"learning_rate": 4.400964226259087e-05,
"loss": 2.2151,
"step": 1362
},
{
"epoch": 0.5629094660820222,
"grad_norm": 0.4485209584236145,
"learning_rate": 4.387417541631952e-05,
"loss": 2.208,
"step": 1364
},
{
"epoch": 0.5637348465308228,
"grad_norm": 0.431249737739563,
"learning_rate": 4.373875420665907e-05,
"loss": 2.1702,
"step": 1366
},
{
"epoch": 0.5645602269796234,
"grad_norm": 0.42122384905815125,
"learning_rate": 4.3603379642480216e-05,
"loss": 2.1714,
"step": 1368
},
{
"epoch": 0.565385607428424,
"grad_norm": 0.4479855000972748,
"learning_rate": 4.3468052732306184e-05,
"loss": 2.1934,
"step": 1370
},
{
"epoch": 0.5662109878772247,
"grad_norm": 0.438849538564682,
"learning_rate": 4.333277448430517e-05,
"loss": 2.2139,
"step": 1372
},
{
"epoch": 0.5670363683260253,
"grad_norm": 0.4208733141422272,
"learning_rate": 4.3197545906282845e-05,
"loss": 2.2021,
"step": 1374
},
{
"epoch": 0.5678617487748259,
"grad_norm": 0.45249640941619873,
"learning_rate": 4.306236800567485e-05,
"loss": 2.2137,
"step": 1376
},
{
"epoch": 0.5686871292236265,
"grad_norm": 0.3885250985622406,
"learning_rate": 4.292724178953929e-05,
"loss": 2.1872,
"step": 1378
},
{
"epoch": 0.5695125096724272,
"grad_norm": 0.45552772283554077,
"learning_rate": 4.27921682645492e-05,
"loss": 2.1896,
"step": 1380
},
{
"epoch": 0.5703378901212277,
"grad_norm": 0.4536789357662201,
"learning_rate": 4.265714843698511e-05,
"loss": 2.1669,
"step": 1382
},
{
"epoch": 0.5711632705700284,
"grad_norm": 0.4505348801612854,
"learning_rate": 4.2522183312727496e-05,
"loss": 2.1951,
"step": 1384
},
{
"epoch": 0.571988651018829,
"grad_norm": 0.4032357931137085,
"learning_rate": 4.238727389724927e-05,
"loss": 2.1917,
"step": 1386
},
{
"epoch": 0.5728140314676297,
"grad_norm": 0.4129430651664734,
"learning_rate": 4.225242119560839e-05,
"loss": 2.2047,
"step": 1388
},
{
"epoch": 0.5736394119164302,
"grad_norm": 0.41739246249198914,
"learning_rate": 4.211762621244022e-05,
"loss": 2.1966,
"step": 1390
},
{
"epoch": 0.5744647923652308,
"grad_norm": 0.4234218895435333,
"learning_rate": 4.198288995195021e-05,
"loss": 2.1865,
"step": 1392
},
{
"epoch": 0.5752901728140315,
"grad_norm": 0.4507383704185486,
"learning_rate": 4.184821341790629e-05,
"loss": 2.1878,
"step": 1394
},
{
"epoch": 0.576115553262832,
"grad_norm": 0.4859677851200104,
"learning_rate": 4.17135976136314e-05,
"loss": 2.1886,
"step": 1396
},
{
"epoch": 0.5769409337116327,
"grad_norm": 0.48982110619544983,
"learning_rate": 4.1579043541996136e-05,
"loss": 2.1965,
"step": 1398
},
{
"epoch": 0.5777663141604333,
"grad_norm": 0.4520018696784973,
"learning_rate": 4.144455220541112e-05,
"loss": 2.1914,
"step": 1400
},
{
"epoch": 0.578591694609234,
"grad_norm": 0.46317774057388306,
"learning_rate": 4.131012460581967e-05,
"loss": 2.1601,
"step": 1402
},
{
"epoch": 0.5794170750580345,
"grad_norm": 0.4065730571746826,
"learning_rate": 4.1175761744690225e-05,
"loss": 2.1799,
"step": 1404
},
{
"epoch": 0.5802424555068352,
"grad_norm": 0.42802825570106506,
"learning_rate": 4.10414646230089e-05,
"loss": 2.1827,
"step": 1406
},
{
"epoch": 0.5810678359556358,
"grad_norm": 0.39025866985321045,
"learning_rate": 4.090723424127216e-05,
"loss": 2.1761,
"step": 1408
},
{
"epoch": 0.5818932164044365,
"grad_norm": 0.410810649394989,
"learning_rate": 4.077307159947915e-05,
"loss": 2.1966,
"step": 1410
},
{
"epoch": 0.582718596853237,
"grad_norm": 0.42078617215156555,
"learning_rate": 4.063897769712448e-05,
"loss": 2.1785,
"step": 1412
},
{
"epoch": 0.5835439773020377,
"grad_norm": 0.3961106538772583,
"learning_rate": 4.0504953533190575e-05,
"loss": 2.199,
"step": 1414
},
{
"epoch": 0.5843693577508383,
"grad_norm": 0.4196956753730774,
"learning_rate": 4.037100010614031e-05,
"loss": 2.1855,
"step": 1416
},
{
"epoch": 0.5851947381996389,
"grad_norm": 0.4017656445503235,
"learning_rate": 4.023711841390966e-05,
"loss": 2.1688,
"step": 1418
},
{
"epoch": 0.5860201186484395,
"grad_norm": 0.39641791582107544,
"learning_rate": 4.010330945390012e-05,
"loss": 2.1595,
"step": 1420
},
{
"epoch": 0.5868454990972402,
"grad_norm": 0.4096054434776306,
"learning_rate": 3.996957422297132e-05,
"loss": 2.1538,
"step": 1422
},
{
"epoch": 0.5876708795460408,
"grad_norm": 0.4470682740211487,
"learning_rate": 3.98359137174337e-05,
"loss": 2.1905,
"step": 1424
},
{
"epoch": 0.5884962599948413,
"grad_norm": 0.45044365525245667,
"learning_rate": 3.9702328933040914e-05,
"loss": 2.1748,
"step": 1426
},
{
"epoch": 0.589321640443642,
"grad_norm": 0.44126656651496887,
"learning_rate": 3.9568820864982606e-05,
"loss": 2.1731,
"step": 1428
},
{
"epoch": 0.5901470208924426,
"grad_norm": 0.4331135153770447,
"learning_rate": 3.943539050787678e-05,
"loss": 2.1852,
"step": 1430
},
{
"epoch": 0.5909724013412432,
"grad_norm": 0.44461002945899963,
"learning_rate": 3.930203885576257e-05,
"loss": 2.1626,
"step": 1432
},
{
"epoch": 0.5917977817900438,
"grad_norm": 0.3985869288444519,
"learning_rate": 3.9168766902092776e-05,
"loss": 2.1839,
"step": 1434
},
{
"epoch": 0.5926231622388445,
"grad_norm": 0.42710578441619873,
"learning_rate": 3.903557563972641e-05,
"loss": 2.1772,
"step": 1436
},
{
"epoch": 0.5934485426876451,
"grad_norm": 0.4027608036994934,
"learning_rate": 3.890246606092139e-05,
"loss": 2.1583,
"step": 1438
},
{
"epoch": 0.5942739231364457,
"grad_norm": 0.4465287923812866,
"learning_rate": 3.876943915732706e-05,
"loss": 2.1724,
"step": 1440
},
{
"epoch": 0.5950993035852463,
"grad_norm": 0.4476611912250519,
"learning_rate": 3.863649591997688e-05,
"loss": 2.1655,
"step": 1442
},
{
"epoch": 0.595924684034047,
"grad_norm": 0.4644310474395752,
"learning_rate": 3.850363733928098e-05,
"loss": 2.1907,
"step": 1444
},
{
"epoch": 0.5967500644828475,
"grad_norm": 0.4769267141819,
"learning_rate": 3.8370864405018816e-05,
"loss": 2.156,
"step": 1446
},
{
"epoch": 0.5975754449316482,
"grad_norm": 0.4150826632976532,
"learning_rate": 3.823817810633181e-05,
"loss": 2.1603,
"step": 1448
},
{
"epoch": 0.5984008253804488,
"grad_norm": 0.4119608998298645,
"learning_rate": 3.810557943171594e-05,
"loss": 2.1631,
"step": 1450
},
{
"epoch": 0.5992262058292495,
"grad_norm": 0.415431946516037,
"learning_rate": 3.797306936901439e-05,
"loss": 2.1926,
"step": 1452
},
{
"epoch": 0.60005158627805,
"grad_norm": 0.38659125566482544,
"learning_rate": 3.784064890541022e-05,
"loss": 2.1918,
"step": 1454
},
{
"epoch": 0.6008769667268506,
"grad_norm": 0.37595850229263306,
"learning_rate": 3.770831902741895e-05,
"loss": 2.1718,
"step": 1456
},
{
"epoch": 0.6017023471756513,
"grad_norm": 0.39984071254730225,
"learning_rate": 3.757608072088129e-05,
"loss": 2.1446,
"step": 1458
},
{
"epoch": 0.602527727624452,
"grad_norm": 0.4029097557067871,
"learning_rate": 3.744393497095573e-05,
"loss": 2.1848,
"step": 1460
},
{
"epoch": 0.6033531080732525,
"grad_norm": 0.42148053646087646,
"learning_rate": 3.7311882762111235e-05,
"loss": 2.1741,
"step": 1462
},
{
"epoch": 0.6041784885220531,
"grad_norm": 0.45567959547042847,
"learning_rate": 3.7179925078119905e-05,
"loss": 2.1867,
"step": 1464
},
{
"epoch": 0.6050038689708538,
"grad_norm": 0.4234828054904938,
"learning_rate": 3.704806290204963e-05,
"loss": 2.1619,
"step": 1466
},
{
"epoch": 0.6058292494196543,
"grad_norm": 0.4149649143218994,
"learning_rate": 3.691629721625677e-05,
"loss": 2.1913,
"step": 1468
},
{
"epoch": 0.606654629868455,
"grad_norm": 0.4161490797996521,
"learning_rate": 3.678462900237888e-05,
"loss": 2.1616,
"step": 1470
},
{
"epoch": 0.6074800103172556,
"grad_norm": 0.394819974899292,
"learning_rate": 3.6653059241327314e-05,
"loss": 2.1712,
"step": 1472
},
{
"epoch": 0.6083053907660563,
"grad_norm": 0.37029707431793213,
"learning_rate": 3.6521588913280024e-05,
"loss": 2.1795,
"step": 1474
},
{
"epoch": 0.6091307712148568,
"grad_norm": 0.3810674846172333,
"learning_rate": 3.6390218997674144e-05,
"loss": 2.1665,
"step": 1476
},
{
"epoch": 0.6099561516636575,
"grad_norm": 0.38873404264450073,
"learning_rate": 3.625895047319875e-05,
"loss": 2.1714,
"step": 1478
},
{
"epoch": 0.6107815321124581,
"grad_norm": 0.37483614683151245,
"learning_rate": 3.6127784317787625e-05,
"loss": 2.1974,
"step": 1480
},
{
"epoch": 0.6116069125612587,
"grad_norm": 0.3878437876701355,
"learning_rate": 3.5996721508611854e-05,
"loss": 2.1547,
"step": 1482
},
{
"epoch": 0.6124322930100593,
"grad_norm": 0.4019594192504883,
"learning_rate": 3.586576302207264e-05,
"loss": 2.185,
"step": 1484
},
{
"epoch": 0.61325767345886,
"grad_norm": 0.39813095331192017,
"learning_rate": 3.573490983379399e-05,
"loss": 2.1578,
"step": 1486
},
{
"epoch": 0.6140830539076606,
"grad_norm": 0.3950514495372772,
"learning_rate": 3.5604162918615413e-05,
"loss": 2.2005,
"step": 1488
},
{
"epoch": 0.6149084343564611,
"grad_norm": 0.4188961088657379,
"learning_rate": 3.5473523250584765e-05,
"loss": 2.1929,
"step": 1490
},
{
"epoch": 0.6157338148052618,
"grad_norm": 0.3965320885181427,
"learning_rate": 3.534299180295087e-05,
"loss": 2.1793,
"step": 1492
},
{
"epoch": 0.6165591952540624,
"grad_norm": 0.40151247382164,
"learning_rate": 3.5212569548156366e-05,
"loss": 2.1846,
"step": 1494
},
{
"epoch": 0.6173845757028631,
"grad_norm": 0.38915884494781494,
"learning_rate": 3.508225745783039e-05,
"loss": 2.162,
"step": 1496
},
{
"epoch": 0.6182099561516636,
"grad_norm": 0.39938920736312866,
"learning_rate": 3.4952056502781336e-05,
"loss": 2.193,
"step": 1498
},
{
"epoch": 0.6190353366004643,
"grad_norm": 0.40345609188079834,
"learning_rate": 3.4821967652989764e-05,
"loss": 2.1492,
"step": 1500
},
{
"epoch": 0.6198607170492649,
"grad_norm": 0.4229806363582611,
"learning_rate": 3.469199187760094e-05,
"loss": 2.1865,
"step": 1502
},
{
"epoch": 0.6206860974980655,
"grad_norm": 0.39548221230506897,
"learning_rate": 3.456213014491778e-05,
"loss": 2.1927,
"step": 1504
},
{
"epoch": 0.6215114779468661,
"grad_norm": 0.38838130235671997,
"learning_rate": 3.4432383422393666e-05,
"loss": 2.1613,
"step": 1506
},
{
"epoch": 0.6223368583956668,
"grad_norm": 0.371660441160202,
"learning_rate": 3.430275267662505e-05,
"loss": 2.1561,
"step": 1508
},
{
"epoch": 0.6231622388444674,
"grad_norm": 0.4182002544403076,
"learning_rate": 3.41732388733445e-05,
"loss": 2.1722,
"step": 1510
},
{
"epoch": 0.623987619293268,
"grad_norm": 0.402644544839859,
"learning_rate": 3.404384297741326e-05,
"loss": 2.1692,
"step": 1512
},
{
"epoch": 0.6248129997420686,
"grad_norm": 0.4064319133758545,
"learning_rate": 3.3914565952814237e-05,
"loss": 2.1838,
"step": 1514
},
{
"epoch": 0.6256383801908693,
"grad_norm": 0.4280416667461395,
"learning_rate": 3.378540876264479e-05,
"loss": 2.2136,
"step": 1516
},
{
"epoch": 0.6264637606396698,
"grad_norm": 0.4381312429904938,
"learning_rate": 3.365637236910944e-05,
"loss": 2.1611,
"step": 1518
},
{
"epoch": 0.6272891410884704,
"grad_norm": 0.43255355954170227,
"learning_rate": 3.35274577335129e-05,
"loss": 2.1719,
"step": 1520
},
{
"epoch": 0.6281145215372711,
"grad_norm": 0.42183390259742737,
"learning_rate": 3.3398665816252705e-05,
"loss": 2.1929,
"step": 1522
},
{
"epoch": 0.6289399019860717,
"grad_norm": 0.39848631620407104,
"learning_rate": 3.326999757681218e-05,
"loss": 2.1474,
"step": 1524
},
{
"epoch": 0.6297652824348723,
"grad_norm": 0.4457964599132538,
"learning_rate": 3.3141453973753324e-05,
"loss": 2.1594,
"step": 1526
},
{
"epoch": 0.6305906628836729,
"grad_norm": 0.4077092409133911,
"learning_rate": 3.301303596470951e-05,
"loss": 2.1596,
"step": 1528
},
{
"epoch": 0.6314160433324736,
"grad_norm": 0.3941916227340698,
"learning_rate": 3.288474450637857e-05,
"loss": 2.1848,
"step": 1530
},
{
"epoch": 0.6322414237812741,
"grad_norm": 0.402087539434433,
"learning_rate": 3.2756580554515434e-05,
"loss": 2.1598,
"step": 1532
},
{
"epoch": 0.6330668042300748,
"grad_norm": 0.4281771183013916,
"learning_rate": 3.262854506392519e-05,
"loss": 2.1842,
"step": 1534
},
{
"epoch": 0.6338921846788754,
"grad_norm": 0.40748128294944763,
"learning_rate": 3.25006389884559e-05,
"loss": 2.1477,
"step": 1536
},
{
"epoch": 0.6347175651276761,
"grad_norm": 0.4175991415977478,
"learning_rate": 3.237286328099149e-05,
"loss": 2.1535,
"step": 1538
},
{
"epoch": 0.6355429455764766,
"grad_norm": 0.4628128111362457,
"learning_rate": 3.2245218893444664e-05,
"loss": 2.1645,
"step": 1540
},
{
"epoch": 0.6363683260252773,
"grad_norm": 0.45409590005874634,
"learning_rate": 3.2117706776749815e-05,
"loss": 2.1625,
"step": 1542
},
{
"epoch": 0.6371937064740779,
"grad_norm": 0.39506542682647705,
"learning_rate": 3.19903278808559e-05,
"loss": 2.1765,
"step": 1544
},
{
"epoch": 0.6380190869228786,
"grad_norm": 0.3923264145851135,
"learning_rate": 3.186308315471947e-05,
"loss": 2.16,
"step": 1546
},
{
"epoch": 0.6388444673716791,
"grad_norm": 0.35540321469306946,
"learning_rate": 3.173597354629746e-05,
"loss": 2.1815,
"step": 1548
},
{
"epoch": 0.6396698478204798,
"grad_norm": 0.37577253580093384,
"learning_rate": 3.1609000002540226e-05,
"loss": 2.1604,
"step": 1550
},
{
"epoch": 0.6404952282692804,
"grad_norm": 0.38430941104888916,
"learning_rate": 3.148216346938446e-05,
"loss": 2.1789,
"step": 1552
},
{
"epoch": 0.6413206087180809,
"grad_norm": 0.3682323694229126,
"learning_rate": 3.135546489174612e-05,
"loss": 2.1718,
"step": 1554
},
{
"epoch": 0.6421459891668816,
"grad_norm": 0.38881126046180725,
"learning_rate": 3.122890521351345e-05,
"loss": 2.1798,
"step": 1556
},
{
"epoch": 0.6429713696156822,
"grad_norm": 0.41004738211631775,
"learning_rate": 3.1102485377539886e-05,
"loss": 2.1297,
"step": 1558
},
{
"epoch": 0.6437967500644829,
"grad_norm": 0.39379170536994934,
"learning_rate": 3.0976206325637046e-05,
"loss": 2.1507,
"step": 1560
},
{
"epoch": 0.6446221305132834,
"grad_norm": 0.40458956360816956,
"learning_rate": 3.085006899856777e-05,
"loss": 2.1678,
"step": 1562
},
{
"epoch": 0.6454475109620841,
"grad_norm": 0.3773816227912903,
"learning_rate": 3.072407433603901e-05,
"loss": 2.1558,
"step": 1564
},
{
"epoch": 0.6462728914108847,
"grad_norm": 0.3630047142505646,
"learning_rate": 3.059822327669494e-05,
"loss": 2.165,
"step": 1566
},
{
"epoch": 0.6470982718596853,
"grad_norm": 0.3747231066226959,
"learning_rate": 3.0472516758109847e-05,
"loss": 2.1576,
"step": 1568
},
{
"epoch": 0.6479236523084859,
"grad_norm": 0.41273626685142517,
"learning_rate": 3.034695571678127e-05,
"loss": 2.1395,
"step": 1570
},
{
"epoch": 0.6487490327572866,
"grad_norm": 0.4430343508720398,
"learning_rate": 3.0221541088122917e-05,
"loss": 2.1734,
"step": 1572
},
{
"epoch": 0.6495744132060872,
"grad_norm": 0.40412119030952454,
"learning_rate": 3.0096273806457763e-05,
"loss": 2.1532,
"step": 1574
},
{
"epoch": 0.6503997936548878,
"grad_norm": 0.37069565057754517,
"learning_rate": 2.997115480501106e-05,
"loss": 2.1957,
"step": 1576
},
{
"epoch": 0.6512251741036884,
"grad_norm": 0.38193821907043457,
"learning_rate": 2.9846185015903394e-05,
"loss": 2.1536,
"step": 1578
},
{
"epoch": 0.6520505545524891,
"grad_norm": 0.3995210826396942,
"learning_rate": 2.9721365370143722e-05,
"loss": 2.181,
"step": 1580
},
{
"epoch": 0.6528759350012897,
"grad_norm": 0.3900876045227051,
"learning_rate": 2.959669679762248e-05,
"loss": 2.1457,
"step": 1582
},
{
"epoch": 0.6537013154500902,
"grad_norm": 0.3811575174331665,
"learning_rate": 2.9472180227104628e-05,
"loss": 2.1654,
"step": 1584
},
{
"epoch": 0.6545266958988909,
"grad_norm": 0.420245885848999,
"learning_rate": 2.9347816586222654e-05,
"loss": 2.1728,
"step": 1586
},
{
"epoch": 0.6553520763476915,
"grad_norm": 0.40868568420410156,
"learning_rate": 2.9223606801469882e-05,
"loss": 2.1794,
"step": 1588
},
{
"epoch": 0.6561774567964921,
"grad_norm": 0.42787596583366394,
"learning_rate": 2.909955179819328e-05,
"loss": 2.1563,
"step": 1590
},
{
"epoch": 0.6570028372452927,
"grad_norm": 0.4084904193878174,
"learning_rate": 2.8975652500586826e-05,
"loss": 2.1449,
"step": 1592
},
{
"epoch": 0.6578282176940934,
"grad_norm": 0.4251170754432678,
"learning_rate": 2.885190983168444e-05,
"loss": 2.1528,
"step": 1594
},
{
"epoch": 0.658653598142894,
"grad_norm": 0.3818851709365845,
"learning_rate": 2.8728324713353194e-05,
"loss": 2.1351,
"step": 1596
},
{
"epoch": 0.6594789785916946,
"grad_norm": 0.4211563467979431,
"learning_rate": 2.860489806628648e-05,
"loss": 2.1689,
"step": 1598
},
{
"epoch": 0.6603043590404952,
"grad_norm": 0.3683875799179077,
"learning_rate": 2.8481630809997022e-05,
"loss": 2.1598,
"step": 1600
},
{
"epoch": 0.6611297394892959,
"grad_norm": 0.375102698802948,
"learning_rate": 2.8358523862810155e-05,
"loss": 2.1573,
"step": 1602
},
{
"epoch": 0.6619551199380964,
"grad_norm": 0.3758949637413025,
"learning_rate": 2.823557814185691e-05,
"loss": 2.1531,
"step": 1604
},
{
"epoch": 0.6627805003868971,
"grad_norm": 0.43883299827575684,
"learning_rate": 2.811279456306718e-05,
"loss": 2.1656,
"step": 1606
},
{
"epoch": 0.6636058808356977,
"grad_norm": 0.45055341720581055,
"learning_rate": 2.7990174041162997e-05,
"loss": 2.1664,
"step": 1608
},
{
"epoch": 0.6644312612844984,
"grad_norm": 0.3908529281616211,
"learning_rate": 2.7867717489651567e-05,
"loss": 2.1549,
"step": 1610
},
{
"epoch": 0.6652566417332989,
"grad_norm": 0.3923724889755249,
"learning_rate": 2.7745425820818567e-05,
"loss": 2.1702,
"step": 1612
},
{
"epoch": 0.6660820221820996,
"grad_norm": 0.39021268486976624,
"learning_rate": 2.7623299945721302e-05,
"loss": 2.1695,
"step": 1614
},
{
"epoch": 0.6669074026309002,
"grad_norm": 0.3864549398422241,
"learning_rate": 2.7501340774181934e-05,
"loss": 2.1723,
"step": 1616
},
{
"epoch": 0.6677327830797009,
"grad_norm": 0.39826998114585876,
"learning_rate": 2.7379549214780754e-05,
"loss": 2.1585,
"step": 1618
},
{
"epoch": 0.6685581635285014,
"grad_norm": 0.41055572032928467,
"learning_rate": 2.7257926174849328e-05,
"loss": 2.1842,
"step": 1620
},
{
"epoch": 0.669383543977302,
"grad_norm": 0.39198827743530273,
"learning_rate": 2.713647256046371e-05,
"loss": 2.1697,
"step": 1622
},
{
"epoch": 0.6702089244261027,
"grad_norm": 0.3906678855419159,
"learning_rate": 2.7015189276437884e-05,
"loss": 2.1513,
"step": 1624
},
{
"epoch": 0.6710343048749032,
"grad_norm": 0.39475318789482117,
"learning_rate": 2.6894077226316783e-05,
"loss": 2.1347,
"step": 1626
},
{
"epoch": 0.6718596853237039,
"grad_norm": 0.3569572865962982,
"learning_rate": 2.6773137312369756e-05,
"loss": 2.1333,
"step": 1628
},
{
"epoch": 0.6726850657725045,
"grad_norm": 0.390018105506897,
"learning_rate": 2.6652370435583712e-05,
"loss": 2.1843,
"step": 1630
},
{
"epoch": 0.6735104462213052,
"grad_norm": 0.36904361844062805,
"learning_rate": 2.6531777495656406e-05,
"loss": 2.1879,
"step": 1632
},
{
"epoch": 0.6743358266701057,
"grad_norm": 0.4072750210762024,
"learning_rate": 2.641135939098991e-05,
"loss": 2.1533,
"step": 1634
},
{
"epoch": 0.6751612071189064,
"grad_norm": 0.40311920642852783,
"learning_rate": 2.6291117018683676e-05,
"loss": 2.1724,
"step": 1636
},
{
"epoch": 0.675986587567707,
"grad_norm": 0.39063480496406555,
"learning_rate": 2.6171051274528057e-05,
"loss": 2.1462,
"step": 1638
},
{
"epoch": 0.6768119680165076,
"grad_norm": 0.38110098242759705,
"learning_rate": 2.6051163052997528e-05,
"loss": 2.1527,
"step": 1640
},
{
"epoch": 0.6776373484653082,
"grad_norm": 0.37648850679397583,
"learning_rate": 2.5931453247243963e-05,
"loss": 2.148,
"step": 1642
},
{
"epoch": 0.6784627289141089,
"grad_norm": 0.3929273188114166,
"learning_rate": 2.5811922749090188e-05,
"loss": 2.1445,
"step": 1644
},
{
"epoch": 0.6792881093629095,
"grad_norm": 0.3670939803123474,
"learning_rate": 2.569257244902311e-05,
"loss": 2.1263,
"step": 1646
},
{
"epoch": 0.68011348981171,
"grad_norm": 0.36248132586479187,
"learning_rate": 2.5573403236187287e-05,
"loss": 2.1398,
"step": 1648
},
{
"epoch": 0.6809388702605107,
"grad_norm": 0.37559229135513306,
"learning_rate": 2.5454415998378073e-05,
"loss": 2.1421,
"step": 1650
},
{
"epoch": 0.6817642507093113,
"grad_norm": 0.3896510899066925,
"learning_rate": 2.5335611622035198e-05,
"loss": 2.1469,
"step": 1652
},
{
"epoch": 0.6825896311581119,
"grad_norm": 0.3731015920639038,
"learning_rate": 2.5216990992236135e-05,
"loss": 2.1438,
"step": 1654
},
{
"epoch": 0.6834150116069125,
"grad_norm": 0.40263113379478455,
"learning_rate": 2.509855499268938e-05,
"loss": 2.1549,
"step": 1656
},
{
"epoch": 0.6842403920557132,
"grad_norm": 0.3805602192878723,
"learning_rate": 2.498030450572808e-05,
"loss": 2.1888,
"step": 1658
},
{
"epoch": 0.6850657725045138,
"grad_norm": 0.3967129588127136,
"learning_rate": 2.486224041230319e-05,
"loss": 2.1246,
"step": 1660
},
{
"epoch": 0.6858911529533144,
"grad_norm": 0.39894452691078186,
"learning_rate": 2.474436359197714e-05,
"loss": 2.1717,
"step": 1662
},
{
"epoch": 0.686716533402115,
"grad_norm": 0.38463294506073,
"learning_rate": 2.4626674922917207e-05,
"loss": 2.1304,
"step": 1664
},
{
"epoch": 0.6875419138509157,
"grad_norm": 0.38346001505851746,
"learning_rate": 2.4509175281888957e-05,
"loss": 2.1603,
"step": 1666
},
{
"epoch": 0.6883672942997163,
"grad_norm": 0.3817712664604187,
"learning_rate": 2.4391865544249687e-05,
"loss": 2.1568,
"step": 1668
},
{
"epoch": 0.6891926747485169,
"grad_norm": 0.41481316089630127,
"learning_rate": 2.4274746583941975e-05,
"loss": 2.1913,
"step": 1670
},
{
"epoch": 0.6900180551973175,
"grad_norm": 0.3687182366847992,
"learning_rate": 2.415781927348709e-05,
"loss": 2.1775,
"step": 1672
},
{
"epoch": 0.6908434356461182,
"grad_norm": 0.39612531661987305,
"learning_rate": 2.4041084483978616e-05,
"loss": 2.1445,
"step": 1674
},
{
"epoch": 0.6916688160949187,
"grad_norm": 0.405380517244339,
"learning_rate": 2.39245430850758e-05,
"loss": 2.1408,
"step": 1676
},
{
"epoch": 0.6924941965437194,
"grad_norm": 0.40155723690986633,
"learning_rate": 2.380819594499718e-05,
"loss": 2.1568,
"step": 1678
},
{
"epoch": 0.69331957699252,
"grad_norm": 0.3637157082557678,
"learning_rate": 2.369204393051409e-05,
"loss": 2.1374,
"step": 1680
},
{
"epoch": 0.6941449574413207,
"grad_norm": 0.3784123659133911,
"learning_rate": 2.3576087906944195e-05,
"loss": 2.1413,
"step": 1682
},
{
"epoch": 0.6949703378901212,
"grad_norm": 0.3910234570503235,
"learning_rate": 2.34603287381451e-05,
"loss": 2.1441,
"step": 1684
},
{
"epoch": 0.6957957183389218,
"grad_norm": 0.38652199506759644,
"learning_rate": 2.33447672865078e-05,
"loss": 2.1682,
"step": 1686
},
{
"epoch": 0.6966210987877225,
"grad_norm": 0.3730972409248352,
"learning_rate": 2.322940441295036e-05,
"loss": 2.0956,
"step": 1688
},
{
"epoch": 0.697446479236523,
"grad_norm": 0.38551416993141174,
"learning_rate": 2.3114240976911466e-05,
"loss": 2.1744,
"step": 1690
},
{
"epoch": 0.6982718596853237,
"grad_norm": 0.38724467158317566,
"learning_rate": 2.2999277836343973e-05,
"loss": 2.1704,
"step": 1692
},
{
"epoch": 0.6990972401341243,
"grad_norm": 0.4469600319862366,
"learning_rate": 2.2884515847708648e-05,
"loss": 2.1334,
"step": 1694
},
{
"epoch": 0.699922620582925,
"grad_norm": 0.38431516289711,
"learning_rate": 2.276995586596763e-05,
"loss": 2.1727,
"step": 1696
},
{
"epoch": 0.7007480010317255,
"grad_norm": 0.39912179112434387,
"learning_rate": 2.265559874457815e-05,
"loss": 2.1739,
"step": 1698
},
{
"epoch": 0.7015733814805262,
"grad_norm": 0.3802652955055237,
"learning_rate": 2.254144533548616e-05,
"loss": 2.1575,
"step": 1700
},
{
"epoch": 0.7023987619293268,
"grad_norm": 0.40614211559295654,
"learning_rate": 2.2427496489119986e-05,
"loss": 2.1458,
"step": 1702
},
{
"epoch": 0.7032241423781275,
"grad_norm": 0.39186087250709534,
"learning_rate": 2.2313753054383958e-05,
"loss": 2.1891,
"step": 1704
},
{
"epoch": 0.704049522826928,
"grad_norm": 0.4107969105243683,
"learning_rate": 2.220021587865218e-05,
"loss": 2.134,
"step": 1706
},
{
"epoch": 0.7048749032757287,
"grad_norm": 0.4244343936443329,
"learning_rate": 2.2086885807762093e-05,
"loss": 2.1632,
"step": 1708
},
{
"epoch": 0.7057002837245293,
"grad_norm": 0.3787813186645508,
"learning_rate": 2.197376368600825e-05,
"loss": 2.1424,
"step": 1710
},
{
"epoch": 0.7065256641733298,
"grad_norm": 0.41439372301101685,
"learning_rate": 2.1860850356136015e-05,
"loss": 2.1364,
"step": 1712
},
{
"epoch": 0.7073510446221305,
"grad_norm": 0.4174649715423584,
"learning_rate": 2.1748146659335256e-05,
"loss": 2.1588,
"step": 1714
},
{
"epoch": 0.7081764250709311,
"grad_norm": 0.4233049750328064,
"learning_rate": 2.163565343523416e-05,
"loss": 2.1649,
"step": 1716
},
{
"epoch": 0.7090018055197318,
"grad_norm": 0.4185529947280884,
"learning_rate": 2.152337152189287e-05,
"loss": 2.1661,
"step": 1718
},
{
"epoch": 0.7098271859685323,
"grad_norm": 0.374424010515213,
"learning_rate": 2.1411301755797293e-05,
"loss": 2.1505,
"step": 1720
},
{
"epoch": 0.710652566417333,
"grad_norm": 0.3833918869495392,
"learning_rate": 2.1299444971852876e-05,
"loss": 2.1284,
"step": 1722
},
{
"epoch": 0.7114779468661336,
"grad_norm": 0.3803061544895172,
"learning_rate": 2.118780200337836e-05,
"loss": 2.1704,
"step": 1724
},
{
"epoch": 0.7123033273149342,
"grad_norm": 0.36754024028778076,
"learning_rate": 2.107637368209966e-05,
"loss": 2.1817,
"step": 1726
},
{
"epoch": 0.7131287077637348,
"grad_norm": 0.38716909289360046,
"learning_rate": 2.096516083814346e-05,
"loss": 2.1664,
"step": 1728
},
{
"epoch": 0.7139540882125355,
"grad_norm": 0.373674601316452,
"learning_rate": 2.085416430003131e-05,
"loss": 2.141,
"step": 1730
},
{
"epoch": 0.7147794686613361,
"grad_norm": 0.40231001377105713,
"learning_rate": 2.074338489467322e-05,
"loss": 2.1501,
"step": 1732
},
{
"epoch": 0.7156048491101367,
"grad_norm": 0.3659866452217102,
"learning_rate": 2.0632823447361593e-05,
"loss": 2.15,
"step": 1734
},
{
"epoch": 0.7164302295589373,
"grad_norm": 0.3718184232711792,
"learning_rate": 2.0522480781765153e-05,
"loss": 2.1515,
"step": 1736
},
{
"epoch": 0.717255610007738,
"grad_norm": 0.3444000780582428,
"learning_rate": 2.0412357719922593e-05,
"loss": 2.1162,
"step": 1738
},
{
"epoch": 0.7180809904565386,
"grad_norm": 0.37671560049057007,
"learning_rate": 2.0302455082236716e-05,
"loss": 2.1391,
"step": 1740
},
{
"epoch": 0.7189063709053392,
"grad_norm": 0.3651193380355835,
"learning_rate": 2.019277368746812e-05,
"loss": 2.1568,
"step": 1742
},
{
"epoch": 0.7197317513541398,
"grad_norm": 0.3553023934364319,
"learning_rate": 2.008331435272917e-05,
"loss": 2.1679,
"step": 1744
},
{
"epoch": 0.7205571318029405,
"grad_norm": 0.36929070949554443,
"learning_rate": 1.997407789347799e-05,
"loss": 2.1376,
"step": 1746
},
{
"epoch": 0.721382512251741,
"grad_norm": 0.355954110622406,
"learning_rate": 1.9865065123512194e-05,
"loss": 2.1242,
"step": 1748
},
{
"epoch": 0.7222078927005416,
"grad_norm": 0.3636607527732849,
"learning_rate": 1.9756276854963002e-05,
"loss": 2.1559,
"step": 1750
},
{
"epoch": 0.7230332731493423,
"grad_norm": 0.37725040316581726,
"learning_rate": 1.9647713898289154e-05,
"loss": 2.131,
"step": 1752
},
{
"epoch": 0.7238586535981429,
"grad_norm": 0.3439856171607971,
"learning_rate": 1.953937706227078e-05,
"loss": 2.1499,
"step": 1754
},
{
"epoch": 0.7246840340469435,
"grad_norm": 0.3753752112388611,
"learning_rate": 1.943126715400353e-05,
"loss": 2.1567,
"step": 1756
},
{
"epoch": 0.7255094144957441,
"grad_norm": 0.3847252428531647,
"learning_rate": 1.9323384978892357e-05,
"loss": 2.149,
"step": 1758
},
{
"epoch": 0.7263347949445448,
"grad_norm": 0.37161755561828613,
"learning_rate": 1.921573134064569e-05,
"loss": 2.1394,
"step": 1760
},
{
"epoch": 0.7271601753933453,
"grad_norm": 0.3596508204936981,
"learning_rate": 1.9108307041269418e-05,
"loss": 2.1586,
"step": 1762
},
{
"epoch": 0.727985555842146,
"grad_norm": 0.37054118514060974,
"learning_rate": 1.9001112881060845e-05,
"loss": 2.1511,
"step": 1764
},
{
"epoch": 0.7288109362909466,
"grad_norm": 0.3763185739517212,
"learning_rate": 1.8894149658602767e-05,
"loss": 2.1515,
"step": 1766
},
{
"epoch": 0.7296363167397473,
"grad_norm": 0.3773542642593384,
"learning_rate": 1.878741817075754e-05,
"loss": 2.1281,
"step": 1768
},
{
"epoch": 0.7304616971885478,
"grad_norm": 0.35355421900749207,
"learning_rate": 1.8680919212661097e-05,
"loss": 2.1556,
"step": 1770
},
{
"epoch": 0.7312870776373485,
"grad_norm": 0.36710578203201294,
"learning_rate": 1.8574653577717116e-05,
"loss": 2.1278,
"step": 1772
},
{
"epoch": 0.7321124580861491,
"grad_norm": 0.36785537004470825,
"learning_rate": 1.8468622057590978e-05,
"loss": 2.1674,
"step": 1774
},
{
"epoch": 0.7329378385349496,
"grad_norm": 0.3685971796512604,
"learning_rate": 1.836282544220398e-05,
"loss": 2.1231,
"step": 1776
},
{
"epoch": 0.7337632189837503,
"grad_norm": 0.3786368668079376,
"learning_rate": 1.825726451972739e-05,
"loss": 2.1318,
"step": 1778
},
{
"epoch": 0.734588599432551,
"grad_norm": 0.3615210950374603,
"learning_rate": 1.815194007657659e-05,
"loss": 2.1545,
"step": 1780
},
{
"epoch": 0.7354139798813516,
"grad_norm": 0.3552665412425995,
"learning_rate": 1.804685289740526e-05,
"loss": 2.1302,
"step": 1782
},
{
"epoch": 0.7362393603301521,
"grad_norm": 0.348206490278244,
"learning_rate": 1.794200376509944e-05,
"loss": 2.1812,
"step": 1784
},
{
"epoch": 0.7370647407789528,
"grad_norm": 0.3670431971549988,
"learning_rate": 1.7837393460771795e-05,
"loss": 2.149,
"step": 1786
},
{
"epoch": 0.7378901212277534,
"grad_norm": 0.33708223700523376,
"learning_rate": 1.7733022763755725e-05,
"loss": 2.1302,
"step": 1788
},
{
"epoch": 0.7387155016765541,
"grad_norm": 0.3709441125392914,
"learning_rate": 1.762889245159957e-05,
"loss": 2.1327,
"step": 1790
},
{
"epoch": 0.7395408821253546,
"grad_norm": 0.3586266338825226,
"learning_rate": 1.7525003300060904e-05,
"loss": 2.179,
"step": 1792
},
{
"epoch": 0.7403662625741553,
"grad_norm": 0.3644587993621826,
"learning_rate": 1.7421356083100615e-05,
"loss": 2.1404,
"step": 1794
},
{
"epoch": 0.7411916430229559,
"grad_norm": 0.35930705070495605,
"learning_rate": 1.7317951572877237e-05,
"loss": 2.1395,
"step": 1796
},
{
"epoch": 0.7420170234717565,
"grad_norm": 0.34341469407081604,
"learning_rate": 1.7214790539741167e-05,
"loss": 2.1475,
"step": 1798
},
{
"epoch": 0.7428424039205571,
"grad_norm": 0.3495519757270813,
"learning_rate": 1.7111873752228907e-05,
"loss": 2.182,
"step": 1800
},
{
"epoch": 0.7436677843693578,
"grad_norm": 0.37678736448287964,
"learning_rate": 1.700920197705742e-05,
"loss": 2.1497,
"step": 1802
},
{
"epoch": 0.7444931648181584,
"grad_norm": 0.34368839859962463,
"learning_rate": 1.6906775979118307e-05,
"loss": 2.1353,
"step": 1804
},
{
"epoch": 0.745318545266959,
"grad_norm": 0.354757696390152,
"learning_rate": 1.680459652147216e-05,
"loss": 2.1045,
"step": 1806
},
{
"epoch": 0.7461439257157596,
"grad_norm": 0.3509856164455414,
"learning_rate": 1.67026643653429e-05,
"loss": 2.1332,
"step": 1808
},
{
"epoch": 0.7469693061645603,
"grad_norm": 0.3583715856075287,
"learning_rate": 1.6600980270112055e-05,
"loss": 2.1543,
"step": 1810
},
{
"epoch": 0.7477946866133608,
"grad_norm": 0.3545357882976532,
"learning_rate": 1.6499544993313183e-05,
"loss": 2.1412,
"step": 1812
},
{
"epoch": 0.7486200670621614,
"grad_norm": 0.35939571261405945,
"learning_rate": 1.6398359290626135e-05,
"loss": 2.1579,
"step": 1814
},
{
"epoch": 0.7494454475109621,
"grad_norm": 0.3477668762207031,
"learning_rate": 1.629742391587144e-05,
"loss": 2.1298,
"step": 1816
},
{
"epoch": 0.7502708279597627,
"grad_norm": 0.36954382061958313,
"learning_rate": 1.619673962100479e-05,
"loss": 2.1591,
"step": 1818
},
{
"epoch": 0.7510962084085633,
"grad_norm": 0.37065422534942627,
"learning_rate": 1.6096307156111312e-05,
"loss": 2.136,
"step": 1820
},
{
"epoch": 0.7519215888573639,
"grad_norm": 0.37768426537513733,
"learning_rate": 1.5996127269400023e-05,
"loss": 2.1142,
"step": 1822
},
{
"epoch": 0.7527469693061646,
"grad_norm": 0.36868610978126526,
"learning_rate": 1.589620070719834e-05,
"loss": 2.1538,
"step": 1824
},
{
"epoch": 0.7535723497549652,
"grad_norm": 0.3411722183227539,
"learning_rate": 1.579652821394632e-05,
"loss": 2.1304,
"step": 1826
},
{
"epoch": 0.7543977302037658,
"grad_norm": 0.33972230553627014,
"learning_rate": 1.5697110532191366e-05,
"loss": 2.1262,
"step": 1828
},
{
"epoch": 0.7552231106525664,
"grad_norm": 0.3463905453681946,
"learning_rate": 1.559794840258249e-05,
"loss": 2.1427,
"step": 1830
},
{
"epoch": 0.7560484911013671,
"grad_norm": 0.40490660071372986,
"learning_rate": 1.549904256386488e-05,
"loss": 2.1464,
"step": 1832
},
{
"epoch": 0.7568738715501676,
"grad_norm": 0.3470916450023651,
"learning_rate": 1.5400393752874454e-05,
"loss": 2.1483,
"step": 1834
},
{
"epoch": 0.7576992519989683,
"grad_norm": 0.36317574977874756,
"learning_rate": 1.5302002704532192e-05,
"loss": 2.1393,
"step": 1836
},
{
"epoch": 0.7585246324477689,
"grad_norm": 0.34546831250190735,
"learning_rate": 1.5203870151838884e-05,
"loss": 2.1396,
"step": 1838
},
{
"epoch": 0.7593500128965696,
"grad_norm": 0.3429735600948334,
"learning_rate": 1.51059968258695e-05,
"loss": 2.168,
"step": 1840
},
{
"epoch": 0.7601753933453701,
"grad_norm": 0.3513084053993225,
"learning_rate": 1.5008383455767828e-05,
"loss": 2.1301,
"step": 1842
},
{
"epoch": 0.7610007737941707,
"grad_norm": 0.37402522563934326,
"learning_rate": 1.4911030768741003e-05,
"loss": 2.1552,
"step": 1844
},
{
"epoch": 0.7618261542429714,
"grad_norm": 0.34295645356178284,
"learning_rate": 1.4813939490054095e-05,
"loss": 2.1551,
"step": 1846
},
{
"epoch": 0.7626515346917719,
"grad_norm": 0.3719686269760132,
"learning_rate": 1.471711034302477e-05,
"loss": 2.1525,
"step": 1848
},
{
"epoch": 0.7634769151405726,
"grad_norm": 0.3539029657840729,
"learning_rate": 1.4620544049017787e-05,
"loss": 2.1572,
"step": 1850
},
{
"epoch": 0.7643022955893732,
"grad_norm": 0.35728850960731506,
"learning_rate": 1.4524241327439708e-05,
"loss": 2.1202,
"step": 1852
},
{
"epoch": 0.7651276760381739,
"grad_norm": 0.36121803522109985,
"learning_rate": 1.4428202895733505e-05,
"loss": 2.1547,
"step": 1854
},
{
"epoch": 0.7659530564869744,
"grad_norm": 0.32610949873924255,
"learning_rate": 1.4332429469373226e-05,
"loss": 2.1201,
"step": 1856
},
{
"epoch": 0.7667784369357751,
"grad_norm": 0.34636059403419495,
"learning_rate": 1.4236921761858685e-05,
"loss": 2.1117,
"step": 1858
},
{
"epoch": 0.7676038173845757,
"grad_norm": 0.34160706400871277,
"learning_rate": 1.4141680484710095e-05,
"loss": 2.1156,
"step": 1860
},
{
"epoch": 0.7684291978333764,
"grad_norm": 0.343398779630661,
"learning_rate": 1.4046706347462819e-05,
"loss": 2.1488,
"step": 1862
},
{
"epoch": 0.7692545782821769,
"grad_norm": 0.3486791253089905,
"learning_rate": 1.395200005766204e-05,
"loss": 2.1227,
"step": 1864
},
{
"epoch": 0.7700799587309776,
"grad_norm": 0.3483271598815918,
"learning_rate": 1.3857562320857526e-05,
"loss": 2.1152,
"step": 1866
},
{
"epoch": 0.7709053391797782,
"grad_norm": 0.341259241104126,
"learning_rate": 1.3763393840598338e-05,
"loss": 2.1577,
"step": 1868
},
{
"epoch": 0.7717307196285788,
"grad_norm": 0.3476760983467102,
"learning_rate": 1.3669495318427666e-05,
"loss": 2.1201,
"step": 1870
},
{
"epoch": 0.7725561000773794,
"grad_norm": 0.34470468759536743,
"learning_rate": 1.3575867453877488e-05,
"loss": 2.1348,
"step": 1872
},
{
"epoch": 0.77338148052618,
"grad_norm": 0.3454931974411011,
"learning_rate": 1.3482510944463445e-05,
"loss": 2.1165,
"step": 1874
},
{
"epoch": 0.7742068609749807,
"grad_norm": 0.3486715257167816,
"learning_rate": 1.3389426485679607e-05,
"loss": 2.1365,
"step": 1876
},
{
"epoch": 0.7750322414237812,
"grad_norm": 0.35253047943115234,
"learning_rate": 1.3296614770993293e-05,
"loss": 2.1237,
"step": 1878
},
{
"epoch": 0.7758576218725819,
"grad_norm": 0.33677321672439575,
"learning_rate": 1.320407649183995e-05,
"loss": 2.1594,
"step": 1880
},
{
"epoch": 0.7766830023213825,
"grad_norm": 0.34186917543411255,
"learning_rate": 1.3111812337617924e-05,
"loss": 2.1212,
"step": 1882
},
{
"epoch": 0.7775083827701831,
"grad_norm": 0.33094942569732666,
"learning_rate": 1.3019822995683395e-05,
"loss": 2.1489,
"step": 1884
},
{
"epoch": 0.7783337632189837,
"grad_norm": 0.3544219434261322,
"learning_rate": 1.2928109151345196e-05,
"loss": 2.1515,
"step": 1886
},
{
"epoch": 0.7791591436677844,
"grad_norm": 0.34201499819755554,
"learning_rate": 1.2836671487859754e-05,
"loss": 2.1533,
"step": 1888
},
{
"epoch": 0.779984524116585,
"grad_norm": 0.34465253353118896,
"learning_rate": 1.274551068642601e-05,
"loss": 2.1618,
"step": 1890
},
{
"epoch": 0.7808099045653856,
"grad_norm": 0.3615592420101166,
"learning_rate": 1.2654627426180277e-05,
"loss": 2.1255,
"step": 1892
},
{
"epoch": 0.7816352850141862,
"grad_norm": 0.3284577429294586,
"learning_rate": 1.2564022384191243e-05,
"loss": 2.1225,
"step": 1894
},
{
"epoch": 0.7824606654629869,
"grad_norm": 0.34497541189193726,
"learning_rate": 1.2473696235454896e-05,
"loss": 2.138,
"step": 1896
},
{
"epoch": 0.7832860459117874,
"grad_norm": 0.32846176624298096,
"learning_rate": 1.2383649652889501e-05,
"loss": 2.1531,
"step": 1898
},
{
"epoch": 0.7841114263605881,
"grad_norm": 0.32436785101890564,
"learning_rate": 1.2293883307330622e-05,
"loss": 2.1174,
"step": 1900
},
{
"epoch": 0.7849368068093887,
"grad_norm": 0.358806848526001,
"learning_rate": 1.2204397867526069e-05,
"loss": 2.1205,
"step": 1902
},
{
"epoch": 0.7857621872581894,
"grad_norm": 0.3195858597755432,
"learning_rate": 1.2115194000130903e-05,
"loss": 2.1283,
"step": 1904
},
{
"epoch": 0.7865875677069899,
"grad_norm": 0.33586594462394714,
"learning_rate": 1.202627236970259e-05,
"loss": 2.1421,
"step": 1906
},
{
"epoch": 0.7874129481557905,
"grad_norm": 0.3359866440296173,
"learning_rate": 1.1937633638695883e-05,
"loss": 2.1415,
"step": 1908
},
{
"epoch": 0.7882383286045912,
"grad_norm": 0.33562368154525757,
"learning_rate": 1.1849278467458048e-05,
"loss": 2.1375,
"step": 1910
},
{
"epoch": 0.7890637090533918,
"grad_norm": 0.32866016030311584,
"learning_rate": 1.1761207514223822e-05,
"loss": 2.1432,
"step": 1912
},
{
"epoch": 0.7898890895021924,
"grad_norm": 0.3484470248222351,
"learning_rate": 1.1673421435110522e-05,
"loss": 2.1184,
"step": 1914
},
{
"epoch": 0.790714469950993,
"grad_norm": 0.31653323769569397,
"learning_rate": 1.1585920884113261e-05,
"loss": 2.1209,
"step": 1916
},
{
"epoch": 0.7915398503997937,
"grad_norm": 0.32875025272369385,
"learning_rate": 1.1498706513099949e-05,
"loss": 2.1288,
"step": 1918
},
{
"epoch": 0.7923652308485942,
"grad_norm": 0.341203510761261,
"learning_rate": 1.1411778971806558e-05,
"loss": 2.1301,
"step": 1920
},
{
"epoch": 0.7931906112973949,
"grad_norm": 0.32854121923446655,
"learning_rate": 1.1325138907832122e-05,
"loss": 2.1586,
"step": 1922
},
{
"epoch": 0.7940159917461955,
"grad_norm": 0.33962199091911316,
"learning_rate": 1.1238786966634052e-05,
"loss": 2.1464,
"step": 1924
},
{
"epoch": 0.7948413721949962,
"grad_norm": 0.3306322991847992,
"learning_rate": 1.1152723791523318e-05,
"loss": 2.155,
"step": 1926
},
{
"epoch": 0.7956667526437967,
"grad_norm": 0.3397471606731415,
"learning_rate": 1.1066950023659545e-05,
"loss": 2.1381,
"step": 1928
},
{
"epoch": 0.7964921330925974,
"grad_norm": 0.3407101035118103,
"learning_rate": 1.0981466302046406e-05,
"loss": 2.1408,
"step": 1930
},
{
"epoch": 0.797317513541398,
"grad_norm": 0.34207984805107117,
"learning_rate": 1.0896273263526663e-05,
"loss": 2.1525,
"step": 1932
},
{
"epoch": 0.7981428939901986,
"grad_norm": 0.34368953108787537,
"learning_rate": 1.0811371542777571e-05,
"loss": 2.1234,
"step": 1934
},
{
"epoch": 0.7989682744389992,
"grad_norm": 0.3221174478530884,
"learning_rate": 1.0726761772306137e-05,
"loss": 2.1171,
"step": 1936
},
{
"epoch": 0.7997936548877999,
"grad_norm": 0.3296349048614502,
"learning_rate": 1.0642444582444322e-05,
"loss": 2.1252,
"step": 1938
},
{
"epoch": 0.8006190353366005,
"grad_norm": 0.3344162404537201,
"learning_rate": 1.055842060134446e-05,
"loss": 2.1235,
"step": 1940
},
{
"epoch": 0.801444415785401,
"grad_norm": 0.33650079369544983,
"learning_rate": 1.0474690454974445e-05,
"loss": 2.1216,
"step": 1942
},
{
"epoch": 0.8022697962342017,
"grad_norm": 0.34013551473617554,
"learning_rate": 1.0391254767113169e-05,
"loss": 2.1177,
"step": 1944
},
{
"epoch": 0.8030951766830023,
"grad_norm": 0.3282875716686249,
"learning_rate": 1.0308114159345883e-05,
"loss": 2.1248,
"step": 1946
},
{
"epoch": 0.803920557131803,
"grad_norm": 0.34421661496162415,
"learning_rate": 1.0225269251059483e-05,
"loss": 2.138,
"step": 1948
},
{
"epoch": 0.8047459375806035,
"grad_norm": 0.339918851852417,
"learning_rate": 1.0142720659437955e-05,
"loss": 2.1582,
"step": 1950
},
{
"epoch": 0.8055713180294042,
"grad_norm": 0.3220682144165039,
"learning_rate": 1.0060468999457767e-05,
"loss": 2.124,
"step": 1952
},
{
"epoch": 0.8063966984782048,
"grad_norm": 0.3417309522628784,
"learning_rate": 9.978514883883266e-06,
"loss": 2.1242,
"step": 1954
},
{
"epoch": 0.8072220789270054,
"grad_norm": 0.3351197838783264,
"learning_rate": 9.89685892326218e-06,
"loss": 2.1391,
"step": 1956
},
{
"epoch": 0.808047459375806,
"grad_norm": 0.3231659233570099,
"learning_rate": 9.815501725920972e-06,
"loss": 2.1391,
"step": 1958
},
{
"epoch": 0.8088728398246067,
"grad_norm": 0.32000261545181274,
"learning_rate": 9.734443897960372e-06,
"loss": 2.1292,
"step": 1960
},
{
"epoch": 0.8096982202734073,
"grad_norm": 0.34334588050842285,
"learning_rate": 9.653686043250848e-06,
"loss": 2.128,
"step": 1962
},
{
"epoch": 0.8105236007222079,
"grad_norm": 0.3440937101840973,
"learning_rate": 9.573228763428093e-06,
"loss": 2.1214,
"step": 1964
},
{
"epoch": 0.8113489811710085,
"grad_norm": 0.33723366260528564,
"learning_rate": 9.493072657888597e-06,
"loss": 2.143,
"step": 1966
},
{
"epoch": 0.8121743616198092,
"grad_norm": 0.34954842925071716,
"learning_rate": 9.413218323785084e-06,
"loss": 2.1214,
"step": 1968
},
{
"epoch": 0.8129997420686097,
"grad_norm": 0.3392115533351898,
"learning_rate": 9.333666356022158e-06,
"loss": 2.1349,
"step": 1970
},
{
"epoch": 0.8138251225174103,
"grad_norm": 0.32914984226226807,
"learning_rate": 9.254417347251815e-06,
"loss": 2.1342,
"step": 1972
},
{
"epoch": 0.814650502966211,
"grad_norm": 0.3356204628944397,
"learning_rate": 9.175471887869042e-06,
"loss": 2.1477,
"step": 1974
},
{
"epoch": 0.8154758834150116,
"grad_norm": 0.3248778283596039,
"learning_rate": 9.096830566007452e-06,
"loss": 2.1274,
"step": 1976
},
{
"epoch": 0.8163012638638122,
"grad_norm": 0.3299380838871002,
"learning_rate": 9.018493967534835e-06,
"loss": 2.1791,
"step": 1978
},
{
"epoch": 0.8171266443126128,
"grad_norm": 0.3330373466014862,
"learning_rate": 8.940462676048855e-06,
"loss": 2.1213,
"step": 1980
},
{
"epoch": 0.8179520247614135,
"grad_norm": 0.35274261236190796,
"learning_rate": 8.862737272872657e-06,
"loss": 2.1165,
"step": 1982
},
{
"epoch": 0.8187774052102141,
"grad_norm": 0.32712242007255554,
"learning_rate": 8.78531833705058e-06,
"loss": 2.1038,
"step": 1984
},
{
"epoch": 0.8196027856590147,
"grad_norm": 0.332295298576355,
"learning_rate": 8.708206445343791e-06,
"loss": 2.1441,
"step": 1986
},
{
"epoch": 0.8204281661078153,
"grad_norm": 0.33298367261886597,
"learning_rate": 8.631402172226061e-06,
"loss": 2.1153,
"step": 1988
},
{
"epoch": 0.821253546556616,
"grad_norm": 0.33189794421195984,
"learning_rate": 8.554906089879411e-06,
"loss": 2.1432,
"step": 1990
},
{
"epoch": 0.8220789270054165,
"grad_norm": 0.3431938886642456,
"learning_rate": 8.478718768189875e-06,
"loss": 2.1066,
"step": 1992
},
{
"epoch": 0.8229043074542172,
"grad_norm": 0.33813905715942383,
"learning_rate": 8.402840774743281e-06,
"loss": 2.1481,
"step": 1994
},
{
"epoch": 0.8237296879030178,
"grad_norm": 0.3725796341896057,
"learning_rate": 8.327272674820974e-06,
"loss": 2.1325,
"step": 1996
},
{
"epoch": 0.8245550683518185,
"grad_norm": 0.3199642300605774,
"learning_rate": 8.252015031395672e-06,
"loss": 2.1408,
"step": 1998
},
{
"epoch": 0.825380448800619,
"grad_norm": 0.32736214995384216,
"learning_rate": 8.177068405127198e-06,
"loss": 2.1613,
"step": 2000
},
{
"epoch": 0.8262058292494197,
"grad_norm": 0.3452676236629486,
"learning_rate": 8.10243335435834e-06,
"loss": 2.1339,
"step": 2002
},
{
"epoch": 0.8270312096982203,
"grad_norm": 0.3281574547290802,
"learning_rate": 8.028110435110709e-06,
"loss": 2.1142,
"step": 2004
},
{
"epoch": 0.8278565901470208,
"grad_norm": 0.32193323969841003,
"learning_rate": 7.954100201080538e-06,
"loss": 2.132,
"step": 2006
},
{
"epoch": 0.8286819705958215,
"grad_norm": 0.3320666253566742,
"learning_rate": 7.880403203634657e-06,
"loss": 2.1461,
"step": 2008
},
{
"epoch": 0.8295073510446221,
"grad_norm": 0.3370856046676636,
"learning_rate": 7.807019991806247e-06,
"loss": 2.1272,
"step": 2010
},
{
"epoch": 0.8303327314934228,
"grad_norm": 0.320910781621933,
"learning_rate": 7.733951112290894e-06,
"loss": 2.0938,
"step": 2012
},
{
"epoch": 0.8311581119422233,
"grad_norm": 0.3256378471851349,
"learning_rate": 7.661197109442409e-06,
"loss": 2.1145,
"step": 2014
},
{
"epoch": 0.831983492391024,
"grad_norm": 0.3386472761631012,
"learning_rate": 7.588758525268808e-06,
"loss": 2.1497,
"step": 2016
},
{
"epoch": 0.8328088728398246,
"grad_norm": 0.3254534900188446,
"learning_rate": 7.516635899428331e-06,
"loss": 2.1381,
"step": 2018
},
{
"epoch": 0.8336342532886252,
"grad_norm": 0.33104851841926575,
"learning_rate": 7.444829769225286e-06,
"loss": 2.1403,
"step": 2020
},
{
"epoch": 0.8344596337374258,
"grad_norm": 0.3282712399959564,
"learning_rate": 7.373340669606205e-06,
"loss": 2.1488,
"step": 2022
},
{
"epoch": 0.8352850141862265,
"grad_norm": 0.3317227363586426,
"learning_rate": 7.30216913315574e-06,
"loss": 2.0801,
"step": 2024
},
{
"epoch": 0.8361103946350271,
"grad_norm": 0.3185071051120758,
"learning_rate": 7.231315690092733e-06,
"loss": 2.1338,
"step": 2026
},
{
"epoch": 0.8369357750838277,
"grad_norm": 0.3287782073020935,
"learning_rate": 7.1607808682663315e-06,
"loss": 2.1071,
"step": 2028
},
{
"epoch": 0.8377611555326283,
"grad_norm": 0.33722466230392456,
"learning_rate": 7.090565193151905e-06,
"loss": 2.1764,
"step": 2030
},
{
"epoch": 0.838586535981429,
"grad_norm": 0.32282426953315735,
"learning_rate": 7.020669187847278e-06,
"loss": 2.1369,
"step": 2032
},
{
"epoch": 0.8394119164302296,
"grad_norm": 0.3378765881061554,
"learning_rate": 6.951093373068779e-06,
"loss": 2.144,
"step": 2034
},
{
"epoch": 0.8402372968790301,
"grad_norm": 0.32491791248321533,
"learning_rate": 6.881838267147334e-06,
"loss": 2.1442,
"step": 2036
},
{
"epoch": 0.8410626773278308,
"grad_norm": 0.34893786907196045,
"learning_rate": 6.812904386024644e-06,
"loss": 2.1357,
"step": 2038
},
{
"epoch": 0.8418880577766314,
"grad_norm": 0.3235718011856079,
"learning_rate": 6.744292243249306e-06,
"loss": 2.099,
"step": 2040
},
{
"epoch": 0.842713438225432,
"grad_norm": 0.31949764490127563,
"learning_rate": 6.676002349973027e-06,
"loss": 2.1416,
"step": 2042
},
{
"epoch": 0.8435388186742326,
"grad_norm": 0.3352109491825104,
"learning_rate": 6.608035214946806e-06,
"loss": 2.1137,
"step": 2044
},
{
"epoch": 0.8443641991230333,
"grad_norm": 0.32356804609298706,
"learning_rate": 6.540391344517105e-06,
"loss": 2.0998,
"step": 2046
},
{
"epoch": 0.8451895795718339,
"grad_norm": 0.33448925614356995,
"learning_rate": 6.473071242622131e-06,
"loss": 2.129,
"step": 2048
},
{
"epoch": 0.8460149600206345,
"grad_norm": 0.32092127203941345,
"learning_rate": 6.406075410788037e-06,
"loss": 2.1348,
"step": 2050
},
{
"epoch": 0.8468403404694351,
"grad_norm": 0.32737207412719727,
"learning_rate": 6.339404348125205e-06,
"loss": 2.1259,
"step": 2052
},
{
"epoch": 0.8476657209182358,
"grad_norm": 0.32666078209877014,
"learning_rate": 6.273058551324568e-06,
"loss": 2.1058,
"step": 2054
},
{
"epoch": 0.8484911013670363,
"grad_norm": 0.323102205991745,
"learning_rate": 6.207038514653818e-06,
"loss": 2.1286,
"step": 2056
},
{
"epoch": 0.849316481815837,
"grad_norm": 0.32299962639808655,
"learning_rate": 6.141344729953802e-06,
"loss": 2.1255,
"step": 2058
},
{
"epoch": 0.8501418622646376,
"grad_norm": 0.3298660218715668,
"learning_rate": 6.075977686634831e-06,
"loss": 2.1577,
"step": 2060
},
{
"epoch": 0.8509672427134383,
"grad_norm": 0.32427558302879333,
"learning_rate": 6.010937871673017e-06,
"loss": 2.1234,
"step": 2062
},
{
"epoch": 0.8517926231622388,
"grad_norm": 0.33669888973236084,
"learning_rate": 5.94622576960669e-06,
"loss": 2.1322,
"step": 2064
},
{
"epoch": 0.8526180036110395,
"grad_norm": 0.3118768036365509,
"learning_rate": 5.88184186253275e-06,
"loss": 2.1201,
"step": 2066
},
{
"epoch": 0.8534433840598401,
"grad_norm": 0.3149246871471405,
"learning_rate": 5.817786630103067e-06,
"loss": 2.1169,
"step": 2068
},
{
"epoch": 0.8542687645086408,
"grad_norm": 0.310986191034317,
"learning_rate": 5.754060549520956e-06,
"loss": 2.1262,
"step": 2070
},
{
"epoch": 0.8550941449574413,
"grad_norm": 0.315181702375412,
"learning_rate": 5.690664095537568e-06,
"loss": 2.1019,
"step": 2072
},
{
"epoch": 0.8559195254062419,
"grad_norm": 0.30971235036849976,
"learning_rate": 5.627597740448398e-06,
"loss": 2.1539,
"step": 2074
},
{
"epoch": 0.8567449058550426,
"grad_norm": 0.33076462149620056,
"learning_rate": 5.5648619540897395e-06,
"loss": 2.1236,
"step": 2076
},
{
"epoch": 0.8575702863038431,
"grad_norm": 0.3266281485557556,
"learning_rate": 5.502457203835187e-06,
"loss": 2.1454,
"step": 2078
},
{
"epoch": 0.8583956667526438,
"grad_norm": 0.329086571931839,
"learning_rate": 5.4403839545921595e-06,
"loss": 2.1057,
"step": 2080
},
{
"epoch": 0.8592210472014444,
"grad_norm": 0.315960168838501,
"learning_rate": 5.378642668798428e-06,
"loss": 2.135,
"step": 2082
},
{
"epoch": 0.8600464276502451,
"grad_norm": 0.31883350014686584,
"learning_rate": 5.317233806418708e-06,
"loss": 2.1407,
"step": 2084
},
{
"epoch": 0.8608718080990456,
"grad_norm": 0.3129589259624481,
"learning_rate": 5.2561578249411824e-06,
"loss": 2.107,
"step": 2086
},
{
"epoch": 0.8616971885478463,
"grad_norm": 0.3180556297302246,
"learning_rate": 5.19541517937408e-06,
"loss": 2.1278,
"step": 2088
},
{
"epoch": 0.8625225689966469,
"grad_norm": 0.30829039216041565,
"learning_rate": 5.135006322242386e-06,
"loss": 2.1136,
"step": 2090
},
{
"epoch": 0.8633479494454475,
"grad_norm": 0.3156387209892273,
"learning_rate": 5.074931703584352e-06,
"loss": 2.1406,
"step": 2092
},
{
"epoch": 0.8641733298942481,
"grad_norm": 0.31208303570747375,
"learning_rate": 5.015191770948241e-06,
"loss": 2.1258,
"step": 2094
},
{
"epoch": 0.8649987103430488,
"grad_norm": 0.31232208013534546,
"learning_rate": 4.955786969388909e-06,
"loss": 2.1439,
"step": 2096
},
{
"epoch": 0.8658240907918494,
"grad_norm": 0.3171435594558716,
"learning_rate": 4.896717741464524e-06,
"loss": 2.1412,
"step": 2098
},
{
"epoch": 0.86664947124065,
"grad_norm": 0.30812162160873413,
"learning_rate": 4.837984527233314e-06,
"loss": 2.138,
"step": 2100
},
{
"epoch": 0.8674748516894506,
"grad_norm": 0.307925820350647,
"learning_rate": 4.77958776425022e-06,
"loss": 2.1345,
"step": 2102
},
{
"epoch": 0.8683002321382512,
"grad_norm": 0.309372216463089,
"learning_rate": 4.721527887563659e-06,
"loss": 2.1448,
"step": 2104
},
{
"epoch": 0.8691256125870519,
"grad_norm": 0.31039103865623474,
"learning_rate": 4.663805329712318e-06,
"loss": 2.1158,
"step": 2106
},
{
"epoch": 0.8699509930358524,
"grad_norm": 0.3017139136791229,
"learning_rate": 4.6064205207218546e-06,
"loss": 2.1628,
"step": 2108
},
{
"epoch": 0.8707763734846531,
"grad_norm": 0.30297982692718506,
"learning_rate": 4.549373888101793e-06,
"loss": 2.1455,
"step": 2110
},
{
"epoch": 0.8716017539334537,
"grad_norm": 0.31339317560195923,
"learning_rate": 4.492665856842249e-06,
"loss": 2.1317,
"step": 2112
},
{
"epoch": 0.8724271343822543,
"grad_norm": 0.30679646134376526,
"learning_rate": 4.4362968494108145e-06,
"loss": 2.1319,
"step": 2114
},
{
"epoch": 0.8732525148310549,
"grad_norm": 0.31017109751701355,
"learning_rate": 4.3802672857494006e-06,
"loss": 2.1495,
"step": 2116
},
{
"epoch": 0.8740778952798556,
"grad_norm": 0.31378743052482605,
"learning_rate": 4.324577583271089e-06,
"loss": 2.1022,
"step": 2118
},
{
"epoch": 0.8749032757286562,
"grad_norm": 0.30802688002586365,
"learning_rate": 4.269228156857069e-06,
"loss": 2.1201,
"step": 2120
},
{
"epoch": 0.8757286561774568,
"grad_norm": 0.3153627812862396,
"learning_rate": 4.2142194188534934e-06,
"loss": 2.1629,
"step": 2122
},
{
"epoch": 0.8765540366262574,
"grad_norm": 0.3161097466945648,
"learning_rate": 4.159551779068438e-06,
"loss": 2.1497,
"step": 2124
},
{
"epoch": 0.8773794170750581,
"grad_norm": 0.310384064912796,
"learning_rate": 4.1052256447688285e-06,
"loss": 2.135,
"step": 2126
},
{
"epoch": 0.8782047975238586,
"grad_norm": 0.332768976688385,
"learning_rate": 4.051241420677427e-06,
"loss": 2.1394,
"step": 2128
},
{
"epoch": 0.8790301779726593,
"grad_norm": 0.3080803155899048,
"learning_rate": 3.997599508969829e-06,
"loss": 2.1377,
"step": 2130
},
{
"epoch": 0.8798555584214599,
"grad_norm": 0.314807653427124,
"learning_rate": 3.9443003092714095e-06,
"loss": 2.1421,
"step": 2132
},
{
"epoch": 0.8806809388702606,
"grad_norm": 0.31590813398361206,
"learning_rate": 3.891344218654403e-06,
"loss": 2.1268,
"step": 2134
},
{
"epoch": 0.8815063193190611,
"grad_norm": 0.3047875463962555,
"learning_rate": 3.8387316316349285e-06,
"loss": 2.1248,
"step": 2136
},
{
"epoch": 0.8823316997678617,
"grad_norm": 0.3278840482234955,
"learning_rate": 3.7864629401700214e-06,
"loss": 2.1316,
"step": 2138
},
{
"epoch": 0.8831570802166624,
"grad_norm": 0.3061862587928772,
"learning_rate": 3.7345385336547856e-06,
"loss": 2.1468,
"step": 2140
},
{
"epoch": 0.8839824606654629,
"grad_norm": 0.31418314576148987,
"learning_rate": 3.682958798919406e-06,
"loss": 2.1235,
"step": 2142
},
{
"epoch": 0.8848078411142636,
"grad_norm": 0.31270700693130493,
"learning_rate": 3.631724120226321e-06,
"loss": 2.1345,
"step": 2144
},
{
"epoch": 0.8856332215630642,
"grad_norm": 0.3076173961162567,
"learning_rate": 3.5808348792673364e-06,
"loss": 2.1069,
"step": 2146
},
{
"epoch": 0.8864586020118649,
"grad_norm": 0.3199022114276886,
"learning_rate": 3.5302914551608112e-06,
"loss": 2.1474,
"step": 2148
},
{
"epoch": 0.8872839824606654,
"grad_norm": 0.30249086022377014,
"learning_rate": 3.480094224448788e-06,
"loss": 2.1373,
"step": 2150
},
{
"epoch": 0.8881093629094661,
"grad_norm": 0.3049810528755188,
"learning_rate": 3.4302435610942372e-06,
"loss": 2.1069,
"step": 2152
},
{
"epoch": 0.8889347433582667,
"grad_norm": 0.310183048248291,
"learning_rate": 3.3807398364782307e-06,
"loss": 2.154,
"step": 2154
},
{
"epoch": 0.8897601238070674,
"grad_norm": 0.30666887760162354,
"learning_rate": 3.331583419397194e-06,
"loss": 2.101,
"step": 2156
},
{
"epoch": 0.8905855042558679,
"grad_norm": 0.30306148529052734,
"learning_rate": 3.2827746760601573e-06,
"loss": 2.1107,
"step": 2158
},
{
"epoch": 0.8914108847046686,
"grad_norm": 0.30855709314346313,
"learning_rate": 3.2343139700860168e-06,
"loss": 2.1451,
"step": 2160
},
{
"epoch": 0.8922362651534692,
"grad_norm": 0.31602942943573,
"learning_rate": 3.186201662500865e-06,
"loss": 2.1116,
"step": 2162
},
{
"epoch": 0.8930616456022697,
"grad_norm": 0.3141464293003082,
"learning_rate": 3.138438111735231e-06,
"loss": 2.1339,
"step": 2164
},
{
"epoch": 0.8938870260510704,
"grad_norm": 0.3070449233055115,
"learning_rate": 3.0910236736214794e-06,
"loss": 2.1485,
"step": 2166
},
{
"epoch": 0.894712406499871,
"grad_norm": 0.29712599515914917,
"learning_rate": 3.043958701391114e-06,
"loss": 2.1151,
"step": 2168
},
{
"epoch": 0.8955377869486717,
"grad_norm": 0.30952054262161255,
"learning_rate": 2.9972435456721627e-06,
"loss": 2.1224,
"step": 2170
},
{
"epoch": 0.8963631673974722,
"grad_norm": 0.3056076169013977,
"learning_rate": 2.9508785544865856e-06,
"loss": 2.1457,
"step": 2172
},
{
"epoch": 0.8971885478462729,
"grad_norm": 0.2979845702648163,
"learning_rate": 2.9048640732476317e-06,
"loss": 2.1149,
"step": 2174
},
{
"epoch": 0.8980139282950735,
"grad_norm": 0.30862563848495483,
"learning_rate": 2.8592004447573207e-06,
"loss": 2.1382,
"step": 2176
},
{
"epoch": 0.8988393087438741,
"grad_norm": 0.3080708682537079,
"learning_rate": 2.813888009203841e-06,
"loss": 2.1036,
"step": 2178
},
{
"epoch": 0.8996646891926747,
"grad_norm": 0.3009863793849945,
"learning_rate": 2.768927104159058e-06,
"loss": 2.1386,
"step": 2180
},
{
"epoch": 0.9004900696414754,
"grad_norm": 0.3038215637207031,
"learning_rate": 2.724318064575976e-06,
"loss": 2.1344,
"step": 2182
},
{
"epoch": 0.901315450090276,
"grad_norm": 0.3082687556743622,
"learning_rate": 2.6800612227862453e-06,
"loss": 2.1287,
"step": 2184
},
{
"epoch": 0.9021408305390766,
"grad_norm": 0.29829367995262146,
"learning_rate": 2.6361569084976723e-06,
"loss": 2.1251,
"step": 2186
},
{
"epoch": 0.9029662109878772,
"grad_norm": 0.30370181798934937,
"learning_rate": 2.592605448791807e-06,
"loss": 2.1409,
"step": 2188
},
{
"epoch": 0.9037915914366779,
"grad_norm": 0.30194738507270813,
"learning_rate": 2.5494071681214483e-06,
"loss": 2.0957,
"step": 2190
},
{
"epoch": 0.9046169718854785,
"grad_norm": 0.3110347092151642,
"learning_rate": 2.5065623883082867e-06,
"loss": 2.1373,
"step": 2192
},
{
"epoch": 0.905442352334279,
"grad_norm": 0.3016178607940674,
"learning_rate": 2.4640714285404544e-06,
"loss": 2.1553,
"step": 2194
},
{
"epoch": 0.9062677327830797,
"grad_norm": 0.30545875430107117,
"learning_rate": 2.421934605370163e-06,
"loss": 2.1334,
"step": 2196
},
{
"epoch": 0.9070931132318804,
"grad_norm": 0.29015982151031494,
"learning_rate": 2.3801522327113802e-06,
"loss": 2.1405,
"step": 2198
},
{
"epoch": 0.9079184936806809,
"grad_norm": 0.29557955265045166,
"learning_rate": 2.338724621837435e-06,
"loss": 2.1282,
"step": 2200
},
{
"epoch": 0.9087438741294815,
"grad_norm": 0.3093981444835663,
"learning_rate": 2.2976520813787594e-06,
"loss": 2.1162,
"step": 2202
},
{
"epoch": 0.9095692545782822,
"grad_norm": 0.3128198981285095,
"learning_rate": 2.2569349173205133e-06,
"loss": 2.1056,
"step": 2204
},
{
"epoch": 0.9103946350270828,
"grad_norm": 0.2990127205848694,
"learning_rate": 2.2165734330003686e-06,
"loss": 2.1289,
"step": 2206
},
{
"epoch": 0.9112200154758834,
"grad_norm": 0.3216819167137146,
"learning_rate": 2.176567929106249e-06,
"loss": 2.1068,
"step": 2208
},
{
"epoch": 0.912045395924684,
"grad_norm": 0.3107984960079193,
"learning_rate": 2.1369187036740235e-06,
"loss": 2.1332,
"step": 2210
},
{
"epoch": 0.9128707763734847,
"grad_norm": 0.2950679063796997,
"learning_rate": 2.0976260520853886e-06,
"loss": 2.1573,
"step": 2212
},
{
"epoch": 0.9136961568222852,
"grad_norm": 0.3093186914920807,
"learning_rate": 2.0586902670655606e-06,
"loss": 2.0812,
"step": 2214
},
{
"epoch": 0.9145215372710859,
"grad_norm": 0.2955740690231323,
"learning_rate": 2.02011163868116e-06,
"loss": 2.1069,
"step": 2216
},
{
"epoch": 0.9153469177198865,
"grad_norm": 0.3023936450481415,
"learning_rate": 1.9818904543380643e-06,
"loss": 2.1372,
"step": 2218
},
{
"epoch": 0.9161722981686872,
"grad_norm": 0.3137916922569275,
"learning_rate": 1.9440269987791914e-06,
"loss": 2.1333,
"step": 2220
},
{
"epoch": 0.9169976786174877,
"grad_norm": 0.3078385591506958,
"learning_rate": 1.90652155408248e-06,
"loss": 2.1218,
"step": 2222
},
{
"epoch": 0.9178230590662884,
"grad_norm": 0.29822996258735657,
"learning_rate": 1.8693743996586742e-06,
"loss": 2.148,
"step": 2224
},
{
"epoch": 0.918648439515089,
"grad_norm": 0.30666473507881165,
"learning_rate": 1.8325858122493432e-06,
"loss": 2.1107,
"step": 2226
},
{
"epoch": 0.9194738199638897,
"grad_norm": 0.3042726516723633,
"learning_rate": 1.7961560659247646e-06,
"loss": 2.1354,
"step": 2228
},
{
"epoch": 0.9202992004126902,
"grad_norm": 0.306774765253067,
"learning_rate": 1.7600854320819038e-06,
"loss": 2.1636,
"step": 2230
},
{
"epoch": 0.9211245808614908,
"grad_norm": 0.29949676990509033,
"learning_rate": 1.7243741794423619e-06,
"loss": 2.1297,
"step": 2232
},
{
"epoch": 0.9219499613102915,
"grad_norm": 0.3098452687263489,
"learning_rate": 1.6890225740504251e-06,
"loss": 2.1298,
"step": 2234
},
{
"epoch": 0.922775341759092,
"grad_norm": 0.3078421354293823,
"learning_rate": 1.6540308792710235e-06,
"loss": 2.1078,
"step": 2236
},
{
"epoch": 0.9236007222078927,
"grad_norm": 0.31042400002479553,
"learning_rate": 1.6193993557878317e-06,
"loss": 2.1276,
"step": 2238
},
{
"epoch": 0.9244261026566933,
"grad_norm": 0.30934637784957886,
"learning_rate": 1.5851282616012653e-06,
"loss": 2.151,
"step": 2240
},
{
"epoch": 0.925251483105494,
"grad_norm": 0.3095110356807709,
"learning_rate": 1.5512178520266096e-06,
"loss": 2.1373,
"step": 2242
},
{
"epoch": 0.9260768635542945,
"grad_norm": 0.2958972454071045,
"learning_rate": 1.5176683796920721e-06,
"loss": 2.1429,
"step": 2244
},
{
"epoch": 0.9269022440030952,
"grad_norm": 0.29824674129486084,
"learning_rate": 1.4844800945369498e-06,
"loss": 2.1453,
"step": 2246
},
{
"epoch": 0.9277276244518958,
"grad_norm": 0.29917067289352417,
"learning_rate": 1.4516532438097196e-06,
"loss": 2.1218,
"step": 2248
},
{
"epoch": 0.9285530049006964,
"grad_norm": 0.2987802028656006,
"learning_rate": 1.419188072066241e-06,
"loss": 2.1268,
"step": 2250
},
{
"epoch": 0.929378385349497,
"grad_norm": 0.29865482449531555,
"learning_rate": 1.3870848211678833e-06,
"loss": 2.1087,
"step": 2252
},
{
"epoch": 0.9302037657982977,
"grad_norm": 0.28756940364837646,
"learning_rate": 1.3553437302797733e-06,
"loss": 2.1003,
"step": 2254
},
{
"epoch": 0.9310291462470983,
"grad_norm": 0.307478129863739,
"learning_rate": 1.3239650358689737e-06,
"loss": 2.1079,
"step": 2256
},
{
"epoch": 0.9318545266958989,
"grad_norm": 0.298652708530426,
"learning_rate": 1.292948971702773e-06,
"loss": 2.1487,
"step": 2258
},
{
"epoch": 0.9326799071446995,
"grad_norm": 0.2891993224620819,
"learning_rate": 1.2622957688468717e-06,
"loss": 2.1359,
"step": 2260
},
{
"epoch": 0.9335052875935002,
"grad_norm": 0.30007168650627136,
"learning_rate": 1.2320056556637205e-06,
"loss": 2.1583,
"step": 2262
},
{
"epoch": 0.9343306680423007,
"grad_norm": 0.3034486472606659,
"learning_rate": 1.2020788578107956e-06,
"loss": 2.1427,
"step": 2264
},
{
"epoch": 0.9351560484911013,
"grad_norm": 0.30218076705932617,
"learning_rate": 1.1725155982389158e-06,
"loss": 2.1206,
"step": 2266
},
{
"epoch": 0.935981428939902,
"grad_norm": 0.305792897939682,
"learning_rate": 1.1433160971905942e-06,
"loss": 2.1063,
"step": 2268
},
{
"epoch": 0.9368068093887026,
"grad_norm": 0.2913326919078827,
"learning_rate": 1.1144805721983841e-06,
"loss": 2.1368,
"step": 2270
},
{
"epoch": 0.9376321898375032,
"grad_norm": 0.2973470985889435,
"learning_rate": 1.0860092380832632e-06,
"loss": 2.106,
"step": 2272
},
{
"epoch": 0.9384575702863038,
"grad_norm": 0.29396718740463257,
"learning_rate": 1.0579023069530346e-06,
"loss": 2.1007,
"step": 2274
},
{
"epoch": 0.9392829507351045,
"grad_norm": 0.29689350724220276,
"learning_rate": 1.0301599882007462e-06,
"loss": 2.1312,
"step": 2276
},
{
"epoch": 0.9401083311839051,
"grad_norm": 0.2933015823364258,
"learning_rate": 1.0027824885031288e-06,
"loss": 2.1144,
"step": 2278
},
{
"epoch": 0.9409337116327057,
"grad_norm": 0.30149662494659424,
"learning_rate": 9.75770011819066e-07,
"loss": 2.1526,
"step": 2280
},
{
"epoch": 0.9417590920815063,
"grad_norm": 0.3010120689868927,
"learning_rate": 9.491227593880492e-07,
"loss": 2.1455,
"step": 2282
},
{
"epoch": 0.942584472530307,
"grad_norm": 0.3034345209598541,
"learning_rate": 9.228409297287132e-07,
"loss": 2.1181,
"step": 2284
},
{
"epoch": 0.9434098529791075,
"grad_norm": 0.3068033754825592,
"learning_rate": 8.96924718637332e-07,
"loss": 2.1583,
"step": 2286
},
{
"epoch": 0.9442352334279082,
"grad_norm": 0.29393401741981506,
"learning_rate": 8.713743191863633e-07,
"loss": 2.157,
"step": 2288
},
{
"epoch": 0.9450606138767088,
"grad_norm": 0.2999117970466614,
"learning_rate": 8.46189921723034e-07,
"loss": 2.1656,
"step": 2290
},
{
"epoch": 0.9458859943255095,
"grad_norm": 0.2952830195426941,
"learning_rate": 8.21371713867869e-07,
"loss": 2.1328,
"step": 2292
},
{
"epoch": 0.94671137477431,
"grad_norm": 0.29716044664382935,
"learning_rate": 7.969198805133638e-07,
"loss": 2.1334,
"step": 2294
},
{
"epoch": 0.9475367552231106,
"grad_norm": 0.298360675573349,
"learning_rate": 7.728346038225475e-07,
"loss": 2.127,
"step": 2296
},
{
"epoch": 0.9483621356719113,
"grad_norm": 0.3036721348762512,
"learning_rate": 7.491160632276562e-07,
"loss": 2.1199,
"step": 2298
},
{
"epoch": 0.9491875161207118,
"grad_norm": 0.2898666262626648,
"learning_rate": 7.25764435428794e-07,
"loss": 2.0981,
"step": 2300
},
{
"epoch": 0.9500128965695125,
"grad_norm": 0.29367583990097046,
"learning_rate": 7.027798943925967e-07,
"loss": 2.1074,
"step": 2302
},
{
"epoch": 0.9508382770183131,
"grad_norm": 0.2900161147117615,
"learning_rate": 6.801626113509651e-07,
"loss": 2.1371,
"step": 2304
},
{
"epoch": 0.9516636574671138,
"grad_norm": 0.31588950753211975,
"learning_rate": 6.579127547997721e-07,
"loss": 2.1346,
"step": 2306
},
{
"epoch": 0.9524890379159143,
"grad_norm": 0.30221107602119446,
"learning_rate": 6.360304904976022e-07,
"loss": 2.1732,
"step": 2308
},
{
"epoch": 0.953314418364715,
"grad_norm": 0.2927485406398773,
"learning_rate": 6.145159814645362e-07,
"loss": 2.0866,
"step": 2310
},
{
"epoch": 0.9541397988135156,
"grad_norm": 0.29365116357803345,
"learning_rate": 5.933693879809132e-07,
"loss": 2.1252,
"step": 2312
},
{
"epoch": 0.9549651792623163,
"grad_norm": 0.29774293303489685,
"learning_rate": 5.725908675861535e-07,
"loss": 2.134,
"step": 2314
},
{
"epoch": 0.9557905597111168,
"grad_norm": 0.30118051171302795,
"learning_rate": 5.521805750775877e-07,
"loss": 2.1385,
"step": 2316
},
{
"epoch": 0.9566159401599175,
"grad_norm": 0.2982546389102936,
"learning_rate": 5.32138662509285e-07,
"loss": 2.1316,
"step": 2318
},
{
"epoch": 0.9574413206087181,
"grad_norm": 0.29799342155456543,
"learning_rate": 5.124652791909324e-07,
"loss": 2.14,
"step": 2320
},
{
"epoch": 0.9582667010575187,
"grad_norm": 0.29669389128685,
"learning_rate": 4.931605716867293e-07,
"loss": 2.1077,
"step": 2322
},
{
"epoch": 0.9590920815063193,
"grad_norm": 0.29356813430786133,
"learning_rate": 4.742246838142672e-07,
"loss": 2.123,
"step": 2324
},
{
"epoch": 0.95991746195512,
"grad_norm": 0.3008041977882385,
"learning_rate": 4.5565775664351275e-07,
"loss": 2.1124,
"step": 2326
},
{
"epoch": 0.9607428424039206,
"grad_norm": 0.3006245195865631,
"learning_rate": 4.3745992849568707e-07,
"loss": 2.1207,
"step": 2328
},
{
"epoch": 0.9615682228527211,
"grad_norm": 0.28674232959747314,
"learning_rate": 4.196313349422942e-07,
"loss": 2.1394,
"step": 2330
},
{
"epoch": 0.9623936033015218,
"grad_norm": 0.301516056060791,
"learning_rate": 4.021721088040775e-07,
"loss": 2.1235,
"step": 2332
},
{
"epoch": 0.9632189837503224,
"grad_norm": 0.30167555809020996,
"learning_rate": 3.8508238015003697e-07,
"loss": 2.1171,
"step": 2334
},
{
"epoch": 0.964044364199123,
"grad_norm": 0.2917619049549103,
"learning_rate": 3.6836227629648e-07,
"loss": 2.1139,
"step": 2336
},
{
"epoch": 0.9648697446479236,
"grad_norm": 0.28927844762802124,
"learning_rate": 3.520119218060336e-07,
"loss": 2.1377,
"step": 2338
},
{
"epoch": 0.9656951250967243,
"grad_norm": 0.3015216290950775,
"learning_rate": 3.360314384867558e-07,
"loss": 2.1335,
"step": 2340
},
{
"epoch": 0.9665205055455249,
"grad_norm": 0.2863229215145111,
"learning_rate": 3.2042094539120883e-07,
"loss": 2.1282,
"step": 2342
},
{
"epoch": 0.9673458859943255,
"grad_norm": 0.28870850801467896,
"learning_rate": 3.051805588155654e-07,
"loss": 2.133,
"step": 2344
},
{
"epoch": 0.9681712664431261,
"grad_norm": 0.29704856872558594,
"learning_rate": 2.903103922987649e-07,
"loss": 2.1267,
"step": 2346
},
{
"epoch": 0.9689966468919268,
"grad_norm": 0.2941645681858063,
"learning_rate": 2.7581055662164736e-07,
"loss": 2.1464,
"step": 2348
},
{
"epoch": 0.9698220273407274,
"grad_norm": 0.30156826972961426,
"learning_rate": 2.6168115980614303e-07,
"loss": 2.1208,
"step": 2350
},
{
"epoch": 0.970647407789528,
"grad_norm": 0.2918868958950043,
"learning_rate": 2.4792230711444544e-07,
"loss": 2.1258,
"step": 2352
},
{
"epoch": 0.9714727882383286,
"grad_norm": 0.2962958514690399,
"learning_rate": 2.3453410104825046e-07,
"loss": 2.1517,
"step": 2354
},
{
"epoch": 0.9722981686871293,
"grad_norm": 0.3073914051055908,
"learning_rate": 2.2151664134799076e-07,
"loss": 2.0907,
"step": 2356
},
{
"epoch": 0.9731235491359298,
"grad_norm": 0.292449414730072,
"learning_rate": 2.0887002499207498e-07,
"loss": 2.1287,
"step": 2358
},
{
"epoch": 0.9739489295847304,
"grad_norm": 0.2905406355857849,
"learning_rate": 1.9659434619617723e-07,
"loss": 2.0952,
"step": 2360
},
{
"epoch": 0.9747743100335311,
"grad_norm": 0.2932960093021393,
"learning_rate": 1.846896964125433e-07,
"loss": 2.1425,
"step": 2362
},
{
"epoch": 0.9755996904823317,
"grad_norm": 0.30320852994918823,
"learning_rate": 1.731561643292856e-07,
"loss": 2.1331,
"step": 2364
},
{
"epoch": 0.9764250709311323,
"grad_norm": 0.28756237030029297,
"learning_rate": 1.6199383586975037e-07,
"loss": 2.1249,
"step": 2366
},
{
"epoch": 0.9772504513799329,
"grad_norm": 0.2928750514984131,
"learning_rate": 1.5120279419185701e-07,
"loss": 2.1222,
"step": 2368
},
{
"epoch": 0.9780758318287336,
"grad_norm": 0.2939901649951935,
"learning_rate": 1.4078311968749313e-07,
"loss": 2.1391,
"step": 2370
},
{
"epoch": 0.9789012122775341,
"grad_norm": 0.2927398681640625,
"learning_rate": 1.307348899818983e-07,
"loss": 2.1125,
"step": 2372
},
{
"epoch": 0.9797265927263348,
"grad_norm": 0.294331431388855,
"learning_rate": 1.2105817993309786e-07,
"loss": 2.0947,
"step": 2374
},
{
"epoch": 0.9805519731751354,
"grad_norm": 0.29420092701911926,
"learning_rate": 1.1175306163135335e-07,
"loss": 2.1467,
"step": 2376
},
{
"epoch": 0.9813773536239361,
"grad_norm": 0.29944416880607605,
"learning_rate": 1.0281960439860739e-07,
"loss": 2.146,
"step": 2378
},
{
"epoch": 0.9822027340727366,
"grad_norm": 0.2895830273628235,
"learning_rate": 9.425787478796744e-08,
"loss": 2.1096,
"step": 2380
},
{
"epoch": 0.9830281145215373,
"grad_norm": 0.3013817071914673,
"learning_rate": 8.606793658323398e-08,
"loss": 2.1204,
"step": 2382
},
{
"epoch": 0.9838534949703379,
"grad_norm": 0.2919696569442749,
"learning_rate": 7.824985079839531e-08,
"loss": 2.137,
"step": 2384
},
{
"epoch": 0.9846788754191385,
"grad_norm": 0.2889643907546997,
"learning_rate": 7.08036756771946e-08,
"loss": 2.0994,
"step": 2386
},
{
"epoch": 0.9855042558679391,
"grad_norm": 0.29254478216171265,
"learning_rate": 6.372946669269131e-08,
"loss": 2.1083,
"step": 2388
},
{
"epoch": 0.9863296363167398,
"grad_norm": 0.3087567389011383,
"learning_rate": 5.702727654682272e-08,
"loss": 2.1284,
"step": 2390
},
{
"epoch": 0.9871550167655404,
"grad_norm": 0.29949310421943665,
"learning_rate": 5.069715517007079e-08,
"loss": 2.1422,
"step": 2392
},
{
"epoch": 0.9879803972143409,
"grad_norm": 0.3776766061782837,
"learning_rate": 4.473914972101256e-08,
"loss": 2.1214,
"step": 2394
},
{
"epoch": 0.9888057776631416,
"grad_norm": 0.29243144392967224,
"learning_rate": 3.9153304586042605e-08,
"loss": 2.1213,
"step": 2396
},
{
"epoch": 0.9896311581119422,
"grad_norm": 0.2868538498878479,
"learning_rate": 3.393966137898441e-08,
"loss": 2.1366,
"step": 2398
},
{
"epoch": 0.9904565385607429,
"grad_norm": 0.3015158176422119,
"learning_rate": 2.9098258940818414e-08,
"loss": 2.1389,
"step": 2400
},
{
"epoch": 0.9912819190095434,
"grad_norm": 0.28750908374786377,
"learning_rate": 2.4629133339371113e-08,
"loss": 2.1257,
"step": 2402
},
{
"epoch": 0.9921072994583441,
"grad_norm": 0.29580312967300415,
"learning_rate": 2.0532317869059735e-08,
"loss": 2.1176,
"step": 2404
},
{
"epoch": 0.9929326799071447,
"grad_norm": 0.29322829842567444,
"learning_rate": 1.6807843050636874e-08,
"loss": 2.1088,
"step": 2406
},
{
"epoch": 0.9937580603559453,
"grad_norm": 0.2918194830417633,
"learning_rate": 1.345573663096289e-08,
"loss": 2.1349,
"step": 2408
},
{
"epoch": 0.9945834408047459,
"grad_norm": 0.28616565465927124,
"learning_rate": 1.0476023582806083e-08,
"loss": 2.098,
"step": 2410
},
{
"epoch": 0.9954088212535466,
"grad_norm": 0.29948458075523376,
"learning_rate": 7.868726104642844e-09,
"loss": 2.1149,
"step": 2412
},
{
"epoch": 0.9962342017023472,
"grad_norm": 0.29504823684692383,
"learning_rate": 5.633863620507773e-09,
"loss": 2.122,
"step": 2414
},
{
"epoch": 0.9970595821511478,
"grad_norm": 0.29400694370269775,
"learning_rate": 3.7714527798438095e-09,
"loss": 2.1245,
"step": 2416
},
{
"epoch": 0.9978849625999484,
"grad_norm": 0.2938084602355957,
"learning_rate": 2.2815074573745434e-09,
"loss": 2.1174,
"step": 2418
},
{
"epoch": 0.9987103430487491,
"grad_norm": 0.2996973991394043,
"learning_rate": 1.164038752998753e-09,
"loss": 2.1225,
"step": 2420
},
{
"epoch": 0.9995357234975496,
"grad_norm": 0.3032398223876953,
"learning_rate": 4.1905499171268626e-10,
"loss": 2.1085,
"step": 2422
},
{
"epoch": 1.0,
"grad_norm": 0.8356114625930786,
"learning_rate": 4.6561723560101245e-11,
"loss": 2.1522,
"step": 2424
}
],
"logging_steps": 2,
"max_steps": 2424,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 300,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.7280755528491336e+18,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}