| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1250, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 48.60254041857085, | |
| "learning_rate": 5.263157894736843e-07, | |
| "loss": 1.3605, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 35.89314672664364, | |
| "learning_rate": 1.0526315789473685e-06, | |
| "loss": 1.2401, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 60.7990081247422, | |
| "learning_rate": 1.5789473684210526e-06, | |
| "loss": 1.1649, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 36.51376184380329, | |
| "learning_rate": 2.105263157894737e-06, | |
| "loss": 1.2448, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 36.98096547145381, | |
| "learning_rate": 2.631578947368421e-06, | |
| "loss": 1.3259, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 19.840368403961094, | |
| "learning_rate": 3.157894736842105e-06, | |
| "loss": 1.1235, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 18.991812240245903, | |
| "learning_rate": 3.6842105263157896e-06, | |
| "loss": 1.0488, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 14.112864562404495, | |
| "learning_rate": 4.210526315789474e-06, | |
| "loss": 1.0145, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 11.905199352452714, | |
| "learning_rate": 4.736842105263158e-06, | |
| "loss": 1.0528, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 7.258681115510464, | |
| "learning_rate": 5.263157894736842e-06, | |
| "loss": 0.8344, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 6.468068271446631, | |
| "learning_rate": 5.789473684210527e-06, | |
| "loss": 0.8258, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 7.2073891356799695, | |
| "learning_rate": 6.31578947368421e-06, | |
| "loss": 0.7777, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 5.42052537085184, | |
| "learning_rate": 6.842105263157896e-06, | |
| "loss": 0.7737, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 6.207179625141898, | |
| "learning_rate": 7.368421052631579e-06, | |
| "loss": 0.7968, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.840526941421514, | |
| "learning_rate": 7.894736842105265e-06, | |
| "loss": 0.6777, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 5.6686511595911, | |
| "learning_rate": 8.421052631578948e-06, | |
| "loss": 0.8208, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 5.46146169529698, | |
| "learning_rate": 8.947368421052632e-06, | |
| "loss": 0.7916, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.3633571859308296, | |
| "learning_rate": 9.473684210526315e-06, | |
| "loss": 0.5994, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 5.7975096177082355, | |
| "learning_rate": 1e-05, | |
| "loss": 0.7091, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 4.848702644768635, | |
| "learning_rate": 1.0526315789473684e-05, | |
| "loss": 0.6414, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 4.520282197928678, | |
| "learning_rate": 1.105263157894737e-05, | |
| "loss": 0.6885, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.912832317546539, | |
| "learning_rate": 1.1578947368421053e-05, | |
| "loss": 0.5096, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 4.3336358480278765, | |
| "learning_rate": 1.2105263157894737e-05, | |
| "loss": 0.4512, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.847456020229534, | |
| "learning_rate": 1.263157894736842e-05, | |
| "loss": 0.4546, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.912624406249825, | |
| "learning_rate": 1.3157894736842108e-05, | |
| "loss": 0.5482, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.5759205643404113, | |
| "learning_rate": 1.3684210526315791e-05, | |
| "loss": 0.4693, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.3883094275895367, | |
| "learning_rate": 1.4210526315789475e-05, | |
| "loss": 0.51, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.7248048455794103, | |
| "learning_rate": 1.4736842105263159e-05, | |
| "loss": 0.5592, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.8493552123337693, | |
| "learning_rate": 1.5263157894736846e-05, | |
| "loss": 0.4027, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.8508740199643166, | |
| "learning_rate": 1.578947368421053e-05, | |
| "loss": 0.4436, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 4.45543671036673, | |
| "learning_rate": 1.6315789473684213e-05, | |
| "loss": 0.464, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 4.294884635896111, | |
| "learning_rate": 1.6842105263157896e-05, | |
| "loss": 0.3796, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 4.623917186038261, | |
| "learning_rate": 1.736842105263158e-05, | |
| "loss": 0.572, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 4.0555190621524035, | |
| "learning_rate": 1.7894736842105264e-05, | |
| "loss": 0.463, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 4.134418141871718, | |
| "learning_rate": 1.8421052631578947e-05, | |
| "loss": 0.4648, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.0300812406530055, | |
| "learning_rate": 1.894736842105263e-05, | |
| "loss": 0.4268, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.9656208592680042, | |
| "learning_rate": 1.9473684210526318e-05, | |
| "loss": 0.4076, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 4.297106308343148, | |
| "learning_rate": 2e-05, | |
| "loss": 0.392, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 4.278775729812893, | |
| "learning_rate": 1.9999966405802828e-05, | |
| "loss": 0.4489, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.6821653254673077, | |
| "learning_rate": 1.9999865623437014e-05, | |
| "loss": 0.4527, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.1520894418798817, | |
| "learning_rate": 1.9999697653579705e-05, | |
| "loss": 0.3535, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.7413611366019266, | |
| "learning_rate": 1.9999462497359468e-05, | |
| "loss": 0.4927, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.9573068516195606, | |
| "learning_rate": 1.999916015635627e-05, | |
| "loss": 0.3845, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.135750361580708, | |
| "learning_rate": 1.9998790632601496e-05, | |
| "loss": 0.4078, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.042182784623944, | |
| "learning_rate": 1.9998353928577917e-05, | |
| "loss": 0.3984, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.8618916695510674, | |
| "learning_rate": 1.999785004721968e-05, | |
| "loss": 0.4136, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.6163481326606473, | |
| "learning_rate": 1.999727899191228e-05, | |
| "loss": 0.4976, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.466867974348983, | |
| "learning_rate": 1.9996640766492542e-05, | |
| "loss": 0.3409, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.1921413852007063, | |
| "learning_rate": 1.9995935375248608e-05, | |
| "loss": 0.4116, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.6860666687778885, | |
| "learning_rate": 1.999516282291988e-05, | |
| "loss": 0.3526, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.970761788613036, | |
| "learning_rate": 1.999432311469702e-05, | |
| "loss": 0.39, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.148790374736204, | |
| "learning_rate": 1.9993416256221894e-05, | |
| "loss": 0.477, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.8495854280786928, | |
| "learning_rate": 1.9992442253587533e-05, | |
| "loss": 0.4467, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.6437700924653527, | |
| "learning_rate": 1.9991401113338103e-05, | |
| "loss": 0.3822, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 4.13624492498233, | |
| "learning_rate": 1.9990292842468868e-05, | |
| "loss": 0.4819, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.6929943628367687, | |
| "learning_rate": 1.998911744842611e-05, | |
| "loss": 0.3502, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.7976347652268854, | |
| "learning_rate": 1.998787493910712e-05, | |
| "loss": 0.4113, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.585758230683453, | |
| "learning_rate": 1.9986565322860117e-05, | |
| "loss": 0.4017, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.2917334411264303, | |
| "learning_rate": 1.99851886084842e-05, | |
| "loss": 0.3634, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.6520998947655685, | |
| "learning_rate": 1.9983744805229296e-05, | |
| "loss": 0.3026, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.468979558179385, | |
| "learning_rate": 1.9982233922796087e-05, | |
| "loss": 0.3603, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.8251450554893736, | |
| "learning_rate": 1.9980655971335944e-05, | |
| "loss": 0.2928, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.4576190213905567, | |
| "learning_rate": 1.9979010961450876e-05, | |
| "loss": 0.3272, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.449635716809827, | |
| "learning_rate": 1.9977298904193438e-05, | |
| "loss": 0.415, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 4.2168527165775895, | |
| "learning_rate": 1.9975519811066665e-05, | |
| "loss": 0.5168, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.010946265742081, | |
| "learning_rate": 1.9973673694024002e-05, | |
| "loss": 0.3194, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.3855860681847103, | |
| "learning_rate": 1.997176056546921e-05, | |
| "loss": 0.4533, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.472854386346785, | |
| "learning_rate": 1.9969780438256295e-05, | |
| "loss": 0.3352, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.2017570680574043, | |
| "learning_rate": 1.9967733325689412e-05, | |
| "loss": 0.3116, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.801958352314478, | |
| "learning_rate": 1.996561924152278e-05, | |
| "loss": 0.3785, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 3.7194369108730396, | |
| "learning_rate": 1.99634381999606e-05, | |
| "loss": 0.4828, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 3.394819488279037, | |
| "learning_rate": 1.996119021565693e-05, | |
| "loss": 0.3358, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.3093567895664115, | |
| "learning_rate": 1.9958875303715618e-05, | |
| "loss": 0.2748, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.259362170148189, | |
| "learning_rate": 1.995649347969019e-05, | |
| "loss": 0.3039, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.975563279323876, | |
| "learning_rate": 1.995404475958373e-05, | |
| "loss": 0.3976, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.419784039357126, | |
| "learning_rate": 1.9951529159848805e-05, | |
| "loss": 0.3348, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.281341928512009, | |
| "learning_rate": 1.9948946697387322e-05, | |
| "loss": 0.3401, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.1087881889228988, | |
| "learning_rate": 1.9946297389550433e-05, | |
| "loss": 0.263, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.639198057741474, | |
| "learning_rate": 1.994358125413841e-05, | |
| "loss": 0.3027, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.6518187164037905, | |
| "learning_rate": 1.9940798309400527e-05, | |
| "loss": 0.3789, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 3.3877370659912067, | |
| "learning_rate": 1.993794857403495e-05, | |
| "loss": 0.4408, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.995385567063843, | |
| "learning_rate": 1.993503206718859e-05, | |
| "loss": 0.3925, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.2232618125681203, | |
| "learning_rate": 1.993204880845699e-05, | |
| "loss": 0.282, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.8970214260593643, | |
| "learning_rate": 1.9928998817884185e-05, | |
| "loss": 0.3268, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 3.3977831596079295, | |
| "learning_rate": 1.992588211596257e-05, | |
| "loss": 0.3398, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.6505205476488403, | |
| "learning_rate": 1.992269872363277e-05, | |
| "loss": 0.2896, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.673536154000272, | |
| "learning_rate": 1.991944866228348e-05, | |
| "loss": 0.3762, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.5495620413841635, | |
| "learning_rate": 1.9916131953751342e-05, | |
| "loss": 0.2878, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.702863582845579, | |
| "learning_rate": 1.9912748620320796e-05, | |
| "loss": 0.3463, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.8321910963387813, | |
| "learning_rate": 1.9909298684723905e-05, | |
| "loss": 0.3361, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.884365131994556, | |
| "learning_rate": 1.990578217014024e-05, | |
| "loss": 0.3617, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.4865597940498807, | |
| "learning_rate": 1.9902199100196697e-05, | |
| "loss": 0.3244, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.458146087533274, | |
| "learning_rate": 1.9898549498967343e-05, | |
| "loss": 0.3171, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.9371733606323587, | |
| "learning_rate": 1.9894833390973266e-05, | |
| "loss": 0.3643, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.8304095813590995, | |
| "learning_rate": 1.98910508011824e-05, | |
| "loss": 0.3632, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 3.218718793020854, | |
| "learning_rate": 1.9887201755009358e-05, | |
| "loss": 0.3553, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 3.020368297448905, | |
| "learning_rate": 1.9883286278315262e-05, | |
| "loss": 0.3934, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.39142603903749, | |
| "learning_rate": 1.987930439740757e-05, | |
| "loss": 0.2706, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.174184873263371, | |
| "learning_rate": 1.98752561390399e-05, | |
| "loss": 0.2752, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 3.0120477247756527, | |
| "learning_rate": 1.9871141530411854e-05, | |
| "loss": 0.3533, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.580695227129875, | |
| "learning_rate": 1.9866960599168825e-05, | |
| "loss": 0.3744, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.8478732301676497, | |
| "learning_rate": 1.986271337340182e-05, | |
| "loss": 0.4049, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.9142355852738673, | |
| "learning_rate": 1.985839988164726e-05, | |
| "loss": 0.2745, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 5.278887974213085, | |
| "learning_rate": 1.9854020152886816e-05, | |
| "loss": 0.2881, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.7272146416314187, | |
| "learning_rate": 1.984957421654717e-05, | |
| "loss": 0.3944, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.3803352703736316, | |
| "learning_rate": 1.984506210249986e-05, | |
| "loss": 0.262, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 3.035594057707899, | |
| "learning_rate": 1.9840483841061058e-05, | |
| "loss": 0.3811, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.0457289224654094, | |
| "learning_rate": 1.983583946299136e-05, | |
| "loss": 0.3255, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.3985562690903723, | |
| "learning_rate": 1.9831128999495605e-05, | |
| "loss": 0.2482, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.0610268888156695, | |
| "learning_rate": 1.982635248222264e-05, | |
| "loss": 0.3167, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.4914973894626784, | |
| "learning_rate": 1.982150994326511e-05, | |
| "loss": 0.3469, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.049519586667049, | |
| "learning_rate": 1.9816601415159266e-05, | |
| "loss": 0.3055, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.8711780595627188, | |
| "learning_rate": 1.981162693088471e-05, | |
| "loss": 0.4214, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.6864434110364426, | |
| "learning_rate": 1.9806586523864212e-05, | |
| "loss": 0.2661, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.9783615289759517, | |
| "learning_rate": 1.9801480227963453e-05, | |
| "loss": 0.3394, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.033682841325606, | |
| "learning_rate": 1.9796308077490817e-05, | |
| "loss": 0.2969, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.54876929709659, | |
| "learning_rate": 1.9791070107197155e-05, | |
| "loss": 0.367, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.656084912397775, | |
| "learning_rate": 1.978576635227554e-05, | |
| "loss": 0.4312, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.393147407538132, | |
| "learning_rate": 1.978039684836106e-05, | |
| "loss": 0.3516, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.2110241591122026, | |
| "learning_rate": 1.9774961631530543e-05, | |
| "loss": 0.3519, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.5954241037962897, | |
| "learning_rate": 1.976946073830234e-05, | |
| "loss": 0.4076, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.5229827155020326, | |
| "learning_rate": 1.976389420563607e-05, | |
| "loss": 0.3995, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.514615433700628, | |
| "learning_rate": 1.9758262070932375e-05, | |
| "loss": 0.3715, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.06027848990462, | |
| "learning_rate": 1.9752564372032655e-05, | |
| "loss": 0.3074, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.0739204538312466, | |
| "learning_rate": 1.9746801147218844e-05, | |
| "loss": 0.3097, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.97836933818748, | |
| "learning_rate": 1.9740972435213114e-05, | |
| "loss": 0.4227, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.9460161454046876, | |
| "learning_rate": 1.9735078275177654e-05, | |
| "loss": 0.2968, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.1620498463762603, | |
| "learning_rate": 1.9729118706714377e-05, | |
| "loss": 0.3533, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.1949576922394582, | |
| "learning_rate": 1.9723093769864663e-05, | |
| "loss": 0.3313, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.3009910262849913, | |
| "learning_rate": 1.9717003505109097e-05, | |
| "loss": 0.2273, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.405991987612652, | |
| "learning_rate": 1.9710847953367193e-05, | |
| "loss": 0.2974, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.000338024899964, | |
| "learning_rate": 1.970462715599711e-05, | |
| "loss": 0.3373, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.8176073475207875, | |
| "learning_rate": 1.969834115479539e-05, | |
| "loss": 0.3848, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.784763316952781, | |
| "learning_rate": 1.9691989991996663e-05, | |
| "loss": 0.3022, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.1290677125865116, | |
| "learning_rate": 1.9685573710273375e-05, | |
| "loss": 0.2818, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.7967830185590423, | |
| "learning_rate": 1.967909235273549e-05, | |
| "loss": 0.3402, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.5376148858644916, | |
| "learning_rate": 1.9672545962930214e-05, | |
| "loss": 0.4607, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.4447490847016016, | |
| "learning_rate": 1.966593458484168e-05, | |
| "loss": 0.3072, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.510203272599188, | |
| "learning_rate": 1.9659258262890683e-05, | |
| "loss": 0.3246, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.6601770534439706, | |
| "learning_rate": 1.9652517041934357e-05, | |
| "loss": 0.3447, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.4313564923861795, | |
| "learning_rate": 1.9645710967265884e-05, | |
| "loss": 0.2969, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 3.301204187778797, | |
| "learning_rate": 1.9638840084614182e-05, | |
| "loss": 0.3449, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.4324891519231553, | |
| "learning_rate": 1.9631904440143614e-05, | |
| "loss": 0.3649, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.375771375825056, | |
| "learning_rate": 1.9624904080453656e-05, | |
| "loss": 0.2945, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.2051469351019053, | |
| "learning_rate": 1.9617839052578605e-05, | |
| "loss": 0.2423, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 3.2775254838339505, | |
| "learning_rate": 1.9610709403987248e-05, | |
| "loss": 0.3367, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.5865833120563506, | |
| "learning_rate": 1.960351518258255e-05, | |
| "loss": 0.2663, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.7811326564649548, | |
| "learning_rate": 1.9596256436701324e-05, | |
| "loss": 0.4021, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.9647404834701583, | |
| "learning_rate": 1.9588933215113926e-05, | |
| "loss": 0.2258, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.6195686997710723, | |
| "learning_rate": 1.95815455670239e-05, | |
| "loss": 0.3468, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.7408300850319733, | |
| "learning_rate": 1.9574093542067673e-05, | |
| "loss": 0.2905, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.4846198337244676, | |
| "learning_rate": 1.95665771903142e-05, | |
| "loss": 0.3065, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.201402370978399, | |
| "learning_rate": 1.955899656226464e-05, | |
| "loss": 0.2765, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.2048417537082274, | |
| "learning_rate": 1.955135170885202e-05, | |
| "loss": 0.3261, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.8685661741745674, | |
| "learning_rate": 1.954364268144088e-05, | |
| "loss": 0.2897, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.038261520725671, | |
| "learning_rate": 1.9535869531826938e-05, | |
| "loss": 0.2876, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.179433620693176, | |
| "learning_rate": 1.952803231223674e-05, | |
| "loss": 0.2753, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 3.046890347395123, | |
| "learning_rate": 1.95201310753273e-05, | |
| "loss": 0.3993, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.8111544068550876, | |
| "learning_rate": 1.9512165874185768e-05, | |
| "loss": 0.284, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.4995520607151525, | |
| "learning_rate": 1.9504136762329046e-05, | |
| "loss": 0.3303, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.228827499108435, | |
| "learning_rate": 1.949604379370345e-05, | |
| "loss": 0.3232, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.31390553334697, | |
| "learning_rate": 1.9487887022684336e-05, | |
| "loss": 0.3011, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 3.0420619190076525, | |
| "learning_rate": 1.9479666504075737e-05, | |
| "loss": 0.3563, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.9863270736505325, | |
| "learning_rate": 1.9471382293110004e-05, | |
| "loss": 0.3037, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.090603011792817, | |
| "learning_rate": 1.946303444544741e-05, | |
| "loss": 0.3123, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.1867676870087167, | |
| "learning_rate": 1.9454623017175814e-05, | |
| "loss": 0.2848, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.2375862840491663, | |
| "learning_rate": 1.9446148064810243e-05, | |
| "loss": 0.2584, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 3.073521758948637, | |
| "learning_rate": 1.943760964529255e-05, | |
| "loss": 0.3982, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.277857419952404, | |
| "learning_rate": 1.9429007815990995e-05, | |
| "loss": 0.2817, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.860132680882802, | |
| "learning_rate": 1.9420342634699893e-05, | |
| "loss": 0.3789, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.738288137502009, | |
| "learning_rate": 1.9411614159639206e-05, | |
| "loss": 0.235, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 3.0847987605412643, | |
| "learning_rate": 1.9402822449454154e-05, | |
| "loss": 0.3787, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.678241605511051, | |
| "learning_rate": 1.9393967563214833e-05, | |
| "loss": 0.3565, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.3794557534236187, | |
| "learning_rate": 1.9385049560415794e-05, | |
| "loss": 0.3767, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.18026602926335, | |
| "learning_rate": 1.937606850097567e-05, | |
| "loss": 0.3241, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.839031218674835, | |
| "learning_rate": 1.936702444523675e-05, | |
| "loss": 0.3692, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.0423888923774403, | |
| "learning_rate": 1.9357917453964604e-05, | |
| "loss": 0.2939, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.0630953241704497, | |
| "learning_rate": 1.9348747588347637e-05, | |
| "loss": 0.3033, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.003742056937922, | |
| "learning_rate": 1.9339514909996706e-05, | |
| "loss": 0.3023, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.183058419141199, | |
| "learning_rate": 1.9330219480944693e-05, | |
| "loss": 0.3104, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.0962519403436466, | |
| "learning_rate": 1.9320861363646094e-05, | |
| "loss": 0.3549, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.5773134400966646, | |
| "learning_rate": 1.9311440620976597e-05, | |
| "loss": 0.4265, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.7220486958043284, | |
| "learning_rate": 1.9301957316232658e-05, | |
| "loss": 0.2717, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.9094558569410807, | |
| "learning_rate": 1.929241151313108e-05, | |
| "loss": 0.3104, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.9500751357137525, | |
| "learning_rate": 1.928280327580858e-05, | |
| "loss": 0.3202, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.4975322789932215, | |
| "learning_rate": 1.9273132668821363e-05, | |
| "loss": 0.3126, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.917476755564393, | |
| "learning_rate": 1.9263399757144686e-05, | |
| "loss": 0.2528, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.143478191577973, | |
| "learning_rate": 1.925360460617242e-05, | |
| "loss": 0.28, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.5405298702531005, | |
| "learning_rate": 1.9243747281716604e-05, | |
| "loss": 0.378, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.0155678579375906, | |
| "learning_rate": 1.9233827850007028e-05, | |
| "loss": 0.2901, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.3060470653500555, | |
| "learning_rate": 1.9223846377690754e-05, | |
| "loss": 0.3193, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.8900227849807494, | |
| "learning_rate": 1.9213802931831697e-05, | |
| "loss": 0.3628, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.9354932437719492, | |
| "learning_rate": 1.9203697579910156e-05, | |
| "loss": 0.2377, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.395154833334844, | |
| "learning_rate": 1.9193530389822364e-05, | |
| "loss": 0.2803, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.1297129261093817, | |
| "learning_rate": 1.9183301429880045e-05, | |
| "loss": 0.3457, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.9248541780760582, | |
| "learning_rate": 1.9173010768809934e-05, | |
| "loss": 0.283, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.039424995239351, | |
| "learning_rate": 1.9162658475753328e-05, | |
| "loss": 0.2603, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.4853584640009045, | |
| "learning_rate": 1.915224462026563e-05, | |
| "loss": 0.394, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 3.046950886966381, | |
| "learning_rate": 1.9141769272315857e-05, | |
| "loss": 0.3757, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.036507295142277, | |
| "learning_rate": 1.913123250228619e-05, | |
| "loss": 0.2404, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 3.367967992606462, | |
| "learning_rate": 1.9120634380971497e-05, | |
| "loss": 0.399, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.3778783287603904, | |
| "learning_rate": 1.9109974979578852e-05, | |
| "loss": 0.2611, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.7971696611984327, | |
| "learning_rate": 1.9099254369727062e-05, | |
| "loss": 0.415, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.3780933781343143, | |
| "learning_rate": 1.9088472623446182e-05, | |
| "loss": 0.3033, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.213758758560456, | |
| "learning_rate": 1.9077629813177038e-05, | |
| "loss": 0.3257, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.7096356571332894, | |
| "learning_rate": 1.9066726011770725e-05, | |
| "loss": 0.2746, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.143024684014943, | |
| "learning_rate": 1.9055761292488142e-05, | |
| "loss": 0.2936, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.174410493141588, | |
| "learning_rate": 1.9044735728999472e-05, | |
| "loss": 0.3359, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.8253319924788767, | |
| "learning_rate": 1.90336493953837e-05, | |
| "loss": 0.3337, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.7564814607045327, | |
| "learning_rate": 1.9022502366128136e-05, | |
| "loss": 0.2585, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.8003063075201506, | |
| "learning_rate": 1.9011294716127867e-05, | |
| "loss": 0.2335, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.0676081597671923, | |
| "learning_rate": 1.90000265206853e-05, | |
| "loss": 0.3342, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.9186653826713473, | |
| "learning_rate": 1.898869785550963e-05, | |
| "loss": 0.2994, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.8847621771847654, | |
| "learning_rate": 1.897730879671634e-05, | |
| "loss": 0.2403, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.6722290718517414, | |
| "learning_rate": 1.8965859420826685e-05, | |
| "loss": 0.2601, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.7386298568668535, | |
| "learning_rate": 1.8954349804767185e-05, | |
| "loss": 0.3763, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.5273232386683633, | |
| "learning_rate": 1.8942780025869097e-05, | |
| "loss": 0.3627, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.0986675503249828, | |
| "learning_rate": 1.8931150161867917e-05, | |
| "loss": 0.2802, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.9864578701687525, | |
| "learning_rate": 1.891946029090283e-05, | |
| "loss": 0.3077, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 3.3538318431562364, | |
| "learning_rate": 1.8907710491516197e-05, | |
| "loss": 0.3164, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.3833491446751607, | |
| "learning_rate": 1.889590084265304e-05, | |
| "loss": 0.309, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.1589341969698133, | |
| "learning_rate": 1.8884031423660492e-05, | |
| "loss": 0.2376, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.710090625928746, | |
| "learning_rate": 1.887210231428727e-05, | |
| "loss": 0.3396, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.7501332775825373, | |
| "learning_rate": 1.8860113594683148e-05, | |
| "loss": 0.2592, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.6999761997206453, | |
| "learning_rate": 1.884806534539841e-05, | |
| "loss": 0.3225, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.3665867811833197, | |
| "learning_rate": 1.8835957647383304e-05, | |
| "loss": 0.3459, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.398153025260128, | |
| "learning_rate": 1.882379058198751e-05, | |
| "loss": 0.2798, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.401401228305016, | |
| "learning_rate": 1.8811564230959585e-05, | |
| "loss": 0.4071, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.5093960148155054, | |
| "learning_rate": 1.8799278676446425e-05, | |
| "loss": 0.3185, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.6890640048458447, | |
| "learning_rate": 1.878693400099269e-05, | |
| "loss": 0.321, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 3.0892133868732587, | |
| "learning_rate": 1.877453028754028e-05, | |
| "loss": 0.3648, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.365123613242651, | |
| "learning_rate": 1.8762067619427745e-05, | |
| "loss": 0.3318, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.2363400437558583, | |
| "learning_rate": 1.874954608038976e-05, | |
| "loss": 0.3309, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.1067878905016864, | |
| "learning_rate": 1.8736965754556527e-05, | |
| "loss": 0.3518, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.939495251491112, | |
| "learning_rate": 1.8724326726453244e-05, | |
| "loss": 0.3071, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.428885090431243, | |
| "learning_rate": 1.8711629080999506e-05, | |
| "loss": 0.3104, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.711241537545367, | |
| "learning_rate": 1.8698872903508755e-05, | |
| "loss": 0.3579, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.120518574195615, | |
| "learning_rate": 1.86860582796877e-05, | |
| "loss": 0.2646, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.228265996988712, | |
| "learning_rate": 1.867318529563574e-05, | |
| "loss": 0.3185, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.2181290188623852, | |
| "learning_rate": 1.866025403784439e-05, | |
| "loss": 0.312, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.19939228581308, | |
| "learning_rate": 1.8647264593196687e-05, | |
| "loss": 0.2936, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.450620349638576, | |
| "learning_rate": 1.8634217048966638e-05, | |
| "loss": 0.3779, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.2394391810144008, | |
| "learning_rate": 1.8621111492818585e-05, | |
| "loss": 0.3442, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.9643676315139884, | |
| "learning_rate": 1.8607948012806664e-05, | |
| "loss": 0.2764, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.636425096704688, | |
| "learning_rate": 1.8594726697374173e-05, | |
| "loss": 0.2618, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.9293405761057236, | |
| "learning_rate": 1.858144763535302e-05, | |
| "loss": 0.2979, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.1636254484392103, | |
| "learning_rate": 1.8568110915963082e-05, | |
| "loss": 0.3102, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.877917319125591, | |
| "learning_rate": 1.855471662881164e-05, | |
| "loss": 0.3791, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.5670099189407107, | |
| "learning_rate": 1.8541264863892755e-05, | |
| "loss": 0.1924, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.037307667342044, | |
| "learning_rate": 1.852775571158668e-05, | |
| "loss": 0.2786, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.7947661624098736, | |
| "learning_rate": 1.8514189262659235e-05, | |
| "loss": 0.2738, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.3960834409580545, | |
| "learning_rate": 1.8500565608261215e-05, | |
| "loss": 0.2794, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.405818356051298, | |
| "learning_rate": 1.8486884839927768e-05, | |
| "loss": 0.3228, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.407679300709315, | |
| "learning_rate": 1.8473147049577777e-05, | |
| "loss": 0.3923, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.7377355703462052, | |
| "learning_rate": 1.845935232951325e-05, | |
| "loss": 0.2699, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.2265681252326313, | |
| "learning_rate": 1.8445500772418697e-05, | |
| "loss": 0.2771, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.771204006061874, | |
| "learning_rate": 1.8431592471360506e-05, | |
| "loss": 0.314, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.800034549232218, | |
| "learning_rate": 1.8417627519786317e-05, | |
| "loss": 0.2969, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.119584706856121, | |
| "learning_rate": 1.84036060115244e-05, | |
| "loss": 0.3011, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.5245990027253953, | |
| "learning_rate": 1.8389528040783014e-05, | |
| "loss": 0.2768, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.5058661829859656, | |
| "learning_rate": 1.837539370214979e-05, | |
| "loss": 0.3769, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.0381135747014842, | |
| "learning_rate": 1.836120309059107e-05, | |
| "loss": 0.272, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.477627565212375, | |
| "learning_rate": 1.8346956301451303e-05, | |
| "loss": 0.351, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.2918977462261503, | |
| "learning_rate": 1.8332653430452375e-05, | |
| "loss": 0.3292, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.9162246172960327, | |
| "learning_rate": 1.8318294573692987e-05, | |
| "loss": 0.3139, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.008395659255134, | |
| "learning_rate": 1.8303879827647977e-05, | |
| "loss": 0.3573, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.4838485512972532, | |
| "learning_rate": 1.828940928916772e-05, | |
| "loss": 0.2945, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.640319813204331, | |
| "learning_rate": 1.8274883055477436e-05, | |
| "loss": 0.4545, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.768495647429907, | |
| "learning_rate": 1.826030122417656e-05, | |
| "loss": 0.3195, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.0356888861889466, | |
| "learning_rate": 1.8245663893238075e-05, | |
| "loss": 0.3294, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.227539950418251, | |
| "learning_rate": 1.8230971161007854e-05, | |
| "loss": 0.3633, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.8036437828957987, | |
| "learning_rate": 1.821622312620401e-05, | |
| "loss": 0.2425, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.39188092072465, | |
| "learning_rate": 1.8201419887916216e-05, | |
| "loss": 0.3828, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.0995389648627083, | |
| "learning_rate": 1.8186561545605055e-05, | |
| "loss": 0.2721, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.8474331750365451, | |
| "learning_rate": 1.8171648199101347e-05, | |
| "loss": 0.3207, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.981814231675974, | |
| "learning_rate": 1.815667994860547e-05, | |
| "loss": 0.2448, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.9132982718549951, | |
| "learning_rate": 1.814165689468669e-05, | |
| "loss": 0.29, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.1800413072668343, | |
| "learning_rate": 1.8126579138282502e-05, | |
| "loss": 0.3054, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.561366521621535, | |
| "learning_rate": 1.811144678069793e-05, | |
| "loss": 0.3533, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.754016144834769, | |
| "learning_rate": 1.809625992360485e-05, | |
| "loss": 0.2538, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.162127975656259, | |
| "learning_rate": 1.8081018669041324e-05, | |
| "loss": 0.293, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.678553745454881, | |
| "learning_rate": 1.8065723119410885e-05, | |
| "loss": 0.2674, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.9784584585424474, | |
| "learning_rate": 1.8050373377481878e-05, | |
| "loss": 0.2634, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.289698822025373, | |
| "learning_rate": 1.803496954638676e-05, | |
| "loss": 0.3278, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.0546256448848754, | |
| "learning_rate": 1.801951172962139e-05, | |
| "loss": 0.3314, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.0189650291686867, | |
| "learning_rate": 1.8004000031044363e-05, | |
| "loss": 0.3137, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.2793742115240057, | |
| "learning_rate": 1.798843455487629e-05, | |
| "loss": 0.2916, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.978610211105925, | |
| "learning_rate": 1.7972815405699105e-05, | |
| "loss": 0.312, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.2311767161123073, | |
| "learning_rate": 1.7957142688455362e-05, | |
| "loss": 0.3389, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.05360168559885, | |
| "learning_rate": 1.7941416508447537e-05, | |
| "loss": 0.2494, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.80172085924464, | |
| "learning_rate": 1.7925636971337304e-05, | |
| "loss": 0.3029, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.327817348612163, | |
| "learning_rate": 1.7909804183144837e-05, | |
| "loss": 0.2801, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.1848347825813543, | |
| "learning_rate": 1.7893918250248106e-05, | |
| "loss": 0.2862, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.1760889724855557, | |
| "learning_rate": 1.7877979279382135e-05, | |
| "loss": 0.2603, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.131768994375875, | |
| "learning_rate": 1.7861987377638314e-05, | |
| "loss": 0.2858, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.229823163798362, | |
| "learning_rate": 1.784594265246366e-05, | |
| "loss": 0.3121, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.299352725908318, | |
| "learning_rate": 1.782984521166011e-05, | |
| "loss": 0.2889, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.5621113668957247, | |
| "learning_rate": 1.781369516338378e-05, | |
| "loss": 0.4031, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.7787192026448813, | |
| "learning_rate": 1.7797492616144256e-05, | |
| "loss": 0.3196, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.825677662281392, | |
| "learning_rate": 1.7781237678803845e-05, | |
| "loss": 0.2472, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.9138575999411622, | |
| "learning_rate": 1.7764930460576867e-05, | |
| "loss": 0.3133, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.424454787520677, | |
| "learning_rate": 1.77485710710289e-05, | |
| "loss": 0.4237, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.6596834658332436, | |
| "learning_rate": 1.7732159620076053e-05, | |
| "loss": 0.2615, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.915662757060755, | |
| "learning_rate": 1.7715696217984233e-05, | |
| "loss": 0.3257, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.8157063327790481, | |
| "learning_rate": 1.7699180975368397e-05, | |
| "loss": 0.3, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.1772564616176617, | |
| "learning_rate": 1.7682614003191807e-05, | |
| "loss": 0.3613, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.6345766099529573, | |
| "learning_rate": 1.7665995412765287e-05, | |
| "loss": 0.2321, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.6761780023043686, | |
| "learning_rate": 1.764932531574648e-05, | |
| "loss": 0.2916, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.477586537493892, | |
| "learning_rate": 1.7632603824139086e-05, | |
| "loss": 0.2836, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.2248085696051487, | |
| "learning_rate": 1.761583105029213e-05, | |
| "loss": 0.3312, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.873254693799373, | |
| "learning_rate": 1.759900710689918e-05, | |
| "loss": 0.2996, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.6963403979706424, | |
| "learning_rate": 1.7582132106997615e-05, | |
| "loss": 0.3688, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.9275761025445475, | |
| "learning_rate": 1.7565206163967847e-05, | |
| "loss": 0.3042, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.0767014249772777, | |
| "learning_rate": 1.7548229391532572e-05, | |
| "loss": 0.3169, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.7919429635231559, | |
| "learning_rate": 1.7531201903755994e-05, | |
| "loss": 0.2714, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.2183254478234824, | |
| "learning_rate": 1.7514123815043073e-05, | |
| "loss": 0.3313, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.869049065360244, | |
| "learning_rate": 1.7496995240138745e-05, | |
| "loss": 0.2469, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.1285116739745717, | |
| "learning_rate": 1.747981629412715e-05, | |
| "loss": 0.2989, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.332735444074386, | |
| "learning_rate": 1.7462587092430877e-05, | |
| "loss": 0.291, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.007902555326692, | |
| "learning_rate": 1.7445307750810153e-05, | |
| "loss": 0.2338, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.074668198872314, | |
| "learning_rate": 1.742797838536211e-05, | |
| "loss": 0.3027, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.8245178053269033, | |
| "learning_rate": 1.741059911251997e-05, | |
| "loss": 0.2782, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.8670263646678231, | |
| "learning_rate": 1.7393170049052274e-05, | |
| "loss": 0.3038, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.4400014121299844, | |
| "learning_rate": 1.7375691312062102e-05, | |
| "loss": 0.3773, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.7893699834417385, | |
| "learning_rate": 1.7358163018986282e-05, | |
| "loss": 0.236, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.356871958108051, | |
| "learning_rate": 1.7340585287594605e-05, | |
| "loss": 0.4084, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.119181522421401, | |
| "learning_rate": 1.7322958235989015e-05, | |
| "loss": 0.3592, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.7688080795720216, | |
| "learning_rate": 1.730528198260285e-05, | |
| "loss": 0.2515, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.0648152043802397, | |
| "learning_rate": 1.728755664620002e-05, | |
| "loss": 0.3064, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.9486429621552654, | |
| "learning_rate": 1.7269782345874204e-05, | |
| "loss": 0.2793, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.770410943564192, | |
| "learning_rate": 1.7251959201048083e-05, | |
| "loss": 0.4832, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.416701783014655, | |
| "learning_rate": 1.72340873314725e-05, | |
| "loss": 0.4427, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.216087293349358, | |
| "learning_rate": 1.7216166857225674e-05, | |
| "loss": 0.2783, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.3919230833571863, | |
| "learning_rate": 1.7198197898712402e-05, | |
| "loss": 0.3604, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.854273317784981, | |
| "learning_rate": 1.718018057666323e-05, | |
| "loss": 0.2478, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.9702887673647873, | |
| "learning_rate": 1.7162115012133643e-05, | |
| "loss": 0.2942, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.9030191103194438, | |
| "learning_rate": 1.7144001326503276e-05, | |
| "loss": 0.2196, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.9079753005811861, | |
| "learning_rate": 1.7125839641475074e-05, | |
| "loss": 0.2952, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.918128245238814, | |
| "learning_rate": 1.7107630079074477e-05, | |
| "loss": 0.2631, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.5192828679265746, | |
| "learning_rate": 1.7089372761648617e-05, | |
| "loss": 0.3316, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.522800037570299, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.3521, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.038572595618781, | |
| "learning_rate": 1.7052715352713076e-05, | |
| "loss": 0.3539, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.622293826263427, | |
| "learning_rate": 1.7034315507498637e-05, | |
| "loss": 0.448, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.090457624775084, | |
| "learning_rate": 1.7015868399847768e-05, | |
| "loss": 0.305, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.5963733633562818, | |
| "learning_rate": 1.6997374153703625e-05, | |
| "loss": 0.3379, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.917460984311743, | |
| "learning_rate": 1.6978832893326074e-05, | |
| "loss": 0.2737, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.9740793094943232, | |
| "learning_rate": 1.6960244743290867e-05, | |
| "loss": 0.3296, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.569033540648746, | |
| "learning_rate": 1.6941609828488806e-05, | |
| "loss": 0.3888, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.4877471722417306, | |
| "learning_rate": 1.6922928274124887e-05, | |
| "loss": 0.2176, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.313163996098816, | |
| "learning_rate": 1.690420020571747e-05, | |
| "loss": 0.3185, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.1610104071558283, | |
| "learning_rate": 1.6885425749097444e-05, | |
| "loss": 0.347, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.926355155438669, | |
| "learning_rate": 1.686660503040737e-05, | |
| "loss": 0.3048, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.550898935585205, | |
| "learning_rate": 1.6847738176100632e-05, | |
| "loss": 0.2524, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.8727473492933593, | |
| "learning_rate": 1.6828825312940594e-05, | |
| "loss": 0.279, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.3169524927684164, | |
| "learning_rate": 1.6809866567999752e-05, | |
| "loss": 0.2292, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.9614958078211489, | |
| "learning_rate": 1.6790862068658863e-05, | |
| "loss": 0.3098, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.1570857664260474, | |
| "learning_rate": 1.677181194260611e-05, | |
| "loss": 0.2656, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.2008427558851613, | |
| "learning_rate": 1.675271631783623e-05, | |
| "loss": 0.3315, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.7402168646872844, | |
| "learning_rate": 1.673357532264966e-05, | |
| "loss": 0.255, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.052681701712784, | |
| "learning_rate": 1.671438908565167e-05, | |
| "loss": 0.3002, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.0854049905321985, | |
| "learning_rate": 1.6695157735751512e-05, | |
| "loss": 0.3146, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.556001474745676, | |
| "learning_rate": 1.667588140216154e-05, | |
| "loss": 0.281, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.328001289077992, | |
| "learning_rate": 1.665656021439633e-05, | |
| "loss": 0.3679, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.246735349527516, | |
| "learning_rate": 1.663719430227186e-05, | |
| "loss": 0.3611, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.302536600638992, | |
| "learning_rate": 1.6617783795904564e-05, | |
| "loss": 0.2772, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.7827203381412275, | |
| "learning_rate": 1.6598328825710536e-05, | |
| "loss": 0.2599, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.0327821167883195, | |
| "learning_rate": 1.6578829522404586e-05, | |
| "loss": 0.3104, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.3139615273673986, | |
| "learning_rate": 1.65592860169994e-05, | |
| "loss": 0.3757, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.8404784917427333, | |
| "learning_rate": 1.653969844080466e-05, | |
| "loss": 0.2065, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.9029373978368715, | |
| "learning_rate": 1.6520066925426146e-05, | |
| "loss": 0.2541, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.420515783221278, | |
| "learning_rate": 1.650039160276485e-05, | |
| "loss": 0.362, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.7841307975964125, | |
| "learning_rate": 1.648067260501611e-05, | |
| "loss": 0.3155, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.5202485924429843, | |
| "learning_rate": 1.646091006466871e-05, | |
| "loss": 0.1921, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.7645307356443054, | |
| "learning_rate": 1.644110411450398e-05, | |
| "loss": 0.2744, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.1778579528461544, | |
| "learning_rate": 1.6421254887594918e-05, | |
| "loss": 0.3207, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.1144744784839755, | |
| "learning_rate": 1.6401362517305296e-05, | |
| "loss": 0.3438, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.4036210464297634, | |
| "learning_rate": 1.6381427137288756e-05, | |
| "loss": 0.2557, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.281318088880689, | |
| "learning_rate": 1.6361448881487913e-05, | |
| "loss": 0.3522, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.442640792379695, | |
| "learning_rate": 1.634142788413346e-05, | |
| "loss": 0.3291, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 3.778962002997131, | |
| "learning_rate": 1.6321364279743267e-05, | |
| "loss": 0.3112, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.482779982218463, | |
| "learning_rate": 1.6301258203121463e-05, | |
| "loss": 0.3753, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.255116541310975, | |
| "learning_rate": 1.628110978935756e-05, | |
| "loss": 0.438, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.4043721570913874, | |
| "learning_rate": 1.6260919173825507e-05, | |
| "loss": 0.2462, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.955749694714099, | |
| "learning_rate": 1.6240686492182806e-05, | |
| "loss": 0.2731, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.5061172100523035, | |
| "learning_rate": 1.62204118803696e-05, | |
| "loss": 0.2472, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.3645366514423483, | |
| "learning_rate": 1.6200095474607753e-05, | |
| "loss": 0.4294, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.7062872716901005, | |
| "learning_rate": 1.6179737411399926e-05, | |
| "loss": 0.2888, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.7788526606346524, | |
| "learning_rate": 1.6159337827528686e-05, | |
| "loss": 0.297, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.062084476736332, | |
| "learning_rate": 1.6138896860055555e-05, | |
| "loss": 0.3007, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.4604320059002305, | |
| "learning_rate": 1.6118414646320115e-05, | |
| "loss": 0.2174, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.346784589374752, | |
| "learning_rate": 1.6097891323939063e-05, | |
| "loss": 0.2916, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.89550070481857, | |
| "learning_rate": 1.6077327030805318e-05, | |
| "loss": 0.31, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.9449292302402192, | |
| "learning_rate": 1.6056721905087056e-05, | |
| "loss": 0.2325, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.6599175197516962, | |
| "learning_rate": 1.6036076085226813e-05, | |
| "loss": 0.2587, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.221872605847609, | |
| "learning_rate": 1.601538970994054e-05, | |
| "loss": 0.2651, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.369743617712892, | |
| "learning_rate": 1.599466291821666e-05, | |
| "loss": 0.3272, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.152582977278543, | |
| "learning_rate": 1.597389584931517e-05, | |
| "loss": 0.3633, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.149297985911286, | |
| "learning_rate": 1.595308864276666e-05, | |
| "loss": 0.3414, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.1819931174923166, | |
| "learning_rate": 1.593224143837142e-05, | |
| "loss": 0.3172, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.165318987684981, | |
| "learning_rate": 1.5911354376198468e-05, | |
| "loss": 0.3352, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.5544032333205273, | |
| "learning_rate": 1.589042759658462e-05, | |
| "loss": 0.188, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.1268000986916364, | |
| "learning_rate": 1.586946124013354e-05, | |
| "loss": 0.234, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.6649749080409588, | |
| "learning_rate": 1.5848455447714822e-05, | |
| "loss": 0.2497, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.5600823534050168, | |
| "learning_rate": 1.582741036046301e-05, | |
| "loss": 0.3956, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.4435713327056052, | |
| "learning_rate": 1.580632611977666e-05, | |
| "loss": 0.3828, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.6251851298524764, | |
| "learning_rate": 1.578520286731741e-05, | |
| "loss": 0.289, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.7105944882630801, | |
| "learning_rate": 1.5764040745008987e-05, | |
| "loss": 0.2551, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.9471548033131576, | |
| "learning_rate": 1.5742839895036305e-05, | |
| "loss": 0.3029, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.2009574984159666, | |
| "learning_rate": 1.572160045984447e-05, | |
| "loss": 0.3057, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.4997777963014745, | |
| "learning_rate": 1.570032258213783e-05, | |
| "loss": 0.279, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 3.2091983353551163, | |
| "learning_rate": 1.5679006404879035e-05, | |
| "loss": 0.1804, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.2147374257119607, | |
| "learning_rate": 1.565765207128805e-05, | |
| "loss": 0.3216, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.6847496660853745, | |
| "learning_rate": 1.5636259724841224e-05, | |
| "loss": 0.2444, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.442032050244461, | |
| "learning_rate": 1.561482950927029e-05, | |
| "loss": 0.2093, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.7144419107938096, | |
| "learning_rate": 1.559336156856143e-05, | |
| "loss": 0.2823, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.8994572657987778, | |
| "learning_rate": 1.5571856046954284e-05, | |
| "loss": 0.2811, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.4556233383553328, | |
| "learning_rate": 1.5550313088941012e-05, | |
| "loss": 0.2636, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.9438588716317593, | |
| "learning_rate": 1.5528732839265272e-05, | |
| "loss": 0.237, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.9487422925171727, | |
| "learning_rate": 1.550711544292131e-05, | |
| "loss": 0.2695, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.8474563686885068, | |
| "learning_rate": 1.5485461045152937e-05, | |
| "loss": 0.332, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.213733875483992, | |
| "learning_rate": 1.5463769791452574e-05, | |
| "loss": 0.3657, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.7730710691561862, | |
| "learning_rate": 1.5442041827560274e-05, | |
| "loss": 0.2537, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 3.246874442788659, | |
| "learning_rate": 1.5420277299462734e-05, | |
| "loss": 0.5124, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.05906376120636, | |
| "learning_rate": 1.5398476353392323e-05, | |
| "loss": 0.239, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.5526941505478917, | |
| "learning_rate": 1.537663913582611e-05, | |
| "loss": 0.232, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.8580732186198845, | |
| "learning_rate": 1.5354765793484834e-05, | |
| "loss": 0.2396, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.5006582448794108, | |
| "learning_rate": 1.533285647333198e-05, | |
| "loss": 0.2019, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.127615426349164, | |
| "learning_rate": 1.531091132257275e-05, | |
| "loss": 0.3526, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.041420611859174, | |
| "learning_rate": 1.5288930488653094e-05, | |
| "loss": 0.2818, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.0158826239887415, | |
| "learning_rate": 1.52669141192587e-05, | |
| "loss": 0.2433, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.35504555694053, | |
| "learning_rate": 1.5244862362314021e-05, | |
| "loss": 0.4146, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.321139724637557, | |
| "learning_rate": 1.5222775365981272e-05, | |
| "loss": 0.3337, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.0652742393182297, | |
| "learning_rate": 1.5200653278659431e-05, | |
| "loss": 0.2628, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.1040864390034226, | |
| "learning_rate": 1.5178496248983254e-05, | |
| "loss": 0.3277, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.0478273126880238, | |
| "learning_rate": 1.5156304425822269e-05, | |
| "loss": 0.3138, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.3950221395018922, | |
| "learning_rate": 1.5134077958279764e-05, | |
| "loss": 0.1842, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.656819521166795, | |
| "learning_rate": 1.511181699569181e-05, | |
| "loss": 0.2883, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.0042138431638254, | |
| "learning_rate": 1.5089521687626243e-05, | |
| "loss": 0.2961, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.7959393612551642, | |
| "learning_rate": 1.5067192183881658e-05, | |
| "loss": 0.2915, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.5773497666048848, | |
| "learning_rate": 1.50448286344864e-05, | |
| "loss": 0.2384, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.8319515701265676, | |
| "learning_rate": 1.5022431189697569e-05, | |
| "loss": 0.2124, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.352102926044369, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.2956, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.6574809214182507, | |
| "learning_rate": 1.4977535216105258e-05, | |
| "loss": 0.3113, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.022537878921683, | |
| "learning_rate": 1.4955036988950617e-05, | |
| "loss": 0.3537, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.7901743008827358, | |
| "learning_rate": 1.4932505469698053e-05, | |
| "loss": 0.2454, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.5189391342729044, | |
| "learning_rate": 1.4909940809733223e-05, | |
| "loss": 0.3997, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.7952766424539717, | |
| "learning_rate": 1.488734316066446e-05, | |
| "loss": 0.3098, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.6459613118945837, | |
| "learning_rate": 1.4864712674321733e-05, | |
| "loss": 0.4042, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.8809594247765489, | |
| "learning_rate": 1.484204950275565e-05, | |
| "loss": 0.2572, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.7345503741548023, | |
| "learning_rate": 1.4819353798236427e-05, | |
| "loss": 0.2967, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.480786580014742, | |
| "learning_rate": 1.4796625713252848e-05, | |
| "loss": 0.3428, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.9650622625519851, | |
| "learning_rate": 1.477386540051127e-05, | |
| "loss": 0.3437, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.6937032220638994, | |
| "learning_rate": 1.4751073012934587e-05, | |
| "loss": 0.253, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.7558596774994375, | |
| "learning_rate": 1.4728248703661183e-05, | |
| "loss": 0.2542, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.9929358246953341, | |
| "learning_rate": 1.4705392626043931e-05, | |
| "loss": 0.3633, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.810310474439796, | |
| "learning_rate": 1.4682504933649144e-05, | |
| "loss": 0.283, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.152036182046435, | |
| "learning_rate": 1.4659585780255556e-05, | |
| "loss": 0.3102, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.689009597364608, | |
| "learning_rate": 1.4636635319853274e-05, | |
| "loss": 0.2497, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.7511702540468523, | |
| "learning_rate": 1.461365370664276e-05, | |
| "loss": 0.2242, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.8016493801156939, | |
| "learning_rate": 1.4590641095033786e-05, | |
| "loss": 0.2562, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.8330222184027876, | |
| "learning_rate": 1.4567597639644387e-05, | |
| "loss": 0.348, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.8884072768255293, | |
| "learning_rate": 1.4544523495299843e-05, | |
| "loss": 0.3019, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.125164852414625, | |
| "learning_rate": 1.4521418817031627e-05, | |
| "loss": 0.27, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.1351697700961187, | |
| "learning_rate": 1.4498283760076362e-05, | |
| "loss": 0.3036, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.7649150773280204, | |
| "learning_rate": 1.4475118479874775e-05, | |
| "loss": 0.2595, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.561271491793302, | |
| "learning_rate": 1.445192313207067e-05, | |
| "loss": 0.4268, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.0380333698817577, | |
| "learning_rate": 1.4428697872509868e-05, | |
| "loss": 0.2958, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.8504896113827187, | |
| "learning_rate": 1.4405442857239151e-05, | |
| "loss": 0.2613, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.8094111096940708, | |
| "learning_rate": 1.4382158242505236e-05, | |
| "loss": 0.2721, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.8451009864315955, | |
| "learning_rate": 1.4358844184753713e-05, | |
| "loss": 0.2508, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.4184369060807616, | |
| "learning_rate": 1.4335500840627988e-05, | |
| "loss": 0.208, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.1158834118020655, | |
| "learning_rate": 1.4312128366968244e-05, | |
| "loss": 0.2629, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.3693919275045663, | |
| "learning_rate": 1.4288726920810381e-05, | |
| "loss": 0.3818, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.5636740891713368, | |
| "learning_rate": 1.4265296659384956e-05, | |
| "loss": 0.2369, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.200704759747656, | |
| "learning_rate": 1.4241837740116133e-05, | |
| "loss": 0.3506, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.1643259001356636, | |
| "learning_rate": 1.4218350320620625e-05, | |
| "loss": 0.3591, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.1092608570864133, | |
| "learning_rate": 1.4194834558706632e-05, | |
| "loss": 0.3503, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.06478314792136, | |
| "learning_rate": 1.4171290612372781e-05, | |
| "loss": 0.2355, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.6958980700470296, | |
| "learning_rate": 1.4147718639807071e-05, | |
| "loss": 0.2471, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.8849495582079205, | |
| "learning_rate": 1.4124118799385797e-05, | |
| "loss": 0.2712, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.8906644949931855, | |
| "learning_rate": 1.4100491249672499e-05, | |
| "loss": 0.293, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.931395556520391, | |
| "learning_rate": 1.4076836149416889e-05, | |
| "loss": 0.2185, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.7527868454175564, | |
| "learning_rate": 1.405315365755379e-05, | |
| "loss": 0.3091, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.900785095851623, | |
| "learning_rate": 1.4029443933202059e-05, | |
| "loss": 0.2492, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.233763201074291, | |
| "learning_rate": 1.4005707135663529e-05, | |
| "loss": 0.292, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.9402674198597119, | |
| "learning_rate": 1.3981943424421932e-05, | |
| "loss": 0.2757, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.05490281674057, | |
| "learning_rate": 1.3958152959141824e-05, | |
| "loss": 0.3103, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.689652030822919, | |
| "learning_rate": 1.3934335899667526e-05, | |
| "loss": 0.28, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.5225074972151993, | |
| "learning_rate": 1.3910492406022033e-05, | |
| "loss": 0.2385, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.8009132690551426, | |
| "learning_rate": 1.3886622638405953e-05, | |
| "loss": 0.2212, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.7608649807194425, | |
| "learning_rate": 1.386272675719642e-05, | |
| "loss": 0.252, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.5938821438110429, | |
| "learning_rate": 1.3838804922946027e-05, | |
| "loss": 0.2367, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.095727220463024, | |
| "learning_rate": 1.381485729638173e-05, | |
| "loss": 0.313, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.7733127567846445, | |
| "learning_rate": 1.3790884038403796e-05, | |
| "loss": 0.2192, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.8184052409785323, | |
| "learning_rate": 1.3766885310084687e-05, | |
| "loss": 0.2479, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 2.2549269220342394, | |
| "learning_rate": 1.374286127266801e-05, | |
| "loss": 0.3441, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.7087572677999134, | |
| "learning_rate": 1.3718812087567414e-05, | |
| "loss": 0.2949, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.614404012946222, | |
| "learning_rate": 1.3694737916365517e-05, | |
| "loss": 0.2777, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 2.0160740135079345, | |
| "learning_rate": 1.36706389208128e-05, | |
| "loss": 0.295, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.7024108871165236, | |
| "learning_rate": 1.3646515262826551e-05, | |
| "loss": 0.2313, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.8676689841332443, | |
| "learning_rate": 1.3622367104489757e-05, | |
| "loss": 0.3114, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.5476973040713458, | |
| "learning_rate": 1.3598194608050011e-05, | |
| "loss": 0.2199, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.9531175723466303, | |
| "learning_rate": 1.357399793591844e-05, | |
| "loss": 0.2401, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.836145460893975, | |
| "learning_rate": 1.354977725066859e-05, | |
| "loss": 0.2551, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 2.5413490229034283, | |
| "learning_rate": 1.3525532715035365e-05, | |
| "loss": 0.3665, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.7210272719851107, | |
| "learning_rate": 1.3501264491913909e-05, | |
| "loss": 0.2342, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.152971499183195, | |
| "learning_rate": 1.3476972744358507e-05, | |
| "loss": 0.277, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.963619315388926, | |
| "learning_rate": 1.3452657635581521e-05, | |
| "loss": 0.2778, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.261926107950733, | |
| "learning_rate": 1.3428319328952254e-05, | |
| "loss": 0.3748, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.002171823722819, | |
| "learning_rate": 1.3403957987995884e-05, | |
| "loss": 0.2425, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.273686726573573, | |
| "learning_rate": 1.337957377639235e-05, | |
| "loss": 0.3396, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.3312186957241137, | |
| "learning_rate": 1.335516685797525e-05, | |
| "loss": 0.3483, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.9943390495589555, | |
| "learning_rate": 1.333073739673076e-05, | |
| "loss": 0.3134, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.9690670721926964, | |
| "learning_rate": 1.3306285556796494e-05, | |
| "loss": 0.2985, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.00802440137394, | |
| "learning_rate": 1.3281811502460448e-05, | |
| "loss": 0.2536, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.301585415154793, | |
| "learning_rate": 1.3257315398159865e-05, | |
| "loss": 0.2793, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.7858563085952386, | |
| "learning_rate": 1.3232797408480128e-05, | |
| "loss": 0.2701, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.017873008101678, | |
| "learning_rate": 1.3208257698153677e-05, | |
| "loss": 0.3334, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.8364335369046931, | |
| "learning_rate": 1.3183696432058889e-05, | |
| "loss": 0.2536, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.7056348936779424, | |
| "learning_rate": 1.3159113775218963e-05, | |
| "loss": 0.3034, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.227118298645537, | |
| "learning_rate": 1.3134509892800821e-05, | |
| "loss": 0.3322, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.117256632364151, | |
| "learning_rate": 1.3109884950114007e-05, | |
| "loss": 0.2889, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.4273413888554127, | |
| "learning_rate": 1.3085239112609549e-05, | |
| "loss": 0.2379, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.9575603339371808, | |
| "learning_rate": 1.3060572545878875e-05, | |
| "loss": 0.2725, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.8170930579170255, | |
| "learning_rate": 1.3035885415652685e-05, | |
| "loss": 0.3094, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.8284584543080296, | |
| "learning_rate": 1.3011177887799846e-05, | |
| "loss": 0.2835, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.569945283347339, | |
| "learning_rate": 1.2986450128326267e-05, | |
| "loss": 0.2072, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.8533615577812252, | |
| "learning_rate": 1.2961702303373795e-05, | |
| "loss": 0.2079, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.9399595281432296, | |
| "learning_rate": 1.2936934579219094e-05, | |
| "loss": 0.2467, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.213415657483411, | |
| "learning_rate": 1.2912147122272523e-05, | |
| "loss": 0.246, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.4888356873561297, | |
| "learning_rate": 1.2887340099077024e-05, | |
| "loss": 0.3111, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.9573742795078264, | |
| "learning_rate": 1.2862513676307009e-05, | |
| "loss": 0.2122, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.7014691865915594, | |
| "learning_rate": 1.283766802076722e-05, | |
| "loss": 0.2022, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.190373861234172, | |
| "learning_rate": 1.2812803299391629e-05, | |
| "loss": 0.2752, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.6302470512377956, | |
| "learning_rate": 1.2787919679242307e-05, | |
| "loss": 0.3832, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.6985835144579573, | |
| "learning_rate": 1.2763017327508304e-05, | |
| "loss": 0.3706, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.1919668554044596, | |
| "learning_rate": 1.2738096411504521e-05, | |
| "loss": 0.3172, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.965514372155199, | |
| "learning_rate": 1.271315709867059e-05, | |
| "loss": 0.2695, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.9788197014312077, | |
| "learning_rate": 1.2688199556569753e-05, | |
| "loss": 0.3215, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.8117898000767236, | |
| "learning_rate": 1.2663223952887724e-05, | |
| "loss": 0.2785, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.758141248372573, | |
| "learning_rate": 1.263823045543158e-05, | |
| "loss": 0.4488, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.444676019103691, | |
| "learning_rate": 1.2613219232128608e-05, | |
| "loss": 0.2588, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.7579524367776718, | |
| "learning_rate": 1.2588190451025209e-05, | |
| "loss": 0.2807, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.0539384138121735, | |
| "learning_rate": 1.2563144280285742e-05, | |
| "loss": 0.3425, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.123962428661618, | |
| "learning_rate": 1.2538080888191408e-05, | |
| "loss": 0.3437, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.848843665190907, | |
| "learning_rate": 1.2513000443139112e-05, | |
| "loss": 0.2328, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.9068583881882215, | |
| "learning_rate": 1.2487903113640338e-05, | |
| "loss": 0.255, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.5499884922847627, | |
| "learning_rate": 1.2462789068320016e-05, | |
| "loss": 0.2282, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.7518125236459046, | |
| "learning_rate": 1.2437658475915378e-05, | |
| "loss": 0.2218, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.8114924338777028, | |
| "learning_rate": 1.2412511505274845e-05, | |
| "loss": 0.2805, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.9244797085738425, | |
| "learning_rate": 1.2387348325356873e-05, | |
| "loss": 0.3397, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.5720493713091632, | |
| "learning_rate": 1.2362169105228828e-05, | |
| "loss": 0.2528, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.8428242120952283, | |
| "learning_rate": 1.2336974014065844e-05, | |
| "loss": 0.2768, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.835184040116667, | |
| "learning_rate": 1.23117632211497e-05, | |
| "loss": 0.2833, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.214221991382723, | |
| "learning_rate": 1.2286536895867656e-05, | |
| "loss": 0.3365, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.235743345209664, | |
| "learning_rate": 1.2261295207711347e-05, | |
| "loss": 0.3073, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.628463064921636, | |
| "learning_rate": 1.2236038326275628e-05, | |
| "loss": 0.2442, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.6633400921663062, | |
| "learning_rate": 1.221076642125742e-05, | |
| "loss": 0.2829, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.9337054901311905, | |
| "learning_rate": 1.2185479662454596e-05, | |
| "loss": 0.2395, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.906099833447381, | |
| "learning_rate": 1.2160178219764838e-05, | |
| "loss": 0.2846, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.9714262464040193, | |
| "learning_rate": 1.2134862263184467e-05, | |
| "loss": 0.2152, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.057145897252437, | |
| "learning_rate": 1.2109531962807333e-05, | |
| "loss": 0.2386, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.9480146225635264, | |
| "learning_rate": 1.2084187488823657e-05, | |
| "loss": 0.3043, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.9995365117357957, | |
| "learning_rate": 1.2058829011518896e-05, | |
| "loss": 0.2283, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.9136198948337104, | |
| "learning_rate": 1.2033456701272577e-05, | |
| "loss": 0.3082, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.307133524485423, | |
| "learning_rate": 1.2008070728557186e-05, | |
| "loss": 0.2434, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.1483944519592906, | |
| "learning_rate": 1.1982671263936996e-05, | |
| "loss": 0.3163, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.0153764114524515, | |
| "learning_rate": 1.195725847806693e-05, | |
| "loss": 0.2928, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.7916502641369683, | |
| "learning_rate": 1.193183254169142e-05, | |
| "loss": 0.2365, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.91929055961395, | |
| "learning_rate": 1.1906393625643244e-05, | |
| "loss": 0.267, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.725549936564891, | |
| "learning_rate": 1.1880941900842398e-05, | |
| "loss": 0.294, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.867199788127604, | |
| "learning_rate": 1.1855477538294934e-05, | |
| "loss": 0.2054, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.1796669209124824, | |
| "learning_rate": 1.1830000709091816e-05, | |
| "loss": 0.3281, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.6222853120409504, | |
| "learning_rate": 1.1804511584407763e-05, | |
| "loss": 0.2956, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.04945883602689, | |
| "learning_rate": 1.177901033550012e-05, | |
| "loss": 0.3231, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.826195057643915, | |
| "learning_rate": 1.1753497133707678e-05, | |
| "loss": 0.3016, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 8.971990719339775, | |
| "learning_rate": 1.1727972150449545e-05, | |
| "loss": 0.2818, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.6422662404105821, | |
| "learning_rate": 1.1702435557223988e-05, | |
| "loss": 0.2689, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.762833226202305, | |
| "learning_rate": 1.1676887525607272e-05, | |
| "loss": 0.2159, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.2910649198888533, | |
| "learning_rate": 1.1651328227252516e-05, | |
| "loss": 0.2244, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.094888043379137, | |
| "learning_rate": 1.1625757833888552e-05, | |
| "loss": 0.3594, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.2689593600580564, | |
| "learning_rate": 1.1600176517318742e-05, | |
| "loss": 0.3697, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.036837172227918, | |
| "learning_rate": 1.1574584449419841e-05, | |
| "loss": 0.3415, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.8312182661707044, | |
| "learning_rate": 1.1548981802140849e-05, | |
| "loss": 0.2814, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.7233203263190564, | |
| "learning_rate": 1.1523368747501839e-05, | |
| "loss": 0.2371, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.354957898292099, | |
| "learning_rate": 1.1497745457592817e-05, | |
| "loss": 0.2179, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.852344400666275, | |
| "learning_rate": 1.1472112104572547e-05, | |
| "loss": 0.2883, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.2117818179444004, | |
| "learning_rate": 1.1446468860667422e-05, | |
| "loss": 0.3362, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.6374093138437715, | |
| "learning_rate": 1.142081589817027e-05, | |
| "loss": 0.229, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.44747695223584, | |
| "learning_rate": 1.1395153389439232e-05, | |
| "loss": 0.3219, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.7629632446504988, | |
| "learning_rate": 1.1369481506896582e-05, | |
| "loss": 0.2567, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.1288861116559445, | |
| "learning_rate": 1.1343800423027583e-05, | |
| "loss": 0.3142, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.4937719998915224, | |
| "learning_rate": 1.1318110310379303e-05, | |
| "loss": 0.2684, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.9641163799417516, | |
| "learning_rate": 1.129241134155949e-05, | |
| "loss": 0.3032, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.320726435802104, | |
| "learning_rate": 1.1266703689235395e-05, | |
| "loss": 0.3314, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.600756878312125, | |
| "learning_rate": 1.1240987526132595e-05, | |
| "loss": 0.2048, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.5669187735152597, | |
| "learning_rate": 1.1215263025033869e-05, | |
| "loss": 0.2504, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.673604639988355, | |
| "learning_rate": 1.1189530358778005e-05, | |
| "loss": 0.267, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.6903134828244797, | |
| "learning_rate": 1.1163789700258656e-05, | |
| "loss": 0.2873, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.8345305254478188, | |
| "learning_rate": 1.1138041222423177e-05, | |
| "loss": 0.2501, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.7035373535636231, | |
| "learning_rate": 1.111228509827145e-05, | |
| "loss": 0.2597, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.0008621254077523, | |
| "learning_rate": 1.1086521500854746e-05, | |
| "loss": 0.299, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.098817320177265, | |
| "learning_rate": 1.1060750603274535e-05, | |
| "loss": 0.3879, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.0169678737020864, | |
| "learning_rate": 1.1034972578681338e-05, | |
| "loss": 0.2962, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.7427358350656112, | |
| "learning_rate": 1.1009187600273565e-05, | |
| "loss": 0.233, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.7268783633290732, | |
| "learning_rate": 1.0983395841296349e-05, | |
| "loss": 0.3154, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.1796515190823067, | |
| "learning_rate": 1.0957597475040373e-05, | |
| "loss": 0.3123, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.0775099568321305, | |
| "learning_rate": 1.0931792674840718e-05, | |
| "loss": 0.3513, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.9943573933644843, | |
| "learning_rate": 1.0905981614075693e-05, | |
| "loss": 0.2742, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.9559678557187534, | |
| "learning_rate": 1.0880164466165675e-05, | |
| "loss": 0.261, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.5524280949298477, | |
| "learning_rate": 1.0854341404571929e-05, | |
| "loss": 0.2293, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.8032833160238992, | |
| "learning_rate": 1.0828512602795462e-05, | |
| "loss": 0.2201, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.991664339586316, | |
| "learning_rate": 1.0802678234375852e-05, | |
| "loss": 0.2977, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.2614790312717767, | |
| "learning_rate": 1.0776838472890065e-05, | |
| "loss": 0.3178, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.6688609853378997, | |
| "learning_rate": 1.075099349195131e-05, | |
| "loss": 0.3176, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.9523644646192964, | |
| "learning_rate": 1.0725143465207868e-05, | |
| "loss": 0.3127, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.0367494023975357, | |
| "learning_rate": 1.0699288566341914e-05, | |
| "loss": 0.2484, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.70860182909142, | |
| "learning_rate": 1.0673428969068365e-05, | |
| "loss": 0.2455, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.0921836159299128, | |
| "learning_rate": 1.06475648471337e-05, | |
| "loss": 0.2931, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.7894757861594108, | |
| "learning_rate": 1.0621696374314807e-05, | |
| "loss": 0.2389, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.9138153204173056, | |
| "learning_rate": 1.0595823724417795e-05, | |
| "loss": 0.2499, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.732159740978979, | |
| "learning_rate": 1.0569947071276847e-05, | |
| "loss": 0.2528, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.604322234601798, | |
| "learning_rate": 1.0544066588753044e-05, | |
| "loss": 0.3118, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.857528267039127, | |
| "learning_rate": 1.0518182450733185e-05, | |
| "loss": 0.3384, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.8319294463625793, | |
| "learning_rate": 1.0492294831128641e-05, | |
| "loss": 0.2126, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.7949572184557832, | |
| "learning_rate": 1.0466403903874176e-05, | |
| "loss": 0.2624, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.3350124712513427, | |
| "learning_rate": 1.0440509842926768e-05, | |
| "loss": 0.2573, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.8899114484260906, | |
| "learning_rate": 1.0414612822264457e-05, | |
| "loss": 0.2336, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.08203660477542, | |
| "learning_rate": 1.0388713015885161e-05, | |
| "loss": 0.355, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.5453599564780907, | |
| "learning_rate": 1.0362810597805526e-05, | |
| "loss": 0.2005, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.4880436797334258, | |
| "learning_rate": 1.0336905742059742e-05, | |
| "loss": 0.2083, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.2129712910857706, | |
| "learning_rate": 1.031099862269837e-05, | |
| "loss": 0.2938, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.007015377715633, | |
| "learning_rate": 1.028508941378719e-05, | |
| "loss": 0.326, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.430415446584424, | |
| "learning_rate": 1.0259178289406011e-05, | |
| "loss": 0.3133, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.7430708368390706, | |
| "learning_rate": 1.0233265423647523e-05, | |
| "loss": 0.2283, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.475276647543626, | |
| "learning_rate": 1.0207350990616107e-05, | |
| "loss": 0.2085, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.5906030539698848, | |
| "learning_rate": 1.0181435164426676e-05, | |
| "loss": 0.2401, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.260933647724671, | |
| "learning_rate": 1.0155518119203511e-05, | |
| "loss": 0.2861, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.8853987068919458, | |
| "learning_rate": 1.0129600029079072e-05, | |
| "loss": 0.2366, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.0976061677630002, | |
| "learning_rate": 1.0103681068192845e-05, | |
| "loss": 0.2928, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.2519798986265807, | |
| "learning_rate": 1.0077761410690172e-05, | |
| "loss": 0.3527, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.9980492224694486, | |
| "learning_rate": 1.0051841230721065e-05, | |
| "loss": 0.3277, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.1108410240606466, | |
| "learning_rate": 1.0025920702439051e-05, | |
| "loss": 0.2718, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.499661079196125, | |
| "learning_rate": 1e-05, | |
| "loss": 0.3087, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.795724082595488, | |
| "learning_rate": 9.97407929756095e-06, | |
| "loss": 0.3023, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.467066192741996, | |
| "learning_rate": 9.948158769278939e-06, | |
| "loss": 0.2076, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.050471519058259, | |
| "learning_rate": 9.92223858930983e-06, | |
| "loss": 0.3034, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.220856023054313, | |
| "learning_rate": 9.896318931807155e-06, | |
| "loss": 0.3391, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.863571935358034, | |
| "learning_rate": 9.870399970920932e-06, | |
| "loss": 0.2584, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.8386913522932375, | |
| "learning_rate": 9.844481880796492e-06, | |
| "loss": 0.2296, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.159763851238493, | |
| "learning_rate": 9.818564835573324e-06, | |
| "loss": 0.3015, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.1871325388107845, | |
| "learning_rate": 9.7926490093839e-06, | |
| "loss": 0.3556, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.452578997980729, | |
| "learning_rate": 9.766734576352478e-06, | |
| "loss": 0.3296, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.3715404202620423, | |
| "learning_rate": 9.740821710593989e-06, | |
| "loss": 0.1949, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.6850708561729166, | |
| "learning_rate": 9.714910586212815e-06, | |
| "loss": 0.2519, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.9975915546839027, | |
| "learning_rate": 9.689001377301634e-06, | |
| "loss": 0.3526, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.6032989326955913, | |
| "learning_rate": 9.663094257940258e-06, | |
| "loss": 0.258, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.5020075466048406, | |
| "learning_rate": 9.637189402194477e-06, | |
| "loss": 0.2157, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.5360060738402725, | |
| "learning_rate": 9.61128698411484e-06, | |
| "loss": 0.2529, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 2.1523288036462875, | |
| "learning_rate": 9.585387177735548e-06, | |
| "loss": 0.3479, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.526681384178872, | |
| "learning_rate": 9.559490157073236e-06, | |
| "loss": 0.2498, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.9706114035878841, | |
| "learning_rate": 9.533596096125826e-06, | |
| "loss": 0.2578, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 2.0875827900456034, | |
| "learning_rate": 9.507705168871359e-06, | |
| "loss": 0.2958, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.3150336652807166, | |
| "learning_rate": 9.481817549266817e-06, | |
| "loss": 0.2239, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.755272176537075, | |
| "learning_rate": 9.45593341124696e-06, | |
| "loss": 0.2453, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.2853026912980152, | |
| "learning_rate": 9.430052928723153e-06, | |
| "loss": 0.1516, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 2.0816660750021545, | |
| "learning_rate": 9.404176275582208e-06, | |
| "loss": 0.3357, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.9012164667801466, | |
| "learning_rate": 9.378303625685196e-06, | |
| "loss": 0.2896, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.038751603631483, | |
| "learning_rate": 9.352435152866299e-06, | |
| "loss": 0.2958, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.7628021191667054, | |
| "learning_rate": 9.326571030931636e-06, | |
| "loss": 0.254, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.140386509095083, | |
| "learning_rate": 9.300711433658088e-06, | |
| "loss": 0.3249, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.0219557296847683, | |
| "learning_rate": 9.274856534792138e-06, | |
| "loss": 0.3125, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.7783897851664183, | |
| "learning_rate": 9.249006508048695e-06, | |
| "loss": 0.3127, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.3950371883220853, | |
| "learning_rate": 9.223161527109938e-06, | |
| "loss": 0.3485, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.685073081651275, | |
| "learning_rate": 9.197321765624153e-06, | |
| "loss": 0.2779, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.846355813733675, | |
| "learning_rate": 9.17148739720454e-06, | |
| "loss": 0.292, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.1325807316923835, | |
| "learning_rate": 9.145658595428075e-06, | |
| "loss": 0.3394, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.1928723467900304, | |
| "learning_rate": 9.119835533834332e-06, | |
| "loss": 0.3057, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.8305417609648569, | |
| "learning_rate": 9.09401838592431e-06, | |
| "loss": 0.2657, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.6794809573883291, | |
| "learning_rate": 9.068207325159285e-06, | |
| "loss": 0.2233, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.9638987984684106, | |
| "learning_rate": 9.042402524959632e-06, | |
| "loss": 0.284, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.767069934592504, | |
| "learning_rate": 9.016604158703654e-06, | |
| "loss": 0.2898, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.9395833869891466, | |
| "learning_rate": 8.990812399726435e-06, | |
| "loss": 0.2359, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.6200623938126053, | |
| "learning_rate": 8.965027421318666e-06, | |
| "loss": 0.2537, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.9061001116103824, | |
| "learning_rate": 8.939249396725468e-06, | |
| "loss": 0.249, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.7573100057844808, | |
| "learning_rate": 8.913478499145255e-06, | |
| "loss": 0.2884, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.9056792765093784, | |
| "learning_rate": 8.887714901728551e-06, | |
| "loss": 0.2455, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.9591625204073388, | |
| "learning_rate": 8.861958777576826e-06, | |
| "loss": 0.2489, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.389773070992383, | |
| "learning_rate": 8.836210299741346e-06, | |
| "loss": 0.1992, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.9368736859469793, | |
| "learning_rate": 8.810469641222001e-06, | |
| "loss": 0.2221, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.0589002820916567, | |
| "learning_rate": 8.784736974966135e-06, | |
| "loss": 0.2342, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.4905403992972885, | |
| "learning_rate": 8.759012473867407e-06, | |
| "loss": 0.2684, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.7473535053835778, | |
| "learning_rate": 8.73329631076461e-06, | |
| "loss": 0.243, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.6813683752097253, | |
| "learning_rate": 8.707588658440511e-06, | |
| "loss": 0.2492, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.408970858642131, | |
| "learning_rate": 8.681889689620699e-06, | |
| "loss": 0.2003, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.716823703272264, | |
| "learning_rate": 8.656199576972424e-06, | |
| "loss": 0.2784, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.411386721721133, | |
| "learning_rate": 8.630518493103421e-06, | |
| "loss": 0.3147, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.770920978175344, | |
| "learning_rate": 8.604846610560771e-06, | |
| "loss": 0.279, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.3271735591024725, | |
| "learning_rate": 8.579184101829734e-06, | |
| "loss": 0.3377, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.3419344175534484, | |
| "learning_rate": 8.553531139332583e-06, | |
| "loss": 0.3811, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.5626317676383743, | |
| "learning_rate": 8.527887895427454e-06, | |
| "loss": 0.2962, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.9362226850688755, | |
| "learning_rate": 8.502254542407186e-06, | |
| "loss": 0.3329, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.7470223843486916, | |
| "learning_rate": 8.476631252498163e-06, | |
| "loss": 0.252, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.8326252457768055, | |
| "learning_rate": 8.451018197859153e-06, | |
| "loss": 0.305, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.914344829480951, | |
| "learning_rate": 8.425415550580162e-06, | |
| "loss": 0.2827, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.7401454344432838, | |
| "learning_rate": 8.399823482681263e-06, | |
| "loss": 0.2237, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.915407039290035, | |
| "learning_rate": 8.374242166111448e-06, | |
| "loss": 0.2602, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.7662030359060474, | |
| "learning_rate": 8.348671772747488e-06, | |
| "loss": 0.2613, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.8378807564270505, | |
| "learning_rate": 8.323112474392731e-06, | |
| "loss": 0.2295, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.1373704073838935, | |
| "learning_rate": 8.297564442776014e-06, | |
| "loss": 0.2583, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.9134412975123845, | |
| "learning_rate": 8.272027849550457e-06, | |
| "loss": 0.251, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.7321493305498081, | |
| "learning_rate": 8.246502866292324e-06, | |
| "loss": 0.2462, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.7422502864431673, | |
| "learning_rate": 8.22098966449988e-06, | |
| "loss": 0.3001, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.8695974387785772, | |
| "learning_rate": 8.195488415592238e-06, | |
| "loss": 0.2629, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.0689409685964963, | |
| "learning_rate": 8.169999290908189e-06, | |
| "loss": 0.2823, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.1939503731878625, | |
| "learning_rate": 8.144522461705067e-06, | |
| "loss": 0.2832, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.7859543663180524, | |
| "learning_rate": 8.119058099157605e-06, | |
| "loss": 0.2787, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.2440954170050214, | |
| "learning_rate": 8.09360637435676e-06, | |
| "loss": 0.2857, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.8539424786835583, | |
| "learning_rate": 8.068167458308582e-06, | |
| "loss": 0.2208, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.9465753497269007, | |
| "learning_rate": 8.042741521933071e-06, | |
| "loss": 0.2725, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 2.0697271722885526, | |
| "learning_rate": 8.017328736063005e-06, | |
| "loss": 0.2857, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.8427390960314034, | |
| "learning_rate": 7.991929271442817e-06, | |
| "loss": 0.2416, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.3575832964819068, | |
| "learning_rate": 7.966543298727426e-06, | |
| "loss": 0.2328, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 2.23600118477056, | |
| "learning_rate": 7.941170988481108e-06, | |
| "loss": 0.3954, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.4238788399103068, | |
| "learning_rate": 7.915812511176348e-06, | |
| "loss": 0.2308, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.5635993113710267, | |
| "learning_rate": 7.89046803719267e-06, | |
| "loss": 0.2139, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.8370804480852818, | |
| "learning_rate": 7.865137736815536e-06, | |
| "loss": 0.2773, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 2.1761902290910675, | |
| "learning_rate": 7.839821780235168e-06, | |
| "loss": 0.2539, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.740237960651226, | |
| "learning_rate": 7.814520337545405e-06, | |
| "loss": 0.2879, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.564863716974201, | |
| "learning_rate": 7.789233578742583e-06, | |
| "loss": 0.2321, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.8466822510532337, | |
| "learning_rate": 7.763961673724379e-06, | |
| "loss": 0.3398, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.9795137872908464, | |
| "learning_rate": 7.738704792288654e-06, | |
| "loss": 0.2737, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.6011860774130708, | |
| "learning_rate": 7.713463104132345e-06, | |
| "loss": 0.2026, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.6552476365616033, | |
| "learning_rate": 7.688236778850307e-06, | |
| "loss": 0.2569, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.0217820875771344, | |
| "learning_rate": 7.663025985934158e-06, | |
| "loss": 0.304, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.5150619815753843, | |
| "learning_rate": 7.637830894771176e-06, | |
| "loss": 0.2189, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.186718062247446, | |
| "learning_rate": 7.61265167464313e-06, | |
| "loss": 0.3316, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.107306318442609, | |
| "learning_rate": 7.587488494725157e-06, | |
| "loss": 0.2399, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.4123410080473793, | |
| "learning_rate": 7.5623415240846235e-06, | |
| "loss": 0.2493, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.1775732705159556, | |
| "learning_rate": 7.537210931679988e-06, | |
| "loss": 0.3429, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.0867680464812977, | |
| "learning_rate": 7.512096886359663e-06, | |
| "loss": 0.2466, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.369684623629669, | |
| "learning_rate": 7.48699955686089e-06, | |
| "loss": 0.21, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.061299527975879, | |
| "learning_rate": 7.4619191118085955e-06, | |
| "loss": 0.3019, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.2068568998917577, | |
| "learning_rate": 7.4368557197142596e-06, | |
| "loss": 0.3616, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.211709484730348, | |
| "learning_rate": 7.411809548974792e-06, | |
| "loss": 0.2979, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.6484394682313261, | |
| "learning_rate": 7.3867807678713965e-06, | |
| "loss": 0.2686, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.819593888775896, | |
| "learning_rate": 7.361769544568424e-06, | |
| "loss": 0.2769, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.0270942833528975, | |
| "learning_rate": 7.336776047112277e-06, | |
| "loss": 0.3073, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.8410653613922618, | |
| "learning_rate": 7.311800443430251e-06, | |
| "loss": 0.2065, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 4.11937231287671, | |
| "learning_rate": 7.286842901329413e-06, | |
| "loss": 0.2401, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.6752790498990626, | |
| "learning_rate": 7.26190358849548e-06, | |
| "loss": 0.2336, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 4.585871964147853, | |
| "learning_rate": 7.236982672491699e-06, | |
| "loss": 0.2328, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.4850708268042807, | |
| "learning_rate": 7.212080320757695e-06, | |
| "loss": 0.2361, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.810180128194031, | |
| "learning_rate": 7.187196700608373e-06, | |
| "loss": 0.2118, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.9604044922011872, | |
| "learning_rate": 7.162331979232784e-06, | |
| "loss": 0.2935, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.3782851976916053, | |
| "learning_rate": 7.137486323692994e-06, | |
| "loss": 0.3103, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.1641128770713647, | |
| "learning_rate": 7.1126599009229766e-06, | |
| "loss": 0.3441, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.8820104796769928, | |
| "learning_rate": 7.0878528777274814e-06, | |
| "loss": 0.2598, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.8562917314614056, | |
| "learning_rate": 7.063065420780909e-06, | |
| "loss": 0.2472, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.121603337381749, | |
| "learning_rate": 7.0382976966262065e-06, | |
| "loss": 0.3149, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.3963555305399518, | |
| "learning_rate": 7.013549871673736e-06, | |
| "loss": 0.2907, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.916425141616387, | |
| "learning_rate": 6.988822112200157e-06, | |
| "loss": 0.245, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.0267284952286784, | |
| "learning_rate": 6.964114584347316e-06, | |
| "loss": 0.2846, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.0835299646218517, | |
| "learning_rate": 6.939427454121128e-06, | |
| "loss": 0.3285, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.110661975800217, | |
| "learning_rate": 6.914760887390453e-06, | |
| "loss": 0.2585, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.8797700943372209, | |
| "learning_rate": 6.890115049885995e-06, | |
| "loss": 0.2546, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.12025172688243, | |
| "learning_rate": 6.865490107199182e-06, | |
| "loss": 0.3522, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.4608708843959843, | |
| "learning_rate": 6.840886224781039e-06, | |
| "loss": 0.4119, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.8579288038132629, | |
| "learning_rate": 6.816303567941111e-06, | |
| "loss": 0.2418, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 2.7794135410103595, | |
| "learning_rate": 6.791742301846325e-06, | |
| "loss": 0.3241, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.937994466079078, | |
| "learning_rate": 6.767202591519876e-06, | |
| "loss": 0.2847, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.6099233206079304, | |
| "learning_rate": 6.742684601840142e-06, | |
| "loss": 0.2654, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.7824149383848715, | |
| "learning_rate": 6.718188497539554e-06, | |
| "loss": 0.2328, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 2.1762391597184894, | |
| "learning_rate": 6.693714443203507e-06, | |
| "loss": 0.3638, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.7104692317924, | |
| "learning_rate": 6.669262603269246e-06, | |
| "loss": 0.2263, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.7275338948194552, | |
| "learning_rate": 6.644833142024752e-06, | |
| "loss": 0.2567, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 2.166136590966985, | |
| "learning_rate": 6.620426223607655e-06, | |
| "loss": 0.3237, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.9817672311971697, | |
| "learning_rate": 6.59604201200412e-06, | |
| "loss": 0.1942, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 2.039979494344524, | |
| "learning_rate": 6.571680671047749e-06, | |
| "loss": 0.244, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.9571588104884596, | |
| "learning_rate": 6.547342364418482e-06, | |
| "loss": 0.2502, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.680652340274426, | |
| "learning_rate": 6.523027255641494e-06, | |
| "loss": 0.2289, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.8417236190828588, | |
| "learning_rate": 6.498735508086094e-06, | |
| "loss": 0.2579, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.7507950484803283, | |
| "learning_rate": 6.474467284964634e-06, | |
| "loss": 0.2502, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.8084082798639443, | |
| "learning_rate": 6.450222749331414e-06, | |
| "loss": 0.32, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 2.2526702250171216, | |
| "learning_rate": 6.426002064081565e-06, | |
| "loss": 0.3481, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.909785646107146, | |
| "learning_rate": 6.40180539194999e-06, | |
| "loss": 0.2803, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.8134151111142809, | |
| "learning_rate": 6.377632895510248e-06, | |
| "loss": 0.2465, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 2.7155913087330097, | |
| "learning_rate": 6.35348473717345e-06, | |
| "loss": 0.4239, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.577301053913633, | |
| "learning_rate": 6.329361079187199e-06, | |
| "loss": 0.2506, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.7753632554801293, | |
| "learning_rate": 6.305262083634488e-06, | |
| "loss": 0.2695, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 2.19246127069483, | |
| "learning_rate": 6.281187912432587e-06, | |
| "loss": 0.3557, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.5659295166802625, | |
| "learning_rate": 6.2571387273319905e-06, | |
| "loss": 0.2695, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.8487247045088673, | |
| "learning_rate": 6.233114689915316e-06, | |
| "loss": 0.2652, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.5346195654255266, | |
| "learning_rate": 6.209115961596208e-06, | |
| "loss": 0.2475, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.8728887617878336, | |
| "learning_rate": 6.1851427036182696e-06, | |
| "loss": 0.2652, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.9044082710275902, | |
| "learning_rate": 6.1611950770539766e-06, | |
| "loss": 0.2661, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 2.6455901933838546, | |
| "learning_rate": 6.137273242803581e-06, | |
| "loss": 0.3343, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 2.0561627754494536, | |
| "learning_rate": 6.113377361594048e-06, | |
| "loss": 0.3318, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.710547264376754, | |
| "learning_rate": 6.0895075939779705e-06, | |
| "loss": 0.2586, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 2.5825166136418742, | |
| "learning_rate": 6.065664100332478e-06, | |
| "loss": 0.3625, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.590240850136802, | |
| "learning_rate": 6.041847040858177e-06, | |
| "loss": 0.1974, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.4426197798860692, | |
| "learning_rate": 6.018056575578075e-06, | |
| "loss": 0.2023, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 2.0249441359081226, | |
| "learning_rate": 5.994292864336473e-06, | |
| "loss": 0.3006, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.9763595272017949, | |
| "learning_rate": 5.970556066797941e-06, | |
| "loss": 0.2527, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 2.1274698904290483, | |
| "learning_rate": 5.9468463424462146e-06, | |
| "loss": 0.2836, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.6396568935664884, | |
| "learning_rate": 5.923163850583114e-06, | |
| "loss": 0.2601, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.4905209493046985, | |
| "learning_rate": 5.899508750327502e-06, | |
| "loss": 0.3347, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.86505404652595, | |
| "learning_rate": 5.875881200614208e-06, | |
| "loss": 0.2689, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.8960019373545756, | |
| "learning_rate": 5.852281360192933e-06, | |
| "loss": 0.2615, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.36025005092698, | |
| "learning_rate": 5.828709387627219e-06, | |
| "loss": 0.2768, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.3701135734425034, | |
| "learning_rate": 5.80516544129337e-06, | |
| "loss": 0.1988, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.7516812972892462, | |
| "learning_rate": 5.781649679379379e-06, | |
| "loss": 0.2604, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.4459045171656397, | |
| "learning_rate": 5.758162259883867e-06, | |
| "loss": 0.1918, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.7513968129817934, | |
| "learning_rate": 5.7347033406150494e-06, | |
| "loss": 0.2546, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.7415168967269428, | |
| "learning_rate": 5.711273079189621e-06, | |
| "loss": 0.2248, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.5123709515298638, | |
| "learning_rate": 5.687871633031754e-06, | |
| "loss": 0.2031, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.598182895373857, | |
| "learning_rate": 5.664499159372017e-06, | |
| "loss": 0.3691, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.4193538794405987, | |
| "learning_rate": 5.64115581524629e-06, | |
| "loss": 0.2177, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.905161913821604, | |
| "learning_rate": 5.617841757494762e-06, | |
| "loss": 0.2835, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.102083052982321, | |
| "learning_rate": 5.594557142760853e-06, | |
| "loss": 0.3016, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.2761940983796167, | |
| "learning_rate": 5.571302127490133e-06, | |
| "loss": 0.3509, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.277744764697184, | |
| "learning_rate": 5.548076867929331e-06, | |
| "loss": 0.2858, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.7070140403796947, | |
| "learning_rate": 5.524881520125229e-06, | |
| "loss": 0.2232, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.915971628592982, | |
| "learning_rate": 5.501716239923642e-06, | |
| "loss": 0.2862, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.9129074042445322, | |
| "learning_rate": 5.4785811829683764e-06, | |
| "loss": 0.2869, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.6475461446423851, | |
| "learning_rate": 5.455476504700161e-06, | |
| "loss": 0.2089, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.1519296552582547, | |
| "learning_rate": 5.432402360355616e-06, | |
| "loss": 0.2914, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.219671875855405, | |
| "learning_rate": 5.4093589049662175e-06, | |
| "loss": 0.3412, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.714197759692349, | |
| "learning_rate": 5.386346293357242e-06, | |
| "loss": 0.2313, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.5027628517881846, | |
| "learning_rate": 5.3633646801467255e-06, | |
| "loss": 0.3819, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.813344312078274, | |
| "learning_rate": 5.340414219744451e-06, | |
| "loss": 0.2589, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.2453999187971676, | |
| "learning_rate": 5.31749506635086e-06, | |
| "loss": 0.2981, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.8015076932483307, | |
| "learning_rate": 5.294607373956071e-06, | |
| "loss": 0.2588, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.1720611042347624, | |
| "learning_rate": 5.271751296338823e-06, | |
| "loss": 0.2953, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.822196770831624, | |
| "learning_rate": 5.248926987065417e-06, | |
| "loss": 0.2931, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.0288907542049603, | |
| "learning_rate": 5.226134599488728e-06, | |
| "loss": 0.3015, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.606122975712571, | |
| "learning_rate": 5.2033742867471586e-06, | |
| "loss": 0.2496, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.926511830402839, | |
| "learning_rate": 5.1806462017635775e-06, | |
| "loss": 0.2775, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.7046572764822447, | |
| "learning_rate": 5.15795049724435e-06, | |
| "loss": 0.2097, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.139068743994829, | |
| "learning_rate": 5.135287325678271e-06, | |
| "loss": 0.2945, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.3382499483296915, | |
| "learning_rate": 5.112656839335544e-06, | |
| "loss": 0.2717, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 4.5489341490977715, | |
| "learning_rate": 5.090059190266779e-06, | |
| "loss": 0.2297, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.929739578300716, | |
| "learning_rate": 5.067494530301953e-06, | |
| "loss": 0.2937, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.688606273121372, | |
| "learning_rate": 5.044963011049384e-06, | |
| "loss": 0.2313, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 2.345953555234023, | |
| "learning_rate": 5.022464783894743e-06, | |
| "loss": 0.372, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.8505091067979924, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.281, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.506976721199778, | |
| "learning_rate": 4.977568810302432e-06, | |
| "loss": 0.232, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.6407950451029856, | |
| "learning_rate": 4.955171365513603e-06, | |
| "loss": 0.2228, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.599770395848796, | |
| "learning_rate": 4.932807816118347e-06, | |
| "loss": 0.2348, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 2.1228077377531895, | |
| "learning_rate": 4.910478312373757e-06, | |
| "loss": 0.2572, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.7090102227337063, | |
| "learning_rate": 4.88818300430819e-06, | |
| "loss": 0.2325, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.8950875251067498, | |
| "learning_rate": 4.865922041720239e-06, | |
| "loss": 0.2548, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.5009240917340398, | |
| "learning_rate": 4.843695574177737e-06, | |
| "loss": 0.1798, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.5171623126881457, | |
| "learning_rate": 4.821503751016746e-06, | |
| "loss": 0.2368, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 2.2217784851424653, | |
| "learning_rate": 4.799346721340571e-06, | |
| "loss": 0.2635, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.6726727263291783, | |
| "learning_rate": 4.777224634018732e-06, | |
| "loss": 0.2305, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.5970766816679292, | |
| "learning_rate": 4.7551376376859794e-06, | |
| "loss": 0.2486, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.8282756770609114, | |
| "learning_rate": 4.733085880741301e-06, | |
| "loss": 0.2258, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.8021282161394163, | |
| "learning_rate": 4.711069511346909e-06, | |
| "loss": 0.2533, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.8622337232706356, | |
| "learning_rate": 4.689088677427249e-06, | |
| "loss": 0.2628, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 2.5580543061415226, | |
| "learning_rate": 4.667143526668022e-06, | |
| "loss": 0.3121, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.6231857953982278, | |
| "learning_rate": 4.645234206515171e-06, | |
| "loss": 0.2643, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.5262446570330437, | |
| "learning_rate": 4.623360864173893e-06, | |
| "loss": 0.2512, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.522437154126145, | |
| "learning_rate": 4.601523646607675e-06, | |
| "loss": 0.2477, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.5879991031824943, | |
| "learning_rate": 4.579722700537268e-06, | |
| "loss": 0.2674, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 3.013364625188082, | |
| "learning_rate": 4.557958172439726e-06, | |
| "loss": 0.4444, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.799358920343576, | |
| "learning_rate": 4.536230208547425e-06, | |
| "loss": 0.2368, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.956106849110676, | |
| "learning_rate": 4.5145389548470645e-06, | |
| "loss": 0.2445, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.5453317819490386, | |
| "learning_rate": 4.492884557078688e-06, | |
| "loss": 0.2464, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.477291903477527, | |
| "learning_rate": 4.471267160734731e-06, | |
| "loss": 0.2515, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.810092610902386, | |
| "learning_rate": 4.449686911058992e-06, | |
| "loss": 0.2465, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.3340906296515653, | |
| "learning_rate": 4.4281439530457174e-06, | |
| "loss": 0.2296, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.9130306552063339, | |
| "learning_rate": 4.4066384314385755e-06, | |
| "loss": 0.2599, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.776612076322738, | |
| "learning_rate": 4.385170490729712e-06, | |
| "loss": 0.3193, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.9804752262674616, | |
| "learning_rate": 4.36374027515878e-06, | |
| "loss": 0.2713, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.6897331235488673, | |
| "learning_rate": 4.342347928711953e-06, | |
| "loss": 0.2245, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.029522379259432, | |
| "learning_rate": 4.320993595120969e-06, | |
| "loss": 0.2631, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.1769099014874875, | |
| "learning_rate": 4.299677417862174e-06, | |
| "loss": 0.3171, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.690743115404439, | |
| "learning_rate": 4.278399540155536e-06, | |
| "loss": 0.2477, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 3.488705706366445, | |
| "learning_rate": 4.257160104963695e-06, | |
| "loss": 0.3136, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.6774752784282572, | |
| "learning_rate": 4.2359592549910145e-06, | |
| "loss": 0.1692, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.9676187400989076, | |
| "learning_rate": 4.214797132682597e-06, | |
| "loss": 0.2493, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.6946929404614504, | |
| "learning_rate": 4.193673880223339e-06, | |
| "loss": 0.2747, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.935665546193616, | |
| "learning_rate": 4.172589639536992e-06, | |
| "loss": 0.2747, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.938611898607862, | |
| "learning_rate": 4.151544552285178e-06, | |
| "loss": 0.3066, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.9430796788762474, | |
| "learning_rate": 4.130538759866457e-06, | |
| "loss": 0.2913, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.6893016033444104, | |
| "learning_rate": 4.109572403415386e-06, | |
| "loss": 0.23, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.6669863700924643, | |
| "learning_rate": 4.088645623801534e-06, | |
| "loss": 0.219, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 2.177237248640568, | |
| "learning_rate": 4.067758561628577e-06, | |
| "loss": 0.2593, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 2.445017111944672, | |
| "learning_rate": 4.046911357233343e-06, | |
| "loss": 0.3743, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 2.0263857774688745, | |
| "learning_rate": 4.026104150684835e-06, | |
| "loss": 0.2894, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 2.214076834154425, | |
| "learning_rate": 4.00533708178334e-06, | |
| "loss": 0.3055, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.8491534643694523, | |
| "learning_rate": 3.984610290059467e-06, | |
| "loss": 0.2483, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.0864996537735236, | |
| "learning_rate": 3.9639239147731865e-06, | |
| "loss": 0.2977, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.518977429747083, | |
| "learning_rate": 3.943278094912946e-06, | |
| "loss": 0.3909, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.8430207575686766, | |
| "learning_rate": 3.9226729691946865e-06, | |
| "loss": 0.2309, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.5685678811930743, | |
| "learning_rate": 3.902108676060937e-06, | |
| "loss": 0.2341, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.1181407558758147, | |
| "learning_rate": 3.881585353679891e-06, | |
| "loss": 0.2572, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.9186450278547378, | |
| "learning_rate": 3.861103139944448e-06, | |
| "loss": 0.265, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.646266791160965, | |
| "learning_rate": 3.840662172471315e-06, | |
| "loss": 0.2546, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.6907626028232738, | |
| "learning_rate": 3.8202625886000745e-06, | |
| "loss": 0.2462, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.079071161563802, | |
| "learning_rate": 3.799904525392251e-06, | |
| "loss": 0.231, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.6050203728815282, | |
| "learning_rate": 3.7795881196303996e-06, | |
| "loss": 0.2116, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.675470240404623, | |
| "learning_rate": 3.759313507817196e-06, | |
| "loss": 0.2679, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.850984938367741, | |
| "learning_rate": 3.739080826174498e-06, | |
| "loss": 0.3196, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.9981294449239055, | |
| "learning_rate": 3.718890210642442e-06, | |
| "loss": 0.2924, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.9295081629858126, | |
| "learning_rate": 3.6987417968785365e-06, | |
| "loss": 0.2104, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.8068361121579488, | |
| "learning_rate": 3.6786357202567367e-06, | |
| "loss": 0.3137, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.7334238116847467, | |
| "learning_rate": 3.658572115866541e-06, | |
| "loss": 0.2643, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.5092334537286647, | |
| "learning_rate": 3.638551118512089e-06, | |
| "loss": 0.2166, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.8963530539025175, | |
| "learning_rate": 3.618572862711247e-06, | |
| "loss": 0.2888, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.2997655874538818, | |
| "learning_rate": 3.5986374826947067e-06, | |
| "loss": 0.2343, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.3597778204686617, | |
| "learning_rate": 3.5787451124050832e-06, | |
| "loss": 0.199, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.685966678875132, | |
| "learning_rate": 3.558895885496023e-06, | |
| "loss": 0.2205, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.403178326462928, | |
| "learning_rate": 3.5390899353312934e-06, | |
| "loss": 0.2058, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.649215250464992, | |
| "learning_rate": 3.519327394983888e-06, | |
| "loss": 0.254, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.784007943129409, | |
| "learning_rate": 3.4996083972351514e-06, | |
| "loss": 0.2338, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.5425709234643024, | |
| "learning_rate": 3.479933074573858e-06, | |
| "loss": 0.2127, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 2.072846212502085, | |
| "learning_rate": 3.4603015591953393e-06, | |
| "loss": 0.2506, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.9617017545169801, | |
| "learning_rate": 3.440713983000601e-06, | |
| "loss": 0.2623, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 2.1949608945418095, | |
| "learning_rate": 3.421170477595419e-06, | |
| "loss": 0.287, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 2.0223352734966333, | |
| "learning_rate": 3.401671174289469e-06, | |
| "loss": 0.2437, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.726939680121309, | |
| "learning_rate": 3.3822162040954355e-06, | |
| "loss": 0.2669, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.3958468201182181, | |
| "learning_rate": 3.3628056977281456e-06, | |
| "loss": 0.1795, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.5349920574626108, | |
| "learning_rate": 3.3434397856036705e-06, | |
| "loss": 0.2277, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.6265746387819953, | |
| "learning_rate": 3.3241185978384636e-06, | |
| "loss": 0.212, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.990986006877363, | |
| "learning_rate": 3.304842264248489e-06, | |
| "loss": 0.3214, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.9233533246133991, | |
| "learning_rate": 3.2856109143483316e-06, | |
| "loss": 0.275, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.5915126926393386, | |
| "learning_rate": 3.266424677350346e-06, | |
| "loss": 0.2297, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.6176077751978817, | |
| "learning_rate": 3.2472836821637744e-06, | |
| "loss": 0.2224, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.5393085991541886, | |
| "learning_rate": 3.228188057393895e-06, | |
| "loss": 0.2248, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.060362274625342, | |
| "learning_rate": 3.209137931341143e-06, | |
| "loss": 0.2691, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.8804183328512907, | |
| "learning_rate": 3.190133432000252e-06, | |
| "loss": 0.2816, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.7918052113764629, | |
| "learning_rate": 3.1711746870594083e-06, | |
| "loss": 0.2553, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.0199052401985345, | |
| "learning_rate": 3.1522618238993728e-06, | |
| "loss": 0.2894, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.262932683368157, | |
| "learning_rate": 3.1333949695926323e-06, | |
| "loss": 0.3589, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.8111099043452628, | |
| "learning_rate": 3.114574250902558e-06, | |
| "loss": 0.2961, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.180703372509301, | |
| "learning_rate": 3.0957997942825337e-06, | |
| "loss": 0.2751, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.1348008623457293, | |
| "learning_rate": 3.077071725875116e-06, | |
| "loss": 0.3002, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.5856791522786344, | |
| "learning_rate": 3.0583901715111965e-06, | |
| "loss": 0.2568, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.6940017773069984, | |
| "learning_rate": 3.039755256709134e-06, | |
| "loss": 0.2467, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.974696925499119, | |
| "learning_rate": 3.021167106673928e-06, | |
| "loss": 0.2767, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.503892274988601, | |
| "learning_rate": 3.0026258462963787e-06, | |
| "loss": 0.1722, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.153554440122579, | |
| "learning_rate": 2.9841316001522345e-06, | |
| "loss": 0.3149, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.75095568108896, | |
| "learning_rate": 2.9656844925013638e-06, | |
| "loss": 0.2601, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.8607532467725405, | |
| "learning_rate": 2.94728464728693e-06, | |
| "loss": 0.3038, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.626934063795533, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 0.1833, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.9274778528150571, | |
| "learning_rate": 2.910627238351383e-06, | |
| "loss": 0.2845, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.3559754751169395, | |
| "learning_rate": 2.8923699209255285e-06, | |
| "loss": 0.2273, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.6582819252008927, | |
| "learning_rate": 2.8741603585249312e-06, | |
| "loss": 0.2841, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.654622795384472, | |
| "learning_rate": 2.855998673496728e-06, | |
| "loss": 0.2547, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.168483542872676, | |
| "learning_rate": 2.837884987866363e-06, | |
| "loss": 0.2858, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.6177763948492443, | |
| "learning_rate": 2.8198194233367747e-06, | |
| "loss": 0.2487, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.8494390147186466, | |
| "learning_rate": 2.8018021012875994e-06, | |
| "loss": 0.271, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.6944744681663106, | |
| "learning_rate": 2.783833142774328e-06, | |
| "loss": 0.2145, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.356672636203177, | |
| "learning_rate": 2.7659126685275028e-06, | |
| "loss": 0.242, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.7071797812952967, | |
| "learning_rate": 2.74804079895192e-06, | |
| "loss": 0.1947, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.680836923707193, | |
| "learning_rate": 2.7302176541257984e-06, | |
| "loss": 0.2331, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.668963787322095, | |
| "learning_rate": 2.7124433537999838e-06, | |
| "loss": 0.2216, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.4706495302727334, | |
| "learning_rate": 2.694718017397151e-06, | |
| "loss": 0.2001, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.6545100367424903, | |
| "learning_rate": 2.677041764010988e-06, | |
| "loss": 0.2329, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.950623456067504, | |
| "learning_rate": 2.6594147124053983e-06, | |
| "loss": 0.2655, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.3036306131593465, | |
| "learning_rate": 2.641836981013719e-06, | |
| "loss": 0.3089, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.4138904957865215, | |
| "learning_rate": 2.6243086879379e-06, | |
| "loss": 0.4055, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.946390349319703, | |
| "learning_rate": 2.6068299509477267e-06, | |
| "loss": 0.3118, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.7931647327520022, | |
| "learning_rate": 2.5894008874800323e-06, | |
| "loss": 0.246, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.8355050420939845, | |
| "learning_rate": 2.572021614637892e-06, | |
| "loss": 0.2431, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.5108304857763393, | |
| "learning_rate": 2.5546922491898497e-06, | |
| "loss": 0.1944, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.346782967322365, | |
| "learning_rate": 2.537412907569127e-06, | |
| "loss": 0.2079, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.4907687498692637, | |
| "learning_rate": 2.5201837058728506e-06, | |
| "loss": 0.2222, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.006852991655727, | |
| "learning_rate": 2.5030047598612585e-06, | |
| "loss": 0.2125, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.8979712050124378, | |
| "learning_rate": 2.485876184956928e-06, | |
| "loss": 0.4267, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.6213096691389526, | |
| "learning_rate": 2.468798096244007e-06, | |
| "loss": 0.2109, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.9873316318148522, | |
| "learning_rate": 2.451770608467432e-06, | |
| "loss": 0.2406, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.186626149431908, | |
| "learning_rate": 2.4347938360321564e-06, | |
| "loss": 0.3419, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.7968422881940356, | |
| "learning_rate": 2.417867893002387e-06, | |
| "loss": 0.2259, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.8541653929442405, | |
| "learning_rate": 2.400992893100822e-06, | |
| "loss": 0.2574, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.0761176343433854, | |
| "learning_rate": 2.3841689497078746e-06, | |
| "loss": 0.3144, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.1785093636632977, | |
| "learning_rate": 2.3673961758609156e-06, | |
| "loss": 0.2764, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.9317088428685394, | |
| "learning_rate": 2.3506746842535244e-06, | |
| "loss": 0.5288, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.7937271040727196, | |
| "learning_rate": 2.3340045872347173e-06, | |
| "loss": 0.2357, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.838531651124889, | |
| "learning_rate": 2.317385996808195e-06, | |
| "loss": 0.256, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 2.283906968927328, | |
| "learning_rate": 2.3008190246316033e-06, | |
| "loss": 0.2639, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 2.158590692135789, | |
| "learning_rate": 2.2843037820157678e-06, | |
| "loss": 0.3043, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.6092953541482968, | |
| "learning_rate": 2.26784037992395e-06, | |
| "loss": 0.2322, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.97563915984023, | |
| "learning_rate": 2.251428928971102e-06, | |
| "loss": 0.2471, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.629050153917207, | |
| "learning_rate": 2.2350695394231346e-06, | |
| "loss": 0.2425, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.6882685576225636, | |
| "learning_rate": 2.218762321196156e-06, | |
| "loss": 0.2276, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.6805409089233097, | |
| "learning_rate": 2.2025073838557454e-06, | |
| "loss": 0.2604, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.8115589091967659, | |
| "learning_rate": 2.186304836616221e-06, | |
| "loss": 0.2522, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.9373751118106743, | |
| "learning_rate": 2.170154788339892e-06, | |
| "loss": 0.2744, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.7419361886427593, | |
| "learning_rate": 2.1540573475363402e-06, | |
| "loss": 0.2447, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.9284641554213888, | |
| "learning_rate": 2.1380126223616894e-06, | |
| "loss": 0.2173, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.1195771363082847, | |
| "learning_rate": 2.122020720617869e-06, | |
| "loss": 0.3005, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.770085685562092, | |
| "learning_rate": 2.106081749751897e-06, | |
| "loss": 0.2074, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.499879939600023, | |
| "learning_rate": 2.090195816855164e-06, | |
| "loss": 0.167, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.307606423009887, | |
| "learning_rate": 2.0743630286627005e-06, | |
| "loss": 0.3208, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.6977856293372309, | |
| "learning_rate": 2.058583491552465e-06, | |
| "loss": 0.2204, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.5998489674345802, | |
| "learning_rate": 2.0428573115446394e-06, | |
| "loss": 0.2267, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.1114488918832883, | |
| "learning_rate": 2.0271845943008984e-06, | |
| "loss": 0.3243, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.1802349543638213, | |
| "learning_rate": 2.011565445123711e-06, | |
| "loss": 0.2929, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.2640749367528925, | |
| "learning_rate": 1.9959999689556407e-06, | |
| "loss": 0.3503, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.905369014558016, | |
| "learning_rate": 1.9804882703786122e-06, | |
| "loss": 0.273, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.9385398442468922, | |
| "learning_rate": 1.9650304536132426e-06, | |
| "loss": 0.2166, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.4130939284561759, | |
| "learning_rate": 1.9496266225181247e-06, | |
| "loss": 0.2331, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.554232419563932, | |
| "learning_rate": 1.9342768805891176e-06, | |
| "loss": 0.1955, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.7286117804200054, | |
| "learning_rate": 1.918981330958678e-06, | |
| "loss": 0.2455, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 2.344247788752057, | |
| "learning_rate": 1.9037400763951508e-06, | |
| "loss": 0.2516, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 2.116552663192002, | |
| "learning_rate": 1.8885532193020706e-06, | |
| "loss": 0.2739, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.5784074402197446, | |
| "learning_rate": 1.8734208617174986e-06, | |
| "loss": 0.2572, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.6104349407900382, | |
| "learning_rate": 1.8583431053133127e-06, | |
| "loss": 0.2411, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.6986377833707487, | |
| "learning_rate": 1.8433200513945338e-06, | |
| "loss": 0.2578, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.5459871249460182, | |
| "learning_rate": 1.8283518008986566e-06, | |
| "loss": 0.1919, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.4656179228758885, | |
| "learning_rate": 1.813438454394948e-06, | |
| "loss": 0.2173, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 2.2051276084515803, | |
| "learning_rate": 1.7985801120837865e-06, | |
| "loss": 0.2693, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 3.9929337623116723, | |
| "learning_rate": 1.7837768737959937e-06, | |
| "loss": 0.2439, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 2.3310422723848814, | |
| "learning_rate": 1.7690288389921495e-06, | |
| "loss": 0.3406, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.5929192652600608, | |
| "learning_rate": 1.7543361067619269e-06, | |
| "loss": 0.1961, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.253150783294225, | |
| "learning_rate": 1.7396987758234418e-06, | |
| "loss": 0.2971, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.9551151379061291, | |
| "learning_rate": 1.7251169445225658e-06, | |
| "loss": 0.2702, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.8443350566644119, | |
| "learning_rate": 1.7105907108322816e-06, | |
| "loss": 0.2168, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.8953709979345807, | |
| "learning_rate": 1.6961201723520248e-06, | |
| "loss": 0.2209, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.6933804163691488, | |
| "learning_rate": 1.6817054263070176e-06, | |
| "loss": 0.2651, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.9741371681122017, | |
| "learning_rate": 1.6673465695476233e-06, | |
| "loss": 0.2624, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.7937690474707826, | |
| "learning_rate": 1.6530436985486997e-06, | |
| "loss": 0.227, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.0398781094509846, | |
| "learning_rate": 1.6387969094089318e-06, | |
| "loss": 0.2504, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.0034581357121066, | |
| "learning_rate": 1.6246062978502165e-06, | |
| "loss": 0.2697, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.3490783836586475, | |
| "learning_rate": 1.6104719592169905e-06, | |
| "loss": 0.1582, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.553829042342749, | |
| "learning_rate": 1.5963939884756042e-06, | |
| "loss": 0.1961, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.5629315635602483, | |
| "learning_rate": 1.5823724802136863e-06, | |
| "loss": 0.214, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.5501021255130174, | |
| "learning_rate": 1.5684075286394983e-06, | |
| "loss": 0.1857, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.6851671227244354, | |
| "learning_rate": 1.5544992275813053e-06, | |
| "loss": 0.1912, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 2.0503928029573855, | |
| "learning_rate": 1.5406476704867524e-06, | |
| "loss": 0.3187, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.611805408069118, | |
| "learning_rate": 1.5268529504222262e-06, | |
| "loss": 0.2549, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 2.0144593850719943, | |
| "learning_rate": 1.5131151600722338e-06, | |
| "loss": 0.301, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.7665966680162037, | |
| "learning_rate": 1.4994343917387854e-06, | |
| "loss": 0.2406, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.5618392868502702, | |
| "learning_rate": 1.485810737340767e-06, | |
| "loss": 0.2677, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.9288584503490473, | |
| "learning_rate": 1.4722442884133214e-06, | |
| "loss": 0.2304, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.5617186763180901, | |
| "learning_rate": 1.4587351361072455e-06, | |
| "loss": 0.2567, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.770241801620364, | |
| "learning_rate": 1.4452833711883629e-06, | |
| "loss": 0.2577, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.9382896906192402, | |
| "learning_rate": 1.4318890840369181e-06, | |
| "loss": 0.2542, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.6049626081328412, | |
| "learning_rate": 1.4185523646469822e-06, | |
| "loss": 0.2083, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.6914009615957128, | |
| "learning_rate": 1.405273302625828e-06, | |
| "loss": 0.2374, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.8576240619220135, | |
| "learning_rate": 1.3920519871933425e-06, | |
| "loss": 0.235, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.9143500706855923, | |
| "learning_rate": 1.3788885071814173e-06, | |
| "loss": 0.3512, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.885115331951092, | |
| "learning_rate": 1.3657829510333653e-06, | |
| "loss": 0.2132, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.698087441819351, | |
| "learning_rate": 1.3527354068033139e-06, | |
| "loss": 0.2516, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.6111456973292821, | |
| "learning_rate": 1.339745962155613e-06, | |
| "loss": 0.2265, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.4189744222385203, | |
| "learning_rate": 1.326814704364262e-06, | |
| "loss": 0.2044, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.8257357511262042, | |
| "learning_rate": 1.313941720312303e-06, | |
| "loss": 0.2578, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.5300273413158851, | |
| "learning_rate": 1.3011270964912458e-06, | |
| "loss": 0.2133, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.9035816080597627, | |
| "learning_rate": 1.2883709190004956e-06, | |
| "loss": 0.2663, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.4919891884156362, | |
| "learning_rate": 1.2756732735467581e-06, | |
| "loss": 0.2153, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.489188386807827, | |
| "learning_rate": 1.263034245443473e-06, | |
| "loss": 0.2072, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 2.0790616040249184, | |
| "learning_rate": 1.2504539196102438e-06, | |
| "loss": 0.3164, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.7799690128110763, | |
| "learning_rate": 1.2379323805722575e-06, | |
| "loss": 0.267, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.1522746470134906, | |
| "learning_rate": 1.2254697124597237e-06, | |
| "loss": 0.2844, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.4489560689229093, | |
| "learning_rate": 1.2130659990073146e-06, | |
| "loss": 0.1939, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 4.139743701541494, | |
| "learning_rate": 1.2007213235535785e-06, | |
| "loss": 0.4079, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.9988394205499171, | |
| "learning_rate": 1.1884357690404157e-06, | |
| "loss": 0.2531, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.4279144078503483, | |
| "learning_rate": 1.176209418012495e-06, | |
| "loss": 0.162, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.7124786659484386, | |
| "learning_rate": 1.1640423526166987e-06, | |
| "loss": 0.225, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.7979914687264473, | |
| "learning_rate": 1.1519346546015908e-06, | |
| "loss": 0.249, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.7353155680409826, | |
| "learning_rate": 1.1398864053168534e-06, | |
| "loss": 0.2624, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.8080169204151373, | |
| "learning_rate": 1.127897685712731e-06, | |
| "loss": 0.2759, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.0284757482671933, | |
| "learning_rate": 1.1159685763395113e-06, | |
| "loss": 0.2559, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.5144844908790105, | |
| "learning_rate": 1.1040991573469629e-06, | |
| "loss": 0.2071, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.2401085581760296, | |
| "learning_rate": 1.0922895084838036e-06, | |
| "loss": 0.2199, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.629115436288061, | |
| "learning_rate": 1.0805397090971738e-06, | |
| "loss": 0.2697, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 2.076570153773264, | |
| "learning_rate": 1.0688498381320855e-06, | |
| "loss": 0.2594, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.8580443460941836, | |
| "learning_rate": 1.057219974130903e-06, | |
| "loss": 0.2128, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.1981221023851834, | |
| "learning_rate": 1.0456501952328191e-06, | |
| "loss": 0.1426, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.607708557965466, | |
| "learning_rate": 1.0341405791733183e-06, | |
| "loss": 0.2416, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.981390470792631, | |
| "learning_rate": 1.022691203283661e-06, | |
| "loss": 0.2385, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.7331067592776395, | |
| "learning_rate": 1.0113021444903725e-06, | |
| "loss": 0.2727, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.6434505773641759, | |
| "learning_rate": 9.999734793146998e-07, | |
| "loss": 0.246, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.6784574564006565, | |
| "learning_rate": 9.887052838721322e-07, | |
| "loss": 0.2591, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.8787331378064278, | |
| "learning_rate": 9.77497633871868e-07, | |
| "loss": 0.2378, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.6362732770702062, | |
| "learning_rate": 9.663506046162986e-07, | |
| "loss": 0.2236, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.4273077402383598, | |
| "learning_rate": 9.5526427100053e-07, | |
| "loss": 0.1615, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.7635521575059474, | |
| "learning_rate": 9.44238707511862e-07, | |
| "loss": 0.2497, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 2.4497383712551444, | |
| "learning_rate": 9.332739882292752e-07, | |
| "loss": 0.3468, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.8689717300129733, | |
| "learning_rate": 9.22370186822965e-07, | |
| "loss": 0.2115, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 2.1692904506738553, | |
| "learning_rate": 9.115273765538202e-07, | |
| "loss": 0.2912, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.8477047973625857, | |
| "learning_rate": 9.0074563027294e-07, | |
| "loss": 0.254, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.6084097443123828, | |
| "learning_rate": 8.900250204211513e-07, | |
| "loss": 0.2346, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.9758442942520758, | |
| "learning_rate": 8.793656190285071e-07, | |
| "loss": 0.2914, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.5800324714803446, | |
| "learning_rate": 8.687674977138116e-07, | |
| "loss": 0.2053, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.903023910592132, | |
| "learning_rate": 8.582307276841461e-07, | |
| "loss": 0.2486, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.8721484797697183, | |
| "learning_rate": 8.477553797343729e-07, | |
| "loss": 0.2525, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 2.210408987867441, | |
| "learning_rate": 8.373415242466721e-07, | |
| "loss": 0.3476, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.8145618381264705, | |
| "learning_rate": 8.269892311900696e-07, | |
| "loss": 0.2253, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.503548842568088, | |
| "learning_rate": 8.166985701199581e-07, | |
| "loss": 0.2078, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.970993383274634, | |
| "learning_rate": 8.06469610177636e-07, | |
| "loss": 0.2661, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.2541842404634007, | |
| "learning_rate": 7.963024200898462e-07, | |
| "loss": 0.2943, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.0793321004842507, | |
| "learning_rate": 7.861970681683051e-07, | |
| "loss": 0.248, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.4323451359398107, | |
| "learning_rate": 7.761536223092459e-07, | |
| "loss": 0.219, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.223009593916491, | |
| "learning_rate": 7.661721499929753e-07, | |
| "loss": 0.3207, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.15631076631837, | |
| "learning_rate": 7.562527182833978e-07, | |
| "loss": 0.2821, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.83217141842868, | |
| "learning_rate": 7.463953938275859e-07, | |
| "loss": 0.2463, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.7945591900376303, | |
| "learning_rate": 7.366002428553154e-07, | |
| "loss": 0.2689, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.6401381233328671, | |
| "learning_rate": 7.268673311786378e-07, | |
| "loss": 0.2225, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.345183288068179, | |
| "learning_rate": 7.171967241914224e-07, | |
| "loss": 0.2579, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.154992194024568, | |
| "learning_rate": 7.07588486868922e-07, | |
| "loss": 0.3283, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.2305934639872602, | |
| "learning_rate": 6.980426837673437e-07, | |
| "loss": 0.1764, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.150422986343797, | |
| "learning_rate": 6.885593790234057e-07, | |
| "loss": 0.3109, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.9487027632840348, | |
| "learning_rate": 6.791386363539065e-07, | |
| "loss": 0.2511, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.3624494939574974, | |
| "learning_rate": 6.697805190553086e-07, | |
| "loss": 0.2182, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.9629653911358407, | |
| "learning_rate": 6.604850900032956e-07, | |
| "loss": 0.2803, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.6686070386269276, | |
| "learning_rate": 6.512524116523633e-07, | |
| "loss": 0.1942, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.8184381048891454, | |
| "learning_rate": 6.420825460353975e-07, | |
| "loss": 0.234, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.8292676950113214, | |
| "learning_rate": 6.329755547632499e-07, | |
| "loss": 0.2588, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.14785054063878, | |
| "learning_rate": 6.239314990243339e-07, | |
| "loss": 0.2758, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.445041782478947, | |
| "learning_rate": 6.149504395842087e-07, | |
| "loss": 0.1945, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.458479886365711, | |
| "learning_rate": 6.0603243678517e-07, | |
| "loss": 0.3026, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.11544638899036, | |
| "learning_rate": 5.971775505458444e-07, | |
| "loss": 0.2975, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.5675593571621884, | |
| "learning_rate": 5.883858403607967e-07, | |
| "loss": 0.2438, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.2184826624622627, | |
| "learning_rate": 5.796573653001091e-07, | |
| "loss": 0.2709, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.8643405635039205, | |
| "learning_rate": 5.709921840090072e-07, | |
| "loss": 0.2948, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.5694826821845747, | |
| "learning_rate": 5.62390354707455e-07, | |
| "loss": 0.2275, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.5156052671076892, | |
| "learning_rate": 5.538519351897575e-07, | |
| "loss": 0.2244, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.0819892972861216, | |
| "learning_rate": 5.453769828241872e-07, | |
| "loss": 0.3315, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.5892607193569708, | |
| "learning_rate": 5.369655545525909e-07, | |
| "loss": 0.3366, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.0297523191929456, | |
| "learning_rate": 5.286177068899989e-07, | |
| "loss": 0.2691, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.5852040501856501, | |
| "learning_rate": 5.203334959242634e-07, | |
| "loss": 0.2311, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.1572968911595205, | |
| "learning_rate": 5.121129773156663e-07, | |
| "loss": 0.2527, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.0453850393520736, | |
| "learning_rate": 5.039562062965508e-07, | |
| "loss": 0.2716, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.8774391499144594, | |
| "learning_rate": 4.95863237670956e-07, | |
| "loss": 0.2746, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.0000300303285536, | |
| "learning_rate": 4.878341258142349e-07, | |
| "loss": 0.2602, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.8976121097618184, | |
| "learning_rate": 4.798689246727006e-07, | |
| "loss": 0.2403, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.436462843251761, | |
| "learning_rate": 4.7196768776326397e-07, | |
| "loss": 0.2295, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.724066891804642, | |
| "learning_rate": 4.6413046817306404e-07, | |
| "loss": 0.1926, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.9793371188350892, | |
| "learning_rate": 4.563573185591219e-07, | |
| "loss": 0.2746, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.5767007072650288, | |
| "learning_rate": 4.4864829114798394e-07, | |
| "loss": 0.2291, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.664299581980807, | |
| "learning_rate": 4.4100343773536226e-07, | |
| "loss": 0.2361, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 2.0924593265935583, | |
| "learning_rate": 4.3342280968580287e-07, | |
| "loss": 0.2756, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.6395170627049216, | |
| "learning_rate": 4.259064579323302e-07, | |
| "loss": 0.2264, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.5970799593500762, | |
| "learning_rate": 4.184544329761009e-07, | |
| "loss": 0.229, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 2.182004710086821, | |
| "learning_rate": 4.11066784886075e-07, | |
| "loss": 0.3343, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.7698497542021756, | |
| "learning_rate": 4.037435632986786e-07, | |
| "loss": 0.2768, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.819895206197229, | |
| "learning_rate": 3.96484817417454e-07, | |
| "loss": 0.2596, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 2.0223853119407713, | |
| "learning_rate": 3.8929059601275463e-07, | |
| "loss": 0.2485, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 2.022319275762578, | |
| "learning_rate": 3.8216094742139833e-07, | |
| "loss": 0.275, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.0916811134300453, | |
| "learning_rate": 3.750959195463466e-07, | |
| "loss": 0.2983, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.751594212716712, | |
| "learning_rate": 3.6809555985639065e-07, | |
| "loss": 0.208, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.385537278039897, | |
| "learning_rate": 3.611599153858214e-07, | |
| "loss": 0.2972, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.2409653452191423, | |
| "learning_rate": 3.5428903273411865e-07, | |
| "loss": 0.3121, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.0139033846091645, | |
| "learning_rate": 3.474829580656436e-07, | |
| "loss": 0.3174, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.91488746376917, | |
| "learning_rate": 3.4074173710931804e-07, | |
| "loss": 0.2736, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.673409190170197, | |
| "learning_rate": 3.3406541515832e-07, | |
| "loss": 0.2287, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.7600273523357843, | |
| "learning_rate": 3.2745403706978876e-07, | |
| "loss": 0.2166, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.5479515229778715, | |
| "learning_rate": 3.209076472645112e-07, | |
| "loss": 0.2092, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.8353681175432979, | |
| "learning_rate": 3.1442628972662703e-07, | |
| "loss": 0.3097, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.7186461066312657, | |
| "learning_rate": 3.080100080033388e-07, | |
| "loss": 0.2302, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.490131470295163, | |
| "learning_rate": 3.016588452046132e-07, | |
| "loss": 0.2988, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.8254395188861843, | |
| "learning_rate": 2.9537284400289354e-07, | |
| "loss": 0.2099, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.9317831257279952, | |
| "learning_rate": 2.8915204663281014e-07, | |
| "loss": 0.2755, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.9044530009379896, | |
| "learning_rate": 2.829964948909048e-07, | |
| "loss": 0.2599, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.028039260610861, | |
| "learning_rate": 2.769062301353398e-07, | |
| "loss": 0.2548, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.6524032088131855, | |
| "learning_rate": 2.708812932856253e-07, | |
| "loss": 0.1937, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.573824843820764, | |
| "learning_rate": 2.649217248223468e-07, | |
| "loss": 0.2193, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.091481605696416, | |
| "learning_rate": 2.5902756478688674e-07, | |
| "loss": 0.3083, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.489498923190435, | |
| "learning_rate": 2.5319885278115907e-07, | |
| "loss": 0.1469, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.9782590951388326, | |
| "learning_rate": 2.474356279673462e-07, | |
| "loss": 0.2463, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.4647787821345215, | |
| "learning_rate": 2.4173792906762806e-07, | |
| "loss": 0.2227, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.034144523410484, | |
| "learning_rate": 2.3610579436392999e-07, | |
| "loss": 0.2374, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.7225315166453739, | |
| "learning_rate": 2.3053926169765984e-07, | |
| "loss": 0.218, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.3404018893048908, | |
| "learning_rate": 2.2503836846945792e-07, | |
| "loss": 0.198, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.3954420631646602, | |
| "learning_rate": 2.1960315163894075e-07, | |
| "loss": 0.2733, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.934892509654841, | |
| "learning_rate": 2.1423364772445886e-07, | |
| "loss": 0.2712, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.690523390032749, | |
| "learning_rate": 2.0892989280284825e-07, | |
| "loss": 0.2585, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.2024579481976647, | |
| "learning_rate": 2.036919225091827e-07, | |
| "loss": 0.2626, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.8870857206700493, | |
| "learning_rate": 1.9851977203654839e-07, | |
| "loss": 0.1944, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.781749146894803, | |
| "learning_rate": 1.9341347613579086e-07, | |
| "loss": 0.2071, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.948549718871209, | |
| "learning_rate": 1.8837306911529185e-07, | |
| "loss": 0.295, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.6870970988575948, | |
| "learning_rate": 1.8339858484073935e-07, | |
| "loss": 0.226, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.9419992404111, | |
| "learning_rate": 1.784900567348913e-07, | |
| "loss": 0.2746, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.1023113751631537, | |
| "learning_rate": 1.7364751777736334e-07, | |
| "loss": 0.3058, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.476859237886749, | |
| "learning_rate": 1.6887100050439587e-07, | |
| "loss": 0.2365, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.9830005254933283, | |
| "learning_rate": 1.6416053700863965e-07, | |
| "loss": 0.2764, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.7476064815606922, | |
| "learning_rate": 1.595161589389449e-07, | |
| "loss": 0.2578, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.2919070393253038, | |
| "learning_rate": 1.5493789750014032e-07, | |
| "loss": 0.1889, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.248108868009186, | |
| "learning_rate": 1.504257834528311e-07, | |
| "loss": 0.2576, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.770475360150387, | |
| "learning_rate": 1.459798471131868e-07, | |
| "loss": 0.2445, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.93257359669872, | |
| "learning_rate": 1.4160011835273936e-07, | |
| "loss": 0.2886, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.0547275163131533, | |
| "learning_rate": 1.3728662659818205e-07, | |
| "loss": 0.3056, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.05653058843975, | |
| "learning_rate": 1.3303940083117527e-07, | |
| "loss": 0.2307, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.8564552698989694, | |
| "learning_rate": 1.2885846958814673e-07, | |
| "loss": 0.2447, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.4862371640366514, | |
| "learning_rate": 1.2474386096010037e-07, | |
| "loss": 0.1962, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.3335426432602813, | |
| "learning_rate": 1.206956025924333e-07, | |
| "loss": 0.2149, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.9383374912058284, | |
| "learning_rate": 1.1671372168474137e-07, | |
| "loss": 0.2511, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.0159139844573124, | |
| "learning_rate": 1.1279824499064396e-07, | |
| "loss": 0.2569, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.8027140386415839, | |
| "learning_rate": 1.0894919881760168e-07, | |
| "loss": 0.2752, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.7497164453237868, | |
| "learning_rate": 1.0516660902673448e-07, | |
| "loss": 0.2237, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.7524906394461068, | |
| "learning_rate": 1.014505010326583e-07, | |
| "loss": 0.2426, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.9015157578913895, | |
| "learning_rate": 9.780089980330643e-08, | |
| "loss": 0.2367, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.8091956868397134, | |
| "learning_rate": 9.42178298597607e-08, | |
| "loss": 0.2108, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.7079179710567602, | |
| "learning_rate": 9.070131527609604e-08, | |
| "loss": 0.2549, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 2.0012786450489655, | |
| "learning_rate": 8.725137967920739e-08, | |
| "loss": 0.2362, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.6799446307225845, | |
| "learning_rate": 8.386804624865851e-08, | |
| "loss": 0.2287, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.2707413035131545, | |
| "learning_rate": 8.055133771652346e-08, | |
| "loss": 0.1707, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 2.240844331643583, | |
| "learning_rate": 7.730127636723539e-08, | |
| "loss": 0.2938, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.8077151139509218, | |
| "learning_rate": 7.411788403743236e-08, | |
| "loss": 0.2064, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.463428596541932, | |
| "learning_rate": 7.100118211581852e-08, | |
| "loss": 0.216, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.8579701300807716, | |
| "learning_rate": 6.795119154301199e-08, | |
| "loss": 0.2622, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.889983109137829, | |
| "learning_rate": 6.496793281141056e-08, | |
| "loss": 0.2207, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.9249117499497341, | |
| "learning_rate": 6.205142596505177e-08, | |
| "loss": 0.2789, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.0731734610871966, | |
| "learning_rate": 5.920169059947412e-08, | |
| "loss": 0.2718, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.476765494755492, | |
| "learning_rate": 5.6418745861593905e-08, | |
| "loss": 0.4248, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.1868528878565128, | |
| "learning_rate": 5.37026104495697e-08, | |
| "loss": 0.3682, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.60588326536286, | |
| "learning_rate": 5.105330261267916e-08, | |
| "loss": 0.2397, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.988214848749438, | |
| "learning_rate": 4.8470840151195745e-08, | |
| "loss": 0.2641, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.427518099981952, | |
| "learning_rate": 4.595524041627109e-08, | |
| "loss": 0.2897, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.4469407635891465, | |
| "learning_rate": 4.350652030981395e-08, | |
| "loss": 0.2708, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.61869636519982, | |
| "learning_rate": 4.1124696284383644e-08, | |
| "loss": 0.1713, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.7304087932509222, | |
| "learning_rate": 3.8809784343072364e-08, | |
| "loss": 0.2796, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.3509790467777107, | |
| "learning_rate": 3.6561800039403016e-08, | |
| "loss": 0.2884, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.6569368307990178, | |
| "learning_rate": 3.438075847721933e-08, | |
| "loss": 0.2357, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.377016569549657, | |
| "learning_rate": 3.2266674310589276e-08, | |
| "loss": 0.214, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.8801587671172033, | |
| "learning_rate": 3.0219561743707326e-08, | |
| "loss": 0.206, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 2.222157784273952, | |
| "learning_rate": 2.8239434530792364e-08, | |
| "loss": 0.2952, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.8976048919400084, | |
| "learning_rate": 2.6326305976001054e-08, | |
| "loss": 0.3109, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.7465668776876373, | |
| "learning_rate": 2.4480188933336812e-08, | |
| "loss": 0.2183, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.8655547145953026, | |
| "learning_rate": 2.2701095806565432e-08, | |
| "loss": 0.2624, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.9202134401828403, | |
| "learning_rate": 2.0989038549125152e-08, | |
| "loss": 0.3012, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.8141355211746353, | |
| "learning_rate": 1.9344028664056715e-08, | |
| "loss": 0.271, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 2.149284486477867, | |
| "learning_rate": 1.7766077203915655e-08, | |
| "loss": 0.3302, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 2.3524206969808823, | |
| "learning_rate": 1.6255194770704586e-08, | |
| "loss": 0.3171, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 2.012305463045916, | |
| "learning_rate": 1.4811391515799911e-08, | |
| "loss": 0.2089, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 2.017582806511981, | |
| "learning_rate": 1.3434677139885222e-08, | |
| "loss": 0.2611, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.613515590334642, | |
| "learning_rate": 1.2125060892881345e-08, | |
| "loss": 0.239, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.454180215027866, | |
| "learning_rate": 1.0882551573891953e-08, | |
| "loss": 0.173, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.606757287726453, | |
| "learning_rate": 9.707157531134714e-09, | |
| "loss": 0.2215, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.538723066462182, | |
| "learning_rate": 8.59888666189579e-09, | |
| "loss": 0.2393, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.8384817094233377, | |
| "learning_rate": 7.557746412468758e-09, | |
| "loss": 0.227, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.966983790686683, | |
| "learning_rate": 6.583743778106888e-09, | |
| "loss": 0.2717, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 2.0175004257834193, | |
| "learning_rate": 5.676885302978719e-09, | |
| "loss": 0.2976, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 2.105840626114623, | |
| "learning_rate": 4.837177080119215e-09, | |
| "loss": 0.3113, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 2.2993857734684404, | |
| "learning_rate": 4.064624751394242e-09, | |
| "loss": 0.2948, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.6503345660550335, | |
| "learning_rate": 3.3592335074594805e-09, | |
| "loss": 0.2222, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.2733823560068471, | |
| "learning_rate": 2.7210080877237978e-09, | |
| "loss": 0.1937, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.5863288882920181, | |
| "learning_rate": 2.149952780321485e-09, | |
| "loss": 0.1769, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.6579688873141334, | |
| "learning_rate": 1.6460714220833952e-09, | |
| "loss": 0.2163, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.805966529584832, | |
| "learning_rate": 1.209367398504746e-09, | |
| "loss": 0.2301, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.142574731152733, | |
| "learning_rate": 8.39843643731797e-10, | |
| "loss": 0.366, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.00762337546855, | |
| "learning_rate": 5.375026405352035e-10, | |
| "loss": 0.3333, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.0681991372071358, | |
| "learning_rate": 3.023464202944748e-10, | |
| "loss": 0.2887, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.3712925913849663, | |
| "learning_rate": 1.3437656298687096e-10, | |
| "loss": 0.2392, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.8748392210135587, | |
| "learning_rate": 3.3594197175190743e-11, | |
| "loss": 0.2562, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.8181806222269268, | |
| "learning_rate": 0.0, | |
| "loss": 0.301, | |
| "step": 1250 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 1250, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 300, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |