| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9995388034044694, | |
| "eval_steps": 500, | |
| "global_step": 745, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0013416628233617039, | |
| "grad_norm": 7.392911121308514, | |
| "learning_rate": 0.0, | |
| "loss": 1.5209, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0026833256467234078, | |
| "grad_norm": 7.112473452836917, | |
| "learning_rate": 4.347826086956522e-07, | |
| "loss": 1.5143, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.004024988470085112, | |
| "grad_norm": 7.177785683559338, | |
| "learning_rate": 8.695652173913044e-07, | |
| "loss": 1.4845, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0053666512934468155, | |
| "grad_norm": 7.134127065979566, | |
| "learning_rate": 1.3043478260869566e-06, | |
| "loss": 1.5244, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.006708314116808519, | |
| "grad_norm": 7.03887090554238, | |
| "learning_rate": 1.7391304347826088e-06, | |
| "loss": 1.4937, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.008049976940170223, | |
| "grad_norm": 6.431004514232511, | |
| "learning_rate": 2.173913043478261e-06, | |
| "loss": 1.4744, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.009391639763531927, | |
| "grad_norm": 6.329636636805787, | |
| "learning_rate": 2.6086956521739132e-06, | |
| "loss": 1.4811, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.010733302586893631, | |
| "grad_norm": 5.167818404189778, | |
| "learning_rate": 3.043478260869566e-06, | |
| "loss": 1.4749, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.012074965410255335, | |
| "grad_norm": 4.756114436376914, | |
| "learning_rate": 3.4782608695652175e-06, | |
| "loss": 1.4369, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.013416628233617039, | |
| "grad_norm": 3.1923565422063027, | |
| "learning_rate": 3.91304347826087e-06, | |
| "loss": 1.4196, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.014758291056978743, | |
| "grad_norm": 3.7393628201726337, | |
| "learning_rate": 4.347826086956522e-06, | |
| "loss": 1.3848, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.016099953880340447, | |
| "grad_norm": 3.820597086197902, | |
| "learning_rate": 4.782608695652174e-06, | |
| "loss": 1.3935, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.017441616703702152, | |
| "grad_norm": 3.6939756049433, | |
| "learning_rate": 5.2173913043478265e-06, | |
| "loss": 1.3457, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.018783279527063854, | |
| "grad_norm": 4.7768162647135535, | |
| "learning_rate": 5.652173913043479e-06, | |
| "loss": 1.3411, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.02012494235042556, | |
| "grad_norm": 5.708989901082793, | |
| "learning_rate": 6.086956521739132e-06, | |
| "loss": 1.3803, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.021466605173787262, | |
| "grad_norm": 5.129788229516496, | |
| "learning_rate": 6.521739130434783e-06, | |
| "loss": 1.3514, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.022808267997148968, | |
| "grad_norm": 3.957950873594244, | |
| "learning_rate": 6.956521739130435e-06, | |
| "loss": 1.3337, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.02414993082051067, | |
| "grad_norm": 3.2380219205405716, | |
| "learning_rate": 7.391304347826087e-06, | |
| "loss": 1.3109, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.025491593643872375, | |
| "grad_norm": 3.3372390298941963, | |
| "learning_rate": 7.82608695652174e-06, | |
| "loss": 1.3058, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.026833256467234078, | |
| "grad_norm": 3.5871280327198463, | |
| "learning_rate": 8.260869565217392e-06, | |
| "loss": 1.3026, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.028174919290595783, | |
| "grad_norm": 3.22326237900213, | |
| "learning_rate": 8.695652173913044e-06, | |
| "loss": 1.2918, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.029516582113957485, | |
| "grad_norm": 2.5369193377611867, | |
| "learning_rate": 9.130434782608697e-06, | |
| "loss": 1.2958, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.03085824493731919, | |
| "grad_norm": 2.6224230361852827, | |
| "learning_rate": 9.565217391304349e-06, | |
| "loss": 1.2228, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.03219990776068089, | |
| "grad_norm": 2.582819063813051, | |
| "learning_rate": 1e-05, | |
| "loss": 1.2521, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.0335415705840426, | |
| "grad_norm": 2.2985705568510957, | |
| "learning_rate": 9.99995279777441e-06, | |
| "loss": 1.2341, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.034883233407404304, | |
| "grad_norm": 2.0127817582932037, | |
| "learning_rate": 9.999811191988863e-06, | |
| "loss": 1.2559, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.036224896230766, | |
| "grad_norm": 1.9823343228538544, | |
| "learning_rate": 9.999575185316994e-06, | |
| "loss": 1.2264, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.03756655905412771, | |
| "grad_norm": 1.792201819633752, | |
| "learning_rate": 9.999244782214828e-06, | |
| "loss": 1.2036, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.038908221877489414, | |
| "grad_norm": 1.7372048174282044, | |
| "learning_rate": 9.998819988920665e-06, | |
| "loss": 1.1851, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.04024988470085112, | |
| "grad_norm": 1.78600915759922, | |
| "learning_rate": 9.998300813454981e-06, | |
| "loss": 1.2168, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04159154752421282, | |
| "grad_norm": 1.7629065705331128, | |
| "learning_rate": 9.997687265620274e-06, | |
| "loss": 1.1855, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.042933210347574524, | |
| "grad_norm": 1.644529086310792, | |
| "learning_rate": 9.996979357000869e-06, | |
| "loss": 1.2073, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.04427487317093623, | |
| "grad_norm": 2.5577223830781333, | |
| "learning_rate": 9.996177100962714e-06, | |
| "loss": 1.1595, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.045616535994297935, | |
| "grad_norm": 1.7300882554626944, | |
| "learning_rate": 9.995280512653116e-06, | |
| "loss": 1.1688, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.046958198817659634, | |
| "grad_norm": 1.5646971960903784, | |
| "learning_rate": 9.99428960900046e-06, | |
| "loss": 1.1868, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04829986164102134, | |
| "grad_norm": 1.685967317065919, | |
| "learning_rate": 9.99320440871389e-06, | |
| "loss": 1.1642, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.049641524464383045, | |
| "grad_norm": 1.633989524662171, | |
| "learning_rate": 9.992024932282955e-06, | |
| "loss": 1.1843, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.05098318728774475, | |
| "grad_norm": 1.5510371509273424, | |
| "learning_rate": 9.990751201977217e-06, | |
| "loss": 1.1819, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.05232485011110645, | |
| "grad_norm": 1.5181337823607957, | |
| "learning_rate": 9.98938324184584e-06, | |
| "loss": 1.1381, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.053666512934468155, | |
| "grad_norm": 1.5147318061378112, | |
| "learning_rate": 9.987921077717127e-06, | |
| "loss": 1.1375, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05500817575782986, | |
| "grad_norm": 1.855980773594425, | |
| "learning_rate": 9.98636473719804e-06, | |
| "loss": 1.1451, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.056349838581191566, | |
| "grad_norm": 1.6054934665141092, | |
| "learning_rate": 9.984714249673676e-06, | |
| "loss": 1.1447, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.057691501404553265, | |
| "grad_norm": 1.447759664276557, | |
| "learning_rate": 9.982969646306704e-06, | |
| "loss": 1.1369, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.05903316422791497, | |
| "grad_norm": 1.4941544438032464, | |
| "learning_rate": 9.98113096003679e-06, | |
| "loss": 1.1397, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.060374827051276676, | |
| "grad_norm": 1.6610959253342235, | |
| "learning_rate": 9.979198225579968e-06, | |
| "loss": 1.1345, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.06171648987463838, | |
| "grad_norm": 1.5390215688694597, | |
| "learning_rate": 9.97717147942799e-06, | |
| "loss": 1.119, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.06305815269800008, | |
| "grad_norm": 1.510282897486931, | |
| "learning_rate": 9.97505075984762e-06, | |
| "loss": 1.1376, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.06439981552136179, | |
| "grad_norm": 1.4415829320619717, | |
| "learning_rate": 9.972836106879936e-06, | |
| "loss": 1.1324, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.06574147834472349, | |
| "grad_norm": 1.72947543283011, | |
| "learning_rate": 9.970527562339554e-06, | |
| "loss": 1.1269, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.0670831411680852, | |
| "grad_norm": 1.7207642200980233, | |
| "learning_rate": 9.968125169813855e-06, | |
| "loss": 1.1116, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0684248039914469, | |
| "grad_norm": 1.571820205098199, | |
| "learning_rate": 9.965628974662145e-06, | |
| "loss": 1.1258, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.06976646681480861, | |
| "grad_norm": 1.5781999464670455, | |
| "learning_rate": 9.963039024014811e-06, | |
| "loss": 1.114, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.0711081296381703, | |
| "grad_norm": 1.4961557725112893, | |
| "learning_rate": 9.96035536677243e-06, | |
| "loss": 1.1138, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.072449792461532, | |
| "grad_norm": 1.7347021506143052, | |
| "learning_rate": 9.957578053604837e-06, | |
| "loss": 1.1141, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.07379145528489371, | |
| "grad_norm": 1.438812778229738, | |
| "learning_rate": 9.954707136950176e-06, | |
| "loss": 1.1103, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.07513311810825542, | |
| "grad_norm": 1.545484642307709, | |
| "learning_rate": 9.951742671013914e-06, | |
| "loss": 1.09, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.07647478093161712, | |
| "grad_norm": 1.4676495438249457, | |
| "learning_rate": 9.9486847117678e-06, | |
| "loss": 1.0811, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.07781644375497883, | |
| "grad_norm": 1.4950763559495723, | |
| "learning_rate": 9.945533316948833e-06, | |
| "loss": 1.0785, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.07915810657834053, | |
| "grad_norm": 1.8715712469761752, | |
| "learning_rate": 9.942288546058148e-06, | |
| "loss": 1.0837, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.08049976940170224, | |
| "grad_norm": 1.692239129693084, | |
| "learning_rate": 9.938950460359912e-06, | |
| "loss": 1.0914, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08184143222506395, | |
| "grad_norm": 1.4201822302921803, | |
| "learning_rate": 9.935519122880152e-06, | |
| "loss": 1.0812, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.08318309504842564, | |
| "grad_norm": 1.503736803352269, | |
| "learning_rate": 9.931994598405576e-06, | |
| "loss": 1.1036, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.08452475787178734, | |
| "grad_norm": 1.6032922425382767, | |
| "learning_rate": 9.928376953482343e-06, | |
| "loss": 1.0798, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.08586642069514905, | |
| "grad_norm": 1.5854938428466847, | |
| "learning_rate": 9.924666256414812e-06, | |
| "loss": 1.0911, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.08720808351851075, | |
| "grad_norm": 1.4675504699964215, | |
| "learning_rate": 9.920862577264242e-06, | |
| "loss": 1.0762, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.08854974634187246, | |
| "grad_norm": 2.091734701831164, | |
| "learning_rate": 9.916965987847485e-06, | |
| "loss": 1.0704, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.08989140916523417, | |
| "grad_norm": 1.6501153418242909, | |
| "learning_rate": 9.912976561735617e-06, | |
| "loss": 1.059, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.09123307198859587, | |
| "grad_norm": 1.5290291395938629, | |
| "learning_rate": 9.908894374252556e-06, | |
| "loss": 1.074, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.09257473481195758, | |
| "grad_norm": 1.8118777602247247, | |
| "learning_rate": 9.904719502473635e-06, | |
| "loss": 1.0862, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.09391639763531927, | |
| "grad_norm": 1.7204147419517413, | |
| "learning_rate": 9.900452025224148e-06, | |
| "loss": 1.0707, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.09525806045868097, | |
| "grad_norm": 1.6239428850208912, | |
| "learning_rate": 9.896092023077866e-06, | |
| "loss": 1.0767, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.09659972328204268, | |
| "grad_norm": 1.8341002603819232, | |
| "learning_rate": 9.891639578355511e-06, | |
| "loss": 1.0725, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.09794138610540438, | |
| "grad_norm": 1.8558854123237023, | |
| "learning_rate": 9.887094775123203e-06, | |
| "loss": 1.0863, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.09928304892876609, | |
| "grad_norm": 1.622711136214513, | |
| "learning_rate": 9.882457699190874e-06, | |
| "loss": 1.0702, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.1006247117521278, | |
| "grad_norm": 1.495142178664832, | |
| "learning_rate": 9.877728438110645e-06, | |
| "loss": 1.0701, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1019663745754895, | |
| "grad_norm": 1.4506371213965268, | |
| "learning_rate": 9.872907081175175e-06, | |
| "loss": 1.0829, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.10330803739885121, | |
| "grad_norm": 1.8034795976596587, | |
| "learning_rate": 9.867993719415974e-06, | |
| "loss": 1.0654, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.1046497002222129, | |
| "grad_norm": 1.785781379811563, | |
| "learning_rate": 9.86298844560169e-06, | |
| "loss": 1.0514, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.1059913630455746, | |
| "grad_norm": 1.7535465757365365, | |
| "learning_rate": 9.857891354236342e-06, | |
| "loss": 1.0564, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.10733302586893631, | |
| "grad_norm": 1.7337828430288411, | |
| "learning_rate": 9.852702541557559e-06, | |
| "loss": 1.0683, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.10867468869229802, | |
| "grad_norm": 1.6693391553415382, | |
| "learning_rate": 9.847422105534739e-06, | |
| "loss": 1.0431, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.11001635151565972, | |
| "grad_norm": 1.474045813243068, | |
| "learning_rate": 9.842050145867219e-06, | |
| "loss": 1.0564, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.11135801433902143, | |
| "grad_norm": 2.7863753262947206, | |
| "learning_rate": 9.836586763982376e-06, | |
| "loss": 1.0567, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.11269967716238313, | |
| "grad_norm": 1.5338677331929418, | |
| "learning_rate": 9.831032063033726e-06, | |
| "loss": 1.0308, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.11404133998574484, | |
| "grad_norm": 2.179252254257764, | |
| "learning_rate": 9.825386147898967e-06, | |
| "loss": 1.0653, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.11538300280910653, | |
| "grad_norm": 1.6643070967595677, | |
| "learning_rate": 9.819649125178004e-06, | |
| "loss": 1.0586, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.11672466563246824, | |
| "grad_norm": 1.734693819240776, | |
| "learning_rate": 9.813821103190932e-06, | |
| "loss": 1.0671, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.11806632845582994, | |
| "grad_norm": 1.9047365235771927, | |
| "learning_rate": 9.807902191975996e-06, | |
| "loss": 1.0563, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.11940799127919165, | |
| "grad_norm": 1.7159087250602358, | |
| "learning_rate": 9.801892503287507e-06, | |
| "loss": 1.049, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.12074965410255335, | |
| "grad_norm": 1.7365191389572339, | |
| "learning_rate": 9.795792150593739e-06, | |
| "loss": 1.0668, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.12209131692591506, | |
| "grad_norm": 1.61649848507611, | |
| "learning_rate": 9.789601249074781e-06, | |
| "loss": 1.0365, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.12343297974927676, | |
| "grad_norm": 1.525103477384651, | |
| "learning_rate": 9.783319915620365e-06, | |
| "loss": 1.0753, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.12477464257263847, | |
| "grad_norm": 1.5468595731679113, | |
| "learning_rate": 9.776948268827658e-06, | |
| "loss": 1.0377, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.12611630539600016, | |
| "grad_norm": 1.5143092484316483, | |
| "learning_rate": 9.770486428999026e-06, | |
| "loss": 1.0398, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.12745796821936187, | |
| "grad_norm": 2.098110190522861, | |
| "learning_rate": 9.763934518139754e-06, | |
| "loss": 1.0321, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.12879963104272357, | |
| "grad_norm": 1.596350127992628, | |
| "learning_rate": 9.757292659955755e-06, | |
| "loss": 1.0465, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.13014129386608528, | |
| "grad_norm": 1.5349488332845023, | |
| "learning_rate": 9.750560979851222e-06, | |
| "loss": 1.0405, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.13148295668944698, | |
| "grad_norm": 1.6094024991622389, | |
| "learning_rate": 9.743739604926268e-06, | |
| "loss": 1.0397, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.1328246195128087, | |
| "grad_norm": 1.4888314569282421, | |
| "learning_rate": 9.736828663974527e-06, | |
| "loss": 1.026, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.1341662823361704, | |
| "grad_norm": 1.5305760875880698, | |
| "learning_rate": 9.729828287480713e-06, | |
| "loss": 1.04, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1355079451595321, | |
| "grad_norm": 1.6171171257521297, | |
| "learning_rate": 9.722738607618171e-06, | |
| "loss": 1.0467, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.1368496079828938, | |
| "grad_norm": 1.8275927631260815, | |
| "learning_rate": 9.715559758246363e-06, | |
| "loss": 1.0424, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.1381912708062555, | |
| "grad_norm": 1.5808349943913245, | |
| "learning_rate": 9.70829187490836e-06, | |
| "loss": 1.052, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.13953293362961722, | |
| "grad_norm": 1.599275155249567, | |
| "learning_rate": 9.700935094828267e-06, | |
| "loss": 1.0433, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.14087459645297892, | |
| "grad_norm": 1.4748747611236446, | |
| "learning_rate": 9.693489556908641e-06, | |
| "loss": 1.0512, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.1422162592763406, | |
| "grad_norm": 1.6060077466070208, | |
| "learning_rate": 9.68595540172787e-06, | |
| "loss": 1.01, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.1435579220997023, | |
| "grad_norm": 1.8066414228236412, | |
| "learning_rate": 9.678332771537506e-06, | |
| "loss": 1.0172, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.144899584923064, | |
| "grad_norm": 1.5192038597826958, | |
| "learning_rate": 9.670621810259596e-06, | |
| "loss": 1.0243, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.14624124774642572, | |
| "grad_norm": 1.8566706929318617, | |
| "learning_rate": 9.662822663483952e-06, | |
| "loss": 1.0386, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.14758291056978742, | |
| "grad_norm": 1.5109073208750536, | |
| "learning_rate": 9.654935478465409e-06, | |
| "loss": 1.0134, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.14892457339314913, | |
| "grad_norm": 1.5248271536361229, | |
| "learning_rate": 9.646960404121042e-06, | |
| "loss": 1.0294, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.15026623621651083, | |
| "grad_norm": 1.4414336657282556, | |
| "learning_rate": 9.638897591027355e-06, | |
| "loss": 1.0259, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.15160789903987254, | |
| "grad_norm": 1.5329634677484316, | |
| "learning_rate": 9.630747191417433e-06, | |
| "loss": 1.0087, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.15294956186323425, | |
| "grad_norm": 1.7617577737233274, | |
| "learning_rate": 9.62250935917808e-06, | |
| "loss": 1.022, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.15429122468659595, | |
| "grad_norm": 1.492081973940454, | |
| "learning_rate": 9.614184249846903e-06, | |
| "loss": 1.0083, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.15563288750995766, | |
| "grad_norm": 1.6947020263672135, | |
| "learning_rate": 9.605772020609376e-06, | |
| "loss": 1.0448, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.15697455033331936, | |
| "grad_norm": 1.517976041310006, | |
| "learning_rate": 9.597272830295877e-06, | |
| "loss": 1.0273, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.15831621315668107, | |
| "grad_norm": 1.6697358355322918, | |
| "learning_rate": 9.58868683937868e-06, | |
| "loss": 1.0388, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.15965787598004277, | |
| "grad_norm": 1.628205377705706, | |
| "learning_rate": 9.580014209968946e-06, | |
| "loss": 0.9951, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.16099953880340448, | |
| "grad_norm": 1.5991384820195875, | |
| "learning_rate": 9.571255105813632e-06, | |
| "loss": 1.0445, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.16234120162676619, | |
| "grad_norm": 1.5239720573296276, | |
| "learning_rate": 9.562409692292425e-06, | |
| "loss": 1.0476, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1636828644501279, | |
| "grad_norm": 1.5397990887970099, | |
| "learning_rate": 9.553478136414606e-06, | |
| "loss": 1.0131, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.16502452727348957, | |
| "grad_norm": 1.682926751705613, | |
| "learning_rate": 9.544460606815901e-06, | |
| "loss": 1.0215, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.16636619009685127, | |
| "grad_norm": 1.5374236614326178, | |
| "learning_rate": 9.535357273755296e-06, | |
| "loss": 1.022, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.16770785292021298, | |
| "grad_norm": 1.5125873221214663, | |
| "learning_rate": 9.526168309111827e-06, | |
| "loss": 1.0325, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.16904951574357469, | |
| "grad_norm": 1.5020363203895621, | |
| "learning_rate": 9.516893886381324e-06, | |
| "loss": 1.003, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.1703911785669364, | |
| "grad_norm": 1.5253980013545938, | |
| "learning_rate": 9.507534180673142e-06, | |
| "loss": 1.0121, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.1717328413902981, | |
| "grad_norm": 1.5838384344412881, | |
| "learning_rate": 9.498089368706862e-06, | |
| "loss": 1.0173, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.1730745042136598, | |
| "grad_norm": 1.5526982969322647, | |
| "learning_rate": 9.488559628808939e-06, | |
| "loss": 1.0172, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.1744161670370215, | |
| "grad_norm": 1.6505573103371025, | |
| "learning_rate": 9.478945140909346e-06, | |
| "loss": 0.9963, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1757578298603832, | |
| "grad_norm": 2.4994210597949533, | |
| "learning_rate": 9.469246086538175e-06, | |
| "loss": 1.0379, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.17709949268374492, | |
| "grad_norm": 1.6133167321425232, | |
| "learning_rate": 9.459462648822209e-06, | |
| "loss": 1.0112, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.17844115550710662, | |
| "grad_norm": 1.6250631419898731, | |
| "learning_rate": 9.449595012481455e-06, | |
| "loss": 1.0472, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.17978281833046833, | |
| "grad_norm": 1.7608651873887804, | |
| "learning_rate": 9.439643363825678e-06, | |
| "loss": 1.0345, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.18112448115383004, | |
| "grad_norm": 1.4462780649585327, | |
| "learning_rate": 9.429607890750863e-06, | |
| "loss": 1.0169, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.18246614397719174, | |
| "grad_norm": 1.5346110788641523, | |
| "learning_rate": 9.419488782735671e-06, | |
| "loss": 1.0252, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.18380780680055345, | |
| "grad_norm": 1.4786881677807142, | |
| "learning_rate": 9.409286230837876e-06, | |
| "loss": 0.9736, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.18514946962391515, | |
| "grad_norm": 1.7544449884490962, | |
| "learning_rate": 9.399000427690736e-06, | |
| "loss": 1.001, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.18649113244727683, | |
| "grad_norm": 1.5443323941244775, | |
| "learning_rate": 9.388631567499373e-06, | |
| "loss": 1.0171, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.18783279527063854, | |
| "grad_norm": 1.5112277113219468, | |
| "learning_rate": 9.378179846037096e-06, | |
| "loss": 0.9966, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.18917445809400024, | |
| "grad_norm": 1.4469628985744891, | |
| "learning_rate": 9.367645460641716e-06, | |
| "loss": 0.9999, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.19051612091736195, | |
| "grad_norm": 1.509159997421376, | |
| "learning_rate": 9.357028610211802e-06, | |
| "loss": 1.0039, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.19185778374072365, | |
| "grad_norm": 1.9033920658610284, | |
| "learning_rate": 9.346329495202944e-06, | |
| "loss": 0.9927, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.19319944656408536, | |
| "grad_norm": 1.8697851005515826, | |
| "learning_rate": 9.335548317623957e-06, | |
| "loss": 0.9862, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.19454110938744706, | |
| "grad_norm": 1.727575777874671, | |
| "learning_rate": 9.324685281033073e-06, | |
| "loss": 0.9985, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.19588277221080877, | |
| "grad_norm": 1.4973664146911145, | |
| "learning_rate": 9.313740590534093e-06, | |
| "loss": 1.0107, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.19722443503417048, | |
| "grad_norm": 1.5511498919072226, | |
| "learning_rate": 9.302714452772515e-06, | |
| "loss": 1.0211, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.19856609785753218, | |
| "grad_norm": 1.7549997830763724, | |
| "learning_rate": 9.291607075931638e-06, | |
| "loss": 1.0127, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.1999077606808939, | |
| "grad_norm": 1.5905298977716988, | |
| "learning_rate": 9.280418669728625e-06, | |
| "loss": 0.9755, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.2012494235042556, | |
| "grad_norm": 1.7118974196904215, | |
| "learning_rate": 9.269149445410545e-06, | |
| "loss": 0.9962, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2025910863276173, | |
| "grad_norm": 1.7000772782219813, | |
| "learning_rate": 9.257799615750384e-06, | |
| "loss": 1.017, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.203932749150979, | |
| "grad_norm": 1.9810883869642395, | |
| "learning_rate": 9.246369395043033e-06, | |
| "loss": 0.996, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.2052744119743407, | |
| "grad_norm": 1.5890509537547852, | |
| "learning_rate": 9.234858999101232e-06, | |
| "loss": 1.0012, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.20661607479770241, | |
| "grad_norm": 1.5976972038680295, | |
| "learning_rate": 9.223268645251504e-06, | |
| "loss": 0.978, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.2079577376210641, | |
| "grad_norm": 1.5846128874339207, | |
| "learning_rate": 9.211598552330047e-06, | |
| "loss": 1.015, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.2092994004444258, | |
| "grad_norm": 1.6624623820573463, | |
| "learning_rate": 9.199848940678607e-06, | |
| "loss": 0.9916, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.2106410632677875, | |
| "grad_norm": 1.519781144208935, | |
| "learning_rate": 9.188020032140308e-06, | |
| "loss": 0.9759, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.2119827260911492, | |
| "grad_norm": 1.7030331960727392, | |
| "learning_rate": 9.17611205005548e-06, | |
| "loss": 1.0027, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.21332438891451092, | |
| "grad_norm": 2.3227823126647986, | |
| "learning_rate": 9.164125219257419e-06, | |
| "loss": 1.0084, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.21466605173787262, | |
| "grad_norm": 1.5950630942235617, | |
| "learning_rate": 9.152059766068164e-06, | |
| "loss": 1.0171, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.21600771456123433, | |
| "grad_norm": 1.5097514171757036, | |
| "learning_rate": 9.139915918294213e-06, | |
| "loss": 1.0052, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.21734937738459603, | |
| "grad_norm": 1.668749714662694, | |
| "learning_rate": 9.127693905222223e-06, | |
| "loss": 0.9837, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.21869104020795774, | |
| "grad_norm": 1.613501913429001, | |
| "learning_rate": 9.115393957614679e-06, | |
| "loss": 1.0006, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.22003270303131944, | |
| "grad_norm": 1.548280495817311, | |
| "learning_rate": 9.103016307705546e-06, | |
| "loss": 1.0172, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.22137436585468115, | |
| "grad_norm": 1.6799688152981618, | |
| "learning_rate": 9.09056118919587e-06, | |
| "loss": 1.0173, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.22271602867804285, | |
| "grad_norm": 1.4892884530388883, | |
| "learning_rate": 9.078028837249378e-06, | |
| "loss": 0.9944, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.22405769150140456, | |
| "grad_norm": 1.5421729124892896, | |
| "learning_rate": 9.065419488488029e-06, | |
| "loss": 1.013, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.22539935432476627, | |
| "grad_norm": 1.8129370572182915, | |
| "learning_rate": 9.052733380987555e-06, | |
| "loss": 1.0076, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.22674101714812797, | |
| "grad_norm": 1.76369666967373, | |
| "learning_rate": 9.03997075427296e-06, | |
| "loss": 1.0034, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.22808267997148968, | |
| "grad_norm": 1.470527664070282, | |
| "learning_rate": 9.027131849313995e-06, | |
| "loss": 0.968, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.22942434279485138, | |
| "grad_norm": 1.5038389192074497, | |
| "learning_rate": 9.014216908520619e-06, | |
| "loss": 0.9939, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.23076600561821306, | |
| "grad_norm": 1.4538596123560616, | |
| "learning_rate": 9.001226175738409e-06, | |
| "loss": 0.9873, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.23210766844157477, | |
| "grad_norm": 1.498912180760362, | |
| "learning_rate": 8.988159896243967e-06, | |
| "loss": 1.0123, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.23344933126493647, | |
| "grad_norm": 1.4063632583533416, | |
| "learning_rate": 8.975018316740278e-06, | |
| "loss": 0.9543, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.23479099408829818, | |
| "grad_norm": 1.4042961321205751, | |
| "learning_rate": 8.961801685352068e-06, | |
| "loss": 0.9941, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.23613265691165988, | |
| "grad_norm": 1.5241960865995388, | |
| "learning_rate": 8.948510251621096e-06, | |
| "loss": 0.9907, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.2374743197350216, | |
| "grad_norm": 1.6001855683643875, | |
| "learning_rate": 8.93514426650147e-06, | |
| "loss": 0.9726, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.2388159825583833, | |
| "grad_norm": 1.6999875656481869, | |
| "learning_rate": 8.921703982354886e-06, | |
| "loss": 0.9979, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.240157645381745, | |
| "grad_norm": 1.5296332672890027, | |
| "learning_rate": 8.908189652945871e-06, | |
| "loss": 1.0019, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.2414993082051067, | |
| "grad_norm": 1.664067570536287, | |
| "learning_rate": 8.894601533437e-06, | |
| "loss": 0.9698, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2428409710284684, | |
| "grad_norm": 2.102340889974168, | |
| "learning_rate": 8.880939880384061e-06, | |
| "loss": 0.999, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.24418263385183012, | |
| "grad_norm": 1.7753482498612518, | |
| "learning_rate": 8.867204951731227e-06, | |
| "loss": 0.9975, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.24552429667519182, | |
| "grad_norm": 1.6954057018557689, | |
| "learning_rate": 8.853397006806183e-06, | |
| "loss": 0.9595, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.24686595949855353, | |
| "grad_norm": 1.6220416310433559, | |
| "learning_rate": 8.839516306315216e-06, | |
| "loss": 0.9772, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.24820762232191523, | |
| "grad_norm": 1.4344699633715252, | |
| "learning_rate": 8.825563112338308e-06, | |
| "loss": 0.9915, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.24954928514527694, | |
| "grad_norm": 1.5652210870327667, | |
| "learning_rate": 8.811537688324187e-06, | |
| "loss": 0.9884, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.25089094796863864, | |
| "grad_norm": 1.5480421042178967, | |
| "learning_rate": 8.797440299085344e-06, | |
| "loss": 0.9912, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.2522326107920003, | |
| "grad_norm": 1.5815223622058714, | |
| "learning_rate": 8.783271210793034e-06, | |
| "loss": 0.9712, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.25357427361536206, | |
| "grad_norm": 1.6906168223241804, | |
| "learning_rate": 8.769030690972262e-06, | |
| "loss": 0.9751, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.25491593643872373, | |
| "grad_norm": 1.6673317361003395, | |
| "learning_rate": 8.754719008496718e-06, | |
| "loss": 0.9857, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.25625759926208547, | |
| "grad_norm": 1.5742573500825576, | |
| "learning_rate": 8.740336433583704e-06, | |
| "loss": 0.988, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.25759926208544714, | |
| "grad_norm": 1.5087573984013583, | |
| "learning_rate": 8.725883237789046e-06, | |
| "loss": 0.987, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.2589409249088089, | |
| "grad_norm": 1.6052629099050266, | |
| "learning_rate": 8.711359694001941e-06, | |
| "loss": 0.9853, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.26028258773217056, | |
| "grad_norm": 16.444996469677516, | |
| "learning_rate": 8.696766076439826e-06, | |
| "loss": 0.9776, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.2616242505555323, | |
| "grad_norm": 1.9102037969030812, | |
| "learning_rate": 8.682102660643196e-06, | |
| "loss": 1.0172, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.26296591337889397, | |
| "grad_norm": 2.9756746121109314, | |
| "learning_rate": 8.667369723470393e-06, | |
| "loss": 0.9947, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.26430757620225565, | |
| "grad_norm": 1.6204007709317712, | |
| "learning_rate": 8.652567543092385e-06, | |
| "loss": 0.9799, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.2656492390256174, | |
| "grad_norm": 1.5303211756166764, | |
| "learning_rate": 8.637696398987517e-06, | |
| "loss": 0.9783, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.26699090184897906, | |
| "grad_norm": 1.6840365379900046, | |
| "learning_rate": 8.622756571936229e-06, | |
| "loss": 0.9736, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.2683325646723408, | |
| "grad_norm": 1.683788142218529, | |
| "learning_rate": 8.607748344015752e-06, | |
| "loss": 0.9702, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.26967422749570247, | |
| "grad_norm": 1.457488972413562, | |
| "learning_rate": 8.592671998594794e-06, | |
| "loss": 0.9806, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2710158903190642, | |
| "grad_norm": 1.547971757217976, | |
| "learning_rate": 8.577527820328176e-06, | |
| "loss": 0.9634, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.2723575531424259, | |
| "grad_norm": 1.3316553082460927, | |
| "learning_rate": 8.562316095151468e-06, | |
| "loss": 0.9512, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.2736992159657876, | |
| "grad_norm": 1.5661643370029357, | |
| "learning_rate": 8.54703711027558e-06, | |
| "loss": 0.9855, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.2750408787891493, | |
| "grad_norm": 1.4810435360919245, | |
| "learning_rate": 8.53169115418135e-06, | |
| "loss": 0.9658, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.276382541612511, | |
| "grad_norm": 1.5394045600056434, | |
| "learning_rate": 8.516278516614092e-06, | |
| "loss": 0.9734, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.2777242044358727, | |
| "grad_norm": 1.4871363595645701, | |
| "learning_rate": 8.50079948857812e-06, | |
| "loss": 0.9701, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.27906586725923443, | |
| "grad_norm": 1.6508266936645926, | |
| "learning_rate": 8.485254362331268e-06, | |
| "loss": 0.9891, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.2804075300825961, | |
| "grad_norm": 1.4599758131575469, | |
| "learning_rate": 8.469643431379355e-06, | |
| "loss": 0.9689, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.28174919290595785, | |
| "grad_norm": 1.4858135833977075, | |
| "learning_rate": 8.453966990470656e-06, | |
| "loss": 0.9735, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2830908557293195, | |
| "grad_norm": 1.5170470501397098, | |
| "learning_rate": 8.438225335590334e-06, | |
| "loss": 0.9745, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.2844325185526812, | |
| "grad_norm": 1.5641985728172763, | |
| "learning_rate": 8.422418763954841e-06, | |
| "loss": 0.9618, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.28577418137604294, | |
| "grad_norm": 1.6783440611678353, | |
| "learning_rate": 8.406547574006326e-06, | |
| "loss": 0.9783, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.2871158441994046, | |
| "grad_norm": 1.579120726370794, | |
| "learning_rate": 8.390612065406983e-06, | |
| "loss": 0.9567, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.28845750702276635, | |
| "grad_norm": 1.5939955792145337, | |
| "learning_rate": 8.374612539033398e-06, | |
| "loss": 0.9746, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.289799169846128, | |
| "grad_norm": 1.4721387285954308, | |
| "learning_rate": 8.358549296970877e-06, | |
| "loss": 0.9691, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.29114083266948976, | |
| "grad_norm": 1.5457271723104407, | |
| "learning_rate": 8.342422642507727e-06, | |
| "loss": 0.9878, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.29248249549285144, | |
| "grad_norm": 1.3692684340326473, | |
| "learning_rate": 8.32623288012954e-06, | |
| "loss": 0.9594, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.29382415831621317, | |
| "grad_norm": 1.5163724984197753, | |
| "learning_rate": 8.309980315513444e-06, | |
| "loss": 0.972, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.29516582113957485, | |
| "grad_norm": 1.6114780037861143, | |
| "learning_rate": 8.29366525552233e-06, | |
| "loss": 0.9603, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2965074839629366, | |
| "grad_norm": 1.4762572081466976, | |
| "learning_rate": 8.27728800819905e-06, | |
| "loss": 0.9727, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.29784914678629826, | |
| "grad_norm": 1.4684362587718065, | |
| "learning_rate": 8.260848882760616e-06, | |
| "loss": 0.9785, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.29919080960966, | |
| "grad_norm": 1.5477609027947252, | |
| "learning_rate": 8.24434818959235e-06, | |
| "loss": 0.9457, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.30053247243302167, | |
| "grad_norm": 1.4186396792517242, | |
| "learning_rate": 8.22778624024203e-06, | |
| "loss": 0.9743, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.3018741352563834, | |
| "grad_norm": 1.507163119567676, | |
| "learning_rate": 8.211163347414005e-06, | |
| "loss": 0.9723, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.3032157980797451, | |
| "grad_norm": 2.061573031593893, | |
| "learning_rate": 8.194479824963284e-06, | |
| "loss": 0.9695, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.3045574609031068, | |
| "grad_norm": 1.7101381416686336, | |
| "learning_rate": 8.177735987889628e-06, | |
| "loss": 0.9639, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3058991237264685, | |
| "grad_norm": 1.4858650389281058, | |
| "learning_rate": 8.160932152331587e-06, | |
| "loss": 0.9707, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.30724078654983017, | |
| "grad_norm": 1.6965951272902517, | |
| "learning_rate": 8.144068635560533e-06, | |
| "loss": 0.9893, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.3085824493731919, | |
| "grad_norm": 1.388920287350121, | |
| "learning_rate": 8.127145755974676e-06, | |
| "loss": 0.9717, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3099241121965536, | |
| "grad_norm": 1.5510344042881328, | |
| "learning_rate": 8.11016383309305e-06, | |
| "loss": 0.9637, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.3112657750199153, | |
| "grad_norm": 1.3951378486816117, | |
| "learning_rate": 8.093123187549475e-06, | |
| "loss": 0.9798, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.312607437843277, | |
| "grad_norm": 1.3958446772921527, | |
| "learning_rate": 8.07602414108651e-06, | |
| "loss": 0.9683, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.3139491006666387, | |
| "grad_norm": 1.466094331759861, | |
| "learning_rate": 8.058867016549372e-06, | |
| "loss": 0.9707, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.3152907634900004, | |
| "grad_norm": 1.8036674074115169, | |
| "learning_rate": 8.04165213787985e-06, | |
| "loss": 0.9489, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.31663242631336214, | |
| "grad_norm": 1.5957322487948662, | |
| "learning_rate": 8.024379830110176e-06, | |
| "loss": 0.9452, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.3179740891367238, | |
| "grad_norm": 1.4680343088229408, | |
| "learning_rate": 8.007050419356898e-06, | |
| "loss": 0.9683, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.31931575196008555, | |
| "grad_norm": 1.5328747678300108, | |
| "learning_rate": 7.989664232814718e-06, | |
| "loss": 0.9741, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.3206574147834472, | |
| "grad_norm": 1.374414004947906, | |
| "learning_rate": 7.972221598750316e-06, | |
| "loss": 0.9535, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.32199907760680896, | |
| "grad_norm": 1.3346744700214859, | |
| "learning_rate": 7.95472284649615e-06, | |
| "loss": 0.9654, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.32334074043017064, | |
| "grad_norm": 1.4669968920031307, | |
| "learning_rate": 7.937168306444243e-06, | |
| "loss": 0.9507, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.32468240325353237, | |
| "grad_norm": 1.5766148114998875, | |
| "learning_rate": 7.919558310039937e-06, | |
| "loss": 0.965, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.32602406607689405, | |
| "grad_norm": 1.4875838167859463, | |
| "learning_rate": 7.90189318977564e-06, | |
| "loss": 0.9717, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.3273657289002558, | |
| "grad_norm": 1.7148717900145576, | |
| "learning_rate": 7.884173279184552e-06, | |
| "loss": 0.9787, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.32870739172361746, | |
| "grad_norm": 1.6903792913472557, | |
| "learning_rate": 7.866398912834357e-06, | |
| "loss": 0.981, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.33004905454697914, | |
| "grad_norm": 1.5756216364889237, | |
| "learning_rate": 7.848570426320918e-06, | |
| "loss": 0.9705, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.33139071737034087, | |
| "grad_norm": 1.3681608333123005, | |
| "learning_rate": 7.830688156261927e-06, | |
| "loss": 0.9794, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.33273238019370255, | |
| "grad_norm": 1.4816370422331064, | |
| "learning_rate": 7.812752440290568e-06, | |
| "loss": 0.9524, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.3340740430170643, | |
| "grad_norm": 1.5218600193401652, | |
| "learning_rate": 7.794763617049124e-06, | |
| "loss": 0.9618, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.33541570584042596, | |
| "grad_norm": 1.4743104016838262, | |
| "learning_rate": 7.776722026182588e-06, | |
| "loss": 0.9744, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3367573686637877, | |
| "grad_norm": 1.5788453081737652, | |
| "learning_rate": 7.758628008332262e-06, | |
| "loss": 0.9665, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.33809903148714937, | |
| "grad_norm": 1.5865308150096813, | |
| "learning_rate": 7.740481905129307e-06, | |
| "loss": 0.9609, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.3394406943105111, | |
| "grad_norm": 1.3752322651899374, | |
| "learning_rate": 7.722284059188308e-06, | |
| "loss": 0.9454, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.3407823571338728, | |
| "grad_norm": 1.5628688780654367, | |
| "learning_rate": 7.704034814100798e-06, | |
| "loss": 0.9453, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.3421240199572345, | |
| "grad_norm": 1.3995041103502648, | |
| "learning_rate": 7.685734514428767e-06, | |
| "loss": 0.9712, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.3434656827805962, | |
| "grad_norm": 1.5469450835726162, | |
| "learning_rate": 7.667383505698165e-06, | |
| "loss": 0.9602, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.3448073456039579, | |
| "grad_norm": 1.4254233488403434, | |
| "learning_rate": 7.648982134392378e-06, | |
| "loss": 0.9476, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.3461490084273196, | |
| "grad_norm": 1.4496312090906014, | |
| "learning_rate": 7.630530747945672e-06, | |
| "loss": 0.9447, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.34749067125068134, | |
| "grad_norm": 1.5432996608177838, | |
| "learning_rate": 7.612029694736652e-06, | |
| "loss": 0.953, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.348832334074043, | |
| "grad_norm": 1.4528733449175, | |
| "learning_rate": 7.593479324081672e-06, | |
| "loss": 0.9779, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3501739968974047, | |
| "grad_norm": 1.585347953236908, | |
| "learning_rate": 7.574879986228245e-06, | |
| "loss": 0.9596, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.3515156597207664, | |
| "grad_norm": 2.2500708161081877, | |
| "learning_rate": 7.556232032348429e-06, | |
| "loss": 0.9627, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.3528573225441281, | |
| "grad_norm": 1.5763755196223852, | |
| "learning_rate": 7.537535814532191e-06, | |
| "loss": 0.9607, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.35419898536748984, | |
| "grad_norm": 1.771846845155408, | |
| "learning_rate": 7.518791685780769e-06, | |
| "loss": 0.9434, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.3555406481908515, | |
| "grad_norm": 1.5224566041251197, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.9691, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.35688231101421325, | |
| "grad_norm": 1.4640993296882345, | |
| "learning_rate": 7.481161111993642e-06, | |
| "loss": 0.9595, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.3582239738375749, | |
| "grad_norm": 1.6809152959115363, | |
| "learning_rate": 7.462275377456671e-06, | |
| "loss": 0.9563, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.35956563666093666, | |
| "grad_norm": 1.6675443282439029, | |
| "learning_rate": 7.443343152968566e-06, | |
| "loss": 0.9785, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.36090729948429834, | |
| "grad_norm": 1.4792935920603045, | |
| "learning_rate": 7.424364795986582e-06, | |
| "loss": 0.9609, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.36224896230766007, | |
| "grad_norm": 1.5483209952088646, | |
| "learning_rate": 7.405340664838994e-06, | |
| "loss": 0.9485, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.36359062513102175, | |
| "grad_norm": 1.4340034591760151, | |
| "learning_rate": 7.38627111871833e-06, | |
| "loss": 0.9534, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.3649322879543835, | |
| "grad_norm": 1.613866321606026, | |
| "learning_rate": 7.3671565176746025e-06, | |
| "loss": 0.9725, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.36627395077774516, | |
| "grad_norm": 1.4824243681022105, | |
| "learning_rate": 7.3479972226084925e-06, | |
| "loss": 0.958, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.3676156136011069, | |
| "grad_norm": 1.4427286368890009, | |
| "learning_rate": 7.328793595264549e-06, | |
| "loss": 0.9435, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.36895727642446857, | |
| "grad_norm": 1.3805561786232567, | |
| "learning_rate": 7.309545998224351e-06, | |
| "loss": 0.9458, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.3702989392478303, | |
| "grad_norm": 1.8516381642259134, | |
| "learning_rate": 7.290254794899665e-06, | |
| "loss": 0.9678, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.371640602071192, | |
| "grad_norm": 1.5815466257148956, | |
| "learning_rate": 7.270920349525584e-06, | |
| "loss": 0.9599, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.37298226489455366, | |
| "grad_norm": 1.3960905267997115, | |
| "learning_rate": 7.2515430271536495e-06, | |
| "loss": 0.9484, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.3743239277179154, | |
| "grad_norm": 1.5277524276047343, | |
| "learning_rate": 7.232123193644957e-06, | |
| "loss": 0.9603, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.3756655905412771, | |
| "grad_norm": 1.3938189518246926, | |
| "learning_rate": 7.212661215663252e-06, | |
| "loss": 0.9435, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3770072533646388, | |
| "grad_norm": 1.5608424878074654, | |
| "learning_rate": 7.193157460668005e-06, | |
| "loss": 0.9385, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.3783489161880005, | |
| "grad_norm": 1.3994956171633623, | |
| "learning_rate": 7.173612296907473e-06, | |
| "loss": 0.9508, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.3796905790113622, | |
| "grad_norm": 1.4666586737314051, | |
| "learning_rate": 7.154026093411747e-06, | |
| "loss": 0.9671, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.3810322418347239, | |
| "grad_norm": 1.4337201807932214, | |
| "learning_rate": 7.134399219985786e-06, | |
| "loss": 0.9666, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.38237390465808563, | |
| "grad_norm": 1.4728179477625383, | |
| "learning_rate": 7.114732047202433e-06, | |
| "loss": 0.9671, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.3837155674814473, | |
| "grad_norm": 1.360568477796925, | |
| "learning_rate": 7.095024946395419e-06, | |
| "loss": 0.9571, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.38505723030480904, | |
| "grad_norm": 1.5631552939635314, | |
| "learning_rate": 7.075278289652349e-06, | |
| "loss": 0.9562, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.3863988931281707, | |
| "grad_norm": 1.566734610660415, | |
| "learning_rate": 7.055492449807684e-06, | |
| "loss": 0.9368, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.38774055595153245, | |
| "grad_norm": 1.4515435817786102, | |
| "learning_rate": 7.035667800435694e-06, | |
| "loss": 0.9416, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.38908221877489413, | |
| "grad_norm": 1.5035435190437387, | |
| "learning_rate": 7.015804715843404e-06, | |
| "loss": 0.9462, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.39042388159825586, | |
| "grad_norm": 1.6249008432001628, | |
| "learning_rate": 6.995903571063541e-06, | |
| "loss": 0.9507, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.39176554442161754, | |
| "grad_norm": 1.5168673161442563, | |
| "learning_rate": 6.975964741847427e-06, | |
| "loss": 0.9626, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.3931072072449793, | |
| "grad_norm": 1.4864183283216745, | |
| "learning_rate": 6.955988604657914e-06, | |
| "loss": 0.9436, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.39444887006834095, | |
| "grad_norm": 1.398353146411846, | |
| "learning_rate": 6.935975536662254e-06, | |
| "loss": 0.943, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.39579053289170263, | |
| "grad_norm": 1.4161271859037246, | |
| "learning_rate": 6.915925915724987e-06, | |
| "loss": 0.928, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.39713219571506436, | |
| "grad_norm": 1.6833911607161969, | |
| "learning_rate": 6.895840120400804e-06, | |
| "loss": 0.9329, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.39847385853842604, | |
| "grad_norm": 1.487710183205637, | |
| "learning_rate": 6.875718529927404e-06, | |
| "loss": 0.9466, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.3998155213617878, | |
| "grad_norm": 1.549467448109303, | |
| "learning_rate": 6.855561524218326e-06, | |
| "loss": 0.9821, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.40115718418514945, | |
| "grad_norm": 1.3745294880173118, | |
| "learning_rate": 6.835369483855783e-06, | |
| "loss": 0.9161, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.4024988470085112, | |
| "grad_norm": 1.4224517987313379, | |
| "learning_rate": 6.815142790083473e-06, | |
| "loss": 0.9469, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.40384050983187286, | |
| "grad_norm": 1.5056244514305563, | |
| "learning_rate": 6.79488182479938e-06, | |
| "loss": 0.9662, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.4051821726552346, | |
| "grad_norm": 1.4054898081789005, | |
| "learning_rate": 6.774586970548567e-06, | |
| "loss": 0.9489, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.4065238354785963, | |
| "grad_norm": 1.4511626851951822, | |
| "learning_rate": 6.754258610515949e-06, | |
| "loss": 0.9496, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.407865498301958, | |
| "grad_norm": 3.3319586578353655, | |
| "learning_rate": 6.73389712851906e-06, | |
| "loss": 0.9405, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.4092071611253197, | |
| "grad_norm": 2.1898520431778246, | |
| "learning_rate": 6.713502909000808e-06, | |
| "loss": 0.923, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4105488239486814, | |
| "grad_norm": 1.4488374448148915, | |
| "learning_rate": 6.6930763370222104e-06, | |
| "loss": 0.9751, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.4118904867720431, | |
| "grad_norm": 1.4191601427296872, | |
| "learning_rate": 6.672617798255135e-06, | |
| "loss": 0.9398, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.41323214959540483, | |
| "grad_norm": 1.428689176709015, | |
| "learning_rate": 6.652127678975002e-06, | |
| "loss": 0.941, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.4145738124187665, | |
| "grad_norm": 1.4795595116700593, | |
| "learning_rate": 6.631606366053507e-06, | |
| "loss": 0.9466, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.4159154752421282, | |
| "grad_norm": 1.51175705146348, | |
| "learning_rate": 6.6110542469513065e-06, | |
| "loss": 0.9384, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.4172571380654899, | |
| "grad_norm": 1.4180114835407127, | |
| "learning_rate": 6.590471709710704e-06, | |
| "loss": 0.9413, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.4185988008888516, | |
| "grad_norm": 1.4988278891464717, | |
| "learning_rate": 6.5698591429483286e-06, | |
| "loss": 0.9516, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.41994046371221333, | |
| "grad_norm": 1.5791863550483363, | |
| "learning_rate": 6.549216935847786e-06, | |
| "loss": 0.9372, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.421282126535575, | |
| "grad_norm": 1.620573210066323, | |
| "learning_rate": 6.52854547815233e-06, | |
| "loss": 0.9286, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.42262378935893674, | |
| "grad_norm": 1.4040060176512321, | |
| "learning_rate": 6.507845160157476e-06, | |
| "loss": 0.951, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.4239654521822984, | |
| "grad_norm": 1.6033577516875774, | |
| "learning_rate": 6.487116372703663e-06, | |
| "loss": 0.9546, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.42530711500566015, | |
| "grad_norm": 1.530825634158125, | |
| "learning_rate": 6.466359507168849e-06, | |
| "loss": 0.9407, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.42664877782902183, | |
| "grad_norm": 1.462628543689146, | |
| "learning_rate": 6.445574955461134e-06, | |
| "loss": 0.9346, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.42799044065238356, | |
| "grad_norm": 1.4846164727489912, | |
| "learning_rate": 6.424763110011356e-06, | |
| "loss": 0.9512, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.42933210347574524, | |
| "grad_norm": 1.6348569990232538, | |
| "learning_rate": 6.403924363765686e-06, | |
| "loss": 0.9426, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.430673766299107, | |
| "grad_norm": 1.3033058347347706, | |
| "learning_rate": 6.383059110178205e-06, | |
| "loss": 0.9367, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.43201542912246865, | |
| "grad_norm": 1.3976319401939663, | |
| "learning_rate": 6.362167743203474e-06, | |
| "loss": 0.9478, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.4333570919458304, | |
| "grad_norm": 2.240174630411269, | |
| "learning_rate": 6.3412506572891e-06, | |
| "loss": 0.9493, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.43469875476919206, | |
| "grad_norm": 1.3498981951443567, | |
| "learning_rate": 6.320308247368285e-06, | |
| "loss": 0.9352, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.4360404175925538, | |
| "grad_norm": 1.5036960555030219, | |
| "learning_rate": 6.2993409088523774e-06, | |
| "loss": 0.9537, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.4373820804159155, | |
| "grad_norm": 1.4501884398705591, | |
| "learning_rate": 6.278349037623388e-06, | |
| "loss": 0.9389, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.43872374323927715, | |
| "grad_norm": 1.8643264437967426, | |
| "learning_rate": 6.2573330300265375e-06, | |
| "loss": 0.9589, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.4400654060626389, | |
| "grad_norm": 1.6346684956995083, | |
| "learning_rate": 6.236293282862756e-06, | |
| "loss": 0.95, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.44140706888600056, | |
| "grad_norm": 1.468976590448429, | |
| "learning_rate": 6.215230193381203e-06, | |
| "loss": 0.9173, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.4427487317093623, | |
| "grad_norm": 1.570877242399236, | |
| "learning_rate": 6.1941441592717564e-06, | |
| "loss": 0.9437, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.444090394532724, | |
| "grad_norm": 1.4652863203353141, | |
| "learning_rate": 6.173035578657513e-06, | |
| "loss": 0.9316, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.4454320573560857, | |
| "grad_norm": 1.3924224386568063, | |
| "learning_rate": 6.151904850087265e-06, | |
| "loss": 0.9435, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.4467737201794474, | |
| "grad_norm": 1.4651748132936537, | |
| "learning_rate": 6.130752372527981e-06, | |
| "loss": 0.9252, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.4481153830028091, | |
| "grad_norm": 1.4286663324788682, | |
| "learning_rate": 6.109578545357268e-06, | |
| "loss": 0.9457, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.4494570458261708, | |
| "grad_norm": 1.740501083768386, | |
| "learning_rate": 6.088383768355829e-06, | |
| "loss": 0.9432, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.45079870864953253, | |
| "grad_norm": 1.4961395872275942, | |
| "learning_rate": 6.067168441699927e-06, | |
| "loss": 0.9676, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.4521403714728942, | |
| "grad_norm": 1.3752313492049115, | |
| "learning_rate": 6.045932965953813e-06, | |
| "loss": 0.9433, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.45348203429625594, | |
| "grad_norm": 1.3820484972229559, | |
| "learning_rate": 6.024677742062176e-06, | |
| "loss": 0.944, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.4548236971196176, | |
| "grad_norm": 2.0893492032041827, | |
| "learning_rate": 6.0034031713425636e-06, | |
| "loss": 0.9434, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.45616535994297935, | |
| "grad_norm": 1.549871037483782, | |
| "learning_rate": 5.98210965547781e-06, | |
| "loss": 0.9748, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.45750702276634103, | |
| "grad_norm": 1.5298386519614102, | |
| "learning_rate": 5.960797596508453e-06, | |
| "loss": 0.9508, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.45884868558970276, | |
| "grad_norm": 1.413672937879993, | |
| "learning_rate": 5.939467396825137e-06, | |
| "loss": 0.9397, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.46019034841306444, | |
| "grad_norm": 1.4329767730242147, | |
| "learning_rate": 5.918119459161021e-06, | |
| "loss": 0.9666, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.4615320112364261, | |
| "grad_norm": 1.3700806796060583, | |
| "learning_rate": 5.896754186584173e-06, | |
| "loss": 0.9415, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.46287367405978785, | |
| "grad_norm": 1.6410590057880081, | |
| "learning_rate": 5.875371982489959e-06, | |
| "loss": 0.9405, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.46421533688314953, | |
| "grad_norm": 1.4182156518065911, | |
| "learning_rate": 5.8539732505934266e-06, | |
| "loss": 0.9198, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.46555699970651127, | |
| "grad_norm": 1.4338567504194042, | |
| "learning_rate": 5.832558394921688e-06, | |
| "loss": 0.9207, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.46689866252987294, | |
| "grad_norm": 1.4666500410104721, | |
| "learning_rate": 5.811127819806277e-06, | |
| "loss": 0.9286, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.4682403253532347, | |
| "grad_norm": 1.3913442891866983, | |
| "learning_rate": 5.7896819298755346e-06, | |
| "loss": 0.9398, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.46958198817659635, | |
| "grad_norm": 1.944673612051866, | |
| "learning_rate": 5.768221130046954e-06, | |
| "loss": 0.9165, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4709236509999581, | |
| "grad_norm": 1.9768029373235239, | |
| "learning_rate": 5.746745825519539e-06, | |
| "loss": 0.9324, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.47226531382331977, | |
| "grad_norm": 1.522618389240632, | |
| "learning_rate": 5.725256421766158e-06, | |
| "loss": 0.9353, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.4736069766466815, | |
| "grad_norm": 1.3101805254428442, | |
| "learning_rate": 5.703753324525885e-06, | |
| "loss": 0.9294, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.4749486394700432, | |
| "grad_norm": 1.4723714938359946, | |
| "learning_rate": 5.682236939796337e-06, | |
| "loss": 0.9263, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.4762903022934049, | |
| "grad_norm": 1.4879677997320575, | |
| "learning_rate": 5.660707673826016e-06, | |
| "loss": 0.9187, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.4776319651167666, | |
| "grad_norm": 1.3341527207031307, | |
| "learning_rate": 5.639165933106626e-06, | |
| "loss": 0.9324, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.4789736279401283, | |
| "grad_norm": 1.4232186684864636, | |
| "learning_rate": 5.617612124365411e-06, | |
| "loss": 0.945, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.48031529076349, | |
| "grad_norm": 1.5777179718248369, | |
| "learning_rate": 5.596046654557467e-06, | |
| "loss": 0.9165, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.4816569535868517, | |
| "grad_norm": 1.4509824338743527, | |
| "learning_rate": 5.574469930858062e-06, | |
| "loss": 0.9367, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.4829986164102134, | |
| "grad_norm": 1.43523407700473, | |
| "learning_rate": 5.55288236065495e-06, | |
| "loss": 0.9381, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4843402792335751, | |
| "grad_norm": 1.2941141889852268, | |
| "learning_rate": 5.5312843515406725e-06, | |
| "loss": 0.9294, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.4856819420569368, | |
| "grad_norm": 3.499123796935252, | |
| "learning_rate": 5.509676311304869e-06, | |
| "loss": 0.9485, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.4870236048802985, | |
| "grad_norm": 1.4060828658646398, | |
| "learning_rate": 5.4880586479265774e-06, | |
| "loss": 0.9334, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.48836526770366023, | |
| "grad_norm": 1.5632703720203085, | |
| "learning_rate": 5.466431769566525e-06, | |
| "loss": 0.9317, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.4897069305270219, | |
| "grad_norm": 1.3794201306987215, | |
| "learning_rate": 5.4447960845594286e-06, | |
| "loss": 0.9581, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.49104859335038364, | |
| "grad_norm": 1.4139859532152406, | |
| "learning_rate": 5.423152001406282e-06, | |
| "loss": 0.9272, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.4923902561737453, | |
| "grad_norm": 1.3607081182885088, | |
| "learning_rate": 5.401499928766644e-06, | |
| "loss": 0.9083, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.49373191899710706, | |
| "grad_norm": 1.3139396936274126, | |
| "learning_rate": 5.379840275450922e-06, | |
| "loss": 0.9125, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.49507358182046873, | |
| "grad_norm": 1.720881483335334, | |
| "learning_rate": 5.358173450412649e-06, | |
| "loss": 0.9284, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.49641524464383047, | |
| "grad_norm": 1.4961481743297107, | |
| "learning_rate": 5.3364998627407746e-06, | |
| "loss": 0.9333, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.49775690746719214, | |
| "grad_norm": 1.291001773361284, | |
| "learning_rate": 5.314819921651928e-06, | |
| "loss": 0.9297, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.4990985702905539, | |
| "grad_norm": 1.4874335774584129, | |
| "learning_rate": 5.293134036482697e-06, | |
| "loss": 0.926, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.5004402331139156, | |
| "grad_norm": 1.3249150101616787, | |
| "learning_rate": 5.271442616681898e-06, | |
| "loss": 0.9477, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.5017818959372773, | |
| "grad_norm": 1.4314814462436254, | |
| "learning_rate": 5.249746071802849e-06, | |
| "loss": 0.9158, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.503123558760639, | |
| "grad_norm": 1.4850215366796362, | |
| "learning_rate": 5.228044811495632e-06, | |
| "loss": 0.9139, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5044652215840006, | |
| "grad_norm": 1.4346788643421962, | |
| "learning_rate": 5.2063392454993585e-06, | |
| "loss": 0.9275, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.5058068844073623, | |
| "grad_norm": 1.3913557969729673, | |
| "learning_rate": 5.184629783634441e-06, | |
| "loss": 0.9252, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.5071485472307241, | |
| "grad_norm": 1.3664085740854548, | |
| "learning_rate": 5.162916835794843e-06, | |
| "loss": 0.9318, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.5084902100540858, | |
| "grad_norm": 1.32531510438658, | |
| "learning_rate": 5.141200811940352e-06, | |
| "loss": 0.9163, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5098318728774475, | |
| "grad_norm": 1.4833763087565515, | |
| "learning_rate": 5.119482122088828e-06, | |
| "loss": 0.9437, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5111735357008091, | |
| "grad_norm": 1.455536383105933, | |
| "learning_rate": 5.097761176308471e-06, | |
| "loss": 0.9353, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5125151985241709, | |
| "grad_norm": 1.4018575883339595, | |
| "learning_rate": 5.076038384710077e-06, | |
| "loss": 0.9378, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5138568613475326, | |
| "grad_norm": 1.529443325382905, | |
| "learning_rate": 5.054314157439286e-06, | |
| "loss": 0.9518, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.5151985241708943, | |
| "grad_norm": 1.3333546144124035, | |
| "learning_rate": 5.032588904668851e-06, | |
| "loss": 0.9121, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.516540186994256, | |
| "grad_norm": 1.4024032505431785, | |
| "learning_rate": 5.010863036590885e-06, | |
| "loss": 0.92, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5178818498176178, | |
| "grad_norm": 1.3464025585023112, | |
| "learning_rate": 4.989136963409117e-06, | |
| "loss": 0.933, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.5192235126409794, | |
| "grad_norm": 1.380359478548665, | |
| "learning_rate": 4.967411095331149e-06, | |
| "loss": 0.9157, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.5205651754643411, | |
| "grad_norm": 1.4753034718726812, | |
| "learning_rate": 4.945685842560715e-06, | |
| "loss": 0.9145, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.5219068382877028, | |
| "grad_norm": 1.6175730490704796, | |
| "learning_rate": 4.923961615289924e-06, | |
| "loss": 0.9269, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.5232485011110646, | |
| "grad_norm": 1.3437231689169467, | |
| "learning_rate": 4.9022388236915306e-06, | |
| "loss": 0.953, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5245901639344263, | |
| "grad_norm": 1.3084234752713977, | |
| "learning_rate": 4.880517877911173e-06, | |
| "loss": 0.9183, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.5259318267577879, | |
| "grad_norm": 1.5145801721587038, | |
| "learning_rate": 4.858799188059651e-06, | |
| "loss": 0.9367, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.5272734895811496, | |
| "grad_norm": 1.4077045421921577, | |
| "learning_rate": 4.837083164205159e-06, | |
| "loss": 0.9345, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.5286151524045113, | |
| "grad_norm": 1.4157771272811692, | |
| "learning_rate": 4.815370216365562e-06, | |
| "loss": 0.942, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.5299568152278731, | |
| "grad_norm": 1.2955482603917654, | |
| "learning_rate": 4.793660754500644e-06, | |
| "loss": 0.944, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.5312984780512348, | |
| "grad_norm": 1.4171045612753872, | |
| "learning_rate": 4.771955188504371e-06, | |
| "loss": 0.9277, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.5326401408745964, | |
| "grad_norm": 1.7506953727580603, | |
| "learning_rate": 4.75025392819715e-06, | |
| "loss": 0.9593, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.5339818036979581, | |
| "grad_norm": 1.3630627845724446, | |
| "learning_rate": 4.7285573833181016e-06, | |
| "loss": 0.929, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.5353234665213199, | |
| "grad_norm": 1.4941901177118797, | |
| "learning_rate": 4.7068659635173034e-06, | |
| "loss": 0.938, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.5366651293446816, | |
| "grad_norm": 1.3837582089973754, | |
| "learning_rate": 4.685180078348072e-06, | |
| "loss": 0.9272, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5380067921680433, | |
| "grad_norm": 1.6311738523492914, | |
| "learning_rate": 4.663500137259226e-06, | |
| "loss": 0.9257, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.5393484549914049, | |
| "grad_norm": 1.7826160967355433, | |
| "learning_rate": 4.641826549587352e-06, | |
| "loss": 0.9209, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.5406901178147667, | |
| "grad_norm": 1.4426572030982026, | |
| "learning_rate": 4.62015972454908e-06, | |
| "loss": 0.9494, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.5420317806381284, | |
| "grad_norm": 1.5231558399676566, | |
| "learning_rate": 4.598500071233358e-06, | |
| "loss": 0.9165, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.5433734434614901, | |
| "grad_norm": 1.2912843216495555, | |
| "learning_rate": 4.57684799859372e-06, | |
| "loss": 0.9327, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.5447151062848518, | |
| "grad_norm": 1.3285466026775592, | |
| "learning_rate": 4.555203915440573e-06, | |
| "loss": 0.9158, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.5460567691082135, | |
| "grad_norm": 1.4181531717310327, | |
| "learning_rate": 4.533568230433477e-06, | |
| "loss": 0.8959, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.5473984319315752, | |
| "grad_norm": 1.3759849226932546, | |
| "learning_rate": 4.511941352073424e-06, | |
| "loss": 0.9203, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.5487400947549369, | |
| "grad_norm": 1.5030252203208716, | |
| "learning_rate": 4.4903236886951315e-06, | |
| "loss": 0.9218, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.5500817575782986, | |
| "grad_norm": 1.4120496050942768, | |
| "learning_rate": 4.468715648459329e-06, | |
| "loss": 0.9271, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.5514234204016603, | |
| "grad_norm": 1.6150466333531532, | |
| "learning_rate": 4.447117639345052e-06, | |
| "loss": 0.9352, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.552765083225022, | |
| "grad_norm": 1.3313158710584112, | |
| "learning_rate": 4.42553006914194e-06, | |
| "loss": 0.9424, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.5541067460483837, | |
| "grad_norm": 1.4173966801171765, | |
| "learning_rate": 4.403953345442535e-06, | |
| "loss": 0.9059, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.5554484088717454, | |
| "grad_norm": 1.4219127140605912, | |
| "learning_rate": 4.382387875634592e-06, | |
| "loss": 0.8982, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.5567900716951071, | |
| "grad_norm": 1.4807768234573824, | |
| "learning_rate": 4.360834066893376e-06, | |
| "loss": 0.9112, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.5581317345184689, | |
| "grad_norm": 1.4934876617789734, | |
| "learning_rate": 4.339292326173987e-06, | |
| "loss": 0.9394, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.5594733973418305, | |
| "grad_norm": 1.2964568666580691, | |
| "learning_rate": 4.317763060203665e-06, | |
| "loss": 0.9087, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.5608150601651922, | |
| "grad_norm": 1.5071257220649843, | |
| "learning_rate": 4.296246675474119e-06, | |
| "loss": 0.9273, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.5621567229885539, | |
| "grad_norm": 1.389386966271088, | |
| "learning_rate": 4.274743578233846e-06, | |
| "loss": 0.9264, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.5634983858119157, | |
| "grad_norm": 1.7541684817806704, | |
| "learning_rate": 4.253254174480462e-06, | |
| "loss": 0.9148, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5648400486352774, | |
| "grad_norm": 1.6521682610990946, | |
| "learning_rate": 4.231778869953047e-06, | |
| "loss": 0.9324, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.566181711458639, | |
| "grad_norm": 1.4036410472707417, | |
| "learning_rate": 4.210318070124465e-06, | |
| "loss": 0.9175, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.5675233742820007, | |
| "grad_norm": 1.4760260423378482, | |
| "learning_rate": 4.188872180193723e-06, | |
| "loss": 0.9301, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.5688650371053624, | |
| "grad_norm": 1.4915112006125035, | |
| "learning_rate": 4.167441605078314e-06, | |
| "loss": 0.9192, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.5702066999287242, | |
| "grad_norm": 1.4529343235561802, | |
| "learning_rate": 4.146026749406574e-06, | |
| "loss": 0.9237, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.5715483627520859, | |
| "grad_norm": 1.5441157122225924, | |
| "learning_rate": 4.124628017510043e-06, | |
| "loss": 0.9382, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.5728900255754475, | |
| "grad_norm": 1.4800738041606645, | |
| "learning_rate": 4.10324581341583e-06, | |
| "loss": 0.9259, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.5742316883988092, | |
| "grad_norm": 1.244794516407526, | |
| "learning_rate": 4.081880540838981e-06, | |
| "loss": 0.9147, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.575573351222171, | |
| "grad_norm": 1.9868472108932922, | |
| "learning_rate": 4.060532603174865e-06, | |
| "loss": 0.9249, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.5769150140455327, | |
| "grad_norm": 2.043155859552025, | |
| "learning_rate": 4.039202403491548e-06, | |
| "loss": 0.914, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5782566768688944, | |
| "grad_norm": 1.4635852855825318, | |
| "learning_rate": 4.0178903445221905e-06, | |
| "loss": 0.9268, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.579598339692256, | |
| "grad_norm": 1.4320508916527583, | |
| "learning_rate": 3.996596828657437e-06, | |
| "loss": 0.9288, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.5809400025156178, | |
| "grad_norm": 1.6077840908976693, | |
| "learning_rate": 3.975322257937825e-06, | |
| "loss": 0.93, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.5822816653389795, | |
| "grad_norm": 1.3697545321413767, | |
| "learning_rate": 3.9540670340461875e-06, | |
| "loss": 0.9037, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.5836233281623412, | |
| "grad_norm": 1.509438713977569, | |
| "learning_rate": 3.932831558300074e-06, | |
| "loss": 0.9233, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.5849649909857029, | |
| "grad_norm": 1.4813174251922685, | |
| "learning_rate": 3.911616231644172e-06, | |
| "loss": 0.9214, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.5863066538090647, | |
| "grad_norm": 1.3554393686833108, | |
| "learning_rate": 3.8904214546427355e-06, | |
| "loss": 0.9217, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.5876483166324263, | |
| "grad_norm": 1.4327778536512215, | |
| "learning_rate": 3.869247627472021e-06, | |
| "loss": 0.9381, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.588989979455788, | |
| "grad_norm": 1.4341323896035245, | |
| "learning_rate": 3.848095149912737e-06, | |
| "loss": 0.9346, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.5903316422791497, | |
| "grad_norm": 1.4824059212863905, | |
| "learning_rate": 3.82696442134249e-06, | |
| "loss": 0.9436, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5916733051025114, | |
| "grad_norm": 1.4570331826042282, | |
| "learning_rate": 3.8058558407282465e-06, | |
| "loss": 0.9106, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.5930149679258732, | |
| "grad_norm": 1.308814793199382, | |
| "learning_rate": 3.7847698066187975e-06, | |
| "loss": 0.9165, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.5943566307492348, | |
| "grad_norm": 1.378524196980017, | |
| "learning_rate": 3.7637067171372437e-06, | |
| "loss": 0.9233, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.5956982935725965, | |
| "grad_norm": 1.4319296775411137, | |
| "learning_rate": 3.742666969973463e-06, | |
| "loss": 0.9328, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.5970399563959582, | |
| "grad_norm": 1.3595089447285513, | |
| "learning_rate": 3.721650962376612e-06, | |
| "loss": 0.9237, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.59838161921932, | |
| "grad_norm": 1.3018802984460596, | |
| "learning_rate": 3.7006590911476242e-06, | |
| "loss": 0.9075, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.5997232820426817, | |
| "grad_norm": 1.3741192831894566, | |
| "learning_rate": 3.6796917526317153e-06, | |
| "loss": 0.9242, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.6010649448660433, | |
| "grad_norm": 1.5039382896600555, | |
| "learning_rate": 3.6587493427109015e-06, | |
| "loss": 0.9005, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.602406607689405, | |
| "grad_norm": 2.594160580170954, | |
| "learning_rate": 3.6378322567965284e-06, | |
| "loss": 0.9419, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.6037482705127668, | |
| "grad_norm": 1.5181333868507627, | |
| "learning_rate": 3.6169408898217973e-06, | |
| "loss": 0.9126, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6050899333361285, | |
| "grad_norm": 1.2916775825580151, | |
| "learning_rate": 3.596075636234315e-06, | |
| "loss": 0.9203, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.6064315961594902, | |
| "grad_norm": 1.3505432254066787, | |
| "learning_rate": 3.575236889988646e-06, | |
| "loss": 0.9098, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.6077732589828518, | |
| "grad_norm": 1.4144638543833534, | |
| "learning_rate": 3.554425044538868e-06, | |
| "loss": 0.9233, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.6091149218062136, | |
| "grad_norm": 1.374872140252449, | |
| "learning_rate": 3.5336404928311518e-06, | |
| "loss": 0.9023, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.6104565846295753, | |
| "grad_norm": 1.361948134387405, | |
| "learning_rate": 3.5128836272963377e-06, | |
| "loss": 0.9235, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.611798247452937, | |
| "grad_norm": 1.5371710757092296, | |
| "learning_rate": 3.4921548398425246e-06, | |
| "loss": 0.9306, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.6131399102762987, | |
| "grad_norm": 1.3466506760555965, | |
| "learning_rate": 3.4714545218476727e-06, | |
| "loss": 0.922, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.6144815730996603, | |
| "grad_norm": 1.4262251212000858, | |
| "learning_rate": 3.4507830641522144e-06, | |
| "loss": 0.9236, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.6158232359230221, | |
| "grad_norm": 1.3179672517423986, | |
| "learning_rate": 3.430140857051675e-06, | |
| "loss": 0.9231, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.6171648987463838, | |
| "grad_norm": 1.2130034269192478, | |
| "learning_rate": 3.4095282902892982e-06, | |
| "loss": 0.9127, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6185065615697455, | |
| "grad_norm": 1.251798831435258, | |
| "learning_rate": 3.388945753048697e-06, | |
| "loss": 0.8986, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.6198482243931072, | |
| "grad_norm": 1.3580683945289906, | |
| "learning_rate": 3.3683936339464957e-06, | |
| "loss": 0.9437, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.621189887216469, | |
| "grad_norm": 1.4318866735343905, | |
| "learning_rate": 3.3478723210250006e-06, | |
| "loss": 0.911, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.6225315500398306, | |
| "grad_norm": 1.4492417004844953, | |
| "learning_rate": 3.3273822017448683e-06, | |
| "loss": 0.9233, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.6238732128631923, | |
| "grad_norm": 1.32995831571142, | |
| "learning_rate": 3.306923662977789e-06, | |
| "loss": 0.9054, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.625214875686554, | |
| "grad_norm": 1.3917139306065034, | |
| "learning_rate": 3.2864970909991927e-06, | |
| "loss": 0.9088, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.6265565385099158, | |
| "grad_norm": 1.326251399568166, | |
| "learning_rate": 3.2661028714809405e-06, | |
| "loss": 0.9164, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.6278982013332775, | |
| "grad_norm": 1.362806929059327, | |
| "learning_rate": 3.2457413894840516e-06, | |
| "loss": 0.8841, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.6292398641566391, | |
| "grad_norm": 1.5947392064771095, | |
| "learning_rate": 3.225413029451434e-06, | |
| "loss": 0.9128, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.6305815269800008, | |
| "grad_norm": 1.816586346006587, | |
| "learning_rate": 3.20511817520062e-06, | |
| "loss": 0.8956, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6319231898033626, | |
| "grad_norm": 1.414680996959683, | |
| "learning_rate": 3.184857209916528e-06, | |
| "loss": 0.9163, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.6332648526267243, | |
| "grad_norm": 1.3021277778693485, | |
| "learning_rate": 3.1646305161442183e-06, | |
| "loss": 0.9021, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.634606515450086, | |
| "grad_norm": 1.7791142325104827, | |
| "learning_rate": 3.144438475781676e-06, | |
| "loss": 0.9261, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.6359481782734476, | |
| "grad_norm": 1.5515962833422756, | |
| "learning_rate": 3.1242814700725977e-06, | |
| "loss": 0.919, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.6372898410968093, | |
| "grad_norm": 1.3625300760427628, | |
| "learning_rate": 3.1041598795991967e-06, | |
| "loss": 0.9244, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.6386315039201711, | |
| "grad_norm": 1.2420275075734377, | |
| "learning_rate": 3.0840740842750145e-06, | |
| "loss": 0.9062, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.6399731667435328, | |
| "grad_norm": 2.2279397674834205, | |
| "learning_rate": 3.064024463337747e-06, | |
| "loss": 0.9084, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.6413148295668945, | |
| "grad_norm": 1.4768686245810154, | |
| "learning_rate": 3.044011395342087e-06, | |
| "loss": 0.9296, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.6426564923902561, | |
| "grad_norm": 1.255495856439165, | |
| "learning_rate": 3.0240352581525735e-06, | |
| "loss": 0.9226, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.6439981552136179, | |
| "grad_norm": 1.3311864368234478, | |
| "learning_rate": 3.0040964289364618e-06, | |
| "loss": 0.9174, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.6453398180369796, | |
| "grad_norm": 1.4180453328625617, | |
| "learning_rate": 2.984195284156597e-06, | |
| "loss": 0.9018, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.6466814808603413, | |
| "grad_norm": 1.454713670842971, | |
| "learning_rate": 2.964332199564309e-06, | |
| "loss": 0.9171, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.648023143683703, | |
| "grad_norm": 1.3635198053975708, | |
| "learning_rate": 2.944507550192318e-06, | |
| "loss": 0.927, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.6493648065070647, | |
| "grad_norm": 1.279867079899654, | |
| "learning_rate": 2.9247217103476527e-06, | |
| "loss": 0.9028, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.6507064693304264, | |
| "grad_norm": 1.445182274301436, | |
| "learning_rate": 2.904975053604584e-06, | |
| "loss": 0.9098, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.6520481321537881, | |
| "grad_norm": 1.397463498237059, | |
| "learning_rate": 2.885267952797569e-06, | |
| "loss": 0.9216, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.6533897949771498, | |
| "grad_norm": 1.4525135526307655, | |
| "learning_rate": 2.865600780014216e-06, | |
| "loss": 0.9289, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.6547314578005116, | |
| "grad_norm": 1.6042241879134849, | |
| "learning_rate": 2.8459739065882537e-06, | |
| "loss": 0.9245, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.6560731206238732, | |
| "grad_norm": 1.4445341641370009, | |
| "learning_rate": 2.826387703092528e-06, | |
| "loss": 0.9208, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.6574147834472349, | |
| "grad_norm": 2.448108575133241, | |
| "learning_rate": 2.8068425393319965e-06, | |
| "loss": 0.9254, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6587564462705966, | |
| "grad_norm": 1.4669057051144683, | |
| "learning_rate": 2.7873387843367494e-06, | |
| "loss": 0.8776, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.6600981090939583, | |
| "grad_norm": 1.4674760507750413, | |
| "learning_rate": 2.7678768063550454e-06, | |
| "loss": 0.9223, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.6614397719173201, | |
| "grad_norm": 1.363895109336093, | |
| "learning_rate": 2.748456972846353e-06, | |
| "loss": 0.9282, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.6627814347406817, | |
| "grad_norm": 1.2507979608624489, | |
| "learning_rate": 2.7290796504744184e-06, | |
| "loss": 0.9114, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.6641230975640434, | |
| "grad_norm": 1.357510836988213, | |
| "learning_rate": 2.7097452051003375e-06, | |
| "loss": 0.9045, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.6654647603874051, | |
| "grad_norm": 1.352458443420182, | |
| "learning_rate": 2.69045400177565e-06, | |
| "loss": 0.9277, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.6668064232107669, | |
| "grad_norm": 1.3923375575907286, | |
| "learning_rate": 2.6712064047354515e-06, | |
| "loss": 0.9063, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.6681480860341286, | |
| "grad_norm": 1.6237983373253484, | |
| "learning_rate": 2.6520027773915075e-06, | |
| "loss": 0.9255, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.6694897488574902, | |
| "grad_norm": 1.5156068144660368, | |
| "learning_rate": 2.632843482325398e-06, | |
| "loss": 0.9217, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.6708314116808519, | |
| "grad_norm": 1.2845138737825397, | |
| "learning_rate": 2.6137288812816695e-06, | |
| "loss": 0.9361, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6721730745042137, | |
| "grad_norm": 1.3411532615795354, | |
| "learning_rate": 2.594659335161008e-06, | |
| "loss": 0.8976, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.6735147373275754, | |
| "grad_norm": 1.3161918955262504, | |
| "learning_rate": 2.5756352040134193e-06, | |
| "loss": 0.9032, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.6748564001509371, | |
| "grad_norm": 1.2808948711754613, | |
| "learning_rate": 2.5566568470314345e-06, | |
| "loss": 0.876, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.6761980629742987, | |
| "grad_norm": 1.2683456388164283, | |
| "learning_rate": 2.5377246225433306e-06, | |
| "loss": 0.9133, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.6775397257976605, | |
| "grad_norm": 1.4083617008244946, | |
| "learning_rate": 2.5188388880063595e-06, | |
| "loss": 0.8979, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.6788813886210222, | |
| "grad_norm": 1.3198518834409616, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.9163, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.6802230514443839, | |
| "grad_norm": 1.2627840763757283, | |
| "learning_rate": 2.481208314219233e-06, | |
| "loss": 0.8808, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.6815647142677456, | |
| "grad_norm": 1.5321601221415106, | |
| "learning_rate": 2.462464185467811e-06, | |
| "loss": 0.9326, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.6829063770911072, | |
| "grad_norm": 1.4336529527711754, | |
| "learning_rate": 2.4437679676515735e-06, | |
| "loss": 0.9273, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.684248039914469, | |
| "grad_norm": 1.301319565756621, | |
| "learning_rate": 2.4251200137717545e-06, | |
| "loss": 0.9144, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.6855897027378307, | |
| "grad_norm": 1.450888662732102, | |
| "learning_rate": 2.4065206759183284e-06, | |
| "loss": 0.9163, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.6869313655611924, | |
| "grad_norm": 1.5629774444609523, | |
| "learning_rate": 2.387970305263349e-06, | |
| "loss": 0.9079, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.6882730283845541, | |
| "grad_norm": 1.419799211127572, | |
| "learning_rate": 2.3694692520543293e-06, | |
| "loss": 0.9176, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.6896146912079159, | |
| "grad_norm": 1.3850306280239242, | |
| "learning_rate": 2.3510178656076245e-06, | |
| "loss": 0.9203, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.6909563540312775, | |
| "grad_norm": 1.4448336778378281, | |
| "learning_rate": 2.3326164943018353e-06, | |
| "loss": 0.9224, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.6922980168546392, | |
| "grad_norm": 1.5296983904894126, | |
| "learning_rate": 2.3142654855712353e-06, | |
| "loss": 0.9003, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.6936396796780009, | |
| "grad_norm": 1.511761151648842, | |
| "learning_rate": 2.295965185899205e-06, | |
| "loss": 0.9116, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.6949813425013627, | |
| "grad_norm": 1.2910704562876771, | |
| "learning_rate": 2.277715940811692e-06, | |
| "loss": 0.9047, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.6963230053247244, | |
| "grad_norm": 1.2875242298776879, | |
| "learning_rate": 2.259518094870693e-06, | |
| "loss": 0.9144, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.697664668148086, | |
| "grad_norm": 1.3530863365538197, | |
| "learning_rate": 2.241371991667739e-06, | |
| "loss": 0.9102, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.6990063309714477, | |
| "grad_norm": 1.7870727665566009, | |
| "learning_rate": 2.2232779738174126e-06, | |
| "loss": 0.9147, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.7003479937948094, | |
| "grad_norm": 1.2954206040170222, | |
| "learning_rate": 2.2052363829508776e-06, | |
| "loss": 0.9151, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.7016896566181712, | |
| "grad_norm": 1.2518430383457515, | |
| "learning_rate": 2.1872475597094323e-06, | |
| "loss": 0.9096, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.7030313194415329, | |
| "grad_norm": 1.2480131310096585, | |
| "learning_rate": 2.1693118437380727e-06, | |
| "loss": 0.9363, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.7043729822648945, | |
| "grad_norm": 1.2545817095323775, | |
| "learning_rate": 2.151429573679084e-06, | |
| "loss": 0.887, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.7057146450882562, | |
| "grad_norm": 1.3238327616598924, | |
| "learning_rate": 2.133601087165644e-06, | |
| "loss": 0.9247, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.707056307911618, | |
| "grad_norm": 1.2204792393828976, | |
| "learning_rate": 2.1158267208154497e-06, | |
| "loss": 0.9005, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.7083979707349797, | |
| "grad_norm": 1.8336414137905717, | |
| "learning_rate": 2.098106810224362e-06, | |
| "loss": 0.9074, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.7097396335583414, | |
| "grad_norm": 1.3213871439009308, | |
| "learning_rate": 2.080441689960066e-06, | |
| "loss": 0.9003, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.711081296381703, | |
| "grad_norm": 1.5959521305555897, | |
| "learning_rate": 2.0628316935557595e-06, | |
| "loss": 0.9092, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.7124229592050648, | |
| "grad_norm": 1.22786859984951, | |
| "learning_rate": 2.0452771535038518e-06, | |
| "loss": 0.9035, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.7137646220284265, | |
| "grad_norm": 1.2187462970114127, | |
| "learning_rate": 2.0277784012496865e-06, | |
| "loss": 0.9129, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.7151062848517882, | |
| "grad_norm": 1.3041611731183413, | |
| "learning_rate": 2.010335767185283e-06, | |
| "loss": 0.9262, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.7164479476751499, | |
| "grad_norm": 1.2636919161262752, | |
| "learning_rate": 1.9929495806431024e-06, | |
| "loss": 0.9065, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.7177896104985116, | |
| "grad_norm": 1.4815138266505345, | |
| "learning_rate": 1.9756201698898246e-06, | |
| "loss": 0.9026, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.7191312733218733, | |
| "grad_norm": 1.3422685731450896, | |
| "learning_rate": 1.958347862120151e-06, | |
| "loss": 0.8931, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.720472936145235, | |
| "grad_norm": 1.2736773153988343, | |
| "learning_rate": 1.9411329834506286e-06, | |
| "loss": 0.9189, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.7218145989685967, | |
| "grad_norm": 1.337143595246076, | |
| "learning_rate": 1.923975858913492e-06, | |
| "loss": 0.8973, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.7231562617919584, | |
| "grad_norm": 1.384362022652147, | |
| "learning_rate": 1.9068768124505265e-06, | |
| "loss": 0.9113, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.7244979246153201, | |
| "grad_norm": 1.3520897803445997, | |
| "learning_rate": 1.8898361669069497e-06, | |
| "loss": 0.9008, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7258395874386818, | |
| "grad_norm": 1.2796126814289661, | |
| "learning_rate": 1.8728542440253232e-06, | |
| "loss": 0.8952, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.7271812502620435, | |
| "grad_norm": 1.2835543746380815, | |
| "learning_rate": 1.8559313644394677e-06, | |
| "loss": 0.9304, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.7285229130854052, | |
| "grad_norm": 1.4028148598362826, | |
| "learning_rate": 1.8390678476684143e-06, | |
| "loss": 0.911, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.729864575908767, | |
| "grad_norm": 1.7653722051119831, | |
| "learning_rate": 1.8222640121103724e-06, | |
| "loss": 0.9123, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.7312062387321286, | |
| "grad_norm": 1.761958414062295, | |
| "learning_rate": 1.8055201750367163e-06, | |
| "loss": 0.8993, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.7325479015554903, | |
| "grad_norm": 1.4012619406304154, | |
| "learning_rate": 1.7888366525859968e-06, | |
| "loss": 0.9051, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.733889564378852, | |
| "grad_norm": 1.2707818756082008, | |
| "learning_rate": 1.7722137597579698e-06, | |
| "loss": 0.9121, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.7352312272022138, | |
| "grad_norm": 1.2914565300838898, | |
| "learning_rate": 1.7556518104076497e-06, | |
| "loss": 0.9213, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.7365728900255755, | |
| "grad_norm": 1.2704401187441536, | |
| "learning_rate": 1.7391511172393849e-06, | |
| "loss": 0.8885, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.7379145528489371, | |
| "grad_norm": 1.2933241809008478, | |
| "learning_rate": 1.7227119918009516e-06, | |
| "loss": 0.9147, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.7392562156722988, | |
| "grad_norm": 1.2465347261670734, | |
| "learning_rate": 1.706334744477673e-06, | |
| "loss": 0.9163, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.7405978784956606, | |
| "grad_norm": 1.6435915576463558, | |
| "learning_rate": 1.6900196844865575e-06, | |
| "loss": 0.9097, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.7419395413190223, | |
| "grad_norm": 1.490867528585643, | |
| "learning_rate": 1.6737671198704625e-06, | |
| "loss": 0.9133, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.743281204142384, | |
| "grad_norm": 1.2585765466201615, | |
| "learning_rate": 1.657577357492277e-06, | |
| "loss": 0.9059, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.7446228669657456, | |
| "grad_norm": 1.301408224002227, | |
| "learning_rate": 1.6414507030291249e-06, | |
| "loss": 0.9005, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.7459645297891073, | |
| "grad_norm": 1.2519314889957627, | |
| "learning_rate": 1.6253874609666033e-06, | |
| "loss": 0.9055, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.7473061926124691, | |
| "grad_norm": 1.2463068016774805, | |
| "learning_rate": 1.609387934593019e-06, | |
| "loss": 0.9102, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.7486478554358308, | |
| "grad_norm": 1.2928441743340047, | |
| "learning_rate": 1.5934524259936757e-06, | |
| "loss": 0.899, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.7499895182591925, | |
| "grad_norm": 1.4106688088793764, | |
| "learning_rate": 1.5775812360451598e-06, | |
| "loss": 0.9166, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.7513311810825541, | |
| "grad_norm": 1.2624680052602721, | |
| "learning_rate": 1.5617746644096681e-06, | |
| "loss": 0.9004, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.7526728439059159, | |
| "grad_norm": 1.3318884983797796, | |
| "learning_rate": 1.5460330095293447e-06, | |
| "loss": 0.8796, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.7540145067292776, | |
| "grad_norm": 1.2843333775104402, | |
| "learning_rate": 1.5303565686206452e-06, | |
| "loss": 0.8792, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.7553561695526393, | |
| "grad_norm": 1.5718745193782164, | |
| "learning_rate": 1.514745637668733e-06, | |
| "loss": 0.9076, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.756697832376001, | |
| "grad_norm": 1.3199412911960229, | |
| "learning_rate": 1.4992005114218805e-06, | |
| "loss": 0.8985, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.7580394951993628, | |
| "grad_norm": 1.5059559020554452, | |
| "learning_rate": 1.4837214833859099e-06, | |
| "loss": 0.9307, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.7593811580227244, | |
| "grad_norm": 1.5423291317821797, | |
| "learning_rate": 1.4683088458186512e-06, | |
| "loss": 0.9048, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.7607228208460861, | |
| "grad_norm": 2.410355060033747, | |
| "learning_rate": 1.4529628897244214e-06, | |
| "loss": 0.908, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.7620644836694478, | |
| "grad_norm": 1.3044233422661902, | |
| "learning_rate": 1.4376839048485335e-06, | |
| "loss": 0.8743, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.7634061464928096, | |
| "grad_norm": 1.4222830676115559, | |
| "learning_rate": 1.422472179671825e-06, | |
| "loss": 0.9205, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.7647478093161713, | |
| "grad_norm": 1.3510955600944414, | |
| "learning_rate": 1.4073280014052077e-06, | |
| "loss": 0.9395, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.7660894721395329, | |
| "grad_norm": 1.2345880688085022, | |
| "learning_rate": 1.3922516559842498e-06, | |
| "loss": 0.9, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.7674311349628946, | |
| "grad_norm": 1.3057826578608795, | |
| "learning_rate": 1.3772434280637737e-06, | |
| "loss": 0.9206, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.7687727977862563, | |
| "grad_norm": 1.715585212025323, | |
| "learning_rate": 1.3623036010124845e-06, | |
| "loss": 0.897, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.7701144606096181, | |
| "grad_norm": 1.3750530619640935, | |
| "learning_rate": 1.347432456907617e-06, | |
| "loss": 0.8954, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.7714561234329798, | |
| "grad_norm": 1.3395553063427457, | |
| "learning_rate": 1.3326302765296096e-06, | |
| "loss": 0.9226, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.7727977862563414, | |
| "grad_norm": 1.4774831354066553, | |
| "learning_rate": 1.3178973393568055e-06, | |
| "loss": 0.9076, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.7741394490797031, | |
| "grad_norm": 1.2722467324017726, | |
| "learning_rate": 1.3032339235601749e-06, | |
| "loss": 0.8837, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.7754811119030649, | |
| "grad_norm": 1.4000195103653286, | |
| "learning_rate": 1.2886403059980602e-06, | |
| "loss": 0.8932, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.7768227747264266, | |
| "grad_norm": 1.4528378906964872, | |
| "learning_rate": 1.2741167622109557e-06, | |
| "loss": 0.8949, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.7781644375497883, | |
| "grad_norm": 1.2457551580747528, | |
| "learning_rate": 1.259663566416296e-06, | |
| "loss": 0.9079, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7795061003731499, | |
| "grad_norm": 1.9720344272287313, | |
| "learning_rate": 1.2452809915032843e-06, | |
| "loss": 0.9189, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.7808477631965117, | |
| "grad_norm": 1.270969125072813, | |
| "learning_rate": 1.2309693090277392e-06, | |
| "loss": 0.9054, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.7821894260198734, | |
| "grad_norm": 1.2484963540929594, | |
| "learning_rate": 1.2167287892069662e-06, | |
| "loss": 0.9052, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.7835310888432351, | |
| "grad_norm": 1.3514106246708202, | |
| "learning_rate": 1.202559700914656e-06, | |
| "loss": 0.9307, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.7848727516665968, | |
| "grad_norm": 1.579563869491601, | |
| "learning_rate": 1.1884623116758121e-06, | |
| "loss": 0.9126, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.7862144144899585, | |
| "grad_norm": 1.240608735446977, | |
| "learning_rate": 1.1744368876616913e-06, | |
| "loss": 0.8825, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.7875560773133202, | |
| "grad_norm": 1.3475713796286928, | |
| "learning_rate": 1.1604836936847852e-06, | |
| "loss": 0.9356, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.7888977401366819, | |
| "grad_norm": 1.3683933074436645, | |
| "learning_rate": 1.1466029931938182e-06, | |
| "loss": 0.9034, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.7902394029600436, | |
| "grad_norm": 1.2934899729517133, | |
| "learning_rate": 1.1327950482687727e-06, | |
| "loss": 0.9292, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.7915810657834053, | |
| "grad_norm": 1.5461960865270319, | |
| "learning_rate": 1.1190601196159406e-06, | |
| "loss": 0.8924, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.792922728606767, | |
| "grad_norm": 1.377453051187854, | |
| "learning_rate": 1.1053984665630025e-06, | |
| "loss": 0.8993, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.7942643914301287, | |
| "grad_norm": 1.2032585780385652, | |
| "learning_rate": 1.0918103470541297e-06, | |
| "loss": 0.9156, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.7956060542534904, | |
| "grad_norm": 1.2989113276114335, | |
| "learning_rate": 1.0782960176451164e-06, | |
| "loss": 0.8946, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.7969477170768521, | |
| "grad_norm": 1.244245982468071, | |
| "learning_rate": 1.064855733498531e-06, | |
| "loss": 0.9097, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.7982893799002139, | |
| "grad_norm": 1.3280190656349367, | |
| "learning_rate": 1.051489748378905e-06, | |
| "loss": 0.91, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.7996310427235755, | |
| "grad_norm": 1.3113261246318444, | |
| "learning_rate": 1.0381983146479352e-06, | |
| "loss": 0.8962, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.8009727055469372, | |
| "grad_norm": 1.331281912168968, | |
| "learning_rate": 1.024981683259723e-06, | |
| "loss": 0.9035, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.8023143683702989, | |
| "grad_norm": 1.6549084680343515, | |
| "learning_rate": 1.0118401037560354e-06, | |
| "loss": 0.9181, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.8036560311936607, | |
| "grad_norm": 1.2586619576665807, | |
| "learning_rate": 9.987738242615924e-07, | |
| "loss": 0.8948, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.8049976940170224, | |
| "grad_norm": 1.309953776979878, | |
| "learning_rate": 9.857830914793827e-07, | |
| "loss": 0.901, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.806339356840384, | |
| "grad_norm": 1.2433181727297853, | |
| "learning_rate": 9.72868150686005e-07, | |
| "loss": 0.9124, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.8076810196637457, | |
| "grad_norm": 1.1983249619659875, | |
| "learning_rate": 9.600292457270416e-07, | |
| "loss": 0.8996, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.8090226824871075, | |
| "grad_norm": 1.2523970949426702, | |
| "learning_rate": 9.472666190124457e-07, | |
| "loss": 0.9258, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.8103643453104692, | |
| "grad_norm": 1.1859328500508861, | |
| "learning_rate": 9.345805115119722e-07, | |
| "loss": 0.9052, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.8117060081338309, | |
| "grad_norm": 1.3744025315238007, | |
| "learning_rate": 9.219711627506239e-07, | |
| "loss": 0.9139, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.8130476709571925, | |
| "grad_norm": 1.192545092485379, | |
| "learning_rate": 9.094388108041302e-07, | |
| "loss": 0.9057, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.8143893337805542, | |
| "grad_norm": 1.3291859242744368, | |
| "learning_rate": 8.969836922944536e-07, | |
| "loss": 0.902, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.815730996603916, | |
| "grad_norm": 1.2686568646382825, | |
| "learning_rate": 8.846060423853198e-07, | |
| "loss": 0.8994, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.8170726594272777, | |
| "grad_norm": 1.4200335252538632, | |
| "learning_rate": 8.723060947777778e-07, | |
| "loss": 0.9067, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.8184143222506394, | |
| "grad_norm": 1.5265855250125198, | |
| "learning_rate": 8.600840817057877e-07, | |
| "loss": 0.91, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.819755985074001, | |
| "grad_norm": 1.3838763518737331, | |
| "learning_rate": 8.479402339318372e-07, | |
| "loss": 0.906, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.8210976478973628, | |
| "grad_norm": 1.2817254846160717, | |
| "learning_rate": 8.358747807425827e-07, | |
| "loss": 0.9106, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.8224393107207245, | |
| "grad_norm": 1.3157115651741536, | |
| "learning_rate": 8.238879499445224e-07, | |
| "loss": 0.9109, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.8237809735440862, | |
| "grad_norm": 2.370081298583623, | |
| "learning_rate": 8.119799678596918e-07, | |
| "loss": 0.8968, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.8251226363674479, | |
| "grad_norm": 1.163618291049878, | |
| "learning_rate": 8.001510593213946e-07, | |
| "loss": 0.9041, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.8264642991908097, | |
| "grad_norm": 1.1913159812321341, | |
| "learning_rate": 7.884014476699541e-07, | |
| "loss": 0.9075, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.8278059620141713, | |
| "grad_norm": 1.3224358409661805, | |
| "learning_rate": 7.767313547484979e-07, | |
| "loss": 0.9177, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.829147624837533, | |
| "grad_norm": 1.5777854124699655, | |
| "learning_rate": 7.651410008987698e-07, | |
| "loss": 0.9069, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.8304892876608947, | |
| "grad_norm": 1.2325653285194271, | |
| "learning_rate": 7.536306049569686e-07, | |
| "loss": 0.9089, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.8318309504842564, | |
| "grad_norm": 1.2072805741779278, | |
| "learning_rate": 7.422003842496167e-07, | |
| "loss": 0.9009, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.8331726133076182, | |
| "grad_norm": 1.2312294895632327, | |
| "learning_rate": 7.308505545894567e-07, | |
| "loss": 0.9114, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.8345142761309798, | |
| "grad_norm": 1.2389500012630505, | |
| "learning_rate": 7.195813302713761e-07, | |
| "loss": 0.8884, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.8358559389543415, | |
| "grad_norm": 1.2610850320698639, | |
| "learning_rate": 7.083929240683618e-07, | |
| "loss": 0.9106, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.8371976017777032, | |
| "grad_norm": 1.5973319577372662, | |
| "learning_rate": 6.972855472274853e-07, | |
| "loss": 0.9064, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.838539264601065, | |
| "grad_norm": 1.3932907377877346, | |
| "learning_rate": 6.862594094659086e-07, | |
| "loss": 0.9242, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.8398809274244267, | |
| "grad_norm": 1.2694477207687995, | |
| "learning_rate": 6.753147189669279e-07, | |
| "loss": 0.9001, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.8412225902477883, | |
| "grad_norm": 1.4356848330613432, | |
| "learning_rate": 6.644516823760439e-07, | |
| "loss": 0.8741, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.84256425307115, | |
| "grad_norm": 1.3465335502199314, | |
| "learning_rate": 6.536705047970566e-07, | |
| "loss": 0.8948, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.8439059158945118, | |
| "grad_norm": 1.393718175289126, | |
| "learning_rate": 6.429713897881984e-07, | |
| "loss": 0.9163, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.8452475787178735, | |
| "grad_norm": 1.2433002110204077, | |
| "learning_rate": 6.323545393582847e-07, | |
| "loss": 0.9088, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.8465892415412352, | |
| "grad_norm": 1.3283975816353129, | |
| "learning_rate": 6.218201539629032e-07, | |
| "loss": 0.9233, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.8479309043645968, | |
| "grad_norm": 1.211143540762075, | |
| "learning_rate": 6.113684325006286e-07, | |
| "loss": 0.9134, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.8492725671879586, | |
| "grad_norm": 1.2705343275484424, | |
| "learning_rate": 6.009995723092655e-07, | |
| "loss": 0.9185, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.8506142300113203, | |
| "grad_norm": 1.3957017176759934, | |
| "learning_rate": 5.907137691621256e-07, | |
| "loss": 0.9024, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.851955892834682, | |
| "grad_norm": 1.3527649104045, | |
| "learning_rate": 5.805112172643296e-07, | |
| "loss": 0.9044, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.8532975556580437, | |
| "grad_norm": 1.261750149081806, | |
| "learning_rate": 5.703921092491393e-07, | |
| "loss": 0.9221, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.8546392184814053, | |
| "grad_norm": 1.3055595818097723, | |
| "learning_rate": 5.603566361743229e-07, | |
| "loss": 0.908, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.8559808813047671, | |
| "grad_norm": 1.4280255737874952, | |
| "learning_rate": 5.504049875185458e-07, | |
| "loss": 0.8873, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.8573225441281288, | |
| "grad_norm": 1.2949770398691693, | |
| "learning_rate": 5.405373511777939e-07, | |
| "loss": 0.9042, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.8586642069514905, | |
| "grad_norm": 1.2916262406127108, | |
| "learning_rate": 5.307539134618256e-07, | |
| "loss": 0.8978, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.8600058697748522, | |
| "grad_norm": 1.3529046928197803, | |
| "learning_rate": 5.21054859090655e-07, | |
| "loss": 0.8892, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.861347532598214, | |
| "grad_norm": 1.282419973354587, | |
| "learning_rate": 5.114403711910631e-07, | |
| "loss": 0.9179, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.8626891954215756, | |
| "grad_norm": 1.3420938264698856, | |
| "learning_rate": 5.019106312931399e-07, | |
| "loss": 0.9155, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.8640308582449373, | |
| "grad_norm": 1.498460165413374, | |
| "learning_rate": 4.924658193268595e-07, | |
| "loss": 0.9068, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.865372521068299, | |
| "grad_norm": 1.3390119903047981, | |
| "learning_rate": 4.831061136186787e-07, | |
| "loss": 0.9028, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.8667141838916608, | |
| "grad_norm": 1.3139597887405536, | |
| "learning_rate": 4.7383169088817407e-07, | |
| "loss": 0.8899, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.8680558467150224, | |
| "grad_norm": 1.2345562311100178, | |
| "learning_rate": 4.646427262447034e-07, | |
| "loss": 0.9113, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.8693975095383841, | |
| "grad_norm": 1.2810485954457775, | |
| "learning_rate": 4.555393931841001e-07, | |
| "loss": 0.9059, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.8707391723617458, | |
| "grad_norm": 1.4327167922145774, | |
| "learning_rate": 4.4652186358539573e-07, | |
| "loss": 0.8924, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.8720808351851076, | |
| "grad_norm": 1.4173807953026714, | |
| "learning_rate": 4.3759030770757606e-07, | |
| "loss": 0.9062, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.8734224980084693, | |
| "grad_norm": 1.2049321963818846, | |
| "learning_rate": 4.287448941863692e-07, | |
| "loss": 0.9088, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.874764160831831, | |
| "grad_norm": 1.2315593362133814, | |
| "learning_rate": 4.1998579003105553e-07, | |
| "loss": 0.8958, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.8761058236551926, | |
| "grad_norm": 1.2723610492335584, | |
| "learning_rate": 4.1131316062131944e-07, | |
| "loss": 0.91, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.8774474864785543, | |
| "grad_norm": 1.2008077017130754, | |
| "learning_rate": 4.0272716970412516e-07, | |
| "loss": 0.9006, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.8787891493019161, | |
| "grad_norm": 1.20510795448871, | |
| "learning_rate": 3.9422797939062505e-07, | |
| "loss": 0.9049, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.8801308121252778, | |
| "grad_norm": 1.5762215699625823, | |
| "learning_rate": 3.858157501530974e-07, | |
| "loss": 0.8951, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.8814724749486395, | |
| "grad_norm": 1.3538553053953095, | |
| "learning_rate": 3.7749064082191976e-07, | |
| "loss": 0.8873, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.8828141377720011, | |
| "grad_norm": 1.3062999793490424, | |
| "learning_rate": 3.692528085825675e-07, | |
| "loss": 0.8937, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.8841558005953629, | |
| "grad_norm": 1.2757504728646711, | |
| "learning_rate": 3.6110240897264727e-07, | |
| "loss": 0.9226, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.8854974634187246, | |
| "grad_norm": 2.1158439115081573, | |
| "learning_rate": 3.53039595878959e-07, | |
| "loss": 0.8921, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.8868391262420863, | |
| "grad_norm": 1.5194549778504565, | |
| "learning_rate": 3.450645215345921e-07, | |
| "loss": 0.9407, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.888180789065448, | |
| "grad_norm": 1.1961113555111094, | |
| "learning_rate": 3.3717733651604967e-07, | |
| "loss": 0.9054, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.8895224518888097, | |
| "grad_norm": 1.2448619199933089, | |
| "learning_rate": 3.2937818974040637e-07, | |
| "loss": 0.8897, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.8908641147121714, | |
| "grad_norm": 1.2228819784956602, | |
| "learning_rate": 3.216672284624961e-07, | |
| "loss": 0.8962, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.8922057775355331, | |
| "grad_norm": 1.2302573473318397, | |
| "learning_rate": 3.140445982721324e-07, | |
| "loss": 0.8973, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.8935474403588948, | |
| "grad_norm": 1.232916261356426, | |
| "learning_rate": 3.0651044309136016e-07, | |
| "loss": 0.8963, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.8948891031822566, | |
| "grad_norm": 1.3879794683807776, | |
| "learning_rate": 2.990649051717348e-07, | |
| "loss": 0.895, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.8962307660056182, | |
| "grad_norm": 1.3094666189869715, | |
| "learning_rate": 2.917081250916415e-07, | |
| "loss": 0.911, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.8975724288289799, | |
| "grad_norm": 2.1986551459756885, | |
| "learning_rate": 2.844402417536374e-07, | |
| "loss": 0.9189, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.8989140916523416, | |
| "grad_norm": 1.247107904720917, | |
| "learning_rate": 2.772613923818301e-07, | |
| "loss": 0.9175, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.9002557544757033, | |
| "grad_norm": 1.2265952393127215, | |
| "learning_rate": 2.701717125192865e-07, | |
| "loss": 0.8806, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.9015974172990651, | |
| "grad_norm": 1.1972138465547026, | |
| "learning_rate": 2.631713360254734e-07, | |
| "loss": 0.8968, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.9029390801224267, | |
| "grad_norm": 1.808757745752747, | |
| "learning_rate": 2.562603950737319e-07, | |
| "loss": 0.9065, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.9042807429457884, | |
| "grad_norm": 1.377558178542473, | |
| "learning_rate": 2.494390201487795e-07, | |
| "loss": 0.9007, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.9056224057691501, | |
| "grad_norm": 1.4113034956571244, | |
| "learning_rate": 2.4270734004424643e-07, | |
| "loss": 0.9001, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.9069640685925119, | |
| "grad_norm": 1.2994213819698734, | |
| "learning_rate": 2.3606548186024702e-07, | |
| "loss": 0.9088, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.9083057314158736, | |
| "grad_norm": 1.3098020957658707, | |
| "learning_rate": 2.295135710009755e-07, | |
| "loss": 0.9084, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.9096473942392352, | |
| "grad_norm": 1.2442584029120756, | |
| "learning_rate": 2.2305173117234236e-07, | |
| "loss": 0.899, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.9109890570625969, | |
| "grad_norm": 1.16625420459152, | |
| "learning_rate": 2.166800843796357e-07, | |
| "loss": 0.9098, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.9123307198859587, | |
| "grad_norm": 1.1965287831013598, | |
| "learning_rate": 2.1039875092521978e-07, | |
| "loss": 0.8831, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.9136723827093204, | |
| "grad_norm": 1.369985402269482, | |
| "learning_rate": 2.042078494062616e-07, | |
| "loss": 0.8911, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.9150140455326821, | |
| "grad_norm": 1.1922261874260034, | |
| "learning_rate": 1.9810749671249353e-07, | |
| "loss": 0.8933, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.9163557083560437, | |
| "grad_norm": 1.1791520718486093, | |
| "learning_rate": 1.920978080240049e-07, | |
| "loss": 0.9004, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.9176973711794055, | |
| "grad_norm": 1.252972974339097, | |
| "learning_rate": 1.861788968090683e-07, | |
| "loss": 0.9109, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.9190390340027672, | |
| "grad_norm": 1.210490712141151, | |
| "learning_rate": 1.8035087482199676e-07, | |
| "loss": 0.9012, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.9203806968261289, | |
| "grad_norm": 1.3085615609680596, | |
| "learning_rate": 1.7461385210103377e-07, | |
| "loss": 0.9226, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.9217223596494906, | |
| "grad_norm": 1.245267818500846, | |
| "learning_rate": 1.68967936966275e-07, | |
| "loss": 0.9068, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.9230640224728522, | |
| "grad_norm": 1.401189629029748, | |
| "learning_rate": 1.6341323601762548e-07, | |
| "loss": 0.9151, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.924405685296214, | |
| "grad_norm": 1.5915899329635883, | |
| "learning_rate": 1.579498541327834e-07, | |
| "loss": 0.8845, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.9257473481195757, | |
| "grad_norm": 1.2173104542610342, | |
| "learning_rate": 1.5257789446526172e-07, | |
| "loss": 0.9003, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.9270890109429374, | |
| "grad_norm": 1.2187813291272873, | |
| "learning_rate": 1.4729745844244302e-07, | |
| "loss": 0.9071, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.9284306737662991, | |
| "grad_norm": 1.3906288260053012, | |
| "learning_rate": 1.4210864576365891e-07, | |
| "loss": 0.9, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.9297723365896609, | |
| "grad_norm": 1.1874827126453622, | |
| "learning_rate": 1.3701155439831249e-07, | |
| "loss": 0.8927, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.9311139994130225, | |
| "grad_norm": 1.2552339140761175, | |
| "learning_rate": 1.320062805840261e-07, | |
| "loss": 0.8997, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.9324556622363842, | |
| "grad_norm": 1.2017347339152205, | |
| "learning_rate": 1.27092918824826e-07, | |
| "loss": 0.9131, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.9337973250597459, | |
| "grad_norm": 1.3162069081963632, | |
| "learning_rate": 1.2227156188935552e-07, | |
| "loss": 0.9098, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.9351389878831077, | |
| "grad_norm": 1.2473748551780643, | |
| "learning_rate": 1.1754230080912588e-07, | |
| "loss": 0.905, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.9364806507064694, | |
| "grad_norm": 1.203625706135816, | |
| "learning_rate": 1.12905224876797e-07, | |
| "loss": 0.9108, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.937822313529831, | |
| "grad_norm": 1.2488624887801947, | |
| "learning_rate": 1.0836042164448945e-07, | |
| "loss": 0.8915, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.9391639763531927, | |
| "grad_norm": 1.2552228912980437, | |
| "learning_rate": 1.0390797692213516e-07, | |
| "loss": 0.8811, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.9405056391765545, | |
| "grad_norm": 1.8280475750688994, | |
| "learning_rate": 9.954797477585376e-08, | |
| "loss": 0.9137, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.9418473019999162, | |
| "grad_norm": 1.2071040198044656, | |
| "learning_rate": 9.528049752636714e-08, | |
| "loss": 0.9038, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.9431889648232779, | |
| "grad_norm": 1.2674976745179707, | |
| "learning_rate": 9.110562574744519e-08, | |
| "loss": 0.9179, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.9445306276466395, | |
| "grad_norm": 1.2141438354939842, | |
| "learning_rate": 8.702343826438364e-08, | |
| "loss": 0.9125, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.9458722904700012, | |
| "grad_norm": 1.5780774480631814, | |
| "learning_rate": 8.303401215251583e-08, | |
| "loss": 0.8969, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.947213953293363, | |
| "grad_norm": 1.310428875382451, | |
| "learning_rate": 7.913742273575886e-08, | |
| "loss": 0.8908, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.9485556161167247, | |
| "grad_norm": 1.1664669903330205, | |
| "learning_rate": 7.533374358518974e-08, | |
| "loss": 0.9021, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.9498972789400864, | |
| "grad_norm": 1.3412312405769142, | |
| "learning_rate": 7.16230465176565e-08, | |
| "loss": 0.9197, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.951238941763448, | |
| "grad_norm": 1.1188094863277196, | |
| "learning_rate": 6.80054015944237e-08, | |
| "loss": 0.9124, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.9525806045868098, | |
| "grad_norm": 1.2264327425075252, | |
| "learning_rate": 6.448087711984796e-08, | |
| "loss": 0.9218, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.9539222674101715, | |
| "grad_norm": 1.2728923335730993, | |
| "learning_rate": 6.104953964008897e-08, | |
| "loss": 0.9032, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.9552639302335332, | |
| "grad_norm": 1.3013816313631277, | |
| "learning_rate": 5.7711453941852736e-08, | |
| "loss": 0.9178, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.9566055930568949, | |
| "grad_norm": 1.2072792472853993, | |
| "learning_rate": 5.446668305116865e-08, | |
| "loss": 0.9038, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.9579472558802566, | |
| "grad_norm": 1.2293045934199316, | |
| "learning_rate": 5.1315288232201e-08, | |
| "loss": 0.9061, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.9592889187036183, | |
| "grad_norm": 1.2938337912720965, | |
| "learning_rate": 4.825732898608826e-08, | |
| "loss": 0.9242, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.96063058152698, | |
| "grad_norm": 1.2703267261050453, | |
| "learning_rate": 4.529286304982394e-08, | |
| "loss": 0.9166, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.9619722443503417, | |
| "grad_norm": 1.1662442465906284, | |
| "learning_rate": 4.2421946395164174e-08, | |
| "loss": 0.9148, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.9633139071737034, | |
| "grad_norm": 1.325544575496581, | |
| "learning_rate": 3.964463322757017e-08, | |
| "loss": 0.8958, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.9646555699970651, | |
| "grad_norm": 1.3596657315473117, | |
| "learning_rate": 3.696097598518855e-08, | |
| "loss": 0.9062, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.9659972328204268, | |
| "grad_norm": 1.2269416950521412, | |
| "learning_rate": 3.437102533785541e-08, | |
| "loss": 0.8801, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.9673388956437885, | |
| "grad_norm": 1.2795185757363334, | |
| "learning_rate": 3.187483018614601e-08, | |
| "loss": 0.9012, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.9686805584671502, | |
| "grad_norm": 1.2125366854934336, | |
| "learning_rate": 2.9472437660446605e-08, | |
| "loss": 0.9038, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.970022221290512, | |
| "grad_norm": 1.2636902281710753, | |
| "learning_rate": 2.7163893120066288e-08, | |
| "loss": 0.9043, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.9713638841138736, | |
| "grad_norm": 1.149530053551484, | |
| "learning_rate": 2.4949240152381536e-08, | |
| "loss": 0.9169, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.9727055469372353, | |
| "grad_norm": 1.2416891050976275, | |
| "learning_rate": 2.2828520572011902e-08, | |
| "loss": 0.8992, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.974047209760597, | |
| "grad_norm": 1.2065853371634816, | |
| "learning_rate": 2.0801774420031172e-08, | |
| "loss": 0.8896, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.9753888725839588, | |
| "grad_norm": 1.3596342258401561, | |
| "learning_rate": 1.8869039963210766e-08, | |
| "loss": 0.9199, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.9767305354073205, | |
| "grad_norm": 1.1746597148679385, | |
| "learning_rate": 1.7030353693298086e-08, | |
| "loss": 0.9148, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.9780721982306821, | |
| "grad_norm": 1.4018834174480932, | |
| "learning_rate": 1.5285750326325953e-08, | |
| "loss": 0.9202, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.9794138610540438, | |
| "grad_norm": 1.1702727095870482, | |
| "learning_rate": 1.3635262801960369e-08, | |
| "loss": 0.8974, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.9807555238774056, | |
| "grad_norm": 1.318215740498902, | |
| "learning_rate": 1.2078922282873773e-08, | |
| "loss": 0.8951, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.9820971867007673, | |
| "grad_norm": 1.221980524500853, | |
| "learning_rate": 1.0616758154161633e-08, | |
| "loss": 0.8947, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.983438849524129, | |
| "grad_norm": 1.148112060120653, | |
| "learning_rate": 9.248798022784001e-09, | |
| "loss": 0.8849, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.9847805123474906, | |
| "grad_norm": 1.2053514295055652, | |
| "learning_rate": 7.975067717045926e-09, | |
| "loss": 0.9117, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.9861221751708523, | |
| "grad_norm": 1.3574322437625919, | |
| "learning_rate": 6.7955912861095155e-09, | |
| "loss": 0.918, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.9874638379942141, | |
| "grad_norm": 1.3563827012833976, | |
| "learning_rate": 5.71039099953985e-09, | |
| "loss": 0.8986, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.9888055008175758, | |
| "grad_norm": 1.16006022136295, | |
| "learning_rate": 4.719487346884211e-09, | |
| "loss": 0.887, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.9901471636409375, | |
| "grad_norm": 1.3750630982606136, | |
| "learning_rate": 3.822899037286276e-09, | |
| "loss": 0.8995, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.9914888264642991, | |
| "grad_norm": 2.131815418813632, | |
| "learning_rate": 3.0206429991314067e-09, | |
| "loss": 0.9266, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.9928304892876609, | |
| "grad_norm": 1.206285530756936, | |
| "learning_rate": 2.3127343797269e-09, | |
| "loss": 0.8972, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.9941721521110226, | |
| "grad_norm": 1.2109701207143455, | |
| "learning_rate": 1.6991865450188827e-09, | |
| "loss": 0.8776, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.9955138149343843, | |
| "grad_norm": 1.2479774386935207, | |
| "learning_rate": 1.1800110793358521e-09, | |
| "loss": 0.8874, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.996855477757746, | |
| "grad_norm": 1.279395306153215, | |
| "learning_rate": 7.552177851732901e-10, | |
| "loss": 0.9076, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.9981971405811078, | |
| "grad_norm": 1.2092532151319206, | |
| "learning_rate": 4.2481468300603625e-10, | |
| "loss": 0.9312, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.9995388034044694, | |
| "grad_norm": 1.2355454119392744, | |
| "learning_rate": 1.8880801113951853e-10, | |
| "loss": 0.887, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.9995388034044694, | |
| "step": 745, | |
| "total_flos": 1422828632670208.0, | |
| "train_loss": 0.05509326341968255, | |
| "train_runtime": 2517.145, | |
| "train_samples_per_second": 151.606, | |
| "train_steps_per_second": 0.296 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 746, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 25, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1422828632670208.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |