| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 5535, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005420054200542005, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8050541516245488e-06, | |
| "loss": 1.3502, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01084010840108401, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.6101083032490977e-06, | |
| "loss": 1.3193, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.016260162601626018, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.4151624548736465e-06, | |
| "loss": 1.3226, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02168021680216802, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.220216606498195e-06, | |
| "loss": 1.2298, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02710027100271003, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.025270758122744e-06, | |
| "loss": 1.1955, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.032520325203252036, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0830324909747293e-05, | |
| "loss": 1.3366, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.037940379403794036, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.263537906137184e-05, | |
| "loss": 1.2038, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04336043360433604, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.444043321299639e-05, | |
| "loss": 1.2876, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.04878048780487805, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.624548736462094e-05, | |
| "loss": 1.0663, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05420054200542006, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.805054151624549e-05, | |
| "loss": 1.0746, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05962059620596206, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9855595667870036e-05, | |
| "loss": 1.1507, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06504065040650407, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1660649819494586e-05, | |
| "loss": 1.108, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07046070460704607, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3465703971119137e-05, | |
| "loss": 1.0929, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07588075880758807, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.527075812274368e-05, | |
| "loss": 1.0056, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08130081300813008, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7075812274368234e-05, | |
| "loss": 1.2323, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08672086720867209, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.888086642599278e-05, | |
| "loss": 1.1624, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0921409214092141, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0685920577617325e-05, | |
| "loss": 1.1477, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.0975609756097561, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.249097472924188e-05, | |
| "loss": 1.1886, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.10298102981029811, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.4296028880866426e-05, | |
| "loss": 1.0738, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.10840108401084012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.610108303249098e-05, | |
| "loss": 1.103, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.11382113821138211, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.790613718411553e-05, | |
| "loss": 1.25, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11924119241192412, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.971119133574007e-05, | |
| "loss": 1.188, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.12466124661246612, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.151624548736462e-05, | |
| "loss": 1.11, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.13008130081300814, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.332129963898917e-05, | |
| "loss": 1.0991, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.13550135501355012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.5126353790613716e-05, | |
| "loss": 1.07, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.14092140921409213, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.693140794223827e-05, | |
| "loss": 1.2724, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.14634146341463414, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.873646209386282e-05, | |
| "loss": 1.1382, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.15176151761517614, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.054151624548736e-05, | |
| "loss": 1.1774, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.15718157181571815, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.234657039711192e-05, | |
| "loss": 1.1395, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.16260162601626016, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.415162454873647e-05, | |
| "loss": 1.192, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.16802168021680217, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.595667870036101e-05, | |
| "loss": 1.1848, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.17344173441734417, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.776173285198556e-05, | |
| "loss": 1.1632, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.17886178861788618, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.9566787003610113e-05, | |
| "loss": 1.1659, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1842818428184282, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.137184115523465e-05, | |
| "loss": 1.0097, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.1897018970189702, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.31768953068592e-05, | |
| "loss": 1.068, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.1951219512195122, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.498194945848377e-05, | |
| "loss": 1.1652, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.2005420054200542, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.678700361010832e-05, | |
| "loss": 1.0985, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.20596205962059622, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.859205776173285e-05, | |
| "loss": 1.0488, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.21138211382113822, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.03971119133574e-05, | |
| "loss": 1.1854, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.21680216802168023, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.220216606498195e-05, | |
| "loss": 1.1238, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.40072202166065e-05, | |
| "loss": 1.145, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.22764227642276422, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.581227436823105e-05, | |
| "loss": 1.2215, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.23306233062330622, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.76173285198556e-05, | |
| "loss": 1.143, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.23848238482384823, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.942238267148014e-05, | |
| "loss": 1.1652, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.24390243902439024, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.122743682310469e-05, | |
| "loss": 1.2227, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.24932249322493225, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.303249097472924e-05, | |
| "loss": 1.1247, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.25474254742547425, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.48375451263538e-05, | |
| "loss": 1.1026, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.2601626016260163, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.664259927797834e-05, | |
| "loss": 1.0836, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.26558265582655827, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.84476534296029e-05, | |
| "loss": 1.0682, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.27100271002710025, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.025270758122743e-05, | |
| "loss": 1.0829, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2764227642276423, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.205776173285198e-05, | |
| "loss": 1.0599, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.28184281842818426, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.386281588447655e-05, | |
| "loss": 1.1322, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2872628726287263, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.56678700361011e-05, | |
| "loss": 1.1264, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2926829268292683, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.747292418772563e-05, | |
| "loss": 1.1008, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.2981029810298103, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.927797833935018e-05, | |
| "loss": 1.1783, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3035230352303523, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999964197887486e-05, | |
| "loss": 1.1658, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3089430894308943, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999745409056648e-05, | |
| "loss": 1.1876, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3143631436314363, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999327730150358e-05, | |
| "loss": 1.0611, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.31978319783197834, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.998711177783901e-05, | |
| "loss": 1.0863, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3252032520325203, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.997895776483754e-05, | |
| "loss": 1.114, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.33062330623306235, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.996881558686615e-05, | |
| "loss": 1.0857, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.33604336043360433, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.995668564738108e-05, | |
| "loss": 1.0991, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.34146341463414637, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.994256842891181e-05, | |
| "loss": 1.0993, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.34688346883468835, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.992646449304188e-05, | |
| "loss": 1.0537, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3523035230352303, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.990837448038653e-05, | |
| "loss": 1.0829, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.35772357723577236, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.988829911056716e-05, | |
| "loss": 1.0441, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.36314363143631434, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.98662391821828e-05, | |
| "loss": 1.0902, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.3685636856368564, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.984219557277832e-05, | |
| "loss": 1.1888, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.37398373983739835, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.981616923880947e-05, | |
| "loss": 1.0265, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.3794037940379404, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.978816121560487e-05, | |
| "loss": 1.1118, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.38482384823848237, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.975817261732483e-05, | |
| "loss": 1.0043, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.3902439024390244, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.972620463691699e-05, | |
| "loss": 1.0356, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.3956639566395664, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.969225854606892e-05, | |
| "loss": 1.1946, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4010840108401084, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.965633569515749e-05, | |
| "loss": 1.0684, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.4065040650406504, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.961843751319517e-05, | |
| "loss": 1.1165, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.41192411924119243, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.957856550777318e-05, | |
| "loss": 1.106, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.4173441734417344, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.95367212650015e-05, | |
| "loss": 1.0912, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.42276422764227645, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.949290644944585e-05, | |
| "loss": 1.0344, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4281842818428184, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.944712280406132e-05, | |
| "loss": 1.148, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.43360433604336046, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.93993721501232e-05, | |
| "loss": 1.0802, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.43902439024390244, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.934965638715446e-05, | |
| "loss": 1.0527, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.929797749285015e-05, | |
| "loss": 1.0383, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.44986449864498645, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.92443375229988e-05, | |
| "loss": 1.0799, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.45528455284552843, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.918873861140058e-05, | |
| "loss": 1.0581, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.46070460704607047, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.913118296978246e-05, | |
| "loss": 1.0526, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.46612466124661245, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.907167288771019e-05, | |
| "loss": 1.1422, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.4715447154471545, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.901021073249725e-05, | |
| "loss": 1.0433, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.47696476964769646, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.89467989491107e-05, | |
| "loss": 1.1253, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.4823848238482385, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.888144006007382e-05, | |
| "loss": 1.0904, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.4878048780487805, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.881413666536589e-05, | |
| "loss": 1.0581, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.4932249322493225, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.874489144231869e-05, | |
| "loss": 1.0418, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.4986449864498645, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.867370714551e-05, | |
| "loss": 1.191, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5040650406504065, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.860058660665404e-05, | |
| "loss": 1.0701, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5094850948509485, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.852553273448879e-05, | |
| "loss": 1.1752, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5149051490514905, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.844854851466037e-05, | |
| "loss": 1.0127, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5203252032520326, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.836963700960416e-05, | |
| "loss": 1.1023, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5257452574525745, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.828880135842302e-05, | |
| "loss": 1.1257, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5311653116531165, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.820604477676247e-05, | |
| "loss": 1.0576, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5365853658536586, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.812137055668267e-05, | |
| "loss": 1.0565, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5420054200542005, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.803478206652752e-05, | |
| "loss": 1.0173, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5474254742547425, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.794628275079069e-05, | |
| "loss": 1.0592, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5528455284552846, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.785587612997857e-05, | |
| "loss": 0.9735, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5582655826558266, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.776356580047019e-05, | |
| "loss": 0.9913, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5636856368563685, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.766935543437421e-05, | |
| "loss": 1.0708, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5691056910569106, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.757324877938283e-05, | |
| "loss": 1.1463, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5745257452574526, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.747524965862267e-05, | |
| "loss": 1.0462, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.5799457994579946, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.737536197050275e-05, | |
| "loss": 1.0178, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.5853658536585366, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.727358968855932e-05, | |
| "loss": 0.8617, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.5907859078590786, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.716993686129796e-05, | |
| "loss": 0.9766, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.5962059620596206, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.706440761203232e-05, | |
| "loss": 1.0065, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6016260162601627, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.695700613872019e-05, | |
| "loss": 0.8501, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.6070460704607046, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.684773671379658e-05, | |
| "loss": 1.1225, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6124661246612466, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.673660368400364e-05, | |
| "loss": 1.0852, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6178861788617886, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.662361147021779e-05, | |
| "loss": 0.9858, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6233062330623306, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.650876456727392e-05, | |
| "loss": 1.0293, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6287262872628726, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.639206754378646e-05, | |
| "loss": 1.06, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6341463414634146, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.627352504196777e-05, | |
| "loss": 1.1035, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6395663956639567, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.615314177744337e-05, | |
| "loss": 1.0686, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6449864498644986, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.603092253906441e-05, | |
| "loss": 0.9771, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6504065040650406, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.590687218871719e-05, | |
| "loss": 1.0134, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6558265582655827, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.578099566112964e-05, | |
| "loss": 1.013, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.6612466124661247, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.565329796367516e-05, | |
| "loss": 1.012, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.552378417617336e-05, | |
| "loss": 0.9936, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6720867208672087, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.539245945068794e-05, | |
| "loss": 0.9983, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6775067750677507, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.525932901132183e-05, | |
| "loss": 0.989, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.6829268292682927, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.512439815400931e-05, | |
| "loss": 1.0803, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.6883468834688347, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.498767224630537e-05, | |
| "loss": 1.0493, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.6937669376693767, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.484915672717213e-05, | |
| "loss": 1.0222, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.6991869918699187, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.470885710676265e-05, | |
| "loss": 0.9564, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7046070460704607, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.456677896620145e-05, | |
| "loss": 0.9883, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7100271002710027, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.442292795736278e-05, | |
| "loss": 1.0013, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7154471544715447, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.427730980264559e-05, | |
| "loss": 0.9472, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7208672086720868, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.412993029474602e-05, | |
| "loss": 1.0772, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7262872628726287, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.398079529642685e-05, | |
| "loss": 1.0644, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7317073170731707, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.382991074028436e-05, | |
| "loss": 1.1249, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7371273712737128, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.367728262851237e-05, | |
| "loss": 0.9928, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.7425474254742548, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.352291703266331e-05, | |
| "loss": 1.1657, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.7479674796747967, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.33668200934069e-05, | |
| "loss": 1.0173, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7533875338753387, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.32089980202857e-05, | |
| "loss": 0.9503, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7588075880758808, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.304945709146819e-05, | |
| "loss": 1.1083, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7642276422764228, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.2888203653499e-05, | |
| "loss": 0.9929, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.7696476964769647, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.272524412104645e-05, | |
| "loss": 1.1123, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.7750677506775068, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.256058497664734e-05, | |
| "loss": 1.0397, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7804878048780488, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.239423277044917e-05, | |
| "loss": 1.0311, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.7859078590785907, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.22261941199494e-05, | |
| "loss": 0.9421, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.7913279132791328, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.205647570973239e-05, | |
| "loss": 0.9313, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.7967479674796748, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.188508429120342e-05, | |
| "loss": 1.0034, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8021680216802168, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.171202668232007e-05, | |
| "loss": 1.036, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.8075880758807588, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.153730976732101e-05, | |
| "loss": 1.0326, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8130081300813008, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.136094049645226e-05, | |
| "loss": 0.8734, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8184281842818428, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.118292588569057e-05, | |
| "loss": 0.8989, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.8238482384823849, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.100327301646439e-05, | |
| "loss": 1.0225, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8292682926829268, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.082198903537213e-05, | |
| "loss": 1.0594, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.8346883468834688, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.063908115389794e-05, | |
| "loss": 0.9244, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.8401084010840109, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.045455664812478e-05, | |
| "loss": 1.0654, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.8455284552845529, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.026842285844499e-05, | |
| "loss": 1.0126, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8509485094850948, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.008068718926825e-05, | |
| "loss": 1.0821, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.8563685636856369, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.989135710872713e-05, | |
| "loss": 1.0105, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.8617886178861789, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.970044014837994e-05, | |
| "loss": 0.9684, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.8672086720867209, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.950794390291108e-05, | |
| "loss": 1.0457, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.8726287262872628, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.931387602982903e-05, | |
| "loss": 1.0847, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.8780487804878049, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.911824424916169e-05, | |
| "loss": 1.1106, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.8834688346883469, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.89210563431492e-05, | |
| "loss": 1.6764, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.872232015593447e-05, | |
| "loss": 0.9737, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.8943089430894309, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.852204359325115e-05, | |
| "loss": 1.0723, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.8997289972899729, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.8320234622109e-05, | |
| "loss": 1.026, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.9051490514905149, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.811690127047707e-05, | |
| "loss": 0.9524, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.9105691056910569, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.791205162696439e-05, | |
| "loss": 0.9443, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9159891598915989, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.770569384049806e-05, | |
| "loss": 1.0877, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.9214092140921409, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.749783611999924e-05, | |
| "loss": 1.0068, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.926829268292683, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.72884867340565e-05, | |
| "loss": 1.0288, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.9322493224932249, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.707765401059689e-05, | |
| "loss": 1.0115, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9376693766937669, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.68653463365547e-05, | |
| "loss": 0.9857, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.943089430894309, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.665157215753788e-05, | |
| "loss": 0.9941, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.948509485094851, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.64363399774919e-05, | |
| "loss": 0.9, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9539295392953929, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.621965835836165e-05, | |
| "loss": 0.8777, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.959349593495935, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.600153591975073e-05, | |
| "loss": 0.952, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.964769647696477, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.578198133857863e-05, | |
| "loss": 0.9876, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.9701897018970189, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.556100334873549e-05, | |
| "loss": 0.9553, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.975609756097561, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.533861074073472e-05, | |
| "loss": 0.9153, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.981029810298103, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.511481236136329e-05, | |
| "loss": 0.9828, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.986449864498645, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.488961711332986e-05, | |
| "loss": 0.98, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.991869918699187, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.466303395491051e-05, | |
| "loss": 0.9471, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.997289972899729, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.443507189959248e-05, | |
| "loss": 0.9981, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.002710027100271, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.420574001571561e-05, | |
| "loss": 0.9234, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.008130081300813, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.397504742611155e-05, | |
| "loss": 0.8865, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.013550135501355, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.37430033077409e-05, | |
| "loss": 0.8455, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.018970189701897, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.350961689132808e-05, | |
| "loss": 0.8807, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.024390243902439, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.327489746099422e-05, | |
| "loss": 0.8296, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.029810298102981, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.303885435388783e-05, | |
| "loss": 0.8678, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.0352303523035231, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.280149695981325e-05, | |
| "loss": 0.7886, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.040650406504065, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.25628347208573e-05, | |
| "loss": 0.8248, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.046070460704607, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.232287713101351e-05, | |
| "loss": 0.8579, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.051490514905149, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.208163373580456e-05, | |
| "loss": 0.7809, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.056910569105691, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.183911413190251e-05, | |
| "loss": 0.7519, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.062330623306233, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.159532796674706e-05, | |
| "loss": 0.8322, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.067750677506775, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.135028493816174e-05, | |
| "loss": 0.8875, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.0731707317073171, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.110399479396822e-05, | |
| "loss": 0.6955, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.0785907859078592, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.085646733159841e-05, | |
| "loss": 0.8317, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.084010840108401, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.060771239770482e-05, | |
| "loss": 0.9015, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.089430894308943, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.035773988776883e-05, | |
| "loss": 0.9467, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.094850948509485, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.010655974570703e-05, | |
| "loss": 0.9237, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.100271002710027, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.985418196347568e-05, | |
| "loss": 0.8042, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.1056910569105691, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.960061658067322e-05, | |
| "loss": 0.7585, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.934587368414084e-05, | |
| "loss": 0.8525, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.1165311653116532, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.908996340756136e-05, | |
| "loss": 0.8898, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.1219512195121952, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.883289593105597e-05, | |
| "loss": 0.851, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.127371273712737, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.857468148077926e-05, | |
| "loss": 0.7651, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.132791327913279, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.831533032851264e-05, | |
| "loss": 0.8294, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.1382113821138211, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.805485279125543e-05, | |
| "loss": 0.7916, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.1436314363143631, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.779325923081465e-05, | |
| "loss": 0.9365, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.1490514905149052, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.753056005339277e-05, | |
| "loss": 0.8456, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.1544715447154472, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.726676570917372e-05, | |
| "loss": 0.8459, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.1598915989159893, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.700188669190727e-05, | |
| "loss": 0.8237, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.165311653116531, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.673593353849143e-05, | |
| "loss": 0.8186, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.170731707317073, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.646891682855347e-05, | |
| "loss": 0.8047, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.1761517615176151, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.620084718402895e-05, | |
| "loss": 0.8191, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.1815718157181572, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.593173526873925e-05, | |
| "loss": 0.8245, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.1869918699186992, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.566159178796726e-05, | |
| "loss": 0.9024, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.1924119241192412, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.539042748803162e-05, | |
| "loss": 0.7802, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.1978319783197833, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.511825315585921e-05, | |
| "loss": 0.7251, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.203252032520325, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.484507961855598e-05, | |
| "loss": 0.8132, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.2086720867208671, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.45709177429764e-05, | |
| "loss": 0.7704, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.2140921409214092, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.429577843529096e-05, | |
| "loss": 0.7359, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.2195121951219512, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.401967264055253e-05, | |
| "loss": 0.8472, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.2249322493224932, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.374261134226083e-05, | |
| "loss": 0.7575, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.2303523035230353, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.346460556192557e-05, | |
| "loss": 0.8207, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.2357723577235773, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.318566635862797e-05, | |
| "loss": 0.863, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.2411924119241193, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.290580482858088e-05, | |
| "loss": 0.9107, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.2466124661246614, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.262503210468728e-05, | |
| "loss": 0.7615, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.2520325203252032, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.234335935609761e-05, | |
| "loss": 0.8431, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.2574525745257452, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.20607977877652e-05, | |
| "loss": 0.8447, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.2628726287262872, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.177735864000073e-05, | |
| "loss": 0.8853, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.2682926829268293, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.149305318802502e-05, | |
| "loss": 0.9468, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.2737127371273713, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.120789274152049e-05, | |
| "loss": 0.7821, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.2791327913279134, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.092188864418127e-05, | |
| "loss": 0.6905, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.2845528455284554, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.063505227326198e-05, | |
| "loss": 0.7971, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.2899728997289972, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.03473950391251e-05, | |
| "loss": 0.6959, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.2953929539295392, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.005892838478711e-05, | |
| "loss": 0.8096, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.3008130081300813, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.976966378546322e-05, | |
| "loss": 0.93, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.3062330623306233, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.947961274811093e-05, | |
| "loss": 0.7973, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.3116531165311653, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.918878681097232e-05, | |
| "loss": 0.85, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.3170731707317074, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.889719754311495e-05, | |
| "loss": 0.7707, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.3224932249322494, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.860485654397173e-05, | |
| "loss": 0.7808, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.3279132791327912, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.831177544287952e-05, | |
| "loss": 0.8128, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.80179658986164e-05, | |
| "loss": 0.7977, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.3387533875338753, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.7723439598938e-05, | |
| "loss": 0.7492, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.3441734417344173, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.742820826011245e-05, | |
| "loss": 0.8064, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.3495934959349594, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.71322836264544e-05, | |
| "loss": 0.807, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.3550135501355014, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.683567746985783e-05, | |
| "loss": 0.8208, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.3604336043360434, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.653840158932769e-05, | |
| "loss": 0.7258, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.3658536585365852, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.624046781051055e-05, | |
| "loss": 0.7142, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.3712737127371275, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.594188798522428e-05, | |
| "loss": 0.7837, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.3766937669376693, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.56426739909864e-05, | |
| "loss": 0.7829, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.3821138211382114, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.53428377305418e-05, | |
| "loss": 0.7371, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.3875338753387534, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.504239113138908e-05, | |
| "loss": 0.9048, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.3929539295392954, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.474134614530617e-05, | |
| "loss": 0.8665, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.3983739837398375, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.443971474787483e-05, | |
| "loss": 0.8496, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.4037940379403793, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.413750893800432e-05, | |
| "loss": 0.7842, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.4092140921409215, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.3834740737454e-05, | |
| "loss": 0.8222, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.4146341463414633, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.353142219035521e-05, | |
| "loss": 0.7915, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.4200542005420054, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.322756536273208e-05, | |
| "loss": 0.8833, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.4254742547425474, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.292318234202155e-05, | |
| "loss": 0.8277, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.4308943089430894, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.261828523659253e-05, | |
| "loss": 0.743, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.4363143631436315, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.231288617526427e-05, | |
| "loss": 0.6826, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.4417344173441735, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.200699730682384e-05, | |
| "loss": 0.7916, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.4471544715447155, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.170063079954283e-05, | |
| "loss": 0.7353, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.4525745257452574, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.139379884069337e-05, | |
| "loss": 0.8918, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.4579945799457994, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.108651363606324e-05, | |
| "loss": 0.7956, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.4634146341463414, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.077878740947035e-05, | |
| "loss": 0.808, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.4688346883468835, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.04706324022765e-05, | |
| "loss": 0.7646, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.4742547425474255, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.016206087290038e-05, | |
| "loss": 0.8579, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.4796747967479675, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.985308509633e-05, | |
| "loss": 0.887, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.4850948509485096, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.9543717363634263e-05, | |
| "loss": 0.8292, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.4905149051490514, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.9233969981474156e-05, | |
| "loss": 0.8304, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.4959349593495934, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.892385527161311e-05, | |
| "loss": 0.7978, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.5013550135501355, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.861338557042691e-05, | |
| "loss": 0.7651, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.5067750677506775, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.830257322841285e-05, | |
| "loss": 0.9219, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.5121951219512195, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.799143060969854e-05, | |
| "loss": 0.7874, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.5176151761517616, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.767997009154997e-05, | |
| "loss": 0.7405, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.5230352303523036, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.736820406387923e-05, | |
| "loss": 0.845, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.5284552845528454, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.705614492875153e-05, | |
| "loss": 0.8464, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.5338753387533877, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.6743805099891955e-05, | |
| "loss": 0.9143, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.5392953929539295, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.6431197002191594e-05, | |
| "loss": 0.804, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.5447154471544715, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.611833307121327e-05, | |
| "loss": 0.7703, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.5501355013550135, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.580522575269684e-05, | |
| "loss": 0.8818, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.5555555555555556, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.54918875020642e-05, | |
| "loss": 0.7634, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.5609756097560976, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.517833078392365e-05, | |
| "loss": 0.7817, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.5663956639566394, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.486456807157417e-05, | |
| "loss": 0.7829, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.5718157181571817, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.455061184650921e-05, | |
| "loss": 0.7587, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.5772357723577235, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.423647459792018e-05, | |
| "loss": 0.6822, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.5826558265582655, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.392216882219956e-05, | |
| "loss": 0.8737, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.5880758807588076, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.360770702244388e-05, | |
| "loss": 0.8793, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.5934959349593496, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.3293101707956306e-05, | |
| "loss": 0.8142, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.5989159891598916, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.2978365393749044e-05, | |
| "loss": 0.718, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.6043360433604335, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.266351060004544e-05, | |
| "loss": 0.8058, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.6097560975609757, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.234854985178196e-05, | |
| "loss": 0.8089, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.6151761517615175, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.203349567811e-05, | |
| "loss": 0.7888, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.6205962059620598, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.1718360611897374e-05, | |
| "loss": 0.761, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.6260162601626016, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.140315718922979e-05, | |
| "loss": 0.7682, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.6314363143631436, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.108789794891224e-05, | |
| "loss": 0.8024, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.6368563685636857, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0772595431970095e-05, | |
| "loss": 0.8088, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.6422764227642277, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0457262181150286e-05, | |
| "loss": 0.7911, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.6476964769647697, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.014191074042233e-05, | |
| "loss": 0.7997, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.6531165311653115, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.9826553654479366e-05, | |
| "loss": 0.7467, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.6585365853658538, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.9511203468239064e-05, | |
| "loss": 0.8015, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.6639566395663956, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.9195872726344655e-05, | |
| "loss": 0.8341, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.6693766937669376, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.888057397266585e-05, | |
| "loss": 0.7597, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.6747967479674797, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.856531974979987e-05, | |
| "loss": 0.7855, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.6802168021680217, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.8250122598572537e-05, | |
| "loss": 0.7871, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.6856368563685638, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.79349950575393e-05, | |
| "loss": 0.8974, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.6910569105691056, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.7619949662486554e-05, | |
| "loss": 0.8021, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.6964769647696478, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.7304998945932904e-05, | |
| "loss": 0.7876, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.7018970189701896, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.699015543663066e-05, | |
| "loss": 0.901, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.7073170731707317, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.667543165906739e-05, | |
| "loss": 0.7647, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.7127371273712737, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.6360840132967735e-05, | |
| "loss": 0.9019, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.7181571815718157, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.604639337279543e-05, | |
| "loss": 0.7046, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.7235772357723578, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.5732103887255354e-05, | |
| "loss": 0.7644, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.7289972899728996, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.541798417879598e-05, | |
| "loss": 0.8395, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.7344173441734418, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.5104046743112094e-05, | |
| "loss": 0.7812, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.7398373983739837, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.479030406864763e-05, | |
| "loss": 0.7638, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.7452574525745257, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.447676863609892e-05, | |
| "loss": 0.7856, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.7506775067750677, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.4163452917918194e-05, | |
| "loss": 0.8102, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.7560975609756098, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.3850369377817465e-05, | |
| "loss": 0.9204, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.7615176151761518, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.353753047027269e-05, | |
| "loss": 0.8147, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.7669376693766936, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.32249486400283e-05, | |
| "loss": 0.7163, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.7723577235772359, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2912636321602186e-05, | |
| "loss": 0.7531, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.7777777777777777, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.260060593879108e-05, | |
| "loss": 0.8307, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.78319783197832, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.228886990417627e-05, | |
| "loss": 1.0805, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.7886178861788617, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.197744061862987e-05, | |
| "loss": 0.7975, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.7940379403794038, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.1666330470821536e-05, | |
| "loss": 0.6771, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.7994579945799458, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.135555183672557e-05, | |
| "loss": 0.7911, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.8048780487804879, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.104511707912866e-05, | |
| "loss": 0.9151, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.8102981029810299, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.0735038547138065e-05, | |
| "loss": 0.7033, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.8157181571815717, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.042532857569039e-05, | |
| "loss": 0.725, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.821138211382114, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.011599948506088e-05, | |
| "loss": 0.7157, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.8265582655826558, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.9807063580373314e-05, | |
| "loss": 0.8203, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.8319783197831978, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.949853315111055e-05, | |
| "loss": 0.7513, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.8373983739837398, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.919042047062558e-05, | |
| "loss": 0.7372, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.8428184281842819, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.8882737795653346e-05, | |
| "loss": 0.7178, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.848238482384824, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.857549736582316e-05, | |
| "loss": 0.7819, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.8536585365853657, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.8268711403171805e-05, | |
| "loss": 0.7438, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.859078590785908, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.796239211165733e-05, | |
| "loss": 0.7724, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.8644986449864498, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.765655167667358e-05, | |
| "loss": 0.8343, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.8699186991869918, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.735120226456551e-05, | |
| "loss": 0.7478, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.8753387533875339, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.704635602214513e-05, | |
| "loss": 0.7864, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.880758807588076, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.6742025076208344e-05, | |
| "loss": 0.7534, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.886178861788618, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.643822153305252e-05, | |
| "loss": 0.7924, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.8915989159891597, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.613495747799498e-05, | |
| "loss": 0.8663, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.897018970189702, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.583224497489213e-05, | |
| "loss": 0.8439, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.9024390243902438, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.553009606565962e-05, | |
| "loss": 0.7788, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.907859078590786, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.522852276979337e-05, | |
| "loss": 0.8515, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.9132791327913279, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.492753708389134e-05, | |
| "loss": 0.7187, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.91869918699187, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.462715098117632e-05, | |
| "loss": 0.7172, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.924119241192412, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.4327376411019675e-05, | |
| "loss": 0.7059, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.9295392953929538, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.4028225298465987e-05, | |
| "loss": 0.9021, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.934959349593496, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.372970954375867e-05, | |
| "loss": 0.7364, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.9403794037940378, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.343184102186655e-05, | |
| "loss": 0.8083, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.94579945799458, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.313463158201157e-05, | |
| "loss": 0.7949, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.951219512195122, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.283809304719728e-05, | |
| "loss": 0.7554, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.956639566395664, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.254223721373866e-05, | |
| "loss": 0.7353, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.962059620596206, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.2247075850792774e-05, | |
| "loss": 0.6905, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.967479674796748, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.195262069989064e-05, | |
| "loss": 0.7416, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.97289972899729, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.1658883474470096e-05, | |
| "loss": 0.8705, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.9783197831978319, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.136587585940991e-05, | |
| "loss": 0.8878, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.9837398373983741, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.107360951056493e-05, | |
| "loss": 0.6716, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.989159891598916, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0782096054302375e-05, | |
| "loss": 0.7695, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.994579945799458, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0491347087039363e-05, | |
| "loss": 0.8582, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.020137417478163e-05, | |
| "loss": 0.7794, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 2.005420054200542, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9912188852663404e-05, | |
| "loss": 0.6789, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.010840108401084, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.962380262448855e-05, | |
| "loss": 0.6068, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 2.016260162601626, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9336226962272904e-05, | |
| "loss": 0.5462, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 2.021680216802168, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9049473305788045e-05, | |
| "loss": 0.5795, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 2.02710027100271, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.876355306210604e-05, | |
| "loss": 0.5308, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 2.032520325203252, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8478477605145815e-05, | |
| "loss": 0.5021, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 2.037940379403794, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.819425827522061e-05, | |
| "loss": 0.5725, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 2.043360433604336, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.791090637858692e-05, | |
| "loss": 0.5371, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 2.048780487804878, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7628433186994705e-05, | |
| "loss": 0.4408, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 2.05420054200542, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7346849937238983e-05, | |
| "loss": 0.5324, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 2.059620596205962, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7066167830712897e-05, | |
| "loss": 0.537, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 2.065040650406504, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.678639803296203e-05, | |
| "loss": 0.6198, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 2.0704607046070462, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6507551673240295e-05, | |
| "loss": 0.6005, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 2.075880758807588, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.622963984406722e-05, | |
| "loss": 0.5428, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 2.08130081300813, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.595267360078665e-05, | |
| "loss": 0.5816, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 2.086720867208672, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.567666396112699e-05, | |
| "loss": 0.5656, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 2.092140921409214, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5401621904762896e-05, | |
| "loss": 0.5424, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 2.097560975609756, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.512755837287859e-05, | |
| "loss": 0.5393, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 2.102981029810298, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.485448426773248e-05, | |
| "loss": 0.4829, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 2.1084010840108403, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.458241045222356e-05, | |
| "loss": 0.5376, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 2.113821138211382, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4311347749459252e-05, | |
| "loss": 0.5107, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 2.1192411924119243, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4041306942324892e-05, | |
| "loss": 0.5824, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 2.124661246612466, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3772298773054757e-05, | |
| "loss": 0.5408, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 2.130081300813008, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3504333942804728e-05, | |
| "loss": 0.4924, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 2.13550135501355, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.323742311122667e-05, | |
| "loss": 0.4882, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 2.140921409214092, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2971576896044283e-05, | |
| "loss": 0.5444, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 2.1463414634146343, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2706805872630816e-05, | |
| "loss": 0.4971, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 2.151761517615176, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2443120573588323e-05, | |
| "loss": 0.6078, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 2.1571815718157183, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.218053148832872e-05, | |
| "loss": 0.5503, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 2.16260162601626, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1919049062656476e-05, | |
| "loss": 0.4805, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 2.168021680216802, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1658683698353095e-05, | |
| "loss": 0.5476, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.1734417344173442, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1399445752763374e-05, | |
| "loss": 0.4429, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 2.178861788617886, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1141345538383306e-05, | |
| "loss": 0.5418, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 2.1842818428184283, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0884393322449895e-05, | |
| "loss": 0.55, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 2.18970189701897, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0628599326532738e-05, | |
| "loss": 0.4787, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 2.1951219512195124, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0373973726127378e-05, | |
| "loss": 0.5278, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 2.200542005420054, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0120526650250533e-05, | |
| "loss": 0.5121, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 2.205962059620596, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9868268181037185e-05, | |
| "loss": 0.5864, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 2.2113821138211383, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.961720835333949e-05, | |
| "loss": 0.6388, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 2.21680216802168, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9367357154327576e-05, | |
| "loss": 0.576, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.91187245230923e-05, | |
| "loss": 0.56, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 2.227642276422764, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8871320350249833e-05, | |
| "loss": 0.5696, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 2.2330623306233064, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.862515447754819e-05, | |
| "loss": 0.5115, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 2.238482384823848, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8380236697475832e-05, | |
| "loss": 0.5578, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 2.2439024390243905, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.813657675287198e-05, | |
| "loss": 0.5048, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 2.2493224932249323, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.789418433653912e-05, | |
| "loss": 0.5528, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 2.254742547425474, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7653069090857434e-05, | |
| "loss": 0.4945, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 2.2601626016260163, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7413240607401164e-05, | |
| "loss": 0.4937, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 2.265582655826558, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.717470842655715e-05, | |
| "loss": 0.5175, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 2.2710027100271004, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.693748203714522e-05, | |
| "loss": 0.5842, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 2.2764227642276422, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6701570876040777e-05, | |
| "loss": 0.5089, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 2.281842818428184, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6466984327799385e-05, | |
| "loss": 0.5788, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 2.2872628726287263, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6233731724283492e-05, | |
| "loss": 0.5136, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 2.292682926829268, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6001822344291124e-05, | |
| "loss": 0.4981, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 2.2981029810298104, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5771265413186832e-05, | |
| "loss": 0.5172, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 2.303523035230352, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5542070102534706e-05, | |
| "loss": 0.5126, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 2.3089430894308944, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5314245529733507e-05, | |
| "loss": 0.5028, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 2.3143631436314362, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5087800757653997e-05, | |
| "loss": 0.4974, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 2.3197831978319785, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.48627447942784e-05, | |
| "loss": 0.4955, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 2.3252032520325203, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4639086592342099e-05, | |
| "loss": 0.5425, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 2.330623306233062, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4416835048977445e-05, | |
| "loss": 0.5073, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 2.3360433604336044, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.419599900535985e-05, | |
| "loss": 0.6215, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 2.341463414634146, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3976587246356116e-05, | |
| "loss": 0.5859, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 2.3468834688346885, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3758608500174908e-05, | |
| "loss": 0.497, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 2.3523035230352303, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3542071438019622e-05, | |
| "loss": 0.5852, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 2.3577235772357725, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3326984673743353e-05, | |
| "loss": 0.4882, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 2.3631436314363143, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3113356763506363e-05, | |
| "loss": 0.5861, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 2.3685636856368566, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2901196205435578e-05, | |
| "loss": 0.5987, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 2.3739837398373984, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2690511439286623e-05, | |
| "loss": 0.5111, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 2.3794037940379402, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.248131084610804e-05, | |
| "loss": 0.555, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 2.3848238482384825, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.227360274790793e-05, | |
| "loss": 0.5821, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 2.3902439024390243, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.206739540732288e-05, | |
| "loss": 0.5988, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 2.3956639566395665, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1862697027289254e-05, | |
| "loss": 0.5745, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 2.4010840108401084, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1659515750716955e-05, | |
| "loss": 0.481, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 2.40650406504065, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1457859660165404e-05, | |
| "loss": 0.5362, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 2.4119241192411924, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1257736777522072e-05, | |
| "loss": 0.6335, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 2.4173441734417342, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1059155063683357e-05, | |
| "loss": 0.5322, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 2.4227642276422765, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0862122418237907e-05, | |
| "loss": 0.5453, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 2.4281842818428183, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0666646679152365e-05, | |
| "loss": 0.5394, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 2.4336043360433606, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0472735622459545e-05, | |
| "loss": 0.6681, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 2.4390243902439024, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.028039696194919e-05, | |
| "loss": 0.5178, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 2.4444444444444446, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0089638348861024e-05, | |
| "loss": 0.4984, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 2.4498644986449865, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.900467371580407e-06, | |
| "loss": 0.5896, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 2.4552845528455283, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.712891555336517e-06, | |
| "loss": 0.4909, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 2.4607046070460705, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.52691836190293e-06, | |
| "loss": 0.6343, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 2.4661246612466123, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.342555189300838e-06, | |
| "loss": 0.5115, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 2.4715447154471546, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.159809371504723e-06, | |
| "loss": 0.5469, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 2.4769647696476964, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.978688178150668e-06, | |
| "loss": 0.5606, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 2.4823848238482387, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.79919881424709e-06, | |
| "loss": 0.5203, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 2.4878048780487805, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.621348419888165e-06, | |
| "loss": 0.5111, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 2.4932249322493227, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.445144069969812e-06, | |
| "loss": 0.5115, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 2.4986449864498645, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.270592773908232e-06, | |
| "loss": 0.4814, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 2.5040650406504064, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.09770147536107e-06, | |
| "loss": 0.426, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 2.5094850948509486, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.926477051951192e-06, | |
| "loss": 0.4801, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 2.5149051490514904, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.756926314993135e-06, | |
| "loss": 0.6409, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 2.5203252032520327, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.589056009222089e-06, | |
| "loss": 0.5287, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 2.5257452574525745, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.422872812525633e-06, | |
| "loss": 0.5553, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 2.5311653116531163, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.258383335678098e-06, | |
| "loss": 0.5598, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 2.5365853658536586, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.095594122077542e-06, | |
| "loss": 0.5477, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 2.5420054200542004, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.934511647485503e-06, | |
| "loss": 0.5337, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 2.5474254742547426, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.775142319769351e-06, | |
| "loss": 0.4678, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 2.5528455284552845, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.61749247864743e-06, | |
| "loss": 0.5446, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 2.5582655826558267, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.461568395436812e-06, | |
| "loss": 0.5185, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 2.5636856368563685, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.307376272803861e-06, | |
| "loss": 0.5106, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 2.569105691056911, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.154922244517486e-06, | |
| "loss": 0.535, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 2.5745257452574526, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.004212375205115e-06, | |
| "loss": 0.6075, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 2.5799457994579944, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.855252660111482e-06, | |
| "loss": 0.4473, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 2.5853658536585367, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.708049024860085e-06, | |
| "loss": 0.5698, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 2.5907859078590785, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.562607325217533e-06, | |
| "loss": 0.534, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 2.5962059620596207, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.418933346860538e-06, | |
| "loss": 0.5458, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 2.6016260162601625, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.277032805145799e-06, | |
| "loss": 0.5138, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 2.6070460704607044, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.136911344882622e-06, | |
| "loss": 0.5076, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 2.6124661246612466, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.998574540108392e-06, | |
| "loss": 0.5299, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 2.617886178861789, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.862027893866822e-06, | |
| "loss": 0.4938, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 2.6233062330623307, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.7272768379890365e-06, | |
| "loss": 0.5444, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 2.6287262872628725, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.594326732877524e-06, | |
| "loss": 0.5202, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 2.6341463414634148, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.463182867292865e-06, | |
| "loss": 0.5111, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 2.6395663956639566, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.333850458143357e-06, | |
| "loss": 0.5141, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 2.644986449864499, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.206334650277483e-06, | |
| "loss": 0.4874, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 2.6504065040650406, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.080640516279266e-06, | |
| "loss": 0.4867, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 2.6558265582655824, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.956773056266466e-06, | |
| "loss": 0.5755, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 2.6612466124661247, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.8347371976916516e-06, | |
| "loss": 0.5755, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.714537795146261e-06, | |
| "loss": 0.523, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 2.6720867208672088, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5961796301674012e-06, | |
| "loss": 0.4475, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 2.6775067750677506, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.479667411047677e-06, | |
| "loss": 0.4947, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 2.682926829268293, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.3650057726479e-06, | |
| "loss": 0.5437, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 2.6883468834688347, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.2521992762127085e-06, | |
| "loss": 0.6523, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 2.693766937669377, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.141252409189099e-06, | |
| "loss": 0.5232, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 2.6991869918699187, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.032169585047956e-06, | |
| "loss": 0.5144, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 2.7046070460704605, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.92495514310846e-06, | |
| "loss": 0.5177, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 2.710027100271003, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.819613348365463e-06, | |
| "loss": 0.4604, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.7154471544715446, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7161483913198392e-06, | |
| "loss": 0.5745, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 2.720867208672087, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6145643878117744e-06, | |
| "loss": 0.4852, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.7262872628726287, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5148653788570676e-06, | |
| "loss": 0.6759, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.7317073170731705, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4170553304863287e-06, | |
| "loss": 0.5064, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.7371273712737128, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.321138133587264e-06, | |
| "loss": 0.4882, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.742547425474255, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.227117603749862e-06, | |
| "loss": 0.5716, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.747967479674797, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.13499748111462e-06, | |
| "loss": 0.5078, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.7533875338753386, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.044781430223758e-06, | |
| "loss": 0.4927, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.758807588075881, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.956473039875445e-06, | |
| "loss": 0.6156, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.7642276422764227, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8700758229810266e-06, | |
| "loss": 0.6286, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.769647696476965, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.785593216425313e-06, | |
| "loss": 0.5391, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 2.7750677506775068, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7030285809298107e-06, | |
| "loss": 0.5114, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 2.7804878048780486, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6223852009190854e-06, | |
| "loss": 0.4548, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 2.785907859078591, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5436662843900718e-06, | |
| "loss": 0.5117, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 2.7913279132791327, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4668749627844536e-06, | |
| "loss": 0.5998, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 2.796747967479675, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3920142908641353e-06, | |
| "loss": 0.5348, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 2.8021680216802167, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3190872465896786e-06, | |
| "loss": 0.5632, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 2.8075880758807585, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2480967310018743e-06, | |
| "loss": 0.5922, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 2.813008130081301, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1790455681063018e-06, | |
| "loss": 0.4896, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 2.818428184281843, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1119365047610409e-06, | |
| "loss": 0.5172, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 2.823848238482385, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.046772210567354e-06, | |
| "loss": 0.5374, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 2.8292682926829267, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.835552777635092e-07, | |
| "loss": 0.5543, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 2.834688346883469, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.222882211216588e-07, | |
| "loss": 0.5452, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 2.8401084010840107, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.62973477847806e-07, | |
| "loss": 0.5049, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 2.845528455284553, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.05613407484851e-07, | |
| "loss": 0.5682, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 2.850948509485095, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.502102918187203e-07, | |
| "loss": 0.448, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 2.8563685636856366, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.967663347876019e-07, | |
| "loss": 0.4991, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 2.861788617886179, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.452836623942859e-07, | |
| "loss": 0.5162, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 2.867208672086721, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.957643226215615e-07, | |
| "loss": 0.4567, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 2.872628726287263, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.482102853507809e-07, | |
| "loss": 0.5041, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 2.8780487804878048, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.02623442283473e-07, | |
| "loss": 0.4718, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 2.883468834688347, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.590056068661197e-07, | |
| "loss": 0.5498, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 2.888888888888889, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.1735851421796925e-07, | |
| "loss": 0.5377, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 2.894308943089431, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.776838210620692e-07, | |
| "loss": 0.5376, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 2.899728997289973, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.39983105659325e-07, | |
| "loss": 0.5345, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 2.9051490514905147, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0425786774572753e-07, | |
| "loss": 0.5777, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 2.910569105691057, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.705095284726844e-07, | |
| "loss": 0.5314, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 2.915989159891599, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3873943035050417e-07, | |
| "loss": 0.5036, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 2.921409214092141, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.089488371949888e-07, | |
| "loss": 0.5007, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 2.926829268292683, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8113893407713523e-07, | |
| "loss": 0.5986, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 2.9322493224932247, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5531082727602285e-07, | |
| "loss": 0.653, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 2.937669376693767, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3146554423478785e-07, | |
| "loss": 0.523, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 2.943089430894309, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0960403351976123e-07, | |
| "loss": 0.4574, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 2.948509485094851, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.972716478272691e-08, | |
| "loss": 0.6004, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 2.953929539295393, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.183572872632715e-08, | |
| "loss": 0.5666, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 2.959349593495935, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.593043707262102e-08, | |
| "loss": 0.5306, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 2.964769647696477, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.201192253474595e-08, | |
| "loss": 0.4908, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 2.970189701897019, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0080738791782304e-08, | |
| "loss": 0.4865, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 2.975609756097561, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0137360466704337e-08, | |
| "loss": 0.4843, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 2.9810298102981028, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2182183107506406e-08, | |
| "loss": 0.5347, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 2.986449864498645, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.215523171465565e-09, | |
| "loss": 0.567, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 2.991869918699187, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2376180125571743e-09, | |
| "loss": 0.5512, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 2.997289972899729, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4862587201246457e-10, | |
| "loss": 0.5884, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 5535, | |
| "total_flos": 3.0626432822122906e+18, | |
| "train_loss": 0.8061501924692238, | |
| "train_runtime": 22473.7337, | |
| "train_samples_per_second": 0.246, | |
| "train_steps_per_second": 0.246 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5535, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 2500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.0626432822122906e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |