| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.6455026455026456, | |
| "eval_steps": 500, | |
| "global_step": 3500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 1.7509137392044067, | |
| "learning_rate": 9.974804736709499e-06, | |
| "loss": 1.9586, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.5435916185379028, | |
| "learning_rate": 9.949609473418998e-06, | |
| "loss": 1.764, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.209004521369934, | |
| "learning_rate": 9.924414210128497e-06, | |
| "loss": 1.5738, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.036686897277832, | |
| "learning_rate": 9.899218946837995e-06, | |
| "loss": 1.456, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.8039250373840332, | |
| "learning_rate": 9.874023683547493e-06, | |
| "loss": 1.3517, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.668268620967865, | |
| "learning_rate": 9.848828420256992e-06, | |
| "loss": 1.2773, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.6465680003166199, | |
| "learning_rate": 9.823633156966492e-06, | |
| "loss": 1.2494, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.669222891330719, | |
| "learning_rate": 9.79843789367599e-06, | |
| "loss": 1.2465, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.7393785715103149, | |
| "learning_rate": 9.773242630385489e-06, | |
| "loss": 1.2073, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.6821632385253906, | |
| "learning_rate": 9.748047367094986e-06, | |
| "loss": 1.1725, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.698268711566925, | |
| "learning_rate": 9.722852103804486e-06, | |
| "loss": 1.1724, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.6640620231628418, | |
| "learning_rate": 9.697656840513983e-06, | |
| "loss": 1.1656, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.8017461895942688, | |
| "learning_rate": 9.672461577223483e-06, | |
| "loss": 1.1764, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.6652551293373108, | |
| "learning_rate": 9.64726631393298e-06, | |
| "loss": 1.1706, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.8237988352775574, | |
| "learning_rate": 9.62207105064248e-06, | |
| "loss": 1.1231, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.8982694149017334, | |
| "learning_rate": 9.59687578735198e-06, | |
| "loss": 1.1417, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.7859475016593933, | |
| "learning_rate": 9.571680524061477e-06, | |
| "loss": 1.1382, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.9249159693717957, | |
| "learning_rate": 9.546485260770976e-06, | |
| "loss": 1.1363, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.221252202987671, | |
| "learning_rate": 9.521289997480474e-06, | |
| "loss": 1.1382, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 4.636524200439453, | |
| "learning_rate": 9.498614260519024e-06, | |
| "loss": 1.0979, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.7521413564682007, | |
| "learning_rate": 9.473418997228522e-06, | |
| "loss": 1.0645, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.8328731060028076, | |
| "learning_rate": 9.44822373393802e-06, | |
| "loss": 1.0585, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.8342303037643433, | |
| "learning_rate": 9.423028470647519e-06, | |
| "loss": 1.0655, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.8196636438369751, | |
| "learning_rate": 9.397833207357018e-06, | |
| "loss": 1.0221, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.7511851787567139, | |
| "learning_rate": 9.372637944066516e-06, | |
| "loss": 1.046, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.8341330885887146, | |
| "learning_rate": 9.347442680776014e-06, | |
| "loss": 1.0331, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.9003998637199402, | |
| "learning_rate": 9.322247417485513e-06, | |
| "loss": 1.0386, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.79107266664505, | |
| "learning_rate": 9.297052154195013e-06, | |
| "loss": 1.0152, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.7756459712982178, | |
| "learning_rate": 9.27185689090451e-06, | |
| "loss": 1.0224, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.762424647808075, | |
| "learning_rate": 9.24666162761401e-06, | |
| "loss": 1.0517, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.7143459916114807, | |
| "learning_rate": 9.221466364323507e-06, | |
| "loss": 1.0106, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.7883962988853455, | |
| "learning_rate": 9.196271101033007e-06, | |
| "loss": 1.0004, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.7985430359840393, | |
| "learning_rate": 9.171075837742504e-06, | |
| "loss": 1.0171, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.8012735247612, | |
| "learning_rate": 9.145880574452004e-06, | |
| "loss": 1.0106, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.7331275939941406, | |
| "learning_rate": 9.120685311161502e-06, | |
| "loss": 1.0044, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.8667876124382019, | |
| "learning_rate": 9.095490047871001e-06, | |
| "loss": 1.0195, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.8839796781539917, | |
| "learning_rate": 9.0702947845805e-06, | |
| "loss": 1.0145, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.8900341391563416, | |
| "learning_rate": 9.045099521289998e-06, | |
| "loss": 1.0171, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.8628876805305481, | |
| "learning_rate": 9.019904257999496e-06, | |
| "loss": 1.0039, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.8550145626068115, | |
| "learning_rate": 8.994708994708995e-06, | |
| "loss": 1.015, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.8471419215202332, | |
| "learning_rate": 8.969513731418495e-06, | |
| "loss": 1.0114, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.8051427006721497, | |
| "learning_rate": 8.944318468127992e-06, | |
| "loss": 0.9998, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.8776746392250061, | |
| "learning_rate": 8.919123204837492e-06, | |
| "loss": 1.0109, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.9933453798294067, | |
| "learning_rate": 8.893927941546991e-06, | |
| "loss": 1.0099, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8829633593559265, | |
| "learning_rate": 8.868732678256489e-06, | |
| "loss": 1.0201, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.8752363324165344, | |
| "learning_rate": 8.843537414965987e-06, | |
| "loss": 1.0022, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.9361273050308228, | |
| "learning_rate": 8.818342151675486e-06, | |
| "loss": 1.0075, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8231716156005859, | |
| "learning_rate": 8.793146888384985e-06, | |
| "loss": 0.984, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.8183861374855042, | |
| "learning_rate": 8.767951625094483e-06, | |
| "loss": 0.9871, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.9514871835708618, | |
| "learning_rate": 8.74275636180398e-06, | |
| "loss": 1.024, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.9753857254981995, | |
| "learning_rate": 8.71756109851348e-06, | |
| "loss": 0.9926, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.9237353205680847, | |
| "learning_rate": 8.69236583522298e-06, | |
| "loss": 0.9709, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.9065663814544678, | |
| "learning_rate": 8.667170571932477e-06, | |
| "loss": 0.9848, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.8715184926986694, | |
| "learning_rate": 8.641975308641975e-06, | |
| "loss": 1.0091, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.8322390913963318, | |
| "learning_rate": 8.616780045351474e-06, | |
| "loss": 0.9876, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.9560284614562988, | |
| "learning_rate": 8.591584782060974e-06, | |
| "loss": 1.0234, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8381783366203308, | |
| "learning_rate": 8.566389518770471e-06, | |
| "loss": 0.9788, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.8607673645019531, | |
| "learning_rate": 8.541194255479971e-06, | |
| "loss": 0.9816, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.8169811367988586, | |
| "learning_rate": 8.515998992189469e-06, | |
| "loss": 1.0092, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.9223540425300598, | |
| "learning_rate": 8.490803728898968e-06, | |
| "loss": 0.9885, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.7612660527229309, | |
| "learning_rate": 8.465608465608466e-06, | |
| "loss": 0.97, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.860916256904602, | |
| "learning_rate": 8.440413202317965e-06, | |
| "loss": 0.9921, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.807750403881073, | |
| "learning_rate": 8.415217939027463e-06, | |
| "loss": 0.987, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7962011694908142, | |
| "learning_rate": 8.390022675736962e-06, | |
| "loss": 1.0134, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.8323624730110168, | |
| "learning_rate": 8.364827412446462e-06, | |
| "loss": 0.9769, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.9545472860336304, | |
| "learning_rate": 8.33963214915596e-06, | |
| "loss": 0.9862, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.0507547855377197, | |
| "learning_rate": 8.314436885865457e-06, | |
| "loss": 0.9977, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.9005000591278076, | |
| "learning_rate": 8.289241622574956e-06, | |
| "loss": 0.9897, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.8091966509819031, | |
| "learning_rate": 8.264046359284456e-06, | |
| "loss": 1.0188, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.8602038025856018, | |
| "learning_rate": 8.238851095993954e-06, | |
| "loss": 0.9997, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.9347642660140991, | |
| "learning_rate": 8.213655832703453e-06, | |
| "loss": 0.9904, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.8802648782730103, | |
| "learning_rate": 8.188460569412952e-06, | |
| "loss": 0.9765, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.8487595915794373, | |
| "learning_rate": 8.16326530612245e-06, | |
| "loss": 1.0184, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.9282223582267761, | |
| "learning_rate": 8.138070042831948e-06, | |
| "loss": 1.0047, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.0162054300308228, | |
| "learning_rate": 8.112874779541447e-06, | |
| "loss": 0.9909, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.8241488337516785, | |
| "learning_rate": 8.087679516250947e-06, | |
| "loss": 0.9976, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.069097638130188, | |
| "learning_rate": 8.062484252960444e-06, | |
| "loss": 0.9874, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.9084659218788147, | |
| "learning_rate": 8.037288989669942e-06, | |
| "loss": 0.9751, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.9863806962966919, | |
| "learning_rate": 8.012093726379441e-06, | |
| "loss": 0.9815, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.9838799834251404, | |
| "learning_rate": 7.98689846308894e-06, | |
| "loss": 1.0019, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.0254228115081787, | |
| "learning_rate": 7.961703199798438e-06, | |
| "loss": 0.9918, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.9409896731376648, | |
| "learning_rate": 7.936507936507936e-06, | |
| "loss": 0.9899, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.9909108281135559, | |
| "learning_rate": 7.911312673217436e-06, | |
| "loss": 0.9666, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.9930652976036072, | |
| "learning_rate": 7.886117409926935e-06, | |
| "loss": 0.9643, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.983561098575592, | |
| "learning_rate": 7.860922146636433e-06, | |
| "loss": 0.9895, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.9257413148880005, | |
| "learning_rate": 7.835726883345932e-06, | |
| "loss": 0.9933, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.9350329041481018, | |
| "learning_rate": 7.81053162005543e-06, | |
| "loss": 0.9725, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.0069069862365723, | |
| "learning_rate": 7.78533635676493e-06, | |
| "loss": 0.9739, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.9367712736129761, | |
| "learning_rate": 7.760141093474427e-06, | |
| "loss": 0.9835, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.867667555809021, | |
| "learning_rate": 7.734945830183926e-06, | |
| "loss": 0.9937, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.9225870370864868, | |
| "learning_rate": 7.709750566893424e-06, | |
| "loss": 0.9771, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.9846392869949341, | |
| "learning_rate": 7.684555303602923e-06, | |
| "loss": 0.9988, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.8313792943954468, | |
| "learning_rate": 7.659360040312423e-06, | |
| "loss": 0.9594, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.9007426500320435, | |
| "learning_rate": 7.63416477702192e-06, | |
| "loss": 0.9641, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.9949538111686707, | |
| "learning_rate": 7.608969513731419e-06, | |
| "loss": 0.9981, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.0600723028182983, | |
| "learning_rate": 7.583774250440918e-06, | |
| "loss": 0.9799, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.124242901802063, | |
| "learning_rate": 7.558578987150417e-06, | |
| "loss": 0.9923, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.1153829097747803, | |
| "learning_rate": 7.533383723859915e-06, | |
| "loss": 0.976, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.996911346912384, | |
| "learning_rate": 7.508188460569413e-06, | |
| "loss": 0.9952, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.866919219493866, | |
| "learning_rate": 7.482993197278913e-06, | |
| "loss": 0.9708, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.0361676216125488, | |
| "learning_rate": 7.457797933988411e-06, | |
| "loss": 0.9757, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.0245063304901123, | |
| "learning_rate": 7.432602670697909e-06, | |
| "loss": 0.9879, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.9136086106300354, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.962, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.1000611782073975, | |
| "learning_rate": 7.382212144116907e-06, | |
| "loss": 0.9743, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.0377795696258545, | |
| "learning_rate": 7.3570168808264054e-06, | |
| "loss": 0.9392, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.9714633226394653, | |
| "learning_rate": 7.331821617535904e-06, | |
| "loss": 0.964, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.027772068977356, | |
| "learning_rate": 7.306626354245403e-06, | |
| "loss": 0.9784, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.9620116949081421, | |
| "learning_rate": 7.281431090954901e-06, | |
| "loss": 0.9803, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.0313563346862793, | |
| "learning_rate": 7.2562358276644e-06, | |
| "loss": 0.9816, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.058566927909851, | |
| "learning_rate": 7.231040564373898e-06, | |
| "loss": 0.9692, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.9281766414642334, | |
| "learning_rate": 7.205845301083398e-06, | |
| "loss": 0.9508, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.960678219795227, | |
| "learning_rate": 7.180650037792895e-06, | |
| "loss": 0.9437, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.9975042343139648, | |
| "learning_rate": 7.155454774502394e-06, | |
| "loss": 0.97, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.9827629327774048, | |
| "learning_rate": 7.130259511211892e-06, | |
| "loss": 0.9709, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.084519386291504, | |
| "learning_rate": 7.105064247921392e-06, | |
| "loss": 0.9736, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.1209474802017212, | |
| "learning_rate": 7.0798689846308895e-06, | |
| "loss": 0.9722, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.0013864040374756, | |
| "learning_rate": 7.054673721340388e-06, | |
| "loss": 0.9671, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.9566646218299866, | |
| "learning_rate": 7.0294784580498875e-06, | |
| "loss": 0.9767, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.9742752909660339, | |
| "learning_rate": 7.004283194759386e-06, | |
| "loss": 0.9703, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.1095930337905884, | |
| "learning_rate": 6.979087931468885e-06, | |
| "loss": 0.9817, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.0295912027359009, | |
| "learning_rate": 6.953892668178382e-06, | |
| "loss": 0.9756, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.0642352104187012, | |
| "learning_rate": 6.928697404887882e-06, | |
| "loss": 0.9503, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.157819390296936, | |
| "learning_rate": 6.90350214159738e-06, | |
| "loss": 0.9765, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.0977922677993774, | |
| "learning_rate": 6.878306878306879e-06, | |
| "loss": 0.9706, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.9640308022499084, | |
| "learning_rate": 6.853111615016378e-06, | |
| "loss": 0.9578, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.0732492208480835, | |
| "learning_rate": 6.827916351725876e-06, | |
| "loss": 0.9455, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.9846688508987427, | |
| "learning_rate": 6.8027210884353745e-06, | |
| "loss": 0.9764, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.8813304901123047, | |
| "learning_rate": 6.777525825144873e-06, | |
| "loss": 0.9842, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.9812163710594177, | |
| "learning_rate": 6.752330561854372e-06, | |
| "loss": 0.9593, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.0121666193008423, | |
| "learning_rate": 6.72713529856387e-06, | |
| "loss": 0.9623, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.0399516820907593, | |
| "learning_rate": 6.701940035273369e-06, | |
| "loss": 0.9665, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.9701886773109436, | |
| "learning_rate": 6.676744771982868e-06, | |
| "loss": 0.978, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 1.0398330688476562, | |
| "learning_rate": 6.651549508692367e-06, | |
| "loss": 0.9515, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 1.038935661315918, | |
| "learning_rate": 6.626354245401865e-06, | |
| "loss": 0.958, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 1.0119668245315552, | |
| "learning_rate": 6.601158982111363e-06, | |
| "loss": 0.9347, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 0.9660181403160095, | |
| "learning_rate": 6.575963718820862e-06, | |
| "loss": 0.9577, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.9453220963478088, | |
| "learning_rate": 6.550768455530361e-06, | |
| "loss": 0.9375, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.9495384693145752, | |
| "learning_rate": 6.525573192239859e-06, | |
| "loss": 0.9311, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 1.1077016592025757, | |
| "learning_rate": 6.500377928949359e-06, | |
| "loss": 0.9489, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 1.0165854692459106, | |
| "learning_rate": 6.4751826656588565e-06, | |
| "loss": 0.958, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 1.3001985549926758, | |
| "learning_rate": 6.449987402368355e-06, | |
| "loss": 0.9325, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 1.0839056968688965, | |
| "learning_rate": 6.424792139077854e-06, | |
| "loss": 0.9769, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 1.0319207906723022, | |
| "learning_rate": 6.399596875787353e-06, | |
| "loss": 0.9574, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 1.0123823881149292, | |
| "learning_rate": 6.374401612496851e-06, | |
| "loss": 0.9315, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 1.1985503435134888, | |
| "learning_rate": 6.349206349206349e-06, | |
| "loss": 0.9354, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 1.1550285816192627, | |
| "learning_rate": 6.324011085915849e-06, | |
| "loss": 0.9344, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 1.0712833404541016, | |
| "learning_rate": 6.298815822625347e-06, | |
| "loss": 0.9444, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 1.0778782367706299, | |
| "learning_rate": 6.273620559334846e-06, | |
| "loss": 0.935, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 1.0286056995391846, | |
| "learning_rate": 6.2484252960443435e-06, | |
| "loss": 0.9606, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 1.1475175619125366, | |
| "learning_rate": 6.223230032753843e-06, | |
| "loss": 0.9354, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 1.1092008352279663, | |
| "learning_rate": 6.1980347694633414e-06, | |
| "loss": 0.9556, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 1.0508161783218384, | |
| "learning_rate": 6.17283950617284e-06, | |
| "loss": 0.9388, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 1.0714831352233887, | |
| "learning_rate": 6.147644242882339e-06, | |
| "loss": 0.9334, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 1.0550581216812134, | |
| "learning_rate": 6.122448979591837e-06, | |
| "loss": 0.9398, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 1.1077096462249756, | |
| "learning_rate": 6.097253716301336e-06, | |
| "loss": 0.9486, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 1.0329755544662476, | |
| "learning_rate": 6.072058453010834e-06, | |
| "loss": 0.9635, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 1.085105061531067, | |
| "learning_rate": 6.046863189720334e-06, | |
| "loss": 0.9394, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 1.0389838218688965, | |
| "learning_rate": 6.021667926429831e-06, | |
| "loss": 0.9474, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 1.1028958559036255, | |
| "learning_rate": 5.99647266313933e-06, | |
| "loss": 0.9364, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 1.0558817386627197, | |
| "learning_rate": 5.971277399848829e-06, | |
| "loss": 0.9276, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 1.0861153602600098, | |
| "learning_rate": 5.946082136558328e-06, | |
| "loss": 0.9375, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 1.1104577779769897, | |
| "learning_rate": 5.920886873267826e-06, | |
| "loss": 0.948, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 1.0775477886199951, | |
| "learning_rate": 5.895691609977324e-06, | |
| "loss": 0.9334, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 1.0878546237945557, | |
| "learning_rate": 5.8704963466868235e-06, | |
| "loss": 0.952, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 1.0840810537338257, | |
| "learning_rate": 5.845301083396322e-06, | |
| "loss": 0.9312, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 1.2151501178741455, | |
| "learning_rate": 5.820105820105821e-06, | |
| "loss": 0.9549, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 1.0833278894424438, | |
| "learning_rate": 5.79491055681532e-06, | |
| "loss": 0.9557, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 1.0161901712417603, | |
| "learning_rate": 5.769715293524818e-06, | |
| "loss": 0.9312, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 1.075750708580017, | |
| "learning_rate": 5.744520030234316e-06, | |
| "loss": 0.9501, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 1.042380928993225, | |
| "learning_rate": 5.719324766943815e-06, | |
| "loss": 0.9381, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 1.009562373161316, | |
| "learning_rate": 5.694129503653314e-06, | |
| "loss": 0.9305, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 1.1201280355453491, | |
| "learning_rate": 5.668934240362812e-06, | |
| "loss": 0.9318, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 1.1156421899795532, | |
| "learning_rate": 5.6437389770723105e-06, | |
| "loss": 0.9445, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 1.0913002490997314, | |
| "learning_rate": 5.61854371378181e-06, | |
| "loss": 0.9537, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 1.041400671005249, | |
| "learning_rate": 5.593348450491308e-06, | |
| "loss": 0.9464, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 1.0327285528182983, | |
| "learning_rate": 5.568153187200807e-06, | |
| "loss": 0.9361, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 1.1421902179718018, | |
| "learning_rate": 5.542957923910305e-06, | |
| "loss": 0.9764, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 1.202650785446167, | |
| "learning_rate": 5.517762660619804e-06, | |
| "loss": 0.9553, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 1.0642273426055908, | |
| "learning_rate": 5.492567397329303e-06, | |
| "loss": 0.9485, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 1.0749919414520264, | |
| "learning_rate": 5.467372134038801e-06, | |
| "loss": 0.9418, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 1.0974488258361816, | |
| "learning_rate": 5.442176870748301e-06, | |
| "loss": 0.9539, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 1.1204264163970947, | |
| "learning_rate": 5.416981607457798e-06, | |
| "loss": 0.9357, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 1.268764853477478, | |
| "learning_rate": 5.391786344167297e-06, | |
| "loss": 0.9512, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 1.159050703048706, | |
| "learning_rate": 5.366591080876795e-06, | |
| "loss": 0.9402, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 1.2613215446472168, | |
| "learning_rate": 5.341395817586295e-06, | |
| "loss": 0.9411, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 1.1373072862625122, | |
| "learning_rate": 5.3162005542957925e-06, | |
| "loss": 0.9752, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 1.144242763519287, | |
| "learning_rate": 5.291005291005291e-06, | |
| "loss": 0.9471, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 1.0989221334457397, | |
| "learning_rate": 5.2658100277147905e-06, | |
| "loss": 0.9348, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 1.0864170789718628, | |
| "learning_rate": 5.240614764424289e-06, | |
| "loss": 0.9435, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 1.1140406131744385, | |
| "learning_rate": 5.2154195011337876e-06, | |
| "loss": 0.9541, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 1.1856478452682495, | |
| "learning_rate": 5.190224237843285e-06, | |
| "loss": 0.9151, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 1.2324665784835815, | |
| "learning_rate": 5.165028974552785e-06, | |
| "loss": 0.9411, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 1.038913369178772, | |
| "learning_rate": 5.139833711262283e-06, | |
| "loss": 0.9487, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 1.040578842163086, | |
| "learning_rate": 5.114638447971782e-06, | |
| "loss": 0.943, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 1.289865493774414, | |
| "learning_rate": 5.089443184681281e-06, | |
| "loss": 0.9382, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 1.1952651739120483, | |
| "learning_rate": 5.064247921390779e-06, | |
| "loss": 0.9453, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "grad_norm": 1.1922310590744019, | |
| "learning_rate": 5.0390526581002774e-06, | |
| "loss": 0.9242, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 1.1691875457763672, | |
| "learning_rate": 5.013857394809776e-06, | |
| "loss": 0.932, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 1.254301905632019, | |
| "learning_rate": 4.9886621315192745e-06, | |
| "loss": 0.9341, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "grad_norm": 1.0548466444015503, | |
| "learning_rate": 4.963466868228773e-06, | |
| "loss": 0.9496, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 1.2470088005065918, | |
| "learning_rate": 4.938271604938272e-06, | |
| "loss": 0.9488, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 1.2311805486679077, | |
| "learning_rate": 4.91307634164777e-06, | |
| "loss": 0.942, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 1.1870156526565552, | |
| "learning_rate": 4.88788107835727e-06, | |
| "loss": 0.94, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 1.060556173324585, | |
| "learning_rate": 4.862685815066767e-06, | |
| "loss": 0.9223, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 1.1396002769470215, | |
| "learning_rate": 4.837490551776267e-06, | |
| "loss": 0.9588, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 1.2042090892791748, | |
| "learning_rate": 4.812295288485765e-06, | |
| "loss": 0.9534, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 1.1195515394210815, | |
| "learning_rate": 4.787100025195264e-06, | |
| "loss": 0.9528, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 1.290823221206665, | |
| "learning_rate": 4.761904761904762e-06, | |
| "loss": 0.9514, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 1.0271083116531372, | |
| "learning_rate": 4.736709498614261e-06, | |
| "loss": 0.9699, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 1.2110365629196167, | |
| "learning_rate": 4.7115142353237595e-06, | |
| "loss": 0.9358, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 1.1250057220458984, | |
| "learning_rate": 4.686318972033258e-06, | |
| "loss": 0.9491, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 1.1912841796875, | |
| "learning_rate": 4.661123708742757e-06, | |
| "loss": 0.9332, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "grad_norm": 1.0299352407455444, | |
| "learning_rate": 4.635928445452255e-06, | |
| "loss": 0.9577, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 1.0603456497192383, | |
| "learning_rate": 4.610733182161754e-06, | |
| "loss": 0.9332, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 1.1462222337722778, | |
| "learning_rate": 4.585537918871252e-06, | |
| "loss": 0.9343, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 1.0181183815002441, | |
| "learning_rate": 4.560342655580751e-06, | |
| "loss": 0.9365, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 1.2271602153778076, | |
| "learning_rate": 4.53514739229025e-06, | |
| "loss": 0.9171, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "grad_norm": 1.1205991506576538, | |
| "learning_rate": 4.509952128999748e-06, | |
| "loss": 0.9299, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 1.1556931734085083, | |
| "learning_rate": 4.484756865709247e-06, | |
| "loss": 0.9652, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 1.0930700302124023, | |
| "learning_rate": 4.459561602418746e-06, | |
| "loss": 0.9259, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 1.207714319229126, | |
| "learning_rate": 4.434366339128244e-06, | |
| "loss": 0.9529, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 1.1223605871200562, | |
| "learning_rate": 4.409171075837743e-06, | |
| "loss": 0.9397, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 1.2369412183761597, | |
| "learning_rate": 4.3839758125472415e-06, | |
| "loss": 0.9246, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 1.232971429824829, | |
| "learning_rate": 4.35878054925674e-06, | |
| "loss": 0.9349, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 1.1932899951934814, | |
| "learning_rate": 4.333585285966239e-06, | |
| "loss": 0.9495, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "grad_norm": 1.2230783700942993, | |
| "learning_rate": 4.308390022675737e-06, | |
| "loss": 0.9205, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 1.2365373373031616, | |
| "learning_rate": 4.283194759385236e-06, | |
| "loss": 0.9214, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 1.132912278175354, | |
| "learning_rate": 4.257999496094734e-06, | |
| "loss": 0.9154, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 1.2143046855926514, | |
| "learning_rate": 4.232804232804233e-06, | |
| "loss": 0.9485, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 1.2785279750823975, | |
| "learning_rate": 4.207608969513731e-06, | |
| "loss": 0.9462, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 1.3493692874908447, | |
| "learning_rate": 4.182413706223231e-06, | |
| "loss": 0.9382, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 1.1612365245819092, | |
| "learning_rate": 4.1572184429327285e-06, | |
| "loss": 0.9407, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 1.2210137844085693, | |
| "learning_rate": 4.132023179642228e-06, | |
| "loss": 0.9196, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 1.1422805786132812, | |
| "learning_rate": 4.1068279163517265e-06, | |
| "loss": 0.9426, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 1.1776541471481323, | |
| "learning_rate": 4.081632653061225e-06, | |
| "loss": 0.9436, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 1.2455426454544067, | |
| "learning_rate": 4.0564373897707236e-06, | |
| "loss": 0.9299, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 1.146103858947754, | |
| "learning_rate": 4.031242126480222e-06, | |
| "loss": 0.9353, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 1.1490944623947144, | |
| "learning_rate": 4.006046863189721e-06, | |
| "loss": 0.9373, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 1.1841084957122803, | |
| "learning_rate": 3.980851599899219e-06, | |
| "loss": 0.9214, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 1.360938310623169, | |
| "learning_rate": 3.955656336608718e-06, | |
| "loss": 0.9233, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 1.1501877307891846, | |
| "learning_rate": 3.930461073318216e-06, | |
| "loss": 0.9149, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "grad_norm": 1.3008352518081665, | |
| "learning_rate": 3.905265810027715e-06, | |
| "loss": 0.9592, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 1.2323644161224365, | |
| "learning_rate": 3.8800705467372134e-06, | |
| "loss": 0.9295, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 1.1721792221069336, | |
| "learning_rate": 3.854875283446712e-06, | |
| "loss": 0.9431, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "grad_norm": 1.2879486083984375, | |
| "learning_rate": 3.829680020156211e-06, | |
| "loss": 0.9463, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 1.1511075496673584, | |
| "learning_rate": 3.8044847568657095e-06, | |
| "loss": 0.9664, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 1.3236780166625977, | |
| "learning_rate": 3.7792894935752085e-06, | |
| "loss": 0.9368, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 1.1940069198608398, | |
| "learning_rate": 3.7540942302847066e-06, | |
| "loss": 0.9548, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 1.1677494049072266, | |
| "learning_rate": 3.7288989669942056e-06, | |
| "loss": 0.9592, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "grad_norm": 1.1518640518188477, | |
| "learning_rate": 3.7037037037037037e-06, | |
| "loss": 0.9497, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 1.159209966659546, | |
| "learning_rate": 3.6785084404132027e-06, | |
| "loss": 0.9368, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 1.2133903503417969, | |
| "learning_rate": 3.6533131771227017e-06, | |
| "loss": 0.9257, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "grad_norm": 1.329829454421997, | |
| "learning_rate": 3.6281179138322e-06, | |
| "loss": 0.9544, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 1.264733910560608, | |
| "learning_rate": 3.602922650541699e-06, | |
| "loss": 0.9525, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "grad_norm": 1.2355499267578125, | |
| "learning_rate": 3.577727387251197e-06, | |
| "loss": 0.95, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "grad_norm": 1.1711816787719727, | |
| "learning_rate": 3.552532123960696e-06, | |
| "loss": 0.9443, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 1.1221129894256592, | |
| "learning_rate": 3.527336860670194e-06, | |
| "loss": 0.9422, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "grad_norm": 1.154335379600525, | |
| "learning_rate": 3.502141597379693e-06, | |
| "loss": 0.9462, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 1.2431056499481201, | |
| "learning_rate": 3.476946334089191e-06, | |
| "loss": 0.9305, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "grad_norm": 1.1966361999511719, | |
| "learning_rate": 3.45175107079869e-06, | |
| "loss": 0.9537, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "grad_norm": 1.2401050329208374, | |
| "learning_rate": 3.426555807508189e-06, | |
| "loss": 0.9262, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 1.1483770608901978, | |
| "learning_rate": 3.4013605442176872e-06, | |
| "loss": 0.9515, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": 1.1387736797332764, | |
| "learning_rate": 3.376165280927186e-06, | |
| "loss": 0.9293, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 1.2311975955963135, | |
| "learning_rate": 3.3509700176366843e-06, | |
| "loss": 0.9201, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 1.1818970441818237, | |
| "learning_rate": 3.3257747543461833e-06, | |
| "loss": 0.9439, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "grad_norm": 1.3574014902114868, | |
| "learning_rate": 3.3005794910556814e-06, | |
| "loss": 0.9235, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 1.1797312498092651, | |
| "learning_rate": 3.2753842277651804e-06, | |
| "loss": 0.9235, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 1.225716233253479, | |
| "learning_rate": 3.2501889644746794e-06, | |
| "loss": 0.9363, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 1.1875022649765015, | |
| "learning_rate": 3.2249937011841775e-06, | |
| "loss": 0.9155, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 1.2818158864974976, | |
| "learning_rate": 3.1997984378936765e-06, | |
| "loss": 0.9189, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 1.0735368728637695, | |
| "learning_rate": 3.1746031746031746e-06, | |
| "loss": 0.9183, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 1.2235422134399414, | |
| "learning_rate": 3.1494079113126736e-06, | |
| "loss": 0.9383, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 1.2761168479919434, | |
| "learning_rate": 3.1242126480221717e-06, | |
| "loss": 0.9161, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 1.1925853490829468, | |
| "learning_rate": 3.0990173847316707e-06, | |
| "loss": 0.9133, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 1.1860272884368896, | |
| "learning_rate": 3.0738221214411697e-06, | |
| "loss": 0.9317, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 1.285198450088501, | |
| "learning_rate": 3.048626858150668e-06, | |
| "loss": 0.9332, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 1.220641851425171, | |
| "learning_rate": 3.023431594860167e-06, | |
| "loss": 0.9464, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 1.31607186794281, | |
| "learning_rate": 2.998236331569665e-06, | |
| "loss": 0.9233, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 1.2238725423812866, | |
| "learning_rate": 2.973041068279164e-06, | |
| "loss": 0.9359, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 1.1632503271102905, | |
| "learning_rate": 2.947845804988662e-06, | |
| "loss": 0.9117, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 1.3732521533966064, | |
| "learning_rate": 2.922650541698161e-06, | |
| "loss": 0.9068, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "grad_norm": 1.2329943180084229, | |
| "learning_rate": 2.89745527840766e-06, | |
| "loss": 0.9208, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 1.331434726715088, | |
| "learning_rate": 2.872260015117158e-06, | |
| "loss": 0.9353, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 1.3423517942428589, | |
| "learning_rate": 2.847064751826657e-06, | |
| "loss": 0.9216, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 1.2505260705947876, | |
| "learning_rate": 2.8218694885361552e-06, | |
| "loss": 0.9267, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 1.1538835763931274, | |
| "learning_rate": 2.796674225245654e-06, | |
| "loss": 0.9195, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 1.3361774682998657, | |
| "learning_rate": 2.7714789619551523e-06, | |
| "loss": 0.9252, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 1.2870272397994995, | |
| "learning_rate": 2.7462836986646513e-06, | |
| "loss": 0.9011, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 1.238709807395935, | |
| "learning_rate": 2.7210884353741503e-06, | |
| "loss": 0.9203, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "grad_norm": 1.1434613466262817, | |
| "learning_rate": 2.6958931720836484e-06, | |
| "loss": 0.9263, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 1.2599645853042603, | |
| "learning_rate": 2.6706979087931474e-06, | |
| "loss": 0.9224, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 1.3294990062713623, | |
| "learning_rate": 2.6455026455026455e-06, | |
| "loss": 0.9116, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 1.2516047954559326, | |
| "learning_rate": 2.6203073822121445e-06, | |
| "loss": 0.9287, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 1.2115424871444702, | |
| "learning_rate": 2.5951121189216426e-06, | |
| "loss": 0.9384, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "grad_norm": 1.1633473634719849, | |
| "learning_rate": 2.5699168556311416e-06, | |
| "loss": 0.9211, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 1.181342363357544, | |
| "learning_rate": 2.5447215923406406e-06, | |
| "loss": 0.9122, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 1.2719746828079224, | |
| "learning_rate": 2.5195263290501387e-06, | |
| "loss": 0.9302, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 1.292691707611084, | |
| "learning_rate": 2.4943310657596373e-06, | |
| "loss": 0.9226, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 1.1643900871276855, | |
| "learning_rate": 2.469135802469136e-06, | |
| "loss": 0.903, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 1.2362549304962158, | |
| "learning_rate": 2.443940539178635e-06, | |
| "loss": 0.9238, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 1.1967755556106567, | |
| "learning_rate": 2.4187452758881334e-06, | |
| "loss": 0.9372, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 1.1664007902145386, | |
| "learning_rate": 2.393550012597632e-06, | |
| "loss": 0.9249, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "grad_norm": 1.39642333984375, | |
| "learning_rate": 2.3683547493071305e-06, | |
| "loss": 0.9381, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 1.2738609313964844, | |
| "learning_rate": 2.343159486016629e-06, | |
| "loss": 0.9345, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 1.2925204038619995, | |
| "learning_rate": 2.3179642227261276e-06, | |
| "loss": 0.9156, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 1.470628261566162, | |
| "learning_rate": 2.292768959435626e-06, | |
| "loss": 0.9128, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 1.280967354774475, | |
| "learning_rate": 2.267573696145125e-06, | |
| "loss": 0.9225, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "grad_norm": 1.3174177408218384, | |
| "learning_rate": 2.2423784328546237e-06, | |
| "loss": 0.9138, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 1.3123756647109985, | |
| "learning_rate": 2.217183169564122e-06, | |
| "loss": 0.9284, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 1.232643723487854, | |
| "learning_rate": 2.1919879062736208e-06, | |
| "loss": 0.9201, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "grad_norm": 1.207700490951538, | |
| "learning_rate": 2.1667926429831193e-06, | |
| "loss": 0.9061, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 1.2038023471832275, | |
| "learning_rate": 2.141597379692618e-06, | |
| "loss": 0.9437, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 1.4127525091171265, | |
| "learning_rate": 2.1164021164021164e-06, | |
| "loss": 0.9142, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 1.3315386772155762, | |
| "learning_rate": 2.0912068531116154e-06, | |
| "loss": 0.9217, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 1.3743528127670288, | |
| "learning_rate": 2.066011589821114e-06, | |
| "loss": 0.8931, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "grad_norm": 1.2260786294937134, | |
| "learning_rate": 2.0408163265306125e-06, | |
| "loss": 0.9224, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 1.3002612590789795, | |
| "learning_rate": 2.015621063240111e-06, | |
| "loss": 0.9371, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 1.1567788124084473, | |
| "learning_rate": 1.9904257999496096e-06, | |
| "loss": 0.9244, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 1.2505953311920166, | |
| "learning_rate": 1.965230536659108e-06, | |
| "loss": 0.8985, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 1.3488541841506958, | |
| "learning_rate": 1.9400352733686067e-06, | |
| "loss": 0.9111, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 1.371126651763916, | |
| "learning_rate": 1.9148400100781057e-06, | |
| "loss": 0.9094, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 1.3531619310379028, | |
| "learning_rate": 1.8896447467876043e-06, | |
| "loss": 0.9353, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 1.261234998703003, | |
| "learning_rate": 1.8644494834971028e-06, | |
| "loss": 0.9635, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 1.2285544872283936, | |
| "learning_rate": 1.8392542202066014e-06, | |
| "loss": 0.9104, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 1.2340446710586548, | |
| "learning_rate": 1.8140589569161e-06, | |
| "loss": 0.9275, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 1.1888856887817383, | |
| "learning_rate": 1.7888636936255985e-06, | |
| "loss": 0.8929, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 1.3445130586624146, | |
| "learning_rate": 1.763668430335097e-06, | |
| "loss": 0.9233, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 1.2736765146255493, | |
| "learning_rate": 1.7384731670445956e-06, | |
| "loss": 0.9143, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 1.1950249671936035, | |
| "learning_rate": 1.7132779037540945e-06, | |
| "loss": 0.8981, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 1.3372317552566528, | |
| "learning_rate": 1.688082640463593e-06, | |
| "loss": 0.9149, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 1.2550235986709595, | |
| "learning_rate": 1.6628873771730917e-06, | |
| "loss": 0.9448, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 1.3083131313323975, | |
| "learning_rate": 1.6376921138825902e-06, | |
| "loss": 0.926, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 1.2111276388168335, | |
| "learning_rate": 1.6124968505920888e-06, | |
| "loss": 0.9237, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 1.1683611869812012, | |
| "learning_rate": 1.5873015873015873e-06, | |
| "loss": 0.9265, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 1.2934691905975342, | |
| "learning_rate": 1.5621063240110859e-06, | |
| "loss": 0.8935, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "grad_norm": 1.2122243642807007, | |
| "learning_rate": 1.5369110607205848e-06, | |
| "loss": 0.9184, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 1.318337321281433, | |
| "learning_rate": 1.5117157974300834e-06, | |
| "loss": 0.919, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 1.2197314500808716, | |
| "learning_rate": 1.486520534139582e-06, | |
| "loss": 0.9118, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 1.1137281656265259, | |
| "learning_rate": 1.4613252708490805e-06, | |
| "loss": 0.9312, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 1.2075839042663574, | |
| "learning_rate": 1.436130007558579e-06, | |
| "loss": 0.9258, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 1.379468321800232, | |
| "learning_rate": 1.4109347442680776e-06, | |
| "loss": 0.9163, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "grad_norm": 1.3524144887924194, | |
| "learning_rate": 1.3857394809775762e-06, | |
| "loss": 0.9211, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "grad_norm": 1.342460036277771, | |
| "learning_rate": 1.3605442176870751e-06, | |
| "loss": 0.9107, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 1.3853424787521362, | |
| "learning_rate": 1.3353489543965737e-06, | |
| "loss": 0.9367, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 1.397850513458252, | |
| "learning_rate": 1.3101536911060723e-06, | |
| "loss": 0.9173, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 1.439322590827942, | |
| "learning_rate": 1.2849584278155708e-06, | |
| "loss": 0.9158, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 1.199365258216858, | |
| "learning_rate": 1.2597631645250694e-06, | |
| "loss": 0.9167, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "grad_norm": 1.323708415031433, | |
| "learning_rate": 1.234567901234568e-06, | |
| "loss": 0.9087, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 1.323588252067566, | |
| "learning_rate": 1.2093726379440667e-06, | |
| "loss": 0.9217, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "grad_norm": 1.3864338397979736, | |
| "learning_rate": 1.1841773746535652e-06, | |
| "loss": 0.9202, | |
| "step": 3500 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3969, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "total_flos": 3.1042768180419625e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |