| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.410958904109589, | |
| "eval_steps": 500, | |
| "global_step": 3000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0013698630136986301, | |
| "grad_norm": 1.4028738737106323, | |
| "learning_rate": 3e-06, | |
| "loss": 2.2484, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0027397260273972603, | |
| "grad_norm": 1.4300401210784912, | |
| "learning_rate": 6.333333333333334e-06, | |
| "loss": 2.2741, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.00410958904109589, | |
| "grad_norm": 4.389048099517822, | |
| "learning_rate": 9.666666666666667e-06, | |
| "loss": 2.3968, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.005479452054794521, | |
| "grad_norm": 1.3157492876052856, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 2.347, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.00684931506849315, | |
| "grad_norm": 1.290838360786438, | |
| "learning_rate": 1.6333333333333335e-05, | |
| "loss": 2.0741, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.00821917808219178, | |
| "grad_norm": 1.7722171545028687, | |
| "learning_rate": 1.9666666666666666e-05, | |
| "loss": 1.7538, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.009589041095890411, | |
| "grad_norm": 1.3361868858337402, | |
| "learning_rate": 2.3000000000000003e-05, | |
| "loss": 1.8606, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.010958904109589041, | |
| "grad_norm": 1.5917797088623047, | |
| "learning_rate": 2.633333333333333e-05, | |
| "loss": 2.0042, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.012328767123287671, | |
| "grad_norm": 1.829978346824646, | |
| "learning_rate": 2.9666666666666672e-05, | |
| "loss": 1.848, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.0136986301369863, | |
| "grad_norm": 1.3388415575027466, | |
| "learning_rate": 3.3e-05, | |
| "loss": 1.8417, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.015068493150684932, | |
| "grad_norm": 1.4123870134353638, | |
| "learning_rate": 3.633333333333333e-05, | |
| "loss": 2.3187, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.01643835616438356, | |
| "grad_norm": 1.5437504053115845, | |
| "learning_rate": 3.966666666666667e-05, | |
| "loss": 1.8643, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.01780821917808219, | |
| "grad_norm": 1.496188998222351, | |
| "learning_rate": 4.3e-05, | |
| "loss": 1.6831, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.019178082191780823, | |
| "grad_norm": 2.099055290222168, | |
| "learning_rate": 4.633333333333333e-05, | |
| "loss": 1.693, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.02054794520547945, | |
| "grad_norm": 3.8160312175750732, | |
| "learning_rate": 4.966666666666667e-05, | |
| "loss": 1.7158, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.021917808219178082, | |
| "grad_norm": 1.1084319353103638, | |
| "learning_rate": 5.300000000000001e-05, | |
| "loss": 1.78, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.023287671232876714, | |
| "grad_norm": 1.714733600616455, | |
| "learning_rate": 5.633333333333334e-05, | |
| "loss": 1.8776, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.024657534246575342, | |
| "grad_norm": 1.0134049654006958, | |
| "learning_rate": 5.966666666666667e-05, | |
| "loss": 1.9483, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.026027397260273973, | |
| "grad_norm": 0.7298793792724609, | |
| "learning_rate": 6.3e-05, | |
| "loss": 1.7299, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.0273972602739726, | |
| "grad_norm": 1.709596872329712, | |
| "learning_rate": 6.633333333333334e-05, | |
| "loss": 2.1109, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.028767123287671233, | |
| "grad_norm": 1.8335596323013306, | |
| "learning_rate": 6.966666666666668e-05, | |
| "loss": 1.7318, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.030136986301369864, | |
| "grad_norm": 2.340693235397339, | |
| "learning_rate": 7.3e-05, | |
| "loss": 1.9166, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.031506849315068496, | |
| "grad_norm": 2.241060972213745, | |
| "learning_rate": 7.633333333333334e-05, | |
| "loss": 1.8311, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.03287671232876712, | |
| "grad_norm": 0.9591296911239624, | |
| "learning_rate": 7.966666666666666e-05, | |
| "loss": 1.8488, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.03424657534246575, | |
| "grad_norm": 1.4175859689712524, | |
| "learning_rate": 8.3e-05, | |
| "loss": 1.7258, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.03561643835616438, | |
| "grad_norm": 2.6265478134155273, | |
| "learning_rate": 8.633333333333334e-05, | |
| "loss": 1.7014, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.036986301369863014, | |
| "grad_norm": 1.3878326416015625, | |
| "learning_rate": 8.966666666666666e-05, | |
| "loss": 1.8881, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.038356164383561646, | |
| "grad_norm": 1.3768271207809448, | |
| "learning_rate": 9.300000000000001e-05, | |
| "loss": 2.0887, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.03972602739726028, | |
| "grad_norm": 1.9178138971328735, | |
| "learning_rate": 9.633333333333335e-05, | |
| "loss": 1.5802, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.0410958904109589, | |
| "grad_norm": 2.208899736404419, | |
| "learning_rate": 9.966666666666667e-05, | |
| "loss": 2.2524, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.04246575342465753, | |
| "grad_norm": 1.5130765438079834, | |
| "learning_rate": 9.999725846827562e-05, | |
| "loss": 1.9414, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.043835616438356165, | |
| "grad_norm": 1.3225808143615723, | |
| "learning_rate": 9.998778195446311e-05, | |
| "loss": 1.8956, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.045205479452054796, | |
| "grad_norm": 2.186032772064209, | |
| "learning_rate": 9.997153789515461e-05, | |
| "loss": 1.9144, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.04657534246575343, | |
| "grad_norm": 1.4805994033813477, | |
| "learning_rate": 9.994852848953574e-05, | |
| "loss": 1.8424, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.04794520547945205, | |
| "grad_norm": 1.9020469188690186, | |
| "learning_rate": 9.991875685271168e-05, | |
| "loss": 1.9322, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.049315068493150684, | |
| "grad_norm": 1.9721382856369019, | |
| "learning_rate": 9.988222701528547e-05, | |
| "loss": 1.7567, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.050684931506849315, | |
| "grad_norm": 2.7525320053100586, | |
| "learning_rate": 9.983894392281237e-05, | |
| "loss": 1.7599, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.052054794520547946, | |
| "grad_norm": 1.7699042558670044, | |
| "learning_rate": 9.978891343513023e-05, | |
| "loss": 1.8447, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.05342465753424658, | |
| "grad_norm": 1.6476136445999146, | |
| "learning_rate": 9.973214232556622e-05, | |
| "loss": 1.8282, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.0547945205479452, | |
| "grad_norm": 1.5267293453216553, | |
| "learning_rate": 9.966863828001982e-05, | |
| "loss": 1.823, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.056164383561643834, | |
| "grad_norm": 3.080139636993408, | |
| "learning_rate": 9.959840989592226e-05, | |
| "loss": 1.8589, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.057534246575342465, | |
| "grad_norm": 1.95379638671875, | |
| "learning_rate": 9.952146668107254e-05, | |
| "loss": 1.7409, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.0589041095890411, | |
| "grad_norm": 1.5778534412384033, | |
| "learning_rate": 9.94378190523503e-05, | |
| "loss": 1.7094, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.06027397260273973, | |
| "grad_norm": 1.2501163482666016, | |
| "learning_rate": 9.934747833430547e-05, | |
| "loss": 1.7797, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.06164383561643835, | |
| "grad_norm": 2.6678783893585205, | |
| "learning_rate": 9.925045675762514e-05, | |
| "loss": 1.7278, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.06301369863013699, | |
| "grad_norm": 2.3310539722442627, | |
| "learning_rate": 9.914676745747772e-05, | |
| "loss": 1.9377, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.06438356164383562, | |
| "grad_norm": 2.122758150100708, | |
| "learning_rate": 9.903642447173465e-05, | |
| "loss": 1.6575, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.06575342465753424, | |
| "grad_norm": 1.6059435606002808, | |
| "learning_rate": 9.891944273906986e-05, | |
| "loss": 1.6817, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.06712328767123288, | |
| "grad_norm": 2.2808139324188232, | |
| "learning_rate": 9.879583809693738e-05, | |
| "loss": 1.6041, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.0684931506849315, | |
| "grad_norm": 3.285200595855713, | |
| "learning_rate": 9.866562727942714e-05, | |
| "loss": 2.1497, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.06986301369863014, | |
| "grad_norm": 2.0148391723632812, | |
| "learning_rate": 9.85288279149995e-05, | |
| "loss": 1.9, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.07123287671232877, | |
| "grad_norm": 3.0683975219726562, | |
| "learning_rate": 9.838545852409857e-05, | |
| "loss": 1.9064, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.07260273972602739, | |
| "grad_norm": 1.8953275680541992, | |
| "learning_rate": 9.823553851664489e-05, | |
| "loss": 2.0165, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.07397260273972603, | |
| "grad_norm": 1.1835041046142578, | |
| "learning_rate": 9.807908818940761e-05, | |
| "loss": 1.7635, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.07534246575342465, | |
| "grad_norm": 2.961548089981079, | |
| "learning_rate": 9.791612872325667e-05, | |
| "loss": 1.976, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.07671232876712329, | |
| "grad_norm": 1.2752394676208496, | |
| "learning_rate": 9.77466821802952e-05, | |
| "loss": 1.5748, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.07808219178082192, | |
| "grad_norm": 1.5959957838058472, | |
| "learning_rate": 9.75707715008727e-05, | |
| "loss": 1.5759, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.07945205479452055, | |
| "grad_norm": 1.7095568180084229, | |
| "learning_rate": 9.73884205004793e-05, | |
| "loss": 1.6938, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.08082191780821918, | |
| "grad_norm": 2.9951586723327637, | |
| "learning_rate": 9.719965386652141e-05, | |
| "loss": 1.7049, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.0821917808219178, | |
| "grad_norm": 1.3047794103622437, | |
| "learning_rate": 9.700449715497961e-05, | |
| "loss": 1.7304, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.08356164383561644, | |
| "grad_norm": 2.5035972595214844, | |
| "learning_rate": 9.680297678694867e-05, | |
| "loss": 1.6454, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.08493150684931507, | |
| "grad_norm": 1.5420308113098145, | |
| "learning_rate": 9.659512004506057e-05, | |
| "loss": 1.8747, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.0863013698630137, | |
| "grad_norm": 2.6465156078338623, | |
| "learning_rate": 9.63809550697909e-05, | |
| "loss": 1.2745, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.08767123287671233, | |
| "grad_norm": 2.176868200302124, | |
| "learning_rate": 9.616051085564906e-05, | |
| "loss": 1.8517, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.08904109589041095, | |
| "grad_norm": 2.929405689239502, | |
| "learning_rate": 9.593381724725285e-05, | |
| "loss": 1.7283, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.09041095890410959, | |
| "grad_norm": 1.6962724924087524, | |
| "learning_rate": 9.570090493528809e-05, | |
| "loss": 1.7032, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.09178082191780822, | |
| "grad_norm": 1.4863412380218506, | |
| "learning_rate": 9.546180545235344e-05, | |
| "loss": 1.7415, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.09315068493150686, | |
| "grad_norm": 1.5905004739761353, | |
| "learning_rate": 9.52165511686915e-05, | |
| "loss": 1.9648, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.09452054794520548, | |
| "grad_norm": 1.0756253004074097, | |
| "learning_rate": 9.496517528780637e-05, | |
| "loss": 1.7415, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.0958904109589041, | |
| "grad_norm": 2.223599910736084, | |
| "learning_rate": 9.47077118419684e-05, | |
| "loss": 1.6522, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.09726027397260274, | |
| "grad_norm": 3.08367657661438, | |
| "learning_rate": 9.444419568760684e-05, | |
| "loss": 1.7242, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.09863013698630137, | |
| "grad_norm": 2.0376291275024414, | |
| "learning_rate": 9.417466250059073e-05, | |
| "loss": 1.8525, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 3.322006940841675, | |
| "learning_rate": 9.389914877139903e-05, | |
| "loss": 1.8933, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.10136986301369863, | |
| "grad_norm": 2.1793291568756104, | |
| "learning_rate": 9.361769180018038e-05, | |
| "loss": 1.4555, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.10273972602739725, | |
| "grad_norm": 1.3473883867263794, | |
| "learning_rate": 9.333032969170326e-05, | |
| "loss": 1.6996, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.10410958904109589, | |
| "grad_norm": 1.4925732612609863, | |
| "learning_rate": 9.30371013501972e-05, | |
| "loss": 2.0061, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.10547945205479452, | |
| "grad_norm": 2.43156099319458, | |
| "learning_rate": 9.273804647408575e-05, | |
| "loss": 2.0925, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.10684931506849316, | |
| "grad_norm": 4.304801940917969, | |
| "learning_rate": 9.243320555061205e-05, | |
| "loss": 1.4975, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.10821917808219178, | |
| "grad_norm": 1.838025689125061, | |
| "learning_rate": 9.212261985035739e-05, | |
| "loss": 1.8231, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.1095890410958904, | |
| "grad_norm": 3.2455596923828125, | |
| "learning_rate": 9.180633142165384e-05, | |
| "loss": 1.5903, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.11095890410958904, | |
| "grad_norm": 1.955100178718567, | |
| "learning_rate": 9.148438308489168e-05, | |
| "loss": 1.7339, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.11232876712328767, | |
| "grad_norm": 1.5595628023147583, | |
| "learning_rate": 9.11568184267221e-05, | |
| "loss": 1.4822, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.1136986301369863, | |
| "grad_norm": 2.0874927043914795, | |
| "learning_rate": 9.082368179415632e-05, | |
| "loss": 1.6121, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.11506849315068493, | |
| "grad_norm": 1.7389984130859375, | |
| "learning_rate": 9.04850182885617e-05, | |
| "loss": 1.6468, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.11643835616438356, | |
| "grad_norm": 2.019792318344116, | |
| "learning_rate": 9.014087375955573e-05, | |
| "loss": 1.4844, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.1178082191780822, | |
| "grad_norm": 1.5820057392120361, | |
| "learning_rate": 8.979129479879873e-05, | |
| "loss": 1.6963, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.11917808219178082, | |
| "grad_norm": 2.9591360092163086, | |
| "learning_rate": 8.943632873368611e-05, | |
| "loss": 1.6382, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.12054794520547946, | |
| "grad_norm": 1.5505706071853638, | |
| "learning_rate": 8.907602362094094e-05, | |
| "loss": 1.8158, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.12191780821917808, | |
| "grad_norm": 1.7320066690444946, | |
| "learning_rate": 8.871042824010791e-05, | |
| "loss": 1.3332, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.1232876712328767, | |
| "grad_norm": 3.1696863174438477, | |
| "learning_rate": 8.833959208694929e-05, | |
| "loss": 1.6221, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.12465753424657534, | |
| "grad_norm": 2.0459401607513428, | |
| "learning_rate": 8.796356536674403e-05, | |
| "loss": 1.7272, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.12602739726027398, | |
| "grad_norm": 4.202193737030029, | |
| "learning_rate": 8.758239898749085e-05, | |
| "loss": 1.7802, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.1273972602739726, | |
| "grad_norm": 2.30110239982605, | |
| "learning_rate": 8.719614455301593e-05, | |
| "loss": 1.6778, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.12876712328767123, | |
| "grad_norm": 3.40289044380188, | |
| "learning_rate": 8.680485435598673e-05, | |
| "loss": 1.4947, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.13013698630136986, | |
| "grad_norm": 2.5461068153381348, | |
| "learning_rate": 8.640858137083232e-05, | |
| "loss": 1.4905, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.13150684931506848, | |
| "grad_norm": 1.995659351348877, | |
| "learning_rate": 8.600737924657156e-05, | |
| "loss": 1.4755, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.13287671232876713, | |
| "grad_norm": 2.8345696926116943, | |
| "learning_rate": 8.560130229954984e-05, | |
| "loss": 1.762, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.13424657534246576, | |
| "grad_norm": 2.7922985553741455, | |
| "learning_rate": 8.519040550608546e-05, | |
| "loss": 1.6869, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.13561643835616438, | |
| "grad_norm": 1.7801475524902344, | |
| "learning_rate": 8.477474449502682e-05, | |
| "loss": 1.8112, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.136986301369863, | |
| "grad_norm": 1.9390945434570312, | |
| "learning_rate": 8.435437554022115e-05, | |
| "loss": 1.6323, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.13835616438356163, | |
| "grad_norm": 2.17783522605896, | |
| "learning_rate": 8.392935555289584e-05, | |
| "loss": 1.778, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.13972602739726028, | |
| "grad_norm": 2.8870582580566406, | |
| "learning_rate": 8.349974207395366e-05, | |
| "loss": 1.7704, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.1410958904109589, | |
| "grad_norm": 2.317627191543579, | |
| "learning_rate": 8.306559326618259e-05, | |
| "loss": 1.8989, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.14246575342465753, | |
| "grad_norm": 1.2422226667404175, | |
| "learning_rate": 8.26269679063816e-05, | |
| "loss": 1.4631, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.14383561643835616, | |
| "grad_norm": 1.404496192932129, | |
| "learning_rate": 8.218392537740305e-05, | |
| "loss": 1.8917, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.14520547945205478, | |
| "grad_norm": 2.3392229080200195, | |
| "learning_rate": 8.173652566011338e-05, | |
| "loss": 1.4527, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.14657534246575343, | |
| "grad_norm": 2.2701287269592285, | |
| "learning_rate": 8.128482932527255e-05, | |
| "loss": 1.4515, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.14794520547945206, | |
| "grad_norm": 1.7164267301559448, | |
| "learning_rate": 8.082889752533375e-05, | |
| "loss": 1.976, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.14931506849315068, | |
| "grad_norm": 1.9462798833847046, | |
| "learning_rate": 8.036879198616434e-05, | |
| "loss": 1.492, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.1506849315068493, | |
| "grad_norm": 1.8207604885101318, | |
| "learning_rate": 7.990457499868919e-05, | |
| "loss": 1.719, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.15205479452054796, | |
| "grad_norm": 2.4192392826080322, | |
| "learning_rate": 7.943630941045744e-05, | |
| "loss": 1.7396, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.15342465753424658, | |
| "grad_norm": 1.4872899055480957, | |
| "learning_rate": 7.896405861713394e-05, | |
| "loss": 1.6986, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.1547945205479452, | |
| "grad_norm": 1.2965902090072632, | |
| "learning_rate": 7.848788655391658e-05, | |
| "loss": 1.7037, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.15616438356164383, | |
| "grad_norm": 2.5743491649627686, | |
| "learning_rate": 7.800785768688035e-05, | |
| "loss": 1.7557, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.15753424657534246, | |
| "grad_norm": 1.7150269746780396, | |
| "learning_rate": 7.752403700424979e-05, | |
| "loss": 1.5882, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.1589041095890411, | |
| "grad_norm": 1.951343297958374, | |
| "learning_rate": 7.703649000760053e-05, | |
| "loss": 1.6334, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.16027397260273973, | |
| "grad_norm": 1.4758542776107788, | |
| "learning_rate": 7.654528270299154e-05, | |
| "loss": 1.4411, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.16164383561643836, | |
| "grad_norm": 2.057770013809204, | |
| "learning_rate": 7.605048159202883e-05, | |
| "loss": 1.4802, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.16301369863013698, | |
| "grad_norm": 1.9429600238800049, | |
| "learning_rate": 7.555215366286227e-05, | |
| "loss": 1.8208, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.1643835616438356, | |
| "grad_norm": 2.5410385131835938, | |
| "learning_rate": 7.505036638111648e-05, | |
| "loss": 1.6631, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.16575342465753426, | |
| "grad_norm": 2.681525468826294, | |
| "learning_rate": 7.454518768075704e-05, | |
| "loss": 1.7817, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.16712328767123288, | |
| "grad_norm": 1.9880177974700928, | |
| "learning_rate": 7.403668595489333e-05, | |
| "loss": 1.5418, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.1684931506849315, | |
| "grad_norm": 1.3528841733932495, | |
| "learning_rate": 7.352493004651916e-05, | |
| "loss": 1.4801, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.16986301369863013, | |
| "grad_norm": 1.925644040107727, | |
| "learning_rate": 7.300998923919259e-05, | |
| "loss": 1.6479, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.17123287671232876, | |
| "grad_norm": 1.71919846534729, | |
| "learning_rate": 7.249193324765599e-05, | |
| "loss": 1.6604, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.1726027397260274, | |
| "grad_norm": 3.4710135459899902, | |
| "learning_rate": 7.197083220839785e-05, | |
| "loss": 1.5677, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.17397260273972603, | |
| "grad_norm": 2.525416374206543, | |
| "learning_rate": 7.14467566701573e-05, | |
| "loss": 1.2976, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.17534246575342466, | |
| "grad_norm": 1.8520058393478394, | |
| "learning_rate": 7.091977758437311e-05, | |
| "loss": 1.8077, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.17671232876712328, | |
| "grad_norm": 2.294551372528076, | |
| "learning_rate": 7.038996629557783e-05, | |
| "loss": 1.3442, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.1780821917808219, | |
| "grad_norm": 3.1857283115386963, | |
| "learning_rate": 6.985739453173903e-05, | |
| "loss": 1.6914, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.17945205479452056, | |
| "grad_norm": 3.152991771697998, | |
| "learning_rate": 6.932213439454837e-05, | |
| "loss": 1.6722, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.18082191780821918, | |
| "grad_norm": 2.886530876159668, | |
| "learning_rate": 6.87842583496602e-05, | |
| "loss": 1.6481, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.1821917808219178, | |
| "grad_norm": 1.9449256658554077, | |
| "learning_rate": 6.824383921688098e-05, | |
| "loss": 1.7951, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.18356164383561643, | |
| "grad_norm": 2.6453981399536133, | |
| "learning_rate": 6.77009501603105e-05, | |
| "loss": 1.4426, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.18493150684931506, | |
| "grad_norm": 1.520572543144226, | |
| "learning_rate": 6.71556646784367e-05, | |
| "loss": 1.5972, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.1863013698630137, | |
| "grad_norm": 1.8179829120635986, | |
| "learning_rate": 6.660805659418516e-05, | |
| "loss": 1.539, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.18767123287671234, | |
| "grad_norm": 1.310356616973877, | |
| "learning_rate": 6.605820004492467e-05, | |
| "loss": 1.6673, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.18904109589041096, | |
| "grad_norm": 1.5523558855056763, | |
| "learning_rate": 6.550616947243009e-05, | |
| "loss": 1.7355, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.19041095890410958, | |
| "grad_norm": 2.022332191467285, | |
| "learning_rate": 6.495203961280434e-05, | |
| "loss": 2.0947, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.1917808219178082, | |
| "grad_norm": 1.9009687900543213, | |
| "learning_rate": 6.439588548636016e-05, | |
| "loss": 1.7585, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.19315068493150686, | |
| "grad_norm": 1.7422246932983398, | |
| "learning_rate": 6.38377823874636e-05, | |
| "loss": 1.8785, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.19452054794520549, | |
| "grad_norm": 2.1474485397338867, | |
| "learning_rate": 6.327780587434044e-05, | |
| "loss": 1.9686, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.1958904109589041, | |
| "grad_norm": 2.906446933746338, | |
| "learning_rate": 6.27160317588467e-05, | |
| "loss": 1.8049, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.19726027397260273, | |
| "grad_norm": 1.4890384674072266, | |
| "learning_rate": 6.215253609620498e-05, | |
| "loss": 1.5907, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.19863013698630136, | |
| "grad_norm": 2.201432943344116, | |
| "learning_rate": 6.158739517470786e-05, | |
| "loss": 1.4835, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.544870138168335, | |
| "learning_rate": 6.102068550538962e-05, | |
| "loss": 1.498, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.20136986301369864, | |
| "grad_norm": 1.5199240446090698, | |
| "learning_rate": 6.045248381166783e-05, | |
| "loss": 1.5222, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.20273972602739726, | |
| "grad_norm": 2.430156707763672, | |
| "learning_rate": 5.988286701895631e-05, | |
| "loss": 1.7141, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.20410958904109588, | |
| "grad_norm": 3.335169792175293, | |
| "learning_rate": 5.9311912244250675e-05, | |
| "loss": 1.4424, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.2054794520547945, | |
| "grad_norm": 2.818110227584839, | |
| "learning_rate": 5.873969678568784e-05, | |
| "loss": 1.3691, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.20684931506849316, | |
| "grad_norm": 2.5193567276000977, | |
| "learning_rate": 5.816629811208112e-05, | |
| "loss": 1.9398, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.20821917808219179, | |
| "grad_norm": 1.702822208404541, | |
| "learning_rate": 5.759179385243224e-05, | |
| "loss": 1.5451, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.2095890410958904, | |
| "grad_norm": 2.1284539699554443, | |
| "learning_rate": 5.701626178542158e-05, | |
| "loss": 1.8442, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.21095890410958903, | |
| "grad_norm": 2.4425137042999268, | |
| "learning_rate": 5.643977982887815e-05, | |
| "loss": 1.5843, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.21232876712328766, | |
| "grad_norm": 2.1926443576812744, | |
| "learning_rate": 5.586242602923081e-05, | |
| "loss": 1.7854, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.2136986301369863, | |
| "grad_norm": 2.7360684871673584, | |
| "learning_rate": 5.528427855094206e-05, | |
| "loss": 1.6639, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.21506849315068494, | |
| "grad_norm": 1.9223748445510864, | |
| "learning_rate": 5.470541566592573e-05, | |
| "loss": 1.4212, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.21643835616438356, | |
| "grad_norm": 2.355661630630493, | |
| "learning_rate": 5.4125915742950275e-05, | |
| "loss": 1.6376, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.21780821917808219, | |
| "grad_norm": 3.028079032897949, | |
| "learning_rate": 5.354585723702893e-05, | |
| "loss": 1.4013, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.2191780821917808, | |
| "grad_norm": 1.155507206916809, | |
| "learning_rate": 5.296531867879809e-05, | |
| "loss": 1.3336, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.22054794520547946, | |
| "grad_norm": 1.4151854515075684, | |
| "learning_rate": 5.2384378663885545e-05, | |
| "loss": 1.4375, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.2219178082191781, | |
| "grad_norm": 1.5798234939575195, | |
| "learning_rate": 5.180311584226991e-05, | |
| "loss": 1.8073, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.2232876712328767, | |
| "grad_norm": 3.0987579822540283, | |
| "learning_rate": 5.1221608907632665e-05, | |
| "loss": 1.5686, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.22465753424657534, | |
| "grad_norm": 3.5521326065063477, | |
| "learning_rate": 5.063993658670425e-05, | |
| "loss": 1.6035, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.22602739726027396, | |
| "grad_norm": 2.7165257930755615, | |
| "learning_rate": 5.0058177628605795e-05, | |
| "loss": 1.7542, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.2273972602739726, | |
| "grad_norm": 2.1085574626922607, | |
| "learning_rate": 4.947641079418773e-05, | |
| "loss": 1.6202, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.22876712328767124, | |
| "grad_norm": 1.6660608053207397, | |
| "learning_rate": 4.889471484536672e-05, | |
| "loss": 1.3809, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.23013698630136986, | |
| "grad_norm": 1.4744638204574585, | |
| "learning_rate": 4.83131685344628e-05, | |
| "loss": 1.7246, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.23150684931506849, | |
| "grad_norm": 1.5128684043884277, | |
| "learning_rate": 4.773185059353732e-05, | |
| "loss": 1.5884, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.2328767123287671, | |
| "grad_norm": 2.4789443016052246, | |
| "learning_rate": 4.715083972373401e-05, | |
| "loss": 1.3332, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.23424657534246576, | |
| "grad_norm": 2.1762492656707764, | |
| "learning_rate": 4.657021458462409e-05, | |
| "loss": 1.3395, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.2356164383561644, | |
| "grad_norm": 2.474536180496216, | |
| "learning_rate": 4.599005378355706e-05, | |
| "loss": 1.5043, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.236986301369863, | |
| "grad_norm": 1.550284743309021, | |
| "learning_rate": 4.541043586501842e-05, | |
| "loss": 1.6574, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.23835616438356164, | |
| "grad_norm": 3.5782651901245117, | |
| "learning_rate": 4.4831439299996084e-05, | |
| "loss": 1.3442, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.23972602739726026, | |
| "grad_norm": 3.107822895050049, | |
| "learning_rate": 4.425314247535668e-05, | |
| "loss": 1.5739, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.2410958904109589, | |
| "grad_norm": 2.3097240924835205, | |
| "learning_rate": 4.3675623683233135e-05, | |
| "loss": 1.639, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.24246575342465754, | |
| "grad_norm": 3.2882018089294434, | |
| "learning_rate": 4.309896111042529e-05, | |
| "loss": 1.5524, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.24383561643835616, | |
| "grad_norm": 2.627248764038086, | |
| "learning_rate": 4.252323282781453e-05, | |
| "loss": 1.8598, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.2452054794520548, | |
| "grad_norm": 2.3391318321228027, | |
| "learning_rate": 4.1948516779794364e-05, | |
| "loss": 1.4296, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.2465753424657534, | |
| "grad_norm": 1.7528724670410156, | |
| "learning_rate": 4.137489077371787e-05, | |
| "loss": 1.7706, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.24794520547945206, | |
| "grad_norm": 2.4802026748657227, | |
| "learning_rate": 4.080243246936399e-05, | |
| "loss": 1.7099, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.2493150684931507, | |
| "grad_norm": 1.308081865310669, | |
| "learning_rate": 4.0231219368423466e-05, | |
| "loss": 1.5738, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.25068493150684934, | |
| "grad_norm": 3.3826613426208496, | |
| "learning_rate": 3.9661328804006475e-05, | |
| "loss": 1.6931, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.25205479452054796, | |
| "grad_norm": 2.954171895980835, | |
| "learning_rate": 3.9092837930172884e-05, | |
| "loss": 1.7415, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.2534246575342466, | |
| "grad_norm": 2.82603120803833, | |
| "learning_rate": 3.852582371148687e-05, | |
| "loss": 1.6954, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.2547945205479452, | |
| "grad_norm": 4.872611999511719, | |
| "learning_rate": 3.796036291259718e-05, | |
| "loss": 1.6146, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.25616438356164384, | |
| "grad_norm": 2.3178672790527344, | |
| "learning_rate": 3.739653208784432e-05, | |
| "loss": 1.5219, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.25753424657534246, | |
| "grad_norm": 1.4849178791046143, | |
| "learning_rate": 3.683440757089646e-05, | |
| "loss": 1.6899, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.2589041095890411, | |
| "grad_norm": 2.602975368499756, | |
| "learning_rate": 3.627406546441494e-05, | |
| "loss": 1.7254, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.2602739726027397, | |
| "grad_norm": 2.0159435272216797, | |
| "learning_rate": 3.5715581629751326e-05, | |
| "loss": 1.8248, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.26164383561643834, | |
| "grad_norm": 1.9132189750671387, | |
| "learning_rate": 3.515903167667686e-05, | |
| "loss": 1.6893, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.26301369863013696, | |
| "grad_norm": 1.4500598907470703, | |
| "learning_rate": 3.460449095314621e-05, | |
| "loss": 1.6949, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.26438356164383564, | |
| "grad_norm": 2.5748395919799805, | |
| "learning_rate": 3.40520345350965e-05, | |
| "loss": 1.7592, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.26575342465753427, | |
| "grad_norm": 3.0504002571105957, | |
| "learning_rate": 3.35017372162833e-05, | |
| "loss": 1.2447, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.2671232876712329, | |
| "grad_norm": 2.7209043502807617, | |
| "learning_rate": 3.295367349815469e-05, | |
| "loss": 1.5326, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.2684931506849315, | |
| "grad_norm": 2.3009421825408936, | |
| "learning_rate": 3.240791757976491e-05, | |
| "loss": 1.7541, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.26986301369863014, | |
| "grad_norm": 2.9373281002044678, | |
| "learning_rate": 3.186454334772916e-05, | |
| "loss": 1.5251, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.27123287671232876, | |
| "grad_norm": 1.99565851688385, | |
| "learning_rate": 3.132362436622035e-05, | |
| "loss": 1.8553, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.2726027397260274, | |
| "grad_norm": 2.116396427154541, | |
| "learning_rate": 3.078523386700982e-05, | |
| "loss": 1.7764, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.273972602739726, | |
| "grad_norm": 1.9144208431243896, | |
| "learning_rate": 3.0249444739552844e-05, | |
| "loss": 1.5227, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.27534246575342464, | |
| "grad_norm": 1.4267282485961914, | |
| "learning_rate": 2.971632952112066e-05, | |
| "loss": 1.5648, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.27671232876712326, | |
| "grad_norm": 1.9111319780349731, | |
| "learning_rate": 2.918596038697995e-05, | |
| "loss": 1.6373, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.27808219178082194, | |
| "grad_norm": 1.5937117338180542, | |
| "learning_rate": 2.86584091406216e-05, | |
| "loss": 1.5564, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.27945205479452057, | |
| "grad_norm": 3.2327396869659424, | |
| "learning_rate": 2.8133747204039574e-05, | |
| "loss": 1.4473, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.2808219178082192, | |
| "grad_norm": 2.090632677078247, | |
| "learning_rate": 2.761204560806152e-05, | |
| "loss": 1.4269, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.2821917808219178, | |
| "grad_norm": 1.5311341285705566, | |
| "learning_rate": 2.709337498273243e-05, | |
| "loss": 1.6679, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.28356164383561644, | |
| "grad_norm": 2.3477697372436523, | |
| "learning_rate": 2.65778055477523e-05, | |
| "loss": 1.4417, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.28493150684931506, | |
| "grad_norm": 2.742414712905884, | |
| "learning_rate": 2.6065407102969664e-05, | |
| "loss": 1.6031, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.2863013698630137, | |
| "grad_norm": 1.9928070306777954, | |
| "learning_rate": 2.555624901893171e-05, | |
| "loss": 1.7178, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.2876712328767123, | |
| "grad_norm": 2.1661367416381836, | |
| "learning_rate": 2.505040022749265e-05, | |
| "loss": 1.6949, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.28904109589041094, | |
| "grad_norm": 2.8512043952941895, | |
| "learning_rate": 2.4547929212481435e-05, | |
| "loss": 1.6156, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.29041095890410956, | |
| "grad_norm": 2.101837635040283, | |
| "learning_rate": 2.404890400043023e-05, | |
| "loss": 1.5359, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.29178082191780824, | |
| "grad_norm": 3.111027479171753, | |
| "learning_rate": 2.3553392151364536e-05, | |
| "loss": 1.5937, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.29315068493150687, | |
| "grad_norm": 1.679614782333374, | |
| "learning_rate": 2.3061460749656844e-05, | |
| "loss": 1.2465, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.2945205479452055, | |
| "grad_norm": 1.1103602647781372, | |
| "learning_rate": 2.2573176394944328e-05, | |
| "loss": 1.6416, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.2958904109589041, | |
| "grad_norm": 2.2801201343536377, | |
| "learning_rate": 2.2088605193112383e-05, | |
| "loss": 1.6485, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.29726027397260274, | |
| "grad_norm": 3.090172052383423, | |
| "learning_rate": 2.160781274734495e-05, | |
| "loss": 1.3705, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.29863013698630136, | |
| "grad_norm": 1.51850426197052, | |
| "learning_rate": 2.1130864149242878e-05, | |
| "loss": 1.4144, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.7803024053573608, | |
| "learning_rate": 2.0657823970011618e-05, | |
| "loss": 1.4928, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.3013698630136986, | |
| "grad_norm": 1.7049388885498047, | |
| "learning_rate": 2.0188756251719203e-05, | |
| "loss": 1.7443, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.30273972602739724, | |
| "grad_norm": 1.9908382892608643, | |
| "learning_rate": 1.9723724498626105e-05, | |
| "loss": 1.6456, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.3041095890410959, | |
| "grad_norm": 3.0804522037506104, | |
| "learning_rate": 1.9262791668587676e-05, | |
| "loss": 1.4869, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.30547945205479454, | |
| "grad_norm": 1.7918072938919067, | |
| "learning_rate": 1.8806020164530702e-05, | |
| "loss": 1.5418, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.30684931506849317, | |
| "grad_norm": 2.762115240097046, | |
| "learning_rate": 1.8353471826005036e-05, | |
| "loss": 1.5662, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.3082191780821918, | |
| "grad_norm": 2.011409044265747, | |
| "learning_rate": 1.7905207920811572e-05, | |
| "loss": 1.6886, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.3095890410958904, | |
| "grad_norm": 2.0907769203186035, | |
| "learning_rate": 1.746128913670746e-05, | |
| "loss": 1.8852, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.31095890410958904, | |
| "grad_norm": 1.4657833576202393, | |
| "learning_rate": 1.7021775573190013e-05, | |
| "loss": 1.636, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.31232876712328766, | |
| "grad_norm": 2.9756340980529785, | |
| "learning_rate": 1.6586726733360237e-05, | |
| "loss": 1.3435, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.3136986301369863, | |
| "grad_norm": 2.4570364952087402, | |
| "learning_rate": 1.615620151586697e-05, | |
| "loss": 1.885, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.3150684931506849, | |
| "grad_norm": 2.5543649196624756, | |
| "learning_rate": 1.5730258206933025e-05, | |
| "loss": 1.5222, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.31643835616438354, | |
| "grad_norm": 1.7562202215194702, | |
| "learning_rate": 1.530895447246411e-05, | |
| "loss": 1.6443, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.3178082191780822, | |
| "grad_norm": 2.0639760494232178, | |
| "learning_rate": 1.4892347350241881e-05, | |
| "loss": 1.5368, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.31917808219178084, | |
| "grad_norm": 1.7844699621200562, | |
| "learning_rate": 1.448049324220181e-05, | |
| "loss": 1.5212, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.32054794520547947, | |
| "grad_norm": 2.314725637435913, | |
| "learning_rate": 1.4073447906797376e-05, | |
| "loss": 1.5756, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.3219178082191781, | |
| "grad_norm": 3.013345956802368, | |
| "learning_rate": 1.367126645145121e-05, | |
| "loss": 1.4461, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.3232876712328767, | |
| "grad_norm": 2.2153244018554688, | |
| "learning_rate": 1.327400332509442e-05, | |
| "loss": 1.6265, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.32465753424657534, | |
| "grad_norm": 2.4383397102355957, | |
| "learning_rate": 1.2881712310795118e-05, | |
| "loss": 1.5656, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.32602739726027397, | |
| "grad_norm": 1.7660318613052368, | |
| "learning_rate": 1.2494446518477022e-05, | |
| "loss": 1.4731, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.3273972602739726, | |
| "grad_norm": 2.3250346183776855, | |
| "learning_rate": 1.2112258377729274e-05, | |
| "loss": 1.7248, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.3287671232876712, | |
| "grad_norm": 1.7260797023773193, | |
| "learning_rate": 1.1735199630708222e-05, | |
| "loss": 1.5978, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.33013698630136984, | |
| "grad_norm": 2.4176435470581055, | |
| "learning_rate": 1.1363321325132447e-05, | |
| "loss": 1.454, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.3315068493150685, | |
| "grad_norm": 2.250056028366089, | |
| "learning_rate": 1.0996673807371677e-05, | |
| "loss": 1.2518, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.33287671232876714, | |
| "grad_norm": 2.250523328781128, | |
| "learning_rate": 1.0635306715630682e-05, | |
| "loss": 1.4483, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.33424657534246577, | |
| "grad_norm": 1.8544931411743164, | |
| "learning_rate": 1.0279268973229089e-05, | |
| "loss": 1.3334, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.3356164383561644, | |
| "grad_norm": 2.062802791595459, | |
| "learning_rate": 9.928608781977966e-06, | |
| "loss": 1.3373, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.336986301369863, | |
| "grad_norm": 2.6046059131622314, | |
| "learning_rate": 9.583373615653978e-06, | |
| "loss": 1.556, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.33835616438356164, | |
| "grad_norm": 1.7562941312789917, | |
| "learning_rate": 9.243610213572285e-06, | |
| "loss": 1.3916, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.33972602739726027, | |
| "grad_norm": 0.9646738767623901, | |
| "learning_rate": 8.909364574258793e-06, | |
| "loss": 1.4581, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.3410958904109589, | |
| "grad_norm": 1.8336948156356812, | |
| "learning_rate": 8.580681949222568e-06, | |
| "loss": 1.5921, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.3424657534246575, | |
| "grad_norm": 2.8797600269317627, | |
| "learning_rate": 8.257606836829678e-06, | |
| "loss": 1.5602, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.34383561643835614, | |
| "grad_norm": 1.4857509136199951, | |
| "learning_rate": 7.940182976278692e-06, | |
| "loss": 1.4826, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.3452054794520548, | |
| "grad_norm": 2.2163922786712646, | |
| "learning_rate": 7.6284533416791814e-06, | |
| "loss": 1.7773, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.34657534246575344, | |
| "grad_norm": 1.7907894849777222, | |
| "learning_rate": 7.322460136233622e-06, | |
| "loss": 1.4813, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.34794520547945207, | |
| "grad_norm": 2.7916197776794434, | |
| "learning_rate": 7.02224478652388e-06, | |
| "loss": 1.2967, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.3493150684931507, | |
| "grad_norm": 1.6418206691741943, | |
| "learning_rate": 6.727847936902543e-06, | |
| "loss": 1.5247, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.3506849315068493, | |
| "grad_norm": 2.005744457244873, | |
| "learning_rate": 6.439309443990532e-06, | |
| "loss": 1.332, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.35205479452054794, | |
| "grad_norm": 2.3452343940734863, | |
| "learning_rate": 6.1566683712809824e-06, | |
| "loss": 1.623, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.35342465753424657, | |
| "grad_norm": 2.700227975845337, | |
| "learning_rate": 5.879962983850745e-06, | |
| "loss": 1.7463, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.3547945205479452, | |
| "grad_norm": 1.9577980041503906, | |
| "learning_rate": 5.6092307431799384e-06, | |
| "loss": 1.4809, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.3561643835616438, | |
| "grad_norm": 2.223386287689209, | |
| "learning_rate": 5.34450830208017e-06, | |
| "loss": 1.5018, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.35753424657534244, | |
| "grad_norm": 3.3687524795532227, | |
| "learning_rate": 5.08583149973243e-06, | |
| "loss": 1.6345, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.3589041095890411, | |
| "grad_norm": 2.8207719326019287, | |
| "learning_rate": 4.833235356834959e-06, | |
| "loss": 1.7605, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.36027397260273974, | |
| "grad_norm": 1.8670095205307007, | |
| "learning_rate": 4.586754070862099e-06, | |
| "loss": 1.6041, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.36164383561643837, | |
| "grad_norm": 1.9502205848693848, | |
| "learning_rate": 4.346421011434382e-06, | |
| "loss": 1.4593, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.363013698630137, | |
| "grad_norm": 1.7307766675949097, | |
| "learning_rate": 4.112268715800943e-06, | |
| "loss": 1.6873, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.3643835616438356, | |
| "grad_norm": 1.3438829183578491, | |
| "learning_rate": 3.884328884434402e-06, | |
| "loss": 1.4745, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.36575342465753424, | |
| "grad_norm": 1.6506098508834839, | |
| "learning_rate": 3.6626323767391777e-06, | |
| "loss": 1.4283, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.36712328767123287, | |
| "grad_norm": 1.3745683431625366, | |
| "learning_rate": 3.4472092068735916e-06, | |
| "loss": 1.2547, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.3684931506849315, | |
| "grad_norm": 2.131854295730591, | |
| "learning_rate": 3.238088539686451e-06, | |
| "loss": 1.7309, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.3698630136986301, | |
| "grad_norm": 2.2386934757232666, | |
| "learning_rate": 3.0352986867686007e-06, | |
| "loss": 1.6385, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.37123287671232874, | |
| "grad_norm": 1.1820231676101685, | |
| "learning_rate": 2.8388671026199522e-06, | |
| "loss": 1.5724, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.3726027397260274, | |
| "grad_norm": 1.3294404745101929, | |
| "learning_rate": 2.6488203809326207e-06, | |
| "loss": 1.5353, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.37397260273972605, | |
| "grad_norm": 1.6606618165969849, | |
| "learning_rate": 2.4651842509905487e-06, | |
| "loss": 1.6569, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.37534246575342467, | |
| "grad_norm": 1.345475196838379, | |
| "learning_rate": 2.2879835741861586e-06, | |
| "loss": 1.4381, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.3767123287671233, | |
| "grad_norm": 2.877779483795166, | |
| "learning_rate": 2.1172423406545516e-06, | |
| "loss": 1.5604, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.3780821917808219, | |
| "grad_norm": 2.1802237033843994, | |
| "learning_rate": 1.9529836660256096e-06, | |
| "loss": 1.5397, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.37945205479452054, | |
| "grad_norm": 1.4911959171295166, | |
| "learning_rate": 1.7952297882945003e-06, | |
| "loss": 1.7633, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.38082191780821917, | |
| "grad_norm": 1.5174319744110107, | |
| "learning_rate": 1.6440020648110067e-06, | |
| "loss": 1.6316, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.3821917808219178, | |
| "grad_norm": 1.5157442092895508, | |
| "learning_rate": 1.4993209693881183e-06, | |
| "loss": 1.2263, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.3835616438356164, | |
| "grad_norm": 2.8943581581115723, | |
| "learning_rate": 1.3612060895301759e-06, | |
| "loss": 1.5867, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.38493150684931504, | |
| "grad_norm": 2.1494810581207275, | |
| "learning_rate": 1.2296761237810207e-06, | |
| "loss": 1.3249, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.3863013698630137, | |
| "grad_norm": 2.02693247795105, | |
| "learning_rate": 1.104748879192552e-06, | |
| "loss": 1.4641, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.38767123287671235, | |
| "grad_norm": 2.7323265075683594, | |
| "learning_rate": 9.864412689139123e-07, | |
| "loss": 1.4395, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.38904109589041097, | |
| "grad_norm": 1.8553956747055054, | |
| "learning_rate": 8.747693099017129e-07, | |
| "loss": 1.4012, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.3904109589041096, | |
| "grad_norm": 1.5457431077957153, | |
| "learning_rate": 7.697481207516289e-07, | |
| "loss": 1.8086, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.3917808219178082, | |
| "grad_norm": 1.3996024131774902, | |
| "learning_rate": 6.713919196515317e-07, | |
| "loss": 1.7806, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.39315068493150684, | |
| "grad_norm": 2.355863332748413, | |
| "learning_rate": 5.797140224566122e-07, | |
| "loss": 1.7083, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.39452054794520547, | |
| "grad_norm": 2.2099246978759766, | |
| "learning_rate": 4.947268408866113e-07, | |
| "loss": 1.3791, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.3958904109589041, | |
| "grad_norm": 1.2900251150131226, | |
| "learning_rate": 4.1644188084548063e-07, | |
| "loss": 1.4308, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.3972602739726027, | |
| "grad_norm": 2.1631548404693604, | |
| "learning_rate": 3.4486974086366253e-07, | |
| "loss": 1.3797, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.39863013698630134, | |
| "grad_norm": 1.1319299936294556, | |
| "learning_rate": 2.800201106632205e-07, | |
| "loss": 1.5695, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.7180445194244385, | |
| "learning_rate": 2.219017698460002e-07, | |
| "loss": 1.5461, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.40136986301369865, | |
| "grad_norm": 1.7271549701690674, | |
| "learning_rate": 1.7052258670501308e-07, | |
| "loss": 1.5524, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.40273972602739727, | |
| "grad_norm": 1.233688235282898, | |
| "learning_rate": 1.2588951715921116e-07, | |
| "loss": 1.4612, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.4041095890410959, | |
| "grad_norm": 2.4479854106903076, | |
| "learning_rate": 8.800860381173448e-08, | |
| "loss": 1.4658, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.4054794520547945, | |
| "grad_norm": 1.8242405652999878, | |
| "learning_rate": 5.688497513188229e-08, | |
| "loss": 1.5335, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.40684931506849314, | |
| "grad_norm": 1.8931853771209717, | |
| "learning_rate": 3.2522844760762836e-08, | |
| "loss": 1.266, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.40821917808219177, | |
| "grad_norm": 2.2524218559265137, | |
| "learning_rate": 1.4925510940844156e-08, | |
| "loss": 1.6866, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.4095890410958904, | |
| "grad_norm": 1.7584060430526733, | |
| "learning_rate": 4.095356069439005e-09, | |
| "loss": 1.4982, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.410958904109589, | |
| "grad_norm": 1.325352430343628, | |
| "learning_rate": 3.384637615733155e-11, | |
| "loss": 1.487, | |
| "step": 3000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 300, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.0814314512384e+17, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |