| { | |
| "best_metric": 0.29665234684944153, | |
| "best_model_checkpoint": "sentiment_classification/checkpoint-1000", | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 0.7555551528930664, | |
| "learning_rate": 9.99e-05, | |
| "loss": 0.0054, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 7.883056640625, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0587, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 12.585994720458984, | |
| "learning_rate": 9.970000000000001e-05, | |
| "loss": 0.1564, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 36.28175735473633, | |
| "learning_rate": 9.960000000000001e-05, | |
| "loss": 1.0358, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 35.03889083862305, | |
| "learning_rate": 9.95e-05, | |
| "loss": 1.0603, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 46.80266189575195, | |
| "learning_rate": 9.94e-05, | |
| "loss": 0.8565, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 55.776710510253906, | |
| "learning_rate": 9.93e-05, | |
| "loss": 1.0901, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 30.31947898864746, | |
| "learning_rate": 9.92e-05, | |
| "loss": 0.3881, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 37.395389556884766, | |
| "learning_rate": 9.910000000000001e-05, | |
| "loss": 0.8026, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 46.29269790649414, | |
| "learning_rate": 9.900000000000001e-05, | |
| "loss": 1.1394, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 44.72748565673828, | |
| "learning_rate": 9.89e-05, | |
| "loss": 1.8889, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 50.34558868408203, | |
| "learning_rate": 9.88e-05, | |
| "loss": 1.9163, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 50.587432861328125, | |
| "learning_rate": 9.87e-05, | |
| "loss": 1.101, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 40.331085205078125, | |
| "learning_rate": 9.86e-05, | |
| "loss": 1.4982, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 42.99494552612305, | |
| "learning_rate": 9.850000000000001e-05, | |
| "loss": 1.2145, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 65.54414367675781, | |
| "learning_rate": 9.84e-05, | |
| "loss": 1.7914, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 23.524738311767578, | |
| "learning_rate": 9.83e-05, | |
| "loss": 0.4952, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 54.68332290649414, | |
| "learning_rate": 9.82e-05, | |
| "loss": 0.8135, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 25.295198440551758, | |
| "learning_rate": 9.81e-05, | |
| "loss": 0.42, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 64.28269958496094, | |
| "learning_rate": 9.8e-05, | |
| "loss": 1.8965, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 59.879634857177734, | |
| "learning_rate": 9.790000000000001e-05, | |
| "loss": 1.9614, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 28.60529136657715, | |
| "learning_rate": 9.78e-05, | |
| "loss": 1.1231, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 52.29844665527344, | |
| "learning_rate": 9.77e-05, | |
| "loss": 1.0977, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 50.24287033081055, | |
| "learning_rate": 9.76e-05, | |
| "loss": 2.2355, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 17.639894485473633, | |
| "learning_rate": 9.75e-05, | |
| "loss": 0.1371, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 40.8584098815918, | |
| "learning_rate": 9.74e-05, | |
| "loss": 1.0995, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 55.463260650634766, | |
| "learning_rate": 9.730000000000001e-05, | |
| "loss": 1.0458, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 42.90127944946289, | |
| "learning_rate": 9.72e-05, | |
| "loss": 1.0041, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 22.913488388061523, | |
| "learning_rate": 9.71e-05, | |
| "loss": 0.4278, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 38.93981170654297, | |
| "learning_rate": 9.7e-05, | |
| "loss": 0.5715, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 21.701683044433594, | |
| "learning_rate": 9.69e-05, | |
| "loss": 0.3351, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 47.243526458740234, | |
| "learning_rate": 9.680000000000001e-05, | |
| "loss": 0.9803, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 6.447603225708008, | |
| "learning_rate": 9.67e-05, | |
| "loss": 0.0597, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 51.16413879394531, | |
| "learning_rate": 9.66e-05, | |
| "loss": 1.8308, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 21.44324493408203, | |
| "learning_rate": 9.65e-05, | |
| "loss": 0.206, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 31.796432495117188, | |
| "learning_rate": 9.64e-05, | |
| "loss": 0.8214, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 37.89706802368164, | |
| "learning_rate": 9.63e-05, | |
| "loss": 0.7969, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 34.30683898925781, | |
| "learning_rate": 9.620000000000001e-05, | |
| "loss": 0.7503, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 44.750328063964844, | |
| "learning_rate": 9.61e-05, | |
| "loss": 0.9113, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 29.14406967163086, | |
| "learning_rate": 9.6e-05, | |
| "loss": 0.399, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 28.217470169067383, | |
| "learning_rate": 9.59e-05, | |
| "loss": 0.4654, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 17.639678955078125, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.3132, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 21.909076690673828, | |
| "learning_rate": 9.57e-05, | |
| "loss": 0.448, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.7724447846412659, | |
| "learning_rate": 9.56e-05, | |
| "loss": 0.0066, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.8613312244415283, | |
| "learning_rate": 9.55e-05, | |
| "loss": 0.0286, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 34.55177307128906, | |
| "learning_rate": 9.54e-05, | |
| "loss": 0.7055, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 31.46555519104004, | |
| "learning_rate": 9.53e-05, | |
| "loss": 0.5292, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 28.770160675048828, | |
| "learning_rate": 9.52e-05, | |
| "loss": 1.0285, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 21.542871475219727, | |
| "learning_rate": 9.51e-05, | |
| "loss": 0.2557, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 44.05634307861328, | |
| "learning_rate": 9.5e-05, | |
| "loss": 0.6056, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 28.97549819946289, | |
| "learning_rate": 9.49e-05, | |
| "loss": 0.6857, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 9.51518440246582, | |
| "learning_rate": 9.48e-05, | |
| "loss": 0.09, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.6027193665504456, | |
| "learning_rate": 9.47e-05, | |
| "loss": 0.0071, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.3073718547821045, | |
| "learning_rate": 9.46e-05, | |
| "loss": 0.0252, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 25.46698570251465, | |
| "learning_rate": 9.449999999999999e-05, | |
| "loss": 0.5108, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 14.047112464904785, | |
| "learning_rate": 9.44e-05, | |
| "loss": 0.1018, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 9.882685661315918, | |
| "learning_rate": 9.43e-05, | |
| "loss": 0.0764, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 42.48798751831055, | |
| "learning_rate": 9.42e-05, | |
| "loss": 1.0458, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 31.454246520996094, | |
| "learning_rate": 9.41e-05, | |
| "loss": 0.5511, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 16.312801361083984, | |
| "learning_rate": 9.4e-05, | |
| "loss": 0.1688, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 31.64558219909668, | |
| "learning_rate": 9.39e-05, | |
| "loss": 0.4796, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 16.89689826965332, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.1242, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 38.343963623046875, | |
| "learning_rate": 9.370000000000001e-05, | |
| "loss": 0.4748, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 30.578535079956055, | |
| "learning_rate": 9.360000000000001e-05, | |
| "loss": 0.8823, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 26.075117111206055, | |
| "learning_rate": 9.350000000000001e-05, | |
| "loss": 0.3553, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 30.658485412597656, | |
| "learning_rate": 9.340000000000001e-05, | |
| "loss": 0.5925, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 16.315160751342773, | |
| "learning_rate": 9.33e-05, | |
| "loss": 0.1272, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 29.514232635498047, | |
| "learning_rate": 9.320000000000002e-05, | |
| "loss": 0.5304, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.8858259320259094, | |
| "learning_rate": 9.310000000000001e-05, | |
| "loss": 0.0042, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 34.349971771240234, | |
| "learning_rate": 9.300000000000001e-05, | |
| "loss": 1.4723, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 12.096094131469727, | |
| "learning_rate": 9.290000000000001e-05, | |
| "loss": 0.0912, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 25.67418098449707, | |
| "learning_rate": 9.28e-05, | |
| "loss": 0.2763, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 9.287317276000977, | |
| "learning_rate": 9.27e-05, | |
| "loss": 0.0993, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 13.056856155395508, | |
| "learning_rate": 9.260000000000001e-05, | |
| "loss": 0.1464, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 19.23573875427246, | |
| "learning_rate": 9.250000000000001e-05, | |
| "loss": 0.1995, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 30.34964942932129, | |
| "learning_rate": 9.240000000000001e-05, | |
| "loss": 0.331, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 34.08774948120117, | |
| "learning_rate": 9.230000000000001e-05, | |
| "loss": 0.5528, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 25.965503692626953, | |
| "learning_rate": 9.22e-05, | |
| "loss": 0.4419, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 27.7506160736084, | |
| "learning_rate": 9.21e-05, | |
| "loss": 0.3613, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 64.53148651123047, | |
| "learning_rate": 9.200000000000001e-05, | |
| "loss": 1.8911, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 14.74899959564209, | |
| "learning_rate": 9.190000000000001e-05, | |
| "loss": 0.2018, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 29.797199249267578, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.7492, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 24.35639762878418, | |
| "learning_rate": 9.17e-05, | |
| "loss": 0.2577, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 39.93398666381836, | |
| "learning_rate": 9.16e-05, | |
| "loss": 0.7685, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 17.78274154663086, | |
| "learning_rate": 9.15e-05, | |
| "loss": 0.2235, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 21.428129196166992, | |
| "learning_rate": 9.140000000000001e-05, | |
| "loss": 0.236, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 4.9476423263549805, | |
| "learning_rate": 9.130000000000001e-05, | |
| "loss": 0.0323, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 3.092775344848633, | |
| "learning_rate": 9.120000000000001e-05, | |
| "loss": 0.0189, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 38.60344314575195, | |
| "learning_rate": 9.11e-05, | |
| "loss": 0.9759, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 10.748154640197754, | |
| "learning_rate": 9.1e-05, | |
| "loss": 0.088, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 31.464191436767578, | |
| "learning_rate": 9.090000000000001e-05, | |
| "loss": 0.5222, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 27.113365173339844, | |
| "learning_rate": 9.080000000000001e-05, | |
| "loss": 0.2638, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 3.8783392906188965, | |
| "learning_rate": 9.070000000000001e-05, | |
| "loss": 0.0234, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 51.70059585571289, | |
| "learning_rate": 9.06e-05, | |
| "loss": 1.9132, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 35.822940826416016, | |
| "learning_rate": 9.05e-05, | |
| "loss": 0.7936, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 41.59641647338867, | |
| "learning_rate": 9.04e-05, | |
| "loss": 1.2705, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.7700830698013306, | |
| "learning_rate": 9.030000000000001e-05, | |
| "loss": 0.0097, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 78.31722259521484, | |
| "learning_rate": 9.020000000000001e-05, | |
| "loss": 2.2656, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 35.84953689575195, | |
| "learning_rate": 9.010000000000001e-05, | |
| "loss": 0.4499, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 32.15023422241211, | |
| "learning_rate": 9e-05, | |
| "loss": 1.3309, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 6.091931343078613, | |
| "learning_rate": 8.99e-05, | |
| "loss": 0.0577, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.4728281497955322, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0151, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 22.939411163330078, | |
| "learning_rate": 8.970000000000001e-05, | |
| "loss": 0.2908, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 33.68719482421875, | |
| "learning_rate": 8.960000000000001e-05, | |
| "loss": 1.0016, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 15.805360794067383, | |
| "learning_rate": 8.950000000000001e-05, | |
| "loss": 0.1251, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 28.76931381225586, | |
| "learning_rate": 8.94e-05, | |
| "loss": 0.486, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 63.06796646118164, | |
| "learning_rate": 8.93e-05, | |
| "loss": 1.705, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 35.362327575683594, | |
| "learning_rate": 8.92e-05, | |
| "loss": 2.3002, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 30.09642791748047, | |
| "learning_rate": 8.910000000000001e-05, | |
| "loss": 0.7912, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 31.83744239807129, | |
| "learning_rate": 8.900000000000001e-05, | |
| "loss": 0.652, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 26.239160537719727, | |
| "learning_rate": 8.89e-05, | |
| "loss": 0.4104, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 17.760883331298828, | |
| "learning_rate": 8.88e-05, | |
| "loss": 0.2015, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 27.477123260498047, | |
| "learning_rate": 8.87e-05, | |
| "loss": 0.4919, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 51.267391204833984, | |
| "learning_rate": 8.86e-05, | |
| "loss": 0.6284, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 29.60396957397461, | |
| "learning_rate": 8.850000000000001e-05, | |
| "loss": 0.2356, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 5.065886497497559, | |
| "learning_rate": 8.840000000000001e-05, | |
| "loss": 0.034, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 28.858583450317383, | |
| "learning_rate": 8.83e-05, | |
| "loss": 0.2402, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 5.959737777709961, | |
| "learning_rate": 8.82e-05, | |
| "loss": 0.0418, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 7.274974346160889, | |
| "learning_rate": 8.81e-05, | |
| "loss": 0.0469, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 49.039154052734375, | |
| "learning_rate": 8.800000000000001e-05, | |
| "loss": 1.8188, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 26.21894645690918, | |
| "learning_rate": 8.790000000000001e-05, | |
| "loss": 0.4633, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 6.200336933135986, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0348, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.5993304252624512, | |
| "learning_rate": 8.77e-05, | |
| "loss": 0.0085, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 9.848234176635742, | |
| "learning_rate": 8.76e-05, | |
| "loss": 0.0598, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 31.79520034790039, | |
| "learning_rate": 8.75e-05, | |
| "loss": 0.9076, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 28.447763442993164, | |
| "learning_rate": 8.740000000000001e-05, | |
| "loss": 0.8934, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 27.019044876098633, | |
| "learning_rate": 8.730000000000001e-05, | |
| "loss": 0.6513, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 48.12791061401367, | |
| "learning_rate": 8.72e-05, | |
| "loss": 2.4184, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 39.648468017578125, | |
| "learning_rate": 8.71e-05, | |
| "loss": 0.9709, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 19.506731033325195, | |
| "learning_rate": 8.7e-05, | |
| "loss": 0.1482, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 5.624091148376465, | |
| "learning_rate": 8.69e-05, | |
| "loss": 0.0289, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 33.03536605834961, | |
| "learning_rate": 8.680000000000001e-05, | |
| "loss": 0.523, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 26.916067123413086, | |
| "learning_rate": 8.67e-05, | |
| "loss": 0.4418, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.7219924926757812, | |
| "learning_rate": 8.66e-05, | |
| "loss": 0.0268, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.1921474933624268, | |
| "learning_rate": 8.65e-05, | |
| "loss": 0.0063, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 32.356571197509766, | |
| "learning_rate": 8.64e-05, | |
| "loss": 0.4848, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 8.539983749389648, | |
| "learning_rate": 8.63e-05, | |
| "loss": 0.0491, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 28.862144470214844, | |
| "learning_rate": 8.620000000000001e-05, | |
| "loss": 0.7974, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 15.244535446166992, | |
| "learning_rate": 8.61e-05, | |
| "loss": 0.1273, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 27.476613998413086, | |
| "learning_rate": 8.6e-05, | |
| "loss": 0.6099, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 25.30959129333496, | |
| "learning_rate": 8.59e-05, | |
| "loss": 0.5524, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 14.604026794433594, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.1036, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 25.0202579498291, | |
| "learning_rate": 8.57e-05, | |
| "loss": 0.4411, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 35.9989128112793, | |
| "learning_rate": 8.560000000000001e-05, | |
| "loss": 0.7138, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.4863531291484833, | |
| "learning_rate": 8.55e-05, | |
| "loss": 0.0038, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.6537394523620605, | |
| "learning_rate": 8.54e-05, | |
| "loss": 0.0223, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 21.37197494506836, | |
| "learning_rate": 8.53e-05, | |
| "loss": 0.3206, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.8339968919754028, | |
| "learning_rate": 8.52e-05, | |
| "loss": 0.0056, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 33.149024963378906, | |
| "learning_rate": 8.510000000000001e-05, | |
| "loss": 0.3708, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 18.96430778503418, | |
| "learning_rate": 8.5e-05, | |
| "loss": 0.2126, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 5.3087029457092285, | |
| "learning_rate": 8.49e-05, | |
| "loss": 0.0343, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.4322396516799927, | |
| "learning_rate": 8.48e-05, | |
| "loss": 0.0099, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 27.87354850769043, | |
| "learning_rate": 8.47e-05, | |
| "loss": 0.4536, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 24.29855728149414, | |
| "learning_rate": 8.46e-05, | |
| "loss": 0.2568, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 32.701412200927734, | |
| "learning_rate": 8.450000000000001e-05, | |
| "loss": 0.6025, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 29.71053123474121, | |
| "learning_rate": 8.44e-05, | |
| "loss": 0.3161, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 24.117475509643555, | |
| "learning_rate": 8.43e-05, | |
| "loss": 0.3355, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.675106406211853, | |
| "learning_rate": 8.42e-05, | |
| "loss": 0.0131, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.6950457096099854, | |
| "learning_rate": 8.41e-05, | |
| "loss": 0.0139, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 5.965531349182129, | |
| "learning_rate": 8.4e-05, | |
| "loss": 0.0381, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 4.490444660186768, | |
| "learning_rate": 8.39e-05, | |
| "loss": 0.0311, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 48.338592529296875, | |
| "learning_rate": 8.38e-05, | |
| "loss": 2.2775, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.9811841249465942, | |
| "learning_rate": 8.37e-05, | |
| "loss": 0.0054, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 28.258628845214844, | |
| "learning_rate": 8.36e-05, | |
| "loss": 1.0993, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 31.349571228027344, | |
| "learning_rate": 8.35e-05, | |
| "loss": 0.4425, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.468545913696289, | |
| "learning_rate": 8.34e-05, | |
| "loss": 0.0119, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.3878319263458252, | |
| "learning_rate": 8.33e-05, | |
| "loss": 0.0027, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 17.001144409179688, | |
| "learning_rate": 8.32e-05, | |
| "loss": 0.1208, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 11.259538650512695, | |
| "learning_rate": 8.31e-05, | |
| "loss": 0.0934, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 28.391857147216797, | |
| "learning_rate": 8.3e-05, | |
| "loss": 0.6119, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 12.239534378051758, | |
| "learning_rate": 8.29e-05, | |
| "loss": 0.1011, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 19.20558738708496, | |
| "learning_rate": 8.28e-05, | |
| "loss": 0.1845, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 28.89300537109375, | |
| "learning_rate": 8.27e-05, | |
| "loss": 0.2756, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 34.91083908081055, | |
| "learning_rate": 8.26e-05, | |
| "loss": 0.7058, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 7.6595845222473145, | |
| "learning_rate": 8.25e-05, | |
| "loss": 0.0658, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 35.96283721923828, | |
| "learning_rate": 8.24e-05, | |
| "loss": 0.5961, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.20796895027160645, | |
| "learning_rate": 8.23e-05, | |
| "loss": 0.0018, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 11.105677604675293, | |
| "learning_rate": 8.22e-05, | |
| "loss": 0.0699, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 45.597267150878906, | |
| "learning_rate": 8.21e-05, | |
| "loss": 0.7625, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 14.93590259552002, | |
| "learning_rate": 8.2e-05, | |
| "loss": 0.1476, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.5446606874465942, | |
| "learning_rate": 8.19e-05, | |
| "loss": 0.0101, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 3.4029202461242676, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0188, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 26.945552825927734, | |
| "learning_rate": 8.17e-05, | |
| "loss": 1.0054, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 17.689714431762695, | |
| "learning_rate": 8.16e-05, | |
| "loss": 0.1296, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.4410698413848877, | |
| "learning_rate": 8.15e-05, | |
| "loss": 0.0168, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 54.830902099609375, | |
| "learning_rate": 8.14e-05, | |
| "loss": 3.1109, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 38.1295166015625, | |
| "learning_rate": 8.13e-05, | |
| "loss": 0.9705, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 6.8395466804504395, | |
| "learning_rate": 8.120000000000001e-05, | |
| "loss": 0.0493, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.1371989250183105, | |
| "learning_rate": 8.11e-05, | |
| "loss": 0.0165, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 23.55173683166504, | |
| "learning_rate": 8.1e-05, | |
| "loss": 0.2548, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 8.50500202178955, | |
| "learning_rate": 8.090000000000001e-05, | |
| "loss": 0.0517, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 32.87669372558594, | |
| "learning_rate": 8.080000000000001e-05, | |
| "loss": 0.3359, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 5.529865741729736, | |
| "learning_rate": 8.070000000000001e-05, | |
| "loss": 0.0391, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 24.900270462036133, | |
| "learning_rate": 8.060000000000001e-05, | |
| "loss": 0.2896, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 17.59227752685547, | |
| "learning_rate": 8.05e-05, | |
| "loss": 0.1772, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.2958277463912964, | |
| "learning_rate": 8.04e-05, | |
| "loss": 0.0067, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 16.34239959716797, | |
| "learning_rate": 8.030000000000001e-05, | |
| "loss": 0.1426, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 27.314735412597656, | |
| "learning_rate": 8.020000000000001e-05, | |
| "loss": 1.4348, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 19.829275131225586, | |
| "learning_rate": 8.010000000000001e-05, | |
| "loss": 0.5608, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 28.39422607421875, | |
| "learning_rate": 8e-05, | |
| "loss": 1.4606, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 31.092449188232422, | |
| "learning_rate": 7.99e-05, | |
| "loss": 0.2932, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 51.73237991333008, | |
| "learning_rate": 7.98e-05, | |
| "loss": 1.8982, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 8.397103309631348, | |
| "learning_rate": 7.970000000000001e-05, | |
| "loss": 0.0508, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 22.450349807739258, | |
| "learning_rate": 7.960000000000001e-05, | |
| "loss": 0.1964, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 38.33378982543945, | |
| "learning_rate": 7.950000000000001e-05, | |
| "loss": 0.9211, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 53.80275344848633, | |
| "learning_rate": 7.94e-05, | |
| "loss": 1.2505, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.610823154449463, | |
| "learning_rate": 7.93e-05, | |
| "loss": 0.0086, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.0060709137469530106, | |
| "learning_rate": 7.920000000000001e-05, | |
| "loss": 0.0, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.0124675035476685, | |
| "learning_rate": 7.910000000000001e-05, | |
| "loss": 0.0066, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 27.3523006439209, | |
| "learning_rate": 7.900000000000001e-05, | |
| "loss": 0.3906, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 23.502004623413086, | |
| "learning_rate": 7.890000000000001e-05, | |
| "loss": 0.3856, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 3.276196241378784, | |
| "learning_rate": 7.88e-05, | |
| "loss": 0.0203, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 20.402345657348633, | |
| "learning_rate": 7.87e-05, | |
| "loss": 0.1852, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.2247250080108643, | |
| "learning_rate": 7.860000000000001e-05, | |
| "loss": 0.0105, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.4587538540363312, | |
| "learning_rate": 7.850000000000001e-05, | |
| "loss": 0.0025, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 25.925575256347656, | |
| "learning_rate": 7.840000000000001e-05, | |
| "loss": 0.3174, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 23.675764083862305, | |
| "learning_rate": 7.83e-05, | |
| "loss": 0.433, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 20.47338104248047, | |
| "learning_rate": 7.82e-05, | |
| "loss": 0.1766, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 27.251806259155273, | |
| "learning_rate": 7.81e-05, | |
| "loss": 0.9316, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 40.24387741088867, | |
| "learning_rate": 7.800000000000001e-05, | |
| "loss": 0.6914, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 34.86661911010742, | |
| "learning_rate": 7.790000000000001e-05, | |
| "loss": 0.705, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.656433343887329, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.015, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 4.383270740509033, | |
| "learning_rate": 7.77e-05, | |
| "loss": 0.0246, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 25.344125747680664, | |
| "learning_rate": 7.76e-05, | |
| "loss": 0.6604, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.3705878257751465, | |
| "learning_rate": 7.75e-05, | |
| "loss": 0.0108, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.18879830837249756, | |
| "learning_rate": 7.740000000000001e-05, | |
| "loss": 0.0013, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 38.107086181640625, | |
| "learning_rate": 7.730000000000001e-05, | |
| "loss": 1.5827, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 8.14284610748291, | |
| "learning_rate": 7.72e-05, | |
| "loss": 0.0449, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 10.337870597839355, | |
| "learning_rate": 7.71e-05, | |
| "loss": 0.06, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 18.004863739013672, | |
| "learning_rate": 7.7e-05, | |
| "loss": 0.1319, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 34.32221984863281, | |
| "learning_rate": 7.69e-05, | |
| "loss": 0.2753, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 43.25252914428711, | |
| "learning_rate": 7.680000000000001e-05, | |
| "loss": 1.961, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 18.302125930786133, | |
| "learning_rate": 7.670000000000001e-05, | |
| "loss": 0.1674, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 7.26860237121582, | |
| "learning_rate": 7.66e-05, | |
| "loss": 0.0528, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 17.053834915161133, | |
| "learning_rate": 7.65e-05, | |
| "loss": 0.1233, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 27.578536987304688, | |
| "learning_rate": 7.64e-05, | |
| "loss": 0.4018, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 31.73558807373047, | |
| "learning_rate": 7.630000000000001e-05, | |
| "loss": 1.2531, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 27.72031593322754, | |
| "learning_rate": 7.620000000000001e-05, | |
| "loss": 1.6853, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 9.15406608581543, | |
| "learning_rate": 7.61e-05, | |
| "loss": 0.0539, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 12.243758201599121, | |
| "learning_rate": 7.6e-05, | |
| "loss": 0.0753, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 4.238646030426025, | |
| "learning_rate": 7.59e-05, | |
| "loss": 0.0304, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 12.011713981628418, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0777, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 25.589540481567383, | |
| "learning_rate": 7.570000000000001e-05, | |
| "loss": 0.7472, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 3.752915859222412, | |
| "learning_rate": 7.560000000000001e-05, | |
| "loss": 0.0184, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.3850865960121155, | |
| "learning_rate": 7.55e-05, | |
| "loss": 0.0023, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.0657126903533936, | |
| "learning_rate": 7.54e-05, | |
| "loss": 0.0139, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 5.209764003753662, | |
| "learning_rate": 7.53e-05, | |
| "loss": 0.0278, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 26.664770126342773, | |
| "learning_rate": 7.52e-05, | |
| "loss": 0.4915, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 22.27918815612793, | |
| "learning_rate": 7.510000000000001e-05, | |
| "loss": 0.2126, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 13.991991996765137, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 0.0893, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 28.546430587768555, | |
| "learning_rate": 7.49e-05, | |
| "loss": 0.7366, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 40.69116973876953, | |
| "learning_rate": 7.48e-05, | |
| "loss": 0.825, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 10.466350555419922, | |
| "learning_rate": 7.47e-05, | |
| "loss": 0.0609, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 41.81674575805664, | |
| "learning_rate": 7.46e-05, | |
| "loss": 0.7904, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 24.998729705810547, | |
| "learning_rate": 7.450000000000001e-05, | |
| "loss": 0.3079, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 29.997278213500977, | |
| "learning_rate": 7.44e-05, | |
| "loss": 0.2313, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 24.29697608947754, | |
| "learning_rate": 7.43e-05, | |
| "loss": 0.2985, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.0109050273895264, | |
| "learning_rate": 7.42e-05, | |
| "loss": 0.008, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.4110793173313141, | |
| "learning_rate": 7.41e-05, | |
| "loss": 0.0022, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 22.185529708862305, | |
| "learning_rate": 7.4e-05, | |
| "loss": 0.2034, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 38.921875, | |
| "learning_rate": 7.390000000000001e-05, | |
| "loss": 0.7375, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 21.488807678222656, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.194, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 10.119009017944336, | |
| "learning_rate": 7.37e-05, | |
| "loss": 0.0688, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 23.405567169189453, | |
| "learning_rate": 7.36e-05, | |
| "loss": 0.3999, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.5199978351593018, | |
| "learning_rate": 7.35e-05, | |
| "loss": 0.0152, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.482628583908081, | |
| "learning_rate": 7.340000000000001e-05, | |
| "loss": 0.0127, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.1299862265586853, | |
| "learning_rate": 7.33e-05, | |
| "loss": 0.0012, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.690964460372925, | |
| "learning_rate": 7.32e-05, | |
| "loss": 0.0133, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 6.540533065795898, | |
| "learning_rate": 7.31e-05, | |
| "loss": 0.0407, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.2752537727355957, | |
| "learning_rate": 7.3e-05, | |
| "loss": 0.0121, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 13.641573905944824, | |
| "learning_rate": 7.29e-05, | |
| "loss": 0.0854, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 28.05483055114746, | |
| "learning_rate": 7.280000000000001e-05, | |
| "loss": 0.5136, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 23.613855361938477, | |
| "learning_rate": 7.27e-05, | |
| "loss": 0.3377, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 18.591848373413086, | |
| "learning_rate": 7.26e-05, | |
| "loss": 0.2001, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.20727111399173737, | |
| "learning_rate": 7.25e-05, | |
| "loss": 0.0017, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 10.257818222045898, | |
| "learning_rate": 7.24e-05, | |
| "loss": 0.0567, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 3.06034255027771, | |
| "learning_rate": 7.23e-05, | |
| "loss": 0.017, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 26.740501403808594, | |
| "learning_rate": 7.22e-05, | |
| "loss": 1.564, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.23033320903778076, | |
| "learning_rate": 7.21e-05, | |
| "loss": 0.0013, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 18.267688751220703, | |
| "learning_rate": 7.2e-05, | |
| "loss": 0.1606, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.9552924633026123, | |
| "learning_rate": 7.19e-05, | |
| "loss": 0.0119, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.3651779294013977, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0032, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 9.202610969543457, | |
| "learning_rate": 7.17e-05, | |
| "loss": 0.0506, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 36.84799575805664, | |
| "learning_rate": 7.16e-05, | |
| "loss": 1.1391, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 36.70115661621094, | |
| "learning_rate": 7.15e-05, | |
| "loss": 1.0974, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 24.444910049438477, | |
| "learning_rate": 7.14e-05, | |
| "loss": 0.2421, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 20.180049896240234, | |
| "learning_rate": 7.13e-05, | |
| "loss": 0.2202, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 23.31342124938965, | |
| "learning_rate": 7.12e-05, | |
| "loss": 0.1958, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 15.620962142944336, | |
| "learning_rate": 7.11e-05, | |
| "loss": 0.1034, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 27.025894165039062, | |
| "learning_rate": 7.1e-05, | |
| "loss": 0.5174, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 25.132476806640625, | |
| "learning_rate": 7.09e-05, | |
| "loss": 0.423, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 5.431723117828369, | |
| "learning_rate": 7.08e-05, | |
| "loss": 0.0509, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 16.48543357849121, | |
| "learning_rate": 7.07e-05, | |
| "loss": 0.1131, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 29.093488693237305, | |
| "learning_rate": 7.06e-05, | |
| "loss": 0.5578, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 11.249940872192383, | |
| "learning_rate": 7.05e-05, | |
| "loss": 0.0791, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 9.518302917480469, | |
| "learning_rate": 7.04e-05, | |
| "loss": 0.0549, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.03421126306056976, | |
| "learning_rate": 7.03e-05, | |
| "loss": 0.0003, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 30.86328125, | |
| "learning_rate": 7.02e-05, | |
| "loss": 0.8866, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 25.39554214477539, | |
| "learning_rate": 7.01e-05, | |
| "loss": 0.2173, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 27.889158248901367, | |
| "learning_rate": 7e-05, | |
| "loss": 0.9736, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 11.48615837097168, | |
| "learning_rate": 6.99e-05, | |
| "loss": 0.0671, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 17.868688583374023, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.1292, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 11.254850387573242, | |
| "learning_rate": 6.97e-05, | |
| "loss": 0.0739, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.120415687561035, | |
| "learning_rate": 6.96e-05, | |
| "loss": 0.0103, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.4435783922672272, | |
| "learning_rate": 6.95e-05, | |
| "loss": 0.0029, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 7.28007173538208, | |
| "learning_rate": 6.939999999999999e-05, | |
| "loss": 0.0388, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 10.124960899353027, | |
| "learning_rate": 6.93e-05, | |
| "loss": 0.0583, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 25.847856521606445, | |
| "learning_rate": 6.92e-05, | |
| "loss": 0.7839, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 24.378753662109375, | |
| "learning_rate": 6.91e-05, | |
| "loss": 0.4693, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 23.50893783569336, | |
| "learning_rate": 6.9e-05, | |
| "loss": 0.3675, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 23.423568725585938, | |
| "learning_rate": 6.89e-05, | |
| "loss": 0.4385, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 22.86809539794922, | |
| "learning_rate": 6.879999999999999e-05, | |
| "loss": 0.2332, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 23.574260711669922, | |
| "learning_rate": 6.87e-05, | |
| "loss": 0.2794, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 11.08251667022705, | |
| "learning_rate": 6.860000000000001e-05, | |
| "loss": 0.0687, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 22.305559158325195, | |
| "learning_rate": 6.850000000000001e-05, | |
| "loss": 0.2469, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 27.41745376586914, | |
| "learning_rate": 6.840000000000001e-05, | |
| "loss": 0.3026, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 11.78591251373291, | |
| "learning_rate": 6.83e-05, | |
| "loss": 0.0773, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 28.363525390625, | |
| "learning_rate": 6.82e-05, | |
| "loss": 0.3241, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.026371236890554428, | |
| "learning_rate": 6.81e-05, | |
| "loss": 0.0002, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 26.836273193359375, | |
| "learning_rate": 6.800000000000001e-05, | |
| "loss": 0.6225, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 20.1362361907959, | |
| "learning_rate": 6.790000000000001e-05, | |
| "loss": 0.1332, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.151141881942749, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0161, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.3739681243896484, | |
| "learning_rate": 6.77e-05, | |
| "loss": 0.02, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 11.643929481506348, | |
| "learning_rate": 6.76e-05, | |
| "loss": 0.0814, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.02318856306374073, | |
| "learning_rate": 6.750000000000001e-05, | |
| "loss": 0.0002, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.5685155987739563, | |
| "learning_rate": 6.740000000000001e-05, | |
| "loss": 0.0027, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.9630302786827087, | |
| "learning_rate": 6.730000000000001e-05, | |
| "loss": 0.0129, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 27.39959144592285, | |
| "learning_rate": 6.720000000000001e-05, | |
| "loss": 0.9036, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 27.959325790405273, | |
| "learning_rate": 6.71e-05, | |
| "loss": 0.3344, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 25.573692321777344, | |
| "learning_rate": 6.7e-05, | |
| "loss": 0.5308, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 8.346728324890137, | |
| "learning_rate": 6.690000000000001e-05, | |
| "loss": 0.1063, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 36.26382064819336, | |
| "learning_rate": 6.680000000000001e-05, | |
| "loss": 0.8096, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 20.98823356628418, | |
| "learning_rate": 6.670000000000001e-05, | |
| "loss": 0.1938, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 11.60432243347168, | |
| "learning_rate": 6.66e-05, | |
| "loss": 0.0702, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.3954720497131348, | |
| "learning_rate": 6.65e-05, | |
| "loss": 0.0085, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 25.829086303710938, | |
| "learning_rate": 6.64e-05, | |
| "loss": 0.5444, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 25.731420516967773, | |
| "learning_rate": 6.630000000000001e-05, | |
| "loss": 0.7691, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 17.625537872314453, | |
| "learning_rate": 6.620000000000001e-05, | |
| "loss": 0.1764, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.233189344406128, | |
| "learning_rate": 6.610000000000001e-05, | |
| "loss": 0.0057, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 38.987022399902344, | |
| "learning_rate": 6.6e-05, | |
| "loss": 1.1134, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 28.655746459960938, | |
| "learning_rate": 6.59e-05, | |
| "loss": 0.7567, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 24.218215942382812, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.2627, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 27.85608673095703, | |
| "learning_rate": 6.570000000000001e-05, | |
| "loss": 1.3316, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 16.30738067626953, | |
| "learning_rate": 6.560000000000001e-05, | |
| "loss": 0.1128, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 27.84078598022461, | |
| "learning_rate": 6.55e-05, | |
| "loss": 1.3145, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.4055051803588867, | |
| "learning_rate": 6.54e-05, | |
| "loss": 0.0163, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 36.960227966308594, | |
| "learning_rate": 6.53e-05, | |
| "loss": 1.1153, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 27.966150283813477, | |
| "learning_rate": 6.52e-05, | |
| "loss": 0.8653, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 16.647764205932617, | |
| "learning_rate": 6.510000000000001e-05, | |
| "loss": 0.1341, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 27.271018981933594, | |
| "learning_rate": 6.500000000000001e-05, | |
| "loss": 0.8342, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 4.560828685760498, | |
| "learning_rate": 6.49e-05, | |
| "loss": 0.0271, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 27.721872329711914, | |
| "learning_rate": 6.48e-05, | |
| "loss": 0.5398, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 27.782337188720703, | |
| "learning_rate": 6.47e-05, | |
| "loss": 0.8772, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 17.337244033813477, | |
| "learning_rate": 6.460000000000001e-05, | |
| "loss": 0.1268, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 48.405452728271484, | |
| "learning_rate": 6.450000000000001e-05, | |
| "loss": 0.8516, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.5789971351623535, | |
| "learning_rate": 6.440000000000001e-05, | |
| "loss": 0.0035, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 43.46677017211914, | |
| "learning_rate": 6.43e-05, | |
| "loss": 1.6386, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 20.956693649291992, | |
| "learning_rate": 6.42e-05, | |
| "loss": 0.2639, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.5366334319114685, | |
| "learning_rate": 6.41e-05, | |
| "loss": 0.003, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 14.680846214294434, | |
| "learning_rate": 6.400000000000001e-05, | |
| "loss": 0.1709, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.22275477647781372, | |
| "learning_rate": 6.390000000000001e-05, | |
| "loss": 0.0019, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 20.443256378173828, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.1746, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 23.81313705444336, | |
| "learning_rate": 6.37e-05, | |
| "loss": 0.402, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 15.194694519042969, | |
| "learning_rate": 6.36e-05, | |
| "loss": 0.1142, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 17.877086639404297, | |
| "learning_rate": 6.35e-05, | |
| "loss": 0.1906, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 13.516918182373047, | |
| "learning_rate": 6.340000000000001e-05, | |
| "loss": 0.1061, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 42.48619079589844, | |
| "learning_rate": 6.330000000000001e-05, | |
| "loss": 2.2565, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 26.311281204223633, | |
| "learning_rate": 6.32e-05, | |
| "loss": 0.7296, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 34.43406677246094, | |
| "learning_rate": 6.31e-05, | |
| "loss": 1.9544, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.6160056591033936, | |
| "learning_rate": 6.3e-05, | |
| "loss": 0.003, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 8.770819664001465, | |
| "learning_rate": 6.29e-05, | |
| "loss": 0.0579, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 45.1691780090332, | |
| "learning_rate": 6.280000000000001e-05, | |
| "loss": 0.924, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 33.87689208984375, | |
| "learning_rate": 6.27e-05, | |
| "loss": 1.5384, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 5.7516889572143555, | |
| "learning_rate": 6.26e-05, | |
| "loss": 0.0374, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.6744739413261414, | |
| "learning_rate": 6.25e-05, | |
| "loss": 0.0035, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 29.63498306274414, | |
| "learning_rate": 6.24e-05, | |
| "loss": 0.398, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 23.468154907226562, | |
| "learning_rate": 6.23e-05, | |
| "loss": 0.4509, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 46.63499069213867, | |
| "learning_rate": 6.220000000000001e-05, | |
| "loss": 1.2205, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 5.985834121704102, | |
| "learning_rate": 6.21e-05, | |
| "loss": 0.0551, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 20.038787841796875, | |
| "learning_rate": 6.2e-05, | |
| "loss": 1.2499, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.5343722701072693, | |
| "learning_rate": 6.19e-05, | |
| "loss": 0.0043, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8306647539138794, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0041, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 5.906933784484863, | |
| "learning_rate": 6.170000000000001e-05, | |
| "loss": 0.0328, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 26.180572509765625, | |
| "learning_rate": 6.16e-05, | |
| "loss": 0.4335, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.458011656999588, | |
| "learning_rate": 6.15e-05, | |
| "loss": 0.0032, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 24.316070556640625, | |
| "learning_rate": 6.14e-05, | |
| "loss": 0.2748, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 23.521617889404297, | |
| "learning_rate": 6.13e-05, | |
| "loss": 0.2207, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 32.9349365234375, | |
| "learning_rate": 6.12e-05, | |
| "loss": 0.4958, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 5.459490776062012, | |
| "learning_rate": 6.110000000000001e-05, | |
| "loss": 0.03, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.2316316366195679, | |
| "learning_rate": 6.1e-05, | |
| "loss": 0.0072, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 29.919815063476562, | |
| "learning_rate": 6.09e-05, | |
| "loss": 0.3748, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 3.5657413005828857, | |
| "learning_rate": 6.08e-05, | |
| "loss": 0.0287, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 4.026643753051758, | |
| "learning_rate": 6.07e-05, | |
| "loss": 0.0368, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 27.373517990112305, | |
| "learning_rate": 6.06e-05, | |
| "loss": 1.2679, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 15.123701095581055, | |
| "learning_rate": 6.05e-05, | |
| "loss": 0.1167, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.5672614574432373, | |
| "learning_rate": 6.04e-05, | |
| "loss": 0.0104, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 27.24859619140625, | |
| "learning_rate": 6.03e-05, | |
| "loss": 0.7966, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.6505637168884277, | |
| "learning_rate": 6.02e-05, | |
| "loss": 0.0084, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 38.031185150146484, | |
| "learning_rate": 6.0100000000000004e-05, | |
| "loss": 1.3056, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 3.0308609008789062, | |
| "learning_rate": 6e-05, | |
| "loss": 0.0268, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.9318943023681641, | |
| "learning_rate": 5.99e-05, | |
| "loss": 0.0068, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 30.836875915527344, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.2937, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 6.094520092010498, | |
| "learning_rate": 5.97e-05, | |
| "loss": 0.037, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 49.60438537597656, | |
| "learning_rate": 5.96e-05, | |
| "loss": 0.7892, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 26.148279190063477, | |
| "learning_rate": 5.95e-05, | |
| "loss": 0.8323, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 13.96594524383545, | |
| "learning_rate": 5.94e-05, | |
| "loss": 0.0942, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 7.893899917602539, | |
| "learning_rate": 5.93e-05, | |
| "loss": 0.0486, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.9445253610610962, | |
| "learning_rate": 5.92e-05, | |
| "loss": 0.0047, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 19.779830932617188, | |
| "learning_rate": 5.91e-05, | |
| "loss": 0.1503, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 44.186283111572266, | |
| "learning_rate": 5.9e-05, | |
| "loss": 1.0128, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 26.87023162841797, | |
| "learning_rate": 5.89e-05, | |
| "loss": 0.6065, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 28.162328720092773, | |
| "learning_rate": 5.88e-05, | |
| "loss": 1.2739, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 38.76342010498047, | |
| "learning_rate": 5.87e-05, | |
| "loss": 1.3519, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.24123622477054596, | |
| "learning_rate": 5.86e-05, | |
| "loss": 0.0017, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 26.10549545288086, | |
| "learning_rate": 5.85e-05, | |
| "loss": 0.6323, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 24.87130355834961, | |
| "learning_rate": 5.8399999999999997e-05, | |
| "loss": 0.3113, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 26.145565032958984, | |
| "learning_rate": 5.83e-05, | |
| "loss": 0.4853, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 23.986276626586914, | |
| "learning_rate": 5.82e-05, | |
| "loss": 0.4204, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 25.39900779724121, | |
| "learning_rate": 5.8099999999999996e-05, | |
| "loss": 0.3669, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 24.91488265991211, | |
| "learning_rate": 5.8e-05, | |
| "loss": 0.4622, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 6.05866813659668, | |
| "learning_rate": 5.79e-05, | |
| "loss": 0.0574, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 22.513336181640625, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.7316, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 20.641204833984375, | |
| "learning_rate": 5.77e-05, | |
| "loss": 0.8384, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.3587974309921265, | |
| "learning_rate": 5.76e-05, | |
| "loss": 0.0065, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 21.501081466674805, | |
| "learning_rate": 5.7499999999999995e-05, | |
| "loss": 0.2061, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 42.36915588378906, | |
| "learning_rate": 5.74e-05, | |
| "loss": 1.3581, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 6.361469745635986, | |
| "learning_rate": 5.73e-05, | |
| "loss": 0.052, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 30.654287338256836, | |
| "learning_rate": 5.72e-05, | |
| "loss": 0.3091, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8391109108924866, | |
| "learning_rate": 5.71e-05, | |
| "loss": 0.0061, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.208446979522705, | |
| "learning_rate": 5.6999999999999996e-05, | |
| "loss": 0.0193, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.1669785976409912, | |
| "learning_rate": 5.69e-05, | |
| "loss": 0.0075, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 15.361763000488281, | |
| "learning_rate": 5.68e-05, | |
| "loss": 0.1838, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.7279540300369263, | |
| "learning_rate": 5.6699999999999996e-05, | |
| "loss": 0.0069, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 35.00238800048828, | |
| "learning_rate": 5.66e-05, | |
| "loss": 0.7085, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 27.048877716064453, | |
| "learning_rate": 5.65e-05, | |
| "loss": 0.3643, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.5507223606109619, | |
| "learning_rate": 5.6399999999999995e-05, | |
| "loss": 0.0036, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.34695854783058167, | |
| "learning_rate": 5.63e-05, | |
| "loss": 0.0021, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 27.41823959350586, | |
| "learning_rate": 5.620000000000001e-05, | |
| "loss": 0.6107, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 27.145366668701172, | |
| "learning_rate": 5.610000000000001e-05, | |
| "loss": 0.6633, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 4.264007091522217, | |
| "learning_rate": 5.6000000000000006e-05, | |
| "loss": 0.0222, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 26.830528259277344, | |
| "learning_rate": 5.590000000000001e-05, | |
| "loss": 0.5476, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 40.81327438354492, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.7979, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.6571967005729675, | |
| "learning_rate": 5.5700000000000005e-05, | |
| "loss": 0.0043, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 18.67711067199707, | |
| "learning_rate": 5.560000000000001e-05, | |
| "loss": 0.1879, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 21.878129959106445, | |
| "learning_rate": 5.550000000000001e-05, | |
| "loss": 0.2265, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 9.461925506591797, | |
| "learning_rate": 5.5400000000000005e-05, | |
| "loss": 0.056, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.5579652786254883, | |
| "learning_rate": 5.530000000000001e-05, | |
| "loss": 0.0132, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.1434270143508911, | |
| "learning_rate": 5.520000000000001e-05, | |
| "loss": 0.0083, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 8.296957015991211, | |
| "learning_rate": 5.5100000000000004e-05, | |
| "loss": 0.0537, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7849231958389282, | |
| "learning_rate": 5.500000000000001e-05, | |
| "loss": 0.0041, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7314896583557129, | |
| "learning_rate": 5.4900000000000006e-05, | |
| "loss": 0.0057, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 8.517040252685547, | |
| "learning_rate": 5.4800000000000004e-05, | |
| "loss": 0.0533, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 7.680517673492432, | |
| "learning_rate": 5.470000000000001e-05, | |
| "loss": 0.0452, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 50.83726119995117, | |
| "learning_rate": 5.4600000000000006e-05, | |
| "loss": 2.289, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 29.895687103271484, | |
| "learning_rate": 5.45e-05, | |
| "loss": 0.6449, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 30.133241653442383, | |
| "learning_rate": 5.440000000000001e-05, | |
| "loss": 0.4874, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.48309120535850525, | |
| "learning_rate": 5.4300000000000005e-05, | |
| "loss": 0.004, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 25.740129470825195, | |
| "learning_rate": 5.420000000000001e-05, | |
| "loss": 0.3231, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 13.617050170898438, | |
| "learning_rate": 5.410000000000001e-05, | |
| "loss": 0.1009, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 31.53325080871582, | |
| "learning_rate": 5.4000000000000005e-05, | |
| "loss": 0.7426, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 36.63671875, | |
| "learning_rate": 5.390000000000001e-05, | |
| "loss": 1.9978, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 10.66323184967041, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0627, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 8.019021034240723, | |
| "learning_rate": 5.3700000000000004e-05, | |
| "loss": 0.0588, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 26.658863067626953, | |
| "learning_rate": 5.360000000000001e-05, | |
| "loss": 0.4552, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 6.3222432136535645, | |
| "learning_rate": 5.3500000000000006e-05, | |
| "loss": 0.0576, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 25.450960159301758, | |
| "learning_rate": 5.3400000000000004e-05, | |
| "loss": 0.8938, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 21.743581771850586, | |
| "learning_rate": 5.330000000000001e-05, | |
| "loss": 0.1594, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 9.093583106994629, | |
| "learning_rate": 5.3200000000000006e-05, | |
| "loss": 0.0525, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 12.705286979675293, | |
| "learning_rate": 5.31e-05, | |
| "loss": 0.1962, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.836313247680664, | |
| "learning_rate": 5.300000000000001e-05, | |
| "loss": 0.0176, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 30.454456329345703, | |
| "learning_rate": 5.2900000000000005e-05, | |
| "loss": 1.425, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 27.88629150390625, | |
| "learning_rate": 5.28e-05, | |
| "loss": 1.7417, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 28.916534423828125, | |
| "learning_rate": 5.270000000000001e-05, | |
| "loss": 0.759, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.10350897908210754, | |
| "learning_rate": 5.2600000000000005e-05, | |
| "loss": 0.0007, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.06146182492375374, | |
| "learning_rate": 5.25e-05, | |
| "loss": 0.0005, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 23.231124877929688, | |
| "learning_rate": 5.2400000000000007e-05, | |
| "loss": 0.318, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 39.07556915283203, | |
| "learning_rate": 5.2300000000000004e-05, | |
| "loss": 1.574, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 4.361273765563965, | |
| "learning_rate": 5.22e-05, | |
| "loss": 0.0221, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 66.95952606201172, | |
| "learning_rate": 5.2100000000000006e-05, | |
| "loss": 2.6285, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 17.135887145996094, | |
| "learning_rate": 5.2000000000000004e-05, | |
| "loss": 0.9921, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 8.985684394836426, | |
| "learning_rate": 5.19e-05, | |
| "loss": 0.0569, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 9.756717681884766, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0604, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 13.56096076965332, | |
| "learning_rate": 5.17e-05, | |
| "loss": 0.1157, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 4.52184534072876, | |
| "learning_rate": 5.16e-05, | |
| "loss": 0.0449, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 30.49310874938965, | |
| "learning_rate": 5.1500000000000005e-05, | |
| "loss": 0.781, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 4.6025214195251465, | |
| "learning_rate": 5.14e-05, | |
| "loss": 0.029, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 4.667913436889648, | |
| "learning_rate": 5.130000000000001e-05, | |
| "loss": 0.0281, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 21.860044479370117, | |
| "learning_rate": 5.1200000000000004e-05, | |
| "loss": 0.24, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 3.007333993911743, | |
| "learning_rate": 5.11e-05, | |
| "loss": 0.0184, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 3.179823398590088, | |
| "learning_rate": 5.1000000000000006e-05, | |
| "loss": 0.0204, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.7667524814605713, | |
| "learning_rate": 5.0900000000000004e-05, | |
| "loss": 0.0062, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.3707985877990723, | |
| "learning_rate": 5.08e-05, | |
| "loss": 0.0147, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 22.363510131835938, | |
| "learning_rate": 5.0700000000000006e-05, | |
| "loss": 0.3099, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 17.151803970336914, | |
| "learning_rate": 5.0600000000000003e-05, | |
| "loss": 0.1448, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 26.439014434814453, | |
| "learning_rate": 5.05e-05, | |
| "loss": 0.2519, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 11.708067893981934, | |
| "learning_rate": 5.0400000000000005e-05, | |
| "loss": 0.0841, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.6435901522636414, | |
| "learning_rate": 5.03e-05, | |
| "loss": 0.0045, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 22.941650390625, | |
| "learning_rate": 5.02e-05, | |
| "loss": 0.2113, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.059579696506261826, | |
| "learning_rate": 5.0100000000000005e-05, | |
| "loss": 0.0004, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 3.3625144958496094, | |
| "learning_rate": 5e-05, | |
| "loss": 0.025, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.4082565307617188, | |
| "learning_rate": 4.99e-05, | |
| "loss": 0.013, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 11.377418518066406, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0745, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 32.57530212402344, | |
| "learning_rate": 4.97e-05, | |
| "loss": 0.6956, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.3960559666156769, | |
| "learning_rate": 4.96e-05, | |
| "loss": 0.0025, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.9463507533073425, | |
| "learning_rate": 4.9500000000000004e-05, | |
| "loss": 0.0047, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 26.72979164123535, | |
| "learning_rate": 4.94e-05, | |
| "loss": 0.5934, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.7482037544250488, | |
| "learning_rate": 4.93e-05, | |
| "loss": 0.0105, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 31.476030349731445, | |
| "learning_rate": 4.92e-05, | |
| "loss": 0.4363, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 17.86579132080078, | |
| "learning_rate": 4.91e-05, | |
| "loss": 0.1511, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 3.154860496520996, | |
| "learning_rate": 4.9e-05, | |
| "loss": 0.0167, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.15494592487812042, | |
| "learning_rate": 4.89e-05, | |
| "loss": 0.001, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 22.509258270263672, | |
| "learning_rate": 4.88e-05, | |
| "loss": 0.2224, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 3.79130220413208, | |
| "learning_rate": 4.87e-05, | |
| "loss": 0.0198, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.28567245602607727, | |
| "learning_rate": 4.86e-05, | |
| "loss": 0.0023, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 6.115846157073975, | |
| "learning_rate": 4.85e-05, | |
| "loss": 0.0433, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 21.192081451416016, | |
| "learning_rate": 4.8400000000000004e-05, | |
| "loss": 0.1827, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 32.70637512207031, | |
| "learning_rate": 4.83e-05, | |
| "loss": 2.368, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 26.6806697845459, | |
| "learning_rate": 4.82e-05, | |
| "loss": 0.8146, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 25.684167861938477, | |
| "learning_rate": 4.8100000000000004e-05, | |
| "loss": 0.7391, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 13.313119888305664, | |
| "learning_rate": 4.8e-05, | |
| "loss": 0.08, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 39.951602935791016, | |
| "learning_rate": 4.79e-05, | |
| "loss": 1.68, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 29.313465118408203, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.8004, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 3.168497085571289, | |
| "learning_rate": 4.77e-05, | |
| "loss": 0.0208, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 11.69185733795166, | |
| "learning_rate": 4.76e-05, | |
| "loss": 0.0701, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 15.70853328704834, | |
| "learning_rate": 4.75e-05, | |
| "loss": 0.141, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7469470500946045, | |
| "learning_rate": 4.74e-05, | |
| "loss": 0.0057, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.03925801441073418, | |
| "learning_rate": 4.73e-05, | |
| "loss": 0.0002, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 28.931766510009766, | |
| "learning_rate": 4.72e-05, | |
| "loss": 0.4199, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.8265246748924255, | |
| "learning_rate": 4.71e-05, | |
| "loss": 0.0044, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 16.333219528198242, | |
| "learning_rate": 4.7e-05, | |
| "loss": 0.1149, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.08890499919652939, | |
| "learning_rate": 4.69e-05, | |
| "loss": 0.0007, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.24474568665027618, | |
| "learning_rate": 4.6800000000000006e-05, | |
| "loss": 0.0013, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 42.735389709472656, | |
| "learning_rate": 4.6700000000000003e-05, | |
| "loss": 1.2246, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 15.926461219787598, | |
| "learning_rate": 4.660000000000001e-05, | |
| "loss": 0.1089, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 26.88334846496582, | |
| "learning_rate": 4.6500000000000005e-05, | |
| "loss": 0.2097, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 25.04203987121582, | |
| "learning_rate": 4.64e-05, | |
| "loss": 0.5348, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 38.08080291748047, | |
| "learning_rate": 4.630000000000001e-05, | |
| "loss": 2.7062, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.8540865778923035, | |
| "learning_rate": 4.6200000000000005e-05, | |
| "loss": 0.0062, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 26.498910903930664, | |
| "learning_rate": 4.61e-05, | |
| "loss": 0.7067, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 7.919168472290039, | |
| "learning_rate": 4.600000000000001e-05, | |
| "loss": 0.0499, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 3.056781530380249, | |
| "learning_rate": 4.5900000000000004e-05, | |
| "loss": 0.0155, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 26.98896026611328, | |
| "learning_rate": 4.58e-05, | |
| "loss": 1.8109, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.0453416109085083, | |
| "learning_rate": 4.5700000000000006e-05, | |
| "loss": 0.0051, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 26.941593170166016, | |
| "learning_rate": 4.5600000000000004e-05, | |
| "loss": 1.3416, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 7.245401382446289, | |
| "learning_rate": 4.55e-05, | |
| "loss": 0.0806, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 28.137914657592773, | |
| "learning_rate": 4.5400000000000006e-05, | |
| "loss": 1.3048, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 27.121395111083984, | |
| "learning_rate": 4.53e-05, | |
| "loss": 0.9772, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 25.434829711914062, | |
| "learning_rate": 4.52e-05, | |
| "loss": 0.481, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 29.52955436706543, | |
| "learning_rate": 4.5100000000000005e-05, | |
| "loss": 0.7622, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.06173492223024368, | |
| "learning_rate": 4.5e-05, | |
| "loss": 0.0008, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.3727266192436218, | |
| "learning_rate": 4.49e-05, | |
| "loss": 0.0025, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.017284419387578964, | |
| "learning_rate": 4.4800000000000005e-05, | |
| "loss": 0.0001, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.07270216196775436, | |
| "learning_rate": 4.47e-05, | |
| "loss": 0.0009, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 7.1794257164001465, | |
| "learning_rate": 4.46e-05, | |
| "loss": 0.0394, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.5796927213668823, | |
| "learning_rate": 4.4500000000000004e-05, | |
| "loss": 0.0035, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.966355085372925, | |
| "learning_rate": 4.44e-05, | |
| "loss": 0.015, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.8540501594543457, | |
| "learning_rate": 4.43e-05, | |
| "loss": 0.0045, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.8207895755767822, | |
| "learning_rate": 4.4200000000000004e-05, | |
| "loss": 0.0069, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 19.900699615478516, | |
| "learning_rate": 4.41e-05, | |
| "loss": 0.2333, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 3.041948080062866, | |
| "learning_rate": 4.4000000000000006e-05, | |
| "loss": 0.0157, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 40.66261672973633, | |
| "learning_rate": 4.39e-05, | |
| "loss": 0.7749, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 23.92762565612793, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.2517, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 26.802112579345703, | |
| "learning_rate": 4.3700000000000005e-05, | |
| "loss": 1.1189, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 25.978343963623047, | |
| "learning_rate": 4.36e-05, | |
| "loss": 0.3535, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.6375006437301636, | |
| "learning_rate": 4.35e-05, | |
| "loss": 0.003, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 4.264609336853027, | |
| "learning_rate": 4.3400000000000005e-05, | |
| "loss": 0.0295, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.2793530523777008, | |
| "learning_rate": 4.33e-05, | |
| "loss": 0.0029, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 10.96884822845459, | |
| "learning_rate": 4.32e-05, | |
| "loss": 0.0667, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.39111682772636414, | |
| "learning_rate": 4.3100000000000004e-05, | |
| "loss": 0.0027, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.712225079536438, | |
| "learning_rate": 4.3e-05, | |
| "loss": 0.0106, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 13.446296691894531, | |
| "learning_rate": 4.29e-05, | |
| "loss": 0.0828, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 28.18335723876953, | |
| "learning_rate": 4.2800000000000004e-05, | |
| "loss": 1.3561, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.2135007530450821, | |
| "learning_rate": 4.27e-05, | |
| "loss": 0.0019, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 43.12370300292969, | |
| "learning_rate": 4.26e-05, | |
| "loss": 2.1706, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.703223466873169, | |
| "learning_rate": 4.25e-05, | |
| "loss": 0.0057, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.19304363429546356, | |
| "learning_rate": 4.24e-05, | |
| "loss": 0.0013, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.9636431336402893, | |
| "learning_rate": 4.23e-05, | |
| "loss": 0.0065, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.248726487159729, | |
| "learning_rate": 4.22e-05, | |
| "loss": 0.0092, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 26.916940689086914, | |
| "learning_rate": 4.21e-05, | |
| "loss": 0.4018, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 20.4180965423584, | |
| "learning_rate": 4.2e-05, | |
| "loss": 0.2221, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 24.25115966796875, | |
| "learning_rate": 4.19e-05, | |
| "loss": 0.3603, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 14.176848411560059, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.096, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 18.621923446655273, | |
| "learning_rate": 4.17e-05, | |
| "loss": 1.9419, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.3514004051685333, | |
| "learning_rate": 4.16e-05, | |
| "loss": 0.002, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 22.820341110229492, | |
| "learning_rate": 4.15e-05, | |
| "loss": 0.1718, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.11256919801235199, | |
| "learning_rate": 4.14e-05, | |
| "loss": 0.0014, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 48.12772750854492, | |
| "learning_rate": 4.13e-05, | |
| "loss": 1.0516, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.6842830181121826, | |
| "learning_rate": 4.12e-05, | |
| "loss": 0.0058, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 27.722042083740234, | |
| "learning_rate": 4.11e-05, | |
| "loss": 0.3808, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 28.378963470458984, | |
| "learning_rate": 4.1e-05, | |
| "loss": 1.2508, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 21.205421447753906, | |
| "learning_rate": 4.09e-05, | |
| "loss": 0.3059, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 11.505128860473633, | |
| "learning_rate": 4.08e-05, | |
| "loss": 0.0675, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.10159604996442795, | |
| "learning_rate": 4.07e-05, | |
| "loss": 0.0006, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.013346759602427483, | |
| "learning_rate": 4.0600000000000004e-05, | |
| "loss": 0.0001, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 28.66274642944336, | |
| "learning_rate": 4.05e-05, | |
| "loss": 0.7989, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.537345290184021, | |
| "learning_rate": 4.0400000000000006e-05, | |
| "loss": 0.0073, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.15267232060432434, | |
| "learning_rate": 4.0300000000000004e-05, | |
| "loss": 0.0011, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.3610873222351074, | |
| "learning_rate": 4.02e-05, | |
| "loss": 0.0154, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7249945402145386, | |
| "learning_rate": 4.0100000000000006e-05, | |
| "loss": 0.006, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 9.683416366577148, | |
| "learning_rate": 4e-05, | |
| "loss": 0.0602, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 22.996389389038086, | |
| "learning_rate": 3.99e-05, | |
| "loss": 0.4727, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 25.594470977783203, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.4286, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 27.750869750976562, | |
| "learning_rate": 3.97e-05, | |
| "loss": 0.7911, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 26.540616989135742, | |
| "learning_rate": 3.960000000000001e-05, | |
| "loss": 0.6582, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 26.802492141723633, | |
| "learning_rate": 3.9500000000000005e-05, | |
| "loss": 0.6329, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 27.42936134338379, | |
| "learning_rate": 3.94e-05, | |
| "loss": 1.0816, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 3.6503002643585205, | |
| "learning_rate": 3.9300000000000007e-05, | |
| "loss": 0.023, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 31.658214569091797, | |
| "learning_rate": 3.9200000000000004e-05, | |
| "loss": 0.4239, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.08632346242666245, | |
| "learning_rate": 3.91e-05, | |
| "loss": 0.0007, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.26137351989746094, | |
| "learning_rate": 3.9000000000000006e-05, | |
| "loss": 0.0048, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 22.23630142211914, | |
| "learning_rate": 3.8900000000000004e-05, | |
| "loss": 0.3182, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 4.530112266540527, | |
| "learning_rate": 3.88e-05, | |
| "loss": 0.0271, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 16.424596786499023, | |
| "learning_rate": 3.8700000000000006e-05, | |
| "loss": 0.1404, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.2123844623565674, | |
| "learning_rate": 3.86e-05, | |
| "loss": 0.0127, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 26.423566818237305, | |
| "learning_rate": 3.85e-05, | |
| "loss": 0.4926, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 24.903274536132812, | |
| "learning_rate": 3.8400000000000005e-05, | |
| "loss": 0.5274, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 28.026695251464844, | |
| "learning_rate": 3.83e-05, | |
| "loss": 0.7997, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.4405864477157593, | |
| "learning_rate": 3.82e-05, | |
| "loss": 0.0245, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.11772224307060242, | |
| "learning_rate": 3.8100000000000005e-05, | |
| "loss": 0.0011, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 2.7985239028930664, | |
| "learning_rate": 3.8e-05, | |
| "loss": 0.0294, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 22.811920166015625, | |
| "learning_rate": 3.79e-05, | |
| "loss": 0.3031, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 16.438217163085938, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.283, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 16.60034942626953, | |
| "learning_rate": 3.77e-05, | |
| "loss": 0.1261, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 12.6220121383667, | |
| "learning_rate": 3.76e-05, | |
| "loss": 0.0818, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 26.638370513916016, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 0.8201, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 28.03763771057129, | |
| "learning_rate": 3.74e-05, | |
| "loss": 1.0075, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 45.880584716796875, | |
| "learning_rate": 3.73e-05, | |
| "loss": 0.6867, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 6.816001892089844, | |
| "learning_rate": 3.72e-05, | |
| "loss": 0.0382, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 10.210158348083496, | |
| "learning_rate": 3.71e-05, | |
| "loss": 0.0685, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.01786009781062603, | |
| "learning_rate": 3.7e-05, | |
| "loss": 0.0002, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.49422261118888855, | |
| "learning_rate": 3.69e-05, | |
| "loss": 0.0045, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 19.256742477416992, | |
| "learning_rate": 3.68e-05, | |
| "loss": 0.1697, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 18.655818939208984, | |
| "learning_rate": 3.6700000000000004e-05, | |
| "loss": 0.2467, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 30.253002166748047, | |
| "learning_rate": 3.66e-05, | |
| "loss": 1.181, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 25.4757137298584, | |
| "learning_rate": 3.65e-05, | |
| "loss": 1.4477, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 50.04316329956055, | |
| "learning_rate": 3.6400000000000004e-05, | |
| "loss": 0.7256, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 5.345543384552002, | |
| "learning_rate": 3.63e-05, | |
| "loss": 0.0279, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.8220503926277161, | |
| "learning_rate": 3.62e-05, | |
| "loss": 0.0059, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.3052619993686676, | |
| "learning_rate": 3.61e-05, | |
| "loss": 0.0032, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.11896578967571259, | |
| "learning_rate": 3.6e-05, | |
| "loss": 0.0007, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.0606658011674881, | |
| "learning_rate": 3.59e-05, | |
| "loss": 0.0006, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.5427333116531372, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.0036, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.271037757396698, | |
| "learning_rate": 3.57e-05, | |
| "loss": 0.0016, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.1393972635269165, | |
| "learning_rate": 3.56e-05, | |
| "loss": 0.007, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.5412964820861816, | |
| "learning_rate": 3.55e-05, | |
| "loss": 0.0131, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.731440782546997, | |
| "learning_rate": 3.54e-05, | |
| "loss": 0.0091, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.20476283133029938, | |
| "learning_rate": 3.53e-05, | |
| "loss": 0.0015, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 7.767834186553955, | |
| "learning_rate": 3.52e-05, | |
| "loss": 0.0518, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 39.933876037597656, | |
| "learning_rate": 3.51e-05, | |
| "loss": 0.7671, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 39.605648040771484, | |
| "learning_rate": 3.5e-05, | |
| "loss": 1.9882, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.08953946828842163, | |
| "learning_rate": 3.49e-05, | |
| "loss": 0.0007, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 11.983209609985352, | |
| "learning_rate": 3.48e-05, | |
| "loss": 0.0733, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.12467926740646362, | |
| "learning_rate": 3.4699999999999996e-05, | |
| "loss": 0.0009, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 28.761707305908203, | |
| "learning_rate": 3.46e-05, | |
| "loss": 1.0135, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.9357807040214539, | |
| "learning_rate": 3.45e-05, | |
| "loss": 0.0098, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 33.30131149291992, | |
| "learning_rate": 3.4399999999999996e-05, | |
| "loss": 0.3859, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.06523477286100388, | |
| "learning_rate": 3.430000000000001e-05, | |
| "loss": 0.0004, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 26.81741714477539, | |
| "learning_rate": 3.4200000000000005e-05, | |
| "loss": 1.4735, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.06154371052980423, | |
| "learning_rate": 3.41e-05, | |
| "loss": 0.0004, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 24.583940505981445, | |
| "learning_rate": 3.4000000000000007e-05, | |
| "loss": 0.4161, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 36.68991470336914, | |
| "learning_rate": 3.3900000000000004e-05, | |
| "loss": 1.0415, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 45.04407501220703, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.9511, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 9.760159492492676, | |
| "learning_rate": 3.3700000000000006e-05, | |
| "loss": 0.0636, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 37.501834869384766, | |
| "learning_rate": 3.3600000000000004e-05, | |
| "loss": 0.5429, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 7.125507354736328, | |
| "learning_rate": 3.35e-05, | |
| "loss": 0.0742, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.6868292093276978, | |
| "learning_rate": 3.3400000000000005e-05, | |
| "loss": 0.0104, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 44.928775787353516, | |
| "learning_rate": 3.33e-05, | |
| "loss": 0.5577, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 23.767250061035156, | |
| "learning_rate": 3.32e-05, | |
| "loss": 0.2924, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 27.04254722595215, | |
| "learning_rate": 3.3100000000000005e-05, | |
| "loss": 0.4817, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.3137115240097046, | |
| "learning_rate": 3.3e-05, | |
| "loss": 0.0021, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 10.24022388458252, | |
| "learning_rate": 3.29e-05, | |
| "loss": 0.0798, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.1032443046569824, | |
| "learning_rate": 3.2800000000000004e-05, | |
| "loss": 0.0103, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 3.888711452484131, | |
| "learning_rate": 3.27e-05, | |
| "loss": 0.0211, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.06245700269937515, | |
| "learning_rate": 3.26e-05, | |
| "loss": 0.0004, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 17.818830490112305, | |
| "learning_rate": 3.2500000000000004e-05, | |
| "loss": 0.1672, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.842295527458191, | |
| "learning_rate": 3.24e-05, | |
| "loss": 0.0102, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.6570789813995361, | |
| "learning_rate": 3.2300000000000006e-05, | |
| "loss": 0.016, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.5554737448692322, | |
| "learning_rate": 3.2200000000000003e-05, | |
| "loss": 0.0032, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 26.97112464904785, | |
| "learning_rate": 3.21e-05, | |
| "loss": 0.8228, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.09880402684211731, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 0.0007, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 26.534805297851562, | |
| "learning_rate": 3.19e-05, | |
| "loss": 1.1114, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 13.699009895324707, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.104, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 27.22563362121582, | |
| "learning_rate": 3.1700000000000005e-05, | |
| "loss": 1.0485, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 13.79177474975586, | |
| "learning_rate": 3.16e-05, | |
| "loss": 0.1139, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.6369801163673401, | |
| "learning_rate": 3.15e-05, | |
| "loss": 0.004, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.0804632157087326, | |
| "learning_rate": 3.1400000000000004e-05, | |
| "loss": 0.0006, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.0806119441986084, | |
| "learning_rate": 3.13e-05, | |
| "loss": 0.0084, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 19.096689224243164, | |
| "learning_rate": 3.12e-05, | |
| "loss": 0.1812, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 11.853796005249023, | |
| "learning_rate": 3.1100000000000004e-05, | |
| "loss": 0.0738, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 13.981377601623535, | |
| "learning_rate": 3.1e-05, | |
| "loss": 0.1197, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 19.2493896484375, | |
| "learning_rate": 3.09e-05, | |
| "loss": 0.1576, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 26.30409812927246, | |
| "learning_rate": 3.08e-05, | |
| "loss": 0.497, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 24.468488693237305, | |
| "learning_rate": 3.07e-05, | |
| "loss": 0.3814, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.004858358763158321, | |
| "learning_rate": 3.06e-05, | |
| "loss": 0.0, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 20.56285858154297, | |
| "learning_rate": 3.05e-05, | |
| "loss": 0.2559, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 10.914053916931152, | |
| "learning_rate": 3.04e-05, | |
| "loss": 0.0581, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 53.04142761230469, | |
| "learning_rate": 3.03e-05, | |
| "loss": 1.5222, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.3830277919769287, | |
| "learning_rate": 3.02e-05, | |
| "loss": 0.016, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 24.649484634399414, | |
| "learning_rate": 3.01e-05, | |
| "loss": 0.8276, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 26.6253604888916, | |
| "learning_rate": 3e-05, | |
| "loss": 0.5528, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.891787052154541, | |
| "learning_rate": 2.9900000000000002e-05, | |
| "loss": 0.005, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 3.0499017238616943, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.0156, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.6534267663955688, | |
| "learning_rate": 2.97e-05, | |
| "loss": 0.0084, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 7.562388896942139, | |
| "learning_rate": 2.96e-05, | |
| "loss": 0.0455, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 6.963227272033691, | |
| "learning_rate": 2.95e-05, | |
| "loss": 0.0556, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 2.26806902885437, | |
| "learning_rate": 2.94e-05, | |
| "loss": 0.0181, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 6.621293544769287, | |
| "learning_rate": 2.93e-05, | |
| "loss": 0.0342, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 3.3657660484313965, | |
| "learning_rate": 2.9199999999999998e-05, | |
| "loss": 0.0171, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 8.781088829040527, | |
| "learning_rate": 2.91e-05, | |
| "loss": 0.0495, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 24.759685516357422, | |
| "learning_rate": 2.9e-05, | |
| "loss": 0.3487, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 23.32072639465332, | |
| "learning_rate": 2.8899999999999998e-05, | |
| "loss": 0.2415, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 14.044452667236328, | |
| "learning_rate": 2.88e-05, | |
| "loss": 0.1113, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 3.096346855163574, | |
| "learning_rate": 2.87e-05, | |
| "loss": 0.0154, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.9582540392875671, | |
| "learning_rate": 2.86e-05, | |
| "loss": 0.0084, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.22299572825431824, | |
| "learning_rate": 2.8499999999999998e-05, | |
| "loss": 0.0018, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 23.300783157348633, | |
| "learning_rate": 2.84e-05, | |
| "loss": 0.2524, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 26.694652557373047, | |
| "learning_rate": 2.83e-05, | |
| "loss": 0.3081, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 26.942161560058594, | |
| "learning_rate": 2.8199999999999998e-05, | |
| "loss": 0.2603, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.22826315462589264, | |
| "learning_rate": 2.8100000000000005e-05, | |
| "loss": 0.0013, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.042904481291770935, | |
| "learning_rate": 2.8000000000000003e-05, | |
| "loss": 0.0003, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 5.835173606872559, | |
| "learning_rate": 2.7900000000000004e-05, | |
| "loss": 0.0336, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.2858074903488159, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.0027, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 4.612087249755859, | |
| "learning_rate": 2.7700000000000002e-05, | |
| "loss": 0.0243, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 8.472134590148926, | |
| "learning_rate": 2.7600000000000003e-05, | |
| "loss": 0.0506, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 13.461259841918945, | |
| "learning_rate": 2.7500000000000004e-05, | |
| "loss": 0.1264, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.7797164916992188, | |
| "learning_rate": 2.7400000000000002e-05, | |
| "loss": 0.0093, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 7.120004653930664, | |
| "learning_rate": 2.7300000000000003e-05, | |
| "loss": 0.052, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 27.213024139404297, | |
| "learning_rate": 2.7200000000000004e-05, | |
| "loss": 0.6143, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 23.018705368041992, | |
| "learning_rate": 2.7100000000000005e-05, | |
| "loss": 0.355, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 5.7414655685424805, | |
| "learning_rate": 2.7000000000000002e-05, | |
| "loss": 0.0318, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 23.56236457824707, | |
| "learning_rate": 2.6900000000000003e-05, | |
| "loss": 0.3694, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 7.875092029571533, | |
| "learning_rate": 2.6800000000000004e-05, | |
| "loss": 0.0448, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 26.399948120117188, | |
| "learning_rate": 2.6700000000000002e-05, | |
| "loss": 0.7737, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 28.907119750976562, | |
| "learning_rate": 2.6600000000000003e-05, | |
| "loss": 0.2974, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 2.2717533111572266, | |
| "learning_rate": 2.6500000000000004e-05, | |
| "loss": 0.0148, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.15304788947105408, | |
| "learning_rate": 2.64e-05, | |
| "loss": 0.0009, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 28.949296951293945, | |
| "learning_rate": 2.6300000000000002e-05, | |
| "loss": 0.5319, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7885856628417969, | |
| "learning_rate": 2.6200000000000003e-05, | |
| "loss": 0.0058, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 26.116670608520508, | |
| "learning_rate": 2.61e-05, | |
| "loss": 0.3608, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.3367154598236084, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 0.0029, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 23.798566818237305, | |
| "learning_rate": 2.5900000000000003e-05, | |
| "loss": 0.2741, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 32.780723571777344, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.4461, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.2866144180297852, | |
| "learning_rate": 2.57e-05, | |
| "loss": 0.0072, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.4138367176055908, | |
| "learning_rate": 2.5600000000000002e-05, | |
| "loss": 0.0022, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.8757226467132568, | |
| "learning_rate": 2.5500000000000003e-05, | |
| "loss": 0.0049, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 27.533611297607422, | |
| "learning_rate": 2.54e-05, | |
| "loss": 0.4811, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 12.152800559997559, | |
| "learning_rate": 2.5300000000000002e-05, | |
| "loss": 0.1209, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 25.30514144897461, | |
| "learning_rate": 2.5200000000000003e-05, | |
| "loss": 0.3028, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.702767252922058, | |
| "learning_rate": 2.51e-05, | |
| "loss": 0.0094, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 12.766653060913086, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.0826, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 23.281902313232422, | |
| "learning_rate": 2.4900000000000002e-05, | |
| "loss": 0.8941, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.5401146411895752, | |
| "learning_rate": 2.48e-05, | |
| "loss": 0.008, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 16.982078552246094, | |
| "learning_rate": 2.47e-05, | |
| "loss": 0.1315, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.11115346103906631, | |
| "learning_rate": 2.46e-05, | |
| "loss": 0.0007, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 31.15369987487793, | |
| "learning_rate": 2.45e-05, | |
| "loss": 0.759, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 36.960872650146484, | |
| "learning_rate": 2.44e-05, | |
| "loss": 0.4665, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.038327042013406754, | |
| "learning_rate": 2.43e-05, | |
| "loss": 0.0003, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.14821617305278778, | |
| "learning_rate": 2.4200000000000002e-05, | |
| "loss": 0.0013, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 20.849529266357422, | |
| "learning_rate": 2.41e-05, | |
| "loss": 0.1943, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 21.639812469482422, | |
| "learning_rate": 2.4e-05, | |
| "loss": 0.1947, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 26.269208908081055, | |
| "learning_rate": 2.39e-05, | |
| "loss": 0.8363, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 16.222475051879883, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.1137, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 31.54730796813965, | |
| "learning_rate": 2.37e-05, | |
| "loss": 1.8999, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 17.911474227905273, | |
| "learning_rate": 2.36e-05, | |
| "loss": 0.1302, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 47.63165283203125, | |
| "learning_rate": 2.35e-05, | |
| "loss": 1.0344, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 26.189237594604492, | |
| "learning_rate": 2.3400000000000003e-05, | |
| "loss": 0.4308, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.35479736328125, | |
| "learning_rate": 2.3300000000000004e-05, | |
| "loss": 0.0129, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 9.399853706359863, | |
| "learning_rate": 2.32e-05, | |
| "loss": 0.066, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.1681160926818848, | |
| "learning_rate": 2.3100000000000002e-05, | |
| "loss": 0.0062, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 29.163421630859375, | |
| "learning_rate": 2.3000000000000003e-05, | |
| "loss": 0.7343, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 4.385603904724121, | |
| "learning_rate": 2.29e-05, | |
| "loss": 0.0237, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.0795365571975708, | |
| "learning_rate": 2.2800000000000002e-05, | |
| "loss": 0.0062, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 22.450462341308594, | |
| "learning_rate": 2.2700000000000003e-05, | |
| "loss": 0.2405, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 4.557497978210449, | |
| "learning_rate": 2.26e-05, | |
| "loss": 0.0423, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 7.581943511962891, | |
| "learning_rate": 2.25e-05, | |
| "loss": 0.0433, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 13.19905948638916, | |
| "learning_rate": 2.2400000000000002e-05, | |
| "loss": 0.1416, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 24.024709701538086, | |
| "learning_rate": 2.23e-05, | |
| "loss": 0.4463, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 8.119006156921387, | |
| "learning_rate": 2.22e-05, | |
| "loss": 0.0608, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 3.916139841079712, | |
| "learning_rate": 2.2100000000000002e-05, | |
| "loss": 0.0216, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.002933024661615491, | |
| "learning_rate": 2.2000000000000003e-05, | |
| "loss": 0.0, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.1204041242599487, | |
| "learning_rate": 2.19e-05, | |
| "loss": 0.0081, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 31.576692581176758, | |
| "learning_rate": 2.18e-05, | |
| "loss": 1.0689, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 31.140485763549805, | |
| "learning_rate": 2.1700000000000002e-05, | |
| "loss": 0.335, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.42327681183815, | |
| "learning_rate": 2.16e-05, | |
| "loss": 0.0046, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.32425838708877563, | |
| "learning_rate": 2.15e-05, | |
| "loss": 0.0017, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 29.009767532348633, | |
| "learning_rate": 2.1400000000000002e-05, | |
| "loss": 1.1913, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 43.424034118652344, | |
| "learning_rate": 2.13e-05, | |
| "loss": 1.1988, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.1210248470306396, | |
| "learning_rate": 2.12e-05, | |
| "loss": 0.0065, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.054579153656959534, | |
| "learning_rate": 2.11e-05, | |
| "loss": 0.0003, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.4357629716396332, | |
| "learning_rate": 2.1e-05, | |
| "loss": 0.0034, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 34.34247589111328, | |
| "learning_rate": 2.09e-05, | |
| "loss": 0.5637, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 36.91853713989258, | |
| "learning_rate": 2.08e-05, | |
| "loss": 0.7341, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 8.309200286865234, | |
| "learning_rate": 2.07e-05, | |
| "loss": 0.0432, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 12.616776466369629, | |
| "learning_rate": 2.06e-05, | |
| "loss": 0.0952, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.600636899471283, | |
| "learning_rate": 2.05e-05, | |
| "loss": 0.0058, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 44.27837371826172, | |
| "learning_rate": 2.04e-05, | |
| "loss": 1.0362, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 26.939998626708984, | |
| "learning_rate": 2.0300000000000002e-05, | |
| "loss": 0.9799, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 45.84077835083008, | |
| "learning_rate": 2.0200000000000003e-05, | |
| "loss": 0.5936, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 5.504016399383545, | |
| "learning_rate": 2.01e-05, | |
| "loss": 0.0332, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 3.4088833332061768, | |
| "learning_rate": 2e-05, | |
| "loss": 0.0165, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.32817769050598145, | |
| "learning_rate": 1.9900000000000003e-05, | |
| "loss": 0.0019, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 19.7551212310791, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.2079, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 27.117473602294922, | |
| "learning_rate": 1.97e-05, | |
| "loss": 1.0161, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.30872708559036255, | |
| "learning_rate": 1.9600000000000002e-05, | |
| "loss": 0.0023, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 32.718196868896484, | |
| "learning_rate": 1.9500000000000003e-05, | |
| "loss": 0.8794, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 26.284860610961914, | |
| "learning_rate": 1.94e-05, | |
| "loss": 0.6486, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.6852843761444092, | |
| "learning_rate": 1.93e-05, | |
| "loss": 0.0036, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 24.214462280273438, | |
| "learning_rate": 1.9200000000000003e-05, | |
| "loss": 0.2703, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 16.324054718017578, | |
| "learning_rate": 1.91e-05, | |
| "loss": 0.1177, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 24.803068161010742, | |
| "learning_rate": 1.9e-05, | |
| "loss": 1.05, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 48.46371841430664, | |
| "learning_rate": 1.8900000000000002e-05, | |
| "loss": 0.7774, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 19.621374130249023, | |
| "learning_rate": 1.88e-05, | |
| "loss": 0.1642, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 32.60996627807617, | |
| "learning_rate": 1.87e-05, | |
| "loss": 0.9919, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 14.111787796020508, | |
| "learning_rate": 1.86e-05, | |
| "loss": 0.094, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.027604341506958, | |
| "learning_rate": 1.85e-05, | |
| "loss": 0.007, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.5485715866088867, | |
| "learning_rate": 1.84e-05, | |
| "loss": 0.0026, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.3733130395412445, | |
| "learning_rate": 1.83e-05, | |
| "loss": 0.0024, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 26.762765884399414, | |
| "learning_rate": 1.8200000000000002e-05, | |
| "loss": 0.5703, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 11.364485740661621, | |
| "learning_rate": 1.81e-05, | |
| "loss": 0.0749, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 16.991313934326172, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.1318, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.18820422887802124, | |
| "learning_rate": 1.79e-05, | |
| "loss": 0.001, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 27.79010009765625, | |
| "learning_rate": 1.78e-05, | |
| "loss": 1.011, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 37.26203155517578, | |
| "learning_rate": 1.77e-05, | |
| "loss": 0.8081, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.4303054213523865, | |
| "learning_rate": 1.76e-05, | |
| "loss": 0.0037, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.8750838041305542, | |
| "learning_rate": 1.75e-05, | |
| "loss": 0.0047, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 5.441610336303711, | |
| "learning_rate": 1.74e-05, | |
| "loss": 0.0306, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.0719880685210228, | |
| "learning_rate": 1.73e-05, | |
| "loss": 0.0005, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 26.52375030517578, | |
| "learning_rate": 1.7199999999999998e-05, | |
| "loss": 0.8269, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 3.149235963821411, | |
| "learning_rate": 1.7100000000000002e-05, | |
| "loss": 0.0166, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.7330241799354553, | |
| "learning_rate": 1.7000000000000003e-05, | |
| "loss": 0.0038, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 29.252592086791992, | |
| "learning_rate": 1.69e-05, | |
| "loss": 0.5713, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 27.370386123657227, | |
| "learning_rate": 1.6800000000000002e-05, | |
| "loss": 0.8151, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.770989179611206, | |
| "learning_rate": 1.6700000000000003e-05, | |
| "loss": 0.0283, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.7080297470092773, | |
| "learning_rate": 1.66e-05, | |
| "loss": 0.0085, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 11.255769729614258, | |
| "learning_rate": 1.65e-05, | |
| "loss": 0.1052, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 26.47229766845703, | |
| "learning_rate": 1.6400000000000002e-05, | |
| "loss": 1.0482, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 26.88467025756836, | |
| "learning_rate": 1.63e-05, | |
| "loss": 0.5596, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 27.23533058166504, | |
| "learning_rate": 1.62e-05, | |
| "loss": 0.7561, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 30.387807846069336, | |
| "learning_rate": 1.6100000000000002e-05, | |
| "loss": 1.4771, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.03761901333928108, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.0006, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 2.5605359077453613, | |
| "learning_rate": 1.59e-05, | |
| "loss": 0.015, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.10990981757640839, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.0008, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 11.03862476348877, | |
| "learning_rate": 1.5700000000000002e-05, | |
| "loss": 0.0686, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 20.627206802368164, | |
| "learning_rate": 1.56e-05, | |
| "loss": 0.3718, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.19297750294208527, | |
| "learning_rate": 1.55e-05, | |
| "loss": 0.0013, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.0797531604766846, | |
| "learning_rate": 1.54e-05, | |
| "loss": 0.0102, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.0616090297698975, | |
| "learning_rate": 1.53e-05, | |
| "loss": 0.0124, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 5.628384590148926, | |
| "learning_rate": 1.52e-05, | |
| "loss": 0.0354, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 25.70692253112793, | |
| "learning_rate": 1.51e-05, | |
| "loss": 0.4203, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 21.292953491210938, | |
| "learning_rate": 1.5e-05, | |
| "loss": 1.182, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 8.425320625305176, | |
| "learning_rate": 1.49e-05, | |
| "loss": 0.0462, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 26.563316345214844, | |
| "learning_rate": 1.48e-05, | |
| "loss": 0.4144, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.6242803931236267, | |
| "learning_rate": 1.47e-05, | |
| "loss": 0.0052, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.20703597366809845, | |
| "learning_rate": 1.4599999999999999e-05, | |
| "loss": 0.0013, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 19.229211807250977, | |
| "learning_rate": 1.45e-05, | |
| "loss": 0.1673, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 2.8394806385040283, | |
| "learning_rate": 1.44e-05, | |
| "loss": 0.0141, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.4133518636226654, | |
| "learning_rate": 1.43e-05, | |
| "loss": 0.0021, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 2.384779453277588, | |
| "learning_rate": 1.42e-05, | |
| "loss": 0.0126, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 12.650036811828613, | |
| "learning_rate": 1.4099999999999999e-05, | |
| "loss": 0.1045, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.14024153351783752, | |
| "learning_rate": 1.4000000000000001e-05, | |
| "loss": 0.0015, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 25.9268741607666, | |
| "learning_rate": 1.3900000000000002e-05, | |
| "loss": 0.2269, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 22.659929275512695, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.5073, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.20133636891841888, | |
| "learning_rate": 1.3700000000000001e-05, | |
| "loss": 0.0011, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 16.163724899291992, | |
| "learning_rate": 1.3600000000000002e-05, | |
| "loss": 0.1342, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 35.01076126098633, | |
| "learning_rate": 1.3500000000000001e-05, | |
| "loss": 1.3155, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 24.18101692199707, | |
| "learning_rate": 1.3400000000000002e-05, | |
| "loss": 0.2992, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.3129206895828247, | |
| "learning_rate": 1.3300000000000001e-05, | |
| "loss": 0.0019, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 3.156733751296997, | |
| "learning_rate": 1.32e-05, | |
| "loss": 0.0197, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 20.24107551574707, | |
| "learning_rate": 1.3100000000000002e-05, | |
| "loss": 0.2469, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.023805420845746994, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 0.0002, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 25.843679428100586, | |
| "learning_rate": 1.29e-05, | |
| "loss": 0.4148, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 46.338687896728516, | |
| "learning_rate": 1.2800000000000001e-05, | |
| "loss": 0.7211, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 28.163171768188477, | |
| "learning_rate": 1.27e-05, | |
| "loss": 1.2761, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 5.789135456085205, | |
| "learning_rate": 1.2600000000000001e-05, | |
| "loss": 0.0272, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 8.08776569366455, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.0441, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.10339970886707306, | |
| "learning_rate": 1.24e-05, | |
| "loss": 0.0014, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 26.45261573791504, | |
| "learning_rate": 1.23e-05, | |
| "loss": 0.8787, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 7.62965202331543, | |
| "learning_rate": 1.22e-05, | |
| "loss": 0.0466, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 10.6347017288208, | |
| "learning_rate": 1.2100000000000001e-05, | |
| "loss": 0.0693, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 26.655921936035156, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.528, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.786623477935791, | |
| "learning_rate": 1.19e-05, | |
| "loss": 0.0181, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.6593170166015625, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.005, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 42.40369415283203, | |
| "learning_rate": 1.1700000000000001e-05, | |
| "loss": 0.889, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 27.659780502319336, | |
| "learning_rate": 1.16e-05, | |
| "loss": 1.0498, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 15.126568794250488, | |
| "learning_rate": 1.1500000000000002e-05, | |
| "loss": 0.1185, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 29.32221794128418, | |
| "learning_rate": 1.1400000000000001e-05, | |
| "loss": 0.895, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 5.868095397949219, | |
| "learning_rate": 1.13e-05, | |
| "loss": 0.0429, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 25.551660537719727, | |
| "learning_rate": 1.1200000000000001e-05, | |
| "loss": 0.2916, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 26.83506202697754, | |
| "learning_rate": 1.11e-05, | |
| "loss": 0.5324, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 3.322347640991211, | |
| "learning_rate": 1.1000000000000001e-05, | |
| "loss": 0.0221, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 28.00304412841797, | |
| "learning_rate": 1.09e-05, | |
| "loss": 0.6972, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.1361875534057617, | |
| "learning_rate": 1.08e-05, | |
| "loss": 0.0102, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.4008387327194214, | |
| "learning_rate": 1.0700000000000001e-05, | |
| "loss": 0.0104, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 21.043048858642578, | |
| "learning_rate": 1.06e-05, | |
| "loss": 0.1851, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.06448391079902649, | |
| "learning_rate": 1.05e-05, | |
| "loss": 0.0004, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.15726327896118164, | |
| "learning_rate": 1.04e-05, | |
| "loss": 0.0012, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 44.16018295288086, | |
| "learning_rate": 1.03e-05, | |
| "loss": 1.0449, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.02686498314142227, | |
| "learning_rate": 1.02e-05, | |
| "loss": 0.0002, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.17026451230049133, | |
| "learning_rate": 1.0100000000000002e-05, | |
| "loss": 0.0012, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 44.215797424316406, | |
| "learning_rate": 1e-05, | |
| "loss": 1.2545, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.9567099809646606, | |
| "learning_rate": 9.900000000000002e-06, | |
| "loss": 0.0068, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 4.834523677825928, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.027, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.023485347628593445, | |
| "learning_rate": 9.7e-06, | |
| "loss": 0.0001, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 14.029927253723145, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 0.0971, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 16.840566635131836, | |
| "learning_rate": 9.5e-06, | |
| "loss": 0.1671, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.9835864901542664, | |
| "learning_rate": 9.4e-06, | |
| "loss": 0.005, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 3.175522804260254, | |
| "learning_rate": 9.3e-06, | |
| "loss": 0.0173, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 27.09811019897461, | |
| "learning_rate": 9.2e-06, | |
| "loss": 1.2902, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 22.86342430114746, | |
| "learning_rate": 9.100000000000001e-06, | |
| "loss": 0.2469, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.795168399810791, | |
| "learning_rate": 9e-06, | |
| "loss": 0.0045, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.3939276933670044, | |
| "learning_rate": 8.9e-06, | |
| "loss": 0.0021, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 27.596954345703125, | |
| "learning_rate": 8.8e-06, | |
| "loss": 0.9408, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 15.893352508544922, | |
| "learning_rate": 8.7e-06, | |
| "loss": 0.1598, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.5591702461242676, | |
| "learning_rate": 8.599999999999999e-06, | |
| "loss": 0.0103, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 26.889225006103516, | |
| "learning_rate": 8.500000000000002e-06, | |
| "loss": 0.5883, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 47.645057678222656, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 2.2247, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 4.39687442779541, | |
| "learning_rate": 8.3e-06, | |
| "loss": 0.0234, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.21854829788208, | |
| "learning_rate": 8.200000000000001e-06, | |
| "loss": 0.0102, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.2136417627334595, | |
| "learning_rate": 8.1e-06, | |
| "loss": 0.0094, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.374004602432251, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.0069, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.16840314865112305, | |
| "learning_rate": 7.9e-06, | |
| "loss": 0.0014, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.11105885356664658, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.0011, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 19.66978645324707, | |
| "learning_rate": 7.7e-06, | |
| "loss": 0.3866, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.2701959609985352, | |
| "learning_rate": 7.6e-06, | |
| "loss": 0.0061, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 27.793781280517578, | |
| "learning_rate": 7.5e-06, | |
| "loss": 0.8543, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 30.1798095703125, | |
| "learning_rate": 7.4e-06, | |
| "loss": 0.5122, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.5201961398124695, | |
| "learning_rate": 7.2999999999999996e-06, | |
| "loss": 0.0027, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 10.465607643127441, | |
| "learning_rate": 7.2e-06, | |
| "loss": 0.0573, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.1310155391693115, | |
| "learning_rate": 7.1e-06, | |
| "loss": 0.0131, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 5.451208114624023, | |
| "learning_rate": 7.000000000000001e-06, | |
| "loss": 0.0284, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 24.128276824951172, | |
| "learning_rate": 6.900000000000001e-06, | |
| "loss": 0.3309, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 25.905508041381836, | |
| "learning_rate": 6.800000000000001e-06, | |
| "loss": 0.389, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 31.932828903198242, | |
| "learning_rate": 6.700000000000001e-06, | |
| "loss": 1.2094, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.16106216609477997, | |
| "learning_rate": 6.6e-06, | |
| "loss": 0.0013, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 28.226865768432617, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 0.5602, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.7520970702171326, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 0.005, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.36321502923965454, | |
| "learning_rate": 6.300000000000001e-06, | |
| "loss": 0.003, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.2656748294830322, | |
| "learning_rate": 6.2e-06, | |
| "loss": 0.0064, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 25.317880630493164, | |
| "learning_rate": 6.1e-06, | |
| "loss": 0.5348, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.06496942788362503, | |
| "learning_rate": 6e-06, | |
| "loss": 0.0004, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.31098636984825134, | |
| "learning_rate": 5.9e-06, | |
| "loss": 0.0026, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.3573431968688965, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.0117, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.8721911907196045, | |
| "learning_rate": 5.7000000000000005e-06, | |
| "loss": 0.0127, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 27.062021255493164, | |
| "learning_rate": 5.600000000000001e-06, | |
| "loss": 0.6997, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.5216832756996155, | |
| "learning_rate": 5.500000000000001e-06, | |
| "loss": 0.003, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 26.380741119384766, | |
| "learning_rate": 5.4e-06, | |
| "loss": 0.273, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 25.798185348510742, | |
| "learning_rate": 5.3e-06, | |
| "loss": 1.0317, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 11.128933906555176, | |
| "learning_rate": 5.2e-06, | |
| "loss": 0.0781, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.875777244567871, | |
| "learning_rate": 5.1e-06, | |
| "loss": 0.0096, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.302217036485672, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0016, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.35395440459251404, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.0021, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 31.090530395507812, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.5176, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.993788242340088, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.0169, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 3.8373305797576904, | |
| "learning_rate": 4.6e-06, | |
| "loss": 0.0227, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.49911898374557495, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.0037, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 26.027299880981445, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.9036, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 11.176959037780762, | |
| "learning_rate": 4.2999999999999995e-06, | |
| "loss": 0.0678, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 2.6982274055480957, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.0144, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 37.264869689941406, | |
| "learning_rate": 4.1000000000000006e-06, | |
| "loss": 1.4107, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 40.02341842651367, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.3686, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 38.77351760864258, | |
| "learning_rate": 3.9e-06, | |
| "loss": 0.4521, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 4.801632881164551, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.0299, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 8.612468719482422, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.0573, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.5201643705368042, | |
| "learning_rate": 3.6e-06, | |
| "loss": 0.0041, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.17031624913215637, | |
| "learning_rate": 3.5000000000000004e-06, | |
| "loss": 0.0025, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.8196523785591125, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0041, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.013019957579672337, | |
| "learning_rate": 3.3e-06, | |
| "loss": 0.0001, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 27.062700271606445, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.658, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 15.487645149230957, | |
| "learning_rate": 3.1e-06, | |
| "loss": 0.118, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 5.086794853210449, | |
| "learning_rate": 3e-06, | |
| "loss": 0.0259, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 27.120859146118164, | |
| "learning_rate": 2.9e-06, | |
| "loss": 1.1966, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 16.72373390197754, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.151, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.03614487126469612, | |
| "learning_rate": 2.7e-06, | |
| "loss": 0.0004, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.07811087369918823, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.0004, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.030874168500304222, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0002, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.020734872668981552, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.0001, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 32.341888427734375, | |
| "learning_rate": 2.3e-06, | |
| "loss": 0.3712, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.11538070440292358, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.0008, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 24.41085433959961, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.2808, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 3.5011379718780518, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.0195, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.7570369839668274, | |
| "learning_rate": 1.9e-06, | |
| "loss": 0.0047, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 14.30388355255127, | |
| "learning_rate": 1.8e-06, | |
| "loss": 0.0972, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.019772866740822792, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.0002, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 25.79269790649414, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.3179, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 4.481821060180664, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.0273, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 2.6944172382354736, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.0168, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.0176031589508057, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.005, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 8.57127857208252, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.05, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 27.0198917388916, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.6885, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 16.693689346313477, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.1505, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 26.509174346923828, | |
| "learning_rate": 9e-07, | |
| "loss": 0.7264, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 7.112478256225586, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.0374, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 31.123777389526367, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 0.9894, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 8.979686737060547, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 0.0554, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.1432526707649231, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.0007, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 27.64889907836914, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 1.3425, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.3497440218925476, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 0.0017, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.7565171122550964, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 0.0036, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 18.668054580688477, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 1.0386, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 33.273231506347656, | |
| "learning_rate": 0.0, | |
| "loss": 1.1228, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.9305, | |
| "eval_balanced_accuracy": 0.9307943873632947, | |
| "eval_loss": 0.29665234684944153, | |
| "eval_runtime": 737.3091, | |
| "eval_samples_per_second": 2.713, | |
| "eval_steps_per_second": 0.339, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1000, | |
| "total_flos": 2.636836569631949e+16, | |
| "train_loss": 0.3728995402829132, | |
| "train_runtime": 9397.7049, | |
| "train_samples_per_second": 0.851, | |
| "train_steps_per_second": 0.106 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "total_flos": 2.636836569631949e+16, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |