| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 20.0, | |
| "eval_steps": 500, | |
| "global_step": 640, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2e-05, | |
| "loss": 3.2435, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4e-05, | |
| "loss": 2.5079, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 6e-05, | |
| "loss": 2.8803, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 8e-05, | |
| "loss": 2.4441, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001, | |
| "loss": 3.1358, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.984251968503937e-05, | |
| "loss": 2.5019, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.968503937007875e-05, | |
| "loss": 2.3411, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.952755905511811e-05, | |
| "loss": 2.703, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.937007874015748e-05, | |
| "loss": 2.4668, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.921259842519686e-05, | |
| "loss": 2.389, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.905511811023622e-05, | |
| "loss": 2.0764, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.88976377952756e-05, | |
| "loss": 2.2588, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.874015748031497e-05, | |
| "loss": 1.9051, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.858267716535433e-05, | |
| "loss": 1.8373, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.842519685039371e-05, | |
| "loss": 1.9495, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.826771653543308e-05, | |
| "loss": 1.6529, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.811023622047244e-05, | |
| "loss": 1.5345, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.795275590551182e-05, | |
| "loss": 1.7246, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 9.779527559055119e-05, | |
| "loss": 1.7377, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 9.763779527559055e-05, | |
| "loss": 1.9529, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 9.748031496062993e-05, | |
| "loss": 1.6535, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 9.73228346456693e-05, | |
| "loss": 1.6496, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.716535433070866e-05, | |
| "loss": 1.826, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 9.700787401574803e-05, | |
| "loss": 1.6899, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 9.68503937007874e-05, | |
| "loss": 1.67, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.669291338582677e-05, | |
| "loss": 1.3442, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 9.653543307086614e-05, | |
| "loss": 1.7548, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 9.637795275590552e-05, | |
| "loss": 1.8599, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.622047244094488e-05, | |
| "loss": 1.3488, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 9.606299212598425e-05, | |
| "loss": 1.7824, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.590551181102363e-05, | |
| "loss": 1.6088, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 9.574803149606299e-05, | |
| "loss": 1.6443, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.559055118110236e-05, | |
| "loss": 1.4576, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.543307086614174e-05, | |
| "loss": 1.5045, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.52755905511811e-05, | |
| "loss": 1.4806, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 9.511811023622048e-05, | |
| "loss": 1.4179, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 9.496062992125985e-05, | |
| "loss": 1.2557, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 9.480314960629921e-05, | |
| "loss": 1.3804, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 9.464566929133859e-05, | |
| "loss": 1.3365, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 9.448818897637796e-05, | |
| "loss": 1.3253, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.433070866141732e-05, | |
| "loss": 1.2595, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 9.41732283464567e-05, | |
| "loss": 1.3044, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 9.401574803149607e-05, | |
| "loss": 1.2742, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 9.385826771653545e-05, | |
| "loss": 1.3766, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 9.370078740157481e-05, | |
| "loss": 1.3081, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 9.354330708661418e-05, | |
| "loss": 1.3413, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 9.338582677165355e-05, | |
| "loss": 1.5895, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 9.322834645669292e-05, | |
| "loss": 1.3584, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 9.307086614173229e-05, | |
| "loss": 1.2959, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.291338582677166e-05, | |
| "loss": 1.3723, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.275590551181103e-05, | |
| "loss": 1.2594, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.259842519685041e-05, | |
| "loss": 1.5367, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 9.244094488188977e-05, | |
| "loss": 1.4205, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 9.228346456692914e-05, | |
| "loss": 1.1466, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 9.21259842519685e-05, | |
| "loss": 1.3667, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 9.196850393700787e-05, | |
| "loss": 1.5853, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 9.181102362204725e-05, | |
| "loss": 1.561, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 9.165354330708661e-05, | |
| "loss": 1.2315, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 9.149606299212598e-05, | |
| "loss": 1.2154, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 9.133858267716536e-05, | |
| "loss": 1.5472, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 9.118110236220472e-05, | |
| "loss": 1.4889, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 9.102362204724409e-05, | |
| "loss": 1.3901, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 9.086614173228347e-05, | |
| "loss": 1.2139, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 9.070866141732283e-05, | |
| "loss": 1.4502, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 9.05511811023622e-05, | |
| "loss": 1.0525, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 9.039370078740158e-05, | |
| "loss": 1.0538, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 9.023622047244094e-05, | |
| "loss": 0.9679, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 9.007874015748032e-05, | |
| "loss": 0.9987, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 8.992125984251969e-05, | |
| "loss": 1.1906, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 8.976377952755905e-05, | |
| "loss": 1.0873, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 8.960629921259843e-05, | |
| "loss": 0.7372, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 8.94488188976378e-05, | |
| "loss": 0.8038, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 8.929133858267716e-05, | |
| "loss": 1.0152, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 8.913385826771654e-05, | |
| "loss": 1.1118, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 8.897637795275591e-05, | |
| "loss": 1.0263, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 8.881889763779529e-05, | |
| "loss": 1.2717, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 8.866141732283465e-05, | |
| "loss": 0.7874, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 8.850393700787402e-05, | |
| "loss": 0.8601, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 8.83464566929134e-05, | |
| "loss": 0.9128, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 8.818897637795276e-05, | |
| "loss": 0.9197, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 8.803149606299213e-05, | |
| "loss": 0.9825, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 8.78740157480315e-05, | |
| "loss": 0.9957, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 8.771653543307087e-05, | |
| "loss": 0.8343, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.755905511811025e-05, | |
| "loss": 0.9326, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 8.740157480314962e-05, | |
| "loss": 0.9682, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 8.724409448818898e-05, | |
| "loss": 0.8522, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 8.708661417322835e-05, | |
| "loss": 1.1413, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 8.692913385826773e-05, | |
| "loss": 0.8049, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 8.677165354330709e-05, | |
| "loss": 1.0667, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.661417322834646e-05, | |
| "loss": 0.9101, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 8.645669291338582e-05, | |
| "loss": 0.9469, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.62992125984252e-05, | |
| "loss": 0.7665, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.614173228346457e-05, | |
| "loss": 0.7717, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 8.598425196850393e-05, | |
| "loss": 1.111, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.582677165354331e-05, | |
| "loss": 0.8728, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 8.566929133858268e-05, | |
| "loss": 0.9571, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 8.551181102362204e-05, | |
| "loss": 0.5283, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 8.535433070866142e-05, | |
| "loss": 0.542, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 8.519685039370079e-05, | |
| "loss": 0.4845, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 8.503937007874016e-05, | |
| "loss": 0.6648, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 8.488188976377953e-05, | |
| "loss": 0.4547, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 8.47244094488189e-05, | |
| "loss": 0.3885, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 8.456692913385827e-05, | |
| "loss": 0.4994, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 8.440944881889764e-05, | |
| "loss": 0.6041, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 8.4251968503937e-05, | |
| "loss": 0.3183, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 8.409448818897638e-05, | |
| "loss": 0.4306, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 8.393700787401575e-05, | |
| "loss": 0.5632, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 8.377952755905513e-05, | |
| "loss": 0.3134, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 8.36220472440945e-05, | |
| "loss": 0.5191, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 8.346456692913386e-05, | |
| "loss": 0.4469, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 8.330708661417324e-05, | |
| "loss": 0.4227, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 8.31496062992126e-05, | |
| "loss": 0.5219, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 8.299212598425197e-05, | |
| "loss": 0.404, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 8.283464566929135e-05, | |
| "loss": 0.4667, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 8.267716535433071e-05, | |
| "loss": 0.5265, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 8.251968503937009e-05, | |
| "loss": 0.506, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 8.236220472440946e-05, | |
| "loss": 0.5201, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 8.220472440944882e-05, | |
| "loss": 0.3932, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 8.20472440944882e-05, | |
| "loss": 0.3454, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 8.188976377952757e-05, | |
| "loss": 0.3017, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 8.173228346456693e-05, | |
| "loss": 0.4062, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 8.15748031496063e-05, | |
| "loss": 0.4778, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 8.141732283464568e-05, | |
| "loss": 0.4259, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 8.125984251968504e-05, | |
| "loss": 0.4804, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 8.110236220472441e-05, | |
| "loss": 0.4438, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 8.094488188976377e-05, | |
| "loss": 0.4618, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 8.078740157480315e-05, | |
| "loss": 0.3725, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 8.062992125984252e-05, | |
| "loss": 0.3634, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 8.047244094488188e-05, | |
| "loss": 0.2317, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 8.031496062992126e-05, | |
| "loss": 0.2488, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 8.015748031496063e-05, | |
| "loss": 0.2222, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 8e-05, | |
| "loss": 0.2099, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 7.984251968503937e-05, | |
| "loss": 0.2021, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 7.968503937007874e-05, | |
| "loss": 0.2598, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 7.952755905511812e-05, | |
| "loss": 0.1909, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 7.937007874015748e-05, | |
| "loss": 0.248, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 7.921259842519685e-05, | |
| "loss": 0.2964, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 7.905511811023623e-05, | |
| "loss": 0.2046, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 7.889763779527559e-05, | |
| "loss": 0.253, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 7.874015748031497e-05, | |
| "loss": 0.2616, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 7.858267716535434e-05, | |
| "loss": 0.2008, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 7.84251968503937e-05, | |
| "loss": 0.3549, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 7.826771653543308e-05, | |
| "loss": 0.3231, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 7.811023622047245e-05, | |
| "loss": 0.1917, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 7.795275590551181e-05, | |
| "loss": 0.1663, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 7.779527559055119e-05, | |
| "loss": 0.1634, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 7.763779527559056e-05, | |
| "loss": 0.2186, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 7.748031496062993e-05, | |
| "loss": 0.1778, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 7.73228346456693e-05, | |
| "loss": 0.199, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 7.716535433070867e-05, | |
| "loss": 0.2056, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 7.700787401574804e-05, | |
| "loss": 0.182, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 7.685039370078741e-05, | |
| "loss": 0.1552, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 7.669291338582677e-05, | |
| "loss": 0.208, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 7.653543307086615e-05, | |
| "loss": 0.2413, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 7.637795275590552e-05, | |
| "loss": 0.2548, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 7.622047244094488e-05, | |
| "loss": 0.1677, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 7.606299212598425e-05, | |
| "loss": 0.2614, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 7.590551181102362e-05, | |
| "loss": 0.2415, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 7.5748031496063e-05, | |
| "loss": 0.1556, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 7.559055118110236e-05, | |
| "loss": 0.2314, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 7.543307086614173e-05, | |
| "loss": 0.1328, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 7.52755905511811e-05, | |
| "loss": 0.1171, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 7.511811023622047e-05, | |
| "loss": 0.1226, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 7.496062992125985e-05, | |
| "loss": 0.1238, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "learning_rate": 7.480314960629921e-05, | |
| "loss": 0.1808, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 7.464566929133858e-05, | |
| "loss": 0.1593, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 7.448818897637796e-05, | |
| "loss": 0.1392, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 7.433070866141732e-05, | |
| "loss": 0.1402, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 7.417322834645669e-05, | |
| "loss": 0.1625, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 7.401574803149607e-05, | |
| "loss": 0.1338, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 7.385826771653543e-05, | |
| "loss": 0.1063, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 7.370078740157481e-05, | |
| "loss": 0.2282, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 7.354330708661418e-05, | |
| "loss": 0.2297, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 7.338582677165354e-05, | |
| "loss": 0.1311, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 7.322834645669292e-05, | |
| "loss": 0.0911, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 7.307086614173229e-05, | |
| "loss": 0.2233, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 7.291338582677165e-05, | |
| "loss": 0.1267, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 7.275590551181103e-05, | |
| "loss": 0.208, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 7.25984251968504e-05, | |
| "loss": 0.1367, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 7.244094488188978e-05, | |
| "loss": 0.106, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 7.228346456692914e-05, | |
| "loss": 0.1259, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 7.212598425196851e-05, | |
| "loss": 0.1345, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 7.196850393700789e-05, | |
| "loss": 0.177, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 7.181102362204725e-05, | |
| "loss": 0.136, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 7.165354330708662e-05, | |
| "loss": 0.1767, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 7.1496062992126e-05, | |
| "loss": 0.1523, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 7.133858267716536e-05, | |
| "loss": 0.1811, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 7.118110236220473e-05, | |
| "loss": 0.1298, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 7.102362204724409e-05, | |
| "loss": 0.1527, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 7.086614173228347e-05, | |
| "loss": 0.2018, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 7.070866141732284e-05, | |
| "loss": 0.2402, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 7.05511811023622e-05, | |
| "loss": 0.1759, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 7.039370078740157e-05, | |
| "loss": 0.0808, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 7.023622047244095e-05, | |
| "loss": 0.091, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 7.007874015748031e-05, | |
| "loss": 0.1475, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 6.992125984251969e-05, | |
| "loss": 0.0999, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 6.976377952755906e-05, | |
| "loss": 0.123, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 6.960629921259842e-05, | |
| "loss": 0.1316, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 6.94488188976378e-05, | |
| "loss": 0.1903, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 6.929133858267717e-05, | |
| "loss": 0.0752, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 6.913385826771653e-05, | |
| "loss": 0.1283, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 6.897637795275591e-05, | |
| "loss": 0.0925, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 6.881889763779528e-05, | |
| "loss": 0.1191, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 6.866141732283465e-05, | |
| "loss": 0.1196, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 6.850393700787402e-05, | |
| "loss": 0.1133, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 6.834645669291338e-05, | |
| "loss": 0.1635, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 6.818897637795276e-05, | |
| "loss": 0.1425, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "learning_rate": 6.803149606299213e-05, | |
| "loss": 0.1083, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "learning_rate": 6.78740157480315e-05, | |
| "loss": 0.1112, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 6.771653543307087e-05, | |
| "loss": 0.116, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 6.755905511811024e-05, | |
| "loss": 0.1103, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 6.740157480314962e-05, | |
| "loss": 0.121, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 6.724409448818898e-05, | |
| "loss": 0.1161, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 6.708661417322835e-05, | |
| "loss": 0.1548, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 6.692913385826773e-05, | |
| "loss": 0.1528, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 6.677165354330709e-05, | |
| "loss": 0.11, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 6.661417322834646e-05, | |
| "loss": 0.1095, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 6.645669291338584e-05, | |
| "loss": 0.0872, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 6.62992125984252e-05, | |
| "loss": 0.136, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 6.614173228346457e-05, | |
| "loss": 0.139, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 6.598425196850395e-05, | |
| "loss": 0.1387, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 6.582677165354331e-05, | |
| "loss": 0.1127, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "learning_rate": 6.566929133858268e-05, | |
| "loss": 0.1444, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 6.551181102362204e-05, | |
| "loss": 0.1196, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 6.535433070866141e-05, | |
| "loss": 0.0833, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 6.519685039370079e-05, | |
| "loss": 0.09, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 6.503937007874015e-05, | |
| "loss": 0.1063, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 6.488188976377953e-05, | |
| "loss": 0.1178, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 6.47244094488189e-05, | |
| "loss": 0.1121, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 6.456692913385826e-05, | |
| "loss": 0.0885, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 6.440944881889764e-05, | |
| "loss": 0.0719, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 6.425196850393701e-05, | |
| "loss": 0.1252, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 6.409448818897637e-05, | |
| "loss": 0.1204, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 6.393700787401575e-05, | |
| "loss": 0.0966, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 6.377952755905512e-05, | |
| "loss": 0.1109, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 6.36220472440945e-05, | |
| "loss": 0.0621, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 6.346456692913386e-05, | |
| "loss": 0.0652, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 6.330708661417323e-05, | |
| "loss": 0.1615, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 6.31496062992126e-05, | |
| "loss": 0.1411, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 6.299212598425197e-05, | |
| "loss": 0.0497, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 6.283464566929134e-05, | |
| "loss": 0.065, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 6.267716535433072e-05, | |
| "loss": 0.1816, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 6.251968503937008e-05, | |
| "loss": 0.0696, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 6.236220472440946e-05, | |
| "loss": 0.1113, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 6.220472440944882e-05, | |
| "loss": 0.0783, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "learning_rate": 6.204724409448819e-05, | |
| "loss": 0.1495, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 6.188976377952757e-05, | |
| "loss": 0.1088, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 6.173228346456693e-05, | |
| "loss": 0.0582, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "learning_rate": 6.15748031496063e-05, | |
| "loss": 0.1302, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 6.141732283464568e-05, | |
| "loss": 0.1083, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 6.125984251968504e-05, | |
| "loss": 0.1105, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 6.110236220472442e-05, | |
| "loss": 0.111, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 6.094488188976378e-05, | |
| "loss": 0.085, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 6.078740157480315e-05, | |
| "loss": 0.1052, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 6.0629921259842526e-05, | |
| "loss": 0.0935, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 6.047244094488189e-05, | |
| "loss": 0.1675, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 6.031496062992126e-05, | |
| "loss": 0.0578, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 6.0157480314960636e-05, | |
| "loss": 0.0734, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 6e-05, | |
| "loss": 0.0752, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 5.984251968503938e-05, | |
| "loss": 0.1179, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 5.9685039370078746e-05, | |
| "loss": 0.0667, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 5.952755905511811e-05, | |
| "loss": 0.0733, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 5.9370078740157483e-05, | |
| "loss": 0.0743, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 5.9212598425196856e-05, | |
| "loss": 0.1124, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 5.905511811023622e-05, | |
| "loss": 0.1117, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 5.889763779527559e-05, | |
| "loss": 0.0866, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 5.874015748031496e-05, | |
| "loss": 0.063, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 5.858267716535434e-05, | |
| "loss": 0.0715, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 5.84251968503937e-05, | |
| "loss": 0.0844, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 5.826771653543307e-05, | |
| "loss": 0.0913, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 5.811023622047245e-05, | |
| "loss": 0.0761, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 5.795275590551181e-05, | |
| "loss": 0.0783, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 5.779527559055118e-05, | |
| "loss": 0.1038, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 5.763779527559056e-05, | |
| "loss": 0.1449, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 5.748031496062992e-05, | |
| "loss": 0.0565, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 5.73228346456693e-05, | |
| "loss": 0.0736, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "learning_rate": 5.716535433070867e-05, | |
| "loss": 0.1085, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 5.700787401574803e-05, | |
| "loss": 0.0951, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 5.6850393700787404e-05, | |
| "loss": 0.0892, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 5.6692913385826777e-05, | |
| "loss": 0.1237, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 5.653543307086614e-05, | |
| "loss": 0.121, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 5.6377952755905514e-05, | |
| "loss": 0.0998, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 5.622047244094488e-05, | |
| "loss": 0.1469, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 5.606299212598426e-05, | |
| "loss": 0.1022, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 5.5905511811023624e-05, | |
| "loss": 0.061, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 5.574803149606299e-05, | |
| "loss": 0.0503, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 5.559055118110237e-05, | |
| "loss": 0.0864, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 5.5433070866141734e-05, | |
| "loss": 0.0922, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 5.52755905511811e-05, | |
| "loss": 0.1066, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 5.511811023622048e-05, | |
| "loss": 0.0673, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 5.496062992125984e-05, | |
| "loss": 0.0758, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 5.480314960629922e-05, | |
| "loss": 0.0556, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 5.464566929133859e-05, | |
| "loss": 0.1262, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 5.448818897637795e-05, | |
| "loss": 0.0578, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 5.433070866141733e-05, | |
| "loss": 0.1064, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "learning_rate": 5.41732283464567e-05, | |
| "loss": 0.0624, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 5.401574803149606e-05, | |
| "loss": 0.082, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "learning_rate": 5.3858267716535435e-05, | |
| "loss": 0.0793, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 5.37007874015748e-05, | |
| "loss": 0.1122, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 5.354330708661418e-05, | |
| "loss": 0.0617, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 5.3385826771653545e-05, | |
| "loss": 0.0803, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 5.322834645669291e-05, | |
| "loss": 0.0774, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 5.307086614173229e-05, | |
| "loss": 0.0814, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 5.2913385826771654e-05, | |
| "loss": 0.0577, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 5.275590551181102e-05, | |
| "loss": 0.0578, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 5.25984251968504e-05, | |
| "loss": 0.061, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 5.2440944881889764e-05, | |
| "loss": 0.0871, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 5.228346456692914e-05, | |
| "loss": 0.0683, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 5.212598425196851e-05, | |
| "loss": 0.1222, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 5.1968503937007874e-05, | |
| "loss": 0.0745, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 5.181102362204725e-05, | |
| "loss": 0.1451, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "learning_rate": 5.165354330708662e-05, | |
| "loss": 0.063, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 9.78, | |
| "learning_rate": 5.1496062992125984e-05, | |
| "loss": 0.0583, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 5.1338582677165356e-05, | |
| "loss": 0.0615, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "learning_rate": 5.118110236220473e-05, | |
| "loss": 0.0839, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 5.10236220472441e-05, | |
| "loss": 0.0714, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 5.0866141732283466e-05, | |
| "loss": 0.0957, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 5.070866141732283e-05, | |
| "loss": 0.0895, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 5.055118110236221e-05, | |
| "loss": 0.0773, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 5.0393700787401575e-05, | |
| "loss": 0.0937, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "learning_rate": 5.023622047244094e-05, | |
| "loss": 0.0558, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 10.06, | |
| "learning_rate": 5.007874015748032e-05, | |
| "loss": 0.0686, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 10.09, | |
| "learning_rate": 4.9921259842519685e-05, | |
| "loss": 0.0517, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 10.12, | |
| "learning_rate": 4.976377952755906e-05, | |
| "loss": 0.0542, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 10.16, | |
| "learning_rate": 4.960629921259843e-05, | |
| "loss": 0.1144, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 10.19, | |
| "learning_rate": 4.94488188976378e-05, | |
| "loss": 0.0854, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 10.22, | |
| "learning_rate": 4.929133858267717e-05, | |
| "loss": 0.0714, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 10.25, | |
| "learning_rate": 4.913385826771654e-05, | |
| "loss": 0.0884, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 10.28, | |
| "learning_rate": 4.897637795275591e-05, | |
| "loss": 0.0674, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 10.31, | |
| "learning_rate": 4.881889763779528e-05, | |
| "loss": 0.0485, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 10.34, | |
| "learning_rate": 4.866141732283465e-05, | |
| "loss": 0.0636, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 10.38, | |
| "learning_rate": 4.8503937007874014e-05, | |
| "loss": 0.0612, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 10.41, | |
| "learning_rate": 4.8346456692913387e-05, | |
| "loss": 0.0482, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 10.44, | |
| "learning_rate": 4.818897637795276e-05, | |
| "loss": 0.0629, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 10.47, | |
| "learning_rate": 4.8031496062992124e-05, | |
| "loss": 0.0612, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "learning_rate": 4.7874015748031496e-05, | |
| "loss": 0.0608, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 10.53, | |
| "learning_rate": 4.771653543307087e-05, | |
| "loss": 0.0525, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 4.755905511811024e-05, | |
| "loss": 0.0674, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 10.59, | |
| "learning_rate": 4.7401574803149606e-05, | |
| "loss": 0.0563, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 4.724409448818898e-05, | |
| "loss": 0.0625, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 10.66, | |
| "learning_rate": 4.708661417322835e-05, | |
| "loss": 0.0812, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 10.69, | |
| "learning_rate": 4.692913385826772e-05, | |
| "loss": 0.1092, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 10.72, | |
| "learning_rate": 4.677165354330709e-05, | |
| "loss": 0.0722, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 10.75, | |
| "learning_rate": 4.661417322834646e-05, | |
| "loss": 0.0876, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 10.78, | |
| "learning_rate": 4.645669291338583e-05, | |
| "loss": 0.0831, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 10.81, | |
| "learning_rate": 4.6299212598425204e-05, | |
| "loss": 0.0546, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 10.84, | |
| "learning_rate": 4.614173228346457e-05, | |
| "loss": 0.093, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 10.88, | |
| "learning_rate": 4.5984251968503935e-05, | |
| "loss": 0.1168, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 10.91, | |
| "learning_rate": 4.582677165354331e-05, | |
| "loss": 0.098, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 10.94, | |
| "learning_rate": 4.566929133858268e-05, | |
| "loss": 0.0887, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 10.97, | |
| "learning_rate": 4.5511811023622045e-05, | |
| "loss": 0.0678, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 4.535433070866142e-05, | |
| "loss": 0.0939, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 11.03, | |
| "learning_rate": 4.519685039370079e-05, | |
| "loss": 0.0565, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 11.06, | |
| "learning_rate": 4.503937007874016e-05, | |
| "loss": 0.0688, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "learning_rate": 4.488188976377953e-05, | |
| "loss": 0.0815, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 11.12, | |
| "learning_rate": 4.47244094488189e-05, | |
| "loss": 0.0516, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 11.16, | |
| "learning_rate": 4.456692913385827e-05, | |
| "loss": 0.048, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 11.19, | |
| "learning_rate": 4.4409448818897643e-05, | |
| "loss": 0.0676, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 11.22, | |
| "learning_rate": 4.425196850393701e-05, | |
| "loss": 0.0519, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 11.25, | |
| "learning_rate": 4.409448818897638e-05, | |
| "loss": 0.1012, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 11.28, | |
| "learning_rate": 4.393700787401575e-05, | |
| "loss": 0.0465, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 11.31, | |
| "learning_rate": 4.3779527559055125e-05, | |
| "loss": 0.0511, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 11.34, | |
| "learning_rate": 4.362204724409449e-05, | |
| "loss": 0.0702, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 11.38, | |
| "learning_rate": 4.346456692913386e-05, | |
| "loss": 0.099, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 11.41, | |
| "learning_rate": 4.330708661417323e-05, | |
| "loss": 0.049, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 11.44, | |
| "learning_rate": 4.31496062992126e-05, | |
| "loss": 0.0622, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 11.47, | |
| "learning_rate": 4.2992125984251966e-05, | |
| "loss": 0.1113, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 11.5, | |
| "learning_rate": 4.283464566929134e-05, | |
| "loss": 0.1067, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 4.267716535433071e-05, | |
| "loss": 0.0444, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 11.56, | |
| "learning_rate": 4.251968503937008e-05, | |
| "loss": 0.0605, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 11.59, | |
| "learning_rate": 4.236220472440945e-05, | |
| "loss": 0.0582, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 11.62, | |
| "learning_rate": 4.220472440944882e-05, | |
| "loss": 0.0866, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 11.66, | |
| "learning_rate": 4.204724409448819e-05, | |
| "loss": 0.0668, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 11.69, | |
| "learning_rate": 4.1889763779527564e-05, | |
| "loss": 0.0444, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 11.72, | |
| "learning_rate": 4.173228346456693e-05, | |
| "loss": 0.0754, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 11.75, | |
| "learning_rate": 4.15748031496063e-05, | |
| "loss": 0.0784, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 11.78, | |
| "learning_rate": 4.1417322834645674e-05, | |
| "loss": 0.1329, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 11.81, | |
| "learning_rate": 4.1259842519685046e-05, | |
| "loss": 0.0889, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 11.84, | |
| "learning_rate": 4.110236220472441e-05, | |
| "loss": 0.0446, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 11.88, | |
| "learning_rate": 4.0944881889763784e-05, | |
| "loss": 0.0787, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 11.91, | |
| "learning_rate": 4.078740157480315e-05, | |
| "loss": 0.0862, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 11.94, | |
| "learning_rate": 4.062992125984252e-05, | |
| "loss": 0.116, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 11.97, | |
| "learning_rate": 4.047244094488189e-05, | |
| "loss": 0.0619, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "learning_rate": 4.031496062992126e-05, | |
| "loss": 0.0485, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 12.03, | |
| "learning_rate": 4.015748031496063e-05, | |
| "loss": 0.0744, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 12.06, | |
| "learning_rate": 4e-05, | |
| "loss": 0.0514, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 12.09, | |
| "learning_rate": 3.984251968503937e-05, | |
| "loss": 0.0918, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 12.12, | |
| "learning_rate": 3.968503937007874e-05, | |
| "loss": 0.0496, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 12.16, | |
| "learning_rate": 3.952755905511811e-05, | |
| "loss": 0.0415, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 12.19, | |
| "learning_rate": 3.9370078740157485e-05, | |
| "loss": 0.0774, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 12.22, | |
| "learning_rate": 3.921259842519685e-05, | |
| "loss": 0.0816, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 12.25, | |
| "learning_rate": 3.905511811023622e-05, | |
| "loss": 0.0815, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 12.28, | |
| "learning_rate": 3.8897637795275595e-05, | |
| "loss": 0.0495, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 12.31, | |
| "learning_rate": 3.874015748031497e-05, | |
| "loss": 0.0631, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 12.34, | |
| "learning_rate": 3.858267716535433e-05, | |
| "loss": 0.0651, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 12.38, | |
| "learning_rate": 3.8425196850393705e-05, | |
| "loss": 0.2812, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 12.41, | |
| "learning_rate": 3.826771653543308e-05, | |
| "loss": 0.1322, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 12.44, | |
| "learning_rate": 3.811023622047244e-05, | |
| "loss": 0.0579, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 12.47, | |
| "learning_rate": 3.795275590551181e-05, | |
| "loss": 0.0711, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 3.779527559055118e-05, | |
| "loss": 0.0625, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 12.53, | |
| "learning_rate": 3.763779527559055e-05, | |
| "loss": 0.0522, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 12.56, | |
| "learning_rate": 3.7480314960629924e-05, | |
| "loss": 0.0537, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 12.59, | |
| "learning_rate": 3.732283464566929e-05, | |
| "loss": 0.0675, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 12.62, | |
| "learning_rate": 3.716535433070866e-05, | |
| "loss": 0.076, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 12.66, | |
| "learning_rate": 3.7007874015748034e-05, | |
| "loss": 0.0461, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 12.69, | |
| "learning_rate": 3.6850393700787406e-05, | |
| "loss": 0.0888, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 3.669291338582677e-05, | |
| "loss": 0.0389, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 12.75, | |
| "learning_rate": 3.6535433070866144e-05, | |
| "loss": 0.1455, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 12.78, | |
| "learning_rate": 3.6377952755905516e-05, | |
| "loss": 0.0486, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 12.81, | |
| "learning_rate": 3.622047244094489e-05, | |
| "loss": 0.0461, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 12.84, | |
| "learning_rate": 3.6062992125984253e-05, | |
| "loss": 0.0601, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 12.88, | |
| "learning_rate": 3.5905511811023626e-05, | |
| "loss": 0.0746, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 12.91, | |
| "learning_rate": 3.5748031496063e-05, | |
| "loss": 0.0824, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 12.94, | |
| "learning_rate": 3.559055118110236e-05, | |
| "loss": 0.0683, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 12.97, | |
| "learning_rate": 3.5433070866141735e-05, | |
| "loss": 0.0585, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "learning_rate": 3.52755905511811e-05, | |
| "loss": 0.0508, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 13.03, | |
| "learning_rate": 3.511811023622047e-05, | |
| "loss": 0.0546, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 13.06, | |
| "learning_rate": 3.4960629921259845e-05, | |
| "loss": 0.0618, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 13.09, | |
| "learning_rate": 3.480314960629921e-05, | |
| "loss": 0.0404, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 13.12, | |
| "learning_rate": 3.464566929133858e-05, | |
| "loss": 0.0646, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 13.16, | |
| "learning_rate": 3.4488188976377955e-05, | |
| "loss": 0.0428, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 13.19, | |
| "learning_rate": 3.433070866141733e-05, | |
| "loss": 0.0447, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 13.22, | |
| "learning_rate": 3.417322834645669e-05, | |
| "loss": 0.1266, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 13.25, | |
| "learning_rate": 3.4015748031496065e-05, | |
| "loss": 0.0574, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 13.28, | |
| "learning_rate": 3.385826771653544e-05, | |
| "loss": 0.1348, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 13.31, | |
| "learning_rate": 3.370078740157481e-05, | |
| "loss": 0.0839, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 13.34, | |
| "learning_rate": 3.3543307086614174e-05, | |
| "loss": 0.0447, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 13.38, | |
| "learning_rate": 3.3385826771653546e-05, | |
| "loss": 0.0651, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 13.41, | |
| "learning_rate": 3.322834645669292e-05, | |
| "loss": 0.0561, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 13.44, | |
| "learning_rate": 3.3070866141732284e-05, | |
| "loss": 0.0699, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 13.47, | |
| "learning_rate": 3.2913385826771656e-05, | |
| "loss": 0.1614, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 13.5, | |
| "learning_rate": 3.275590551181102e-05, | |
| "loss": 0.0557, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 13.53, | |
| "learning_rate": 3.2598425196850394e-05, | |
| "loss": 0.047, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 13.56, | |
| "learning_rate": 3.2440944881889766e-05, | |
| "loss": 0.0537, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 13.59, | |
| "learning_rate": 3.228346456692913e-05, | |
| "loss": 0.0689, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 13.62, | |
| "learning_rate": 3.2125984251968504e-05, | |
| "loss": 0.0541, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 13.66, | |
| "learning_rate": 3.1968503937007876e-05, | |
| "loss": 0.0477, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 13.69, | |
| "learning_rate": 3.181102362204725e-05, | |
| "loss": 0.0526, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 13.72, | |
| "learning_rate": 3.165354330708661e-05, | |
| "loss": 0.1067, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 13.75, | |
| "learning_rate": 3.1496062992125985e-05, | |
| "loss": 0.061, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 13.78, | |
| "learning_rate": 3.133858267716536e-05, | |
| "loss": 0.0808, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 13.81, | |
| "learning_rate": 3.118110236220473e-05, | |
| "loss": 0.0593, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 13.84, | |
| "learning_rate": 3.1023622047244095e-05, | |
| "loss": 0.0681, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 13.88, | |
| "learning_rate": 3.086614173228347e-05, | |
| "loss": 0.0921, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 13.91, | |
| "learning_rate": 3.070866141732284e-05, | |
| "loss": 0.0665, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 13.94, | |
| "learning_rate": 3.055118110236221e-05, | |
| "loss": 0.0453, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 13.97, | |
| "learning_rate": 3.0393700787401574e-05, | |
| "loss": 0.0435, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 3.0236220472440946e-05, | |
| "loss": 0.0611, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 14.03, | |
| "learning_rate": 3.0078740157480318e-05, | |
| "loss": 0.0755, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 14.06, | |
| "learning_rate": 2.992125984251969e-05, | |
| "loss": 0.0466, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 14.09, | |
| "learning_rate": 2.9763779527559056e-05, | |
| "loss": 0.0823, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 14.12, | |
| "learning_rate": 2.9606299212598428e-05, | |
| "loss": 0.0492, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 14.16, | |
| "learning_rate": 2.9448818897637797e-05, | |
| "loss": 0.0708, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 14.19, | |
| "learning_rate": 2.929133858267717e-05, | |
| "loss": 0.0693, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 14.22, | |
| "learning_rate": 2.9133858267716534e-05, | |
| "loss": 0.0509, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 14.25, | |
| "learning_rate": 2.8976377952755906e-05, | |
| "loss": 0.0427, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 14.28, | |
| "learning_rate": 2.881889763779528e-05, | |
| "loss": 0.0418, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 14.31, | |
| "learning_rate": 2.866141732283465e-05, | |
| "loss": 0.039, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 14.34, | |
| "learning_rate": 2.8503937007874016e-05, | |
| "loss": 0.0535, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 14.38, | |
| "learning_rate": 2.8346456692913388e-05, | |
| "loss": 0.0514, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 14.41, | |
| "learning_rate": 2.8188976377952757e-05, | |
| "loss": 0.0798, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 14.44, | |
| "learning_rate": 2.803149606299213e-05, | |
| "loss": 0.1023, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 14.47, | |
| "learning_rate": 2.7874015748031495e-05, | |
| "loss": 0.0419, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 14.5, | |
| "learning_rate": 2.7716535433070867e-05, | |
| "loss": 0.0879, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 14.53, | |
| "learning_rate": 2.755905511811024e-05, | |
| "loss": 0.0379, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 14.56, | |
| "learning_rate": 2.740157480314961e-05, | |
| "loss": 0.0552, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 14.59, | |
| "learning_rate": 2.7244094488188977e-05, | |
| "loss": 0.0768, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 14.62, | |
| "learning_rate": 2.708661417322835e-05, | |
| "loss": 0.0638, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 14.66, | |
| "learning_rate": 2.6929133858267717e-05, | |
| "loss": 0.0582, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 14.69, | |
| "learning_rate": 2.677165354330709e-05, | |
| "loss": 0.0611, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 14.72, | |
| "learning_rate": 2.6614173228346455e-05, | |
| "loss": 0.049, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 14.75, | |
| "learning_rate": 2.6456692913385827e-05, | |
| "loss": 0.0845, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 14.78, | |
| "learning_rate": 2.62992125984252e-05, | |
| "loss": 0.0509, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 14.81, | |
| "learning_rate": 2.614173228346457e-05, | |
| "loss": 0.0804, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 14.84, | |
| "learning_rate": 2.5984251968503937e-05, | |
| "loss": 0.0849, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 14.88, | |
| "learning_rate": 2.582677165354331e-05, | |
| "loss": 0.1303, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 14.91, | |
| "learning_rate": 2.5669291338582678e-05, | |
| "loss": 0.0495, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 14.94, | |
| "learning_rate": 2.551181102362205e-05, | |
| "loss": 0.118, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 14.97, | |
| "learning_rate": 2.5354330708661416e-05, | |
| "loss": 0.0637, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 2.5196850393700788e-05, | |
| "loss": 0.0568, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 15.03, | |
| "learning_rate": 2.503937007874016e-05, | |
| "loss": 0.0379, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 15.06, | |
| "learning_rate": 2.488188976377953e-05, | |
| "loss": 0.0522, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 15.09, | |
| "learning_rate": 2.47244094488189e-05, | |
| "loss": 0.0653, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 15.12, | |
| "learning_rate": 2.456692913385827e-05, | |
| "loss": 0.0537, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 15.16, | |
| "learning_rate": 2.440944881889764e-05, | |
| "loss": 0.053, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 15.19, | |
| "learning_rate": 2.4251968503937007e-05, | |
| "loss": 0.0583, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 15.22, | |
| "learning_rate": 2.409448818897638e-05, | |
| "loss": 0.0951, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 15.25, | |
| "learning_rate": 2.3937007874015748e-05, | |
| "loss": 0.0691, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 15.28, | |
| "learning_rate": 2.377952755905512e-05, | |
| "loss": 0.0479, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 15.31, | |
| "learning_rate": 2.362204724409449e-05, | |
| "loss": 0.0594, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 15.34, | |
| "learning_rate": 2.346456692913386e-05, | |
| "loss": 0.0835, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 15.38, | |
| "learning_rate": 2.330708661417323e-05, | |
| "loss": 0.0571, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 15.41, | |
| "learning_rate": 2.3149606299212602e-05, | |
| "loss": 0.0463, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 15.44, | |
| "learning_rate": 2.2992125984251968e-05, | |
| "loss": 0.0665, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 15.47, | |
| "learning_rate": 2.283464566929134e-05, | |
| "loss": 0.0599, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 15.5, | |
| "learning_rate": 2.267716535433071e-05, | |
| "loss": 0.0414, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 15.53, | |
| "learning_rate": 2.251968503937008e-05, | |
| "loss": 0.0759, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 15.56, | |
| "learning_rate": 2.236220472440945e-05, | |
| "loss": 0.0454, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 15.59, | |
| "learning_rate": 2.2204724409448822e-05, | |
| "loss": 0.0685, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "learning_rate": 2.204724409448819e-05, | |
| "loss": 0.0787, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 15.66, | |
| "learning_rate": 2.1889763779527563e-05, | |
| "loss": 0.0757, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 15.69, | |
| "learning_rate": 2.173228346456693e-05, | |
| "loss": 0.0582, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 15.72, | |
| "learning_rate": 2.15748031496063e-05, | |
| "loss": 0.054, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 15.75, | |
| "learning_rate": 2.141732283464567e-05, | |
| "loss": 0.0343, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 15.78, | |
| "learning_rate": 2.125984251968504e-05, | |
| "loss": 0.0632, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 15.81, | |
| "learning_rate": 2.110236220472441e-05, | |
| "loss": 0.0844, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 15.84, | |
| "learning_rate": 2.0944881889763782e-05, | |
| "loss": 0.0659, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 15.88, | |
| "learning_rate": 2.078740157480315e-05, | |
| "loss": 0.0599, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 15.91, | |
| "learning_rate": 2.0629921259842523e-05, | |
| "loss": 0.0552, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 15.94, | |
| "learning_rate": 2.0472440944881892e-05, | |
| "loss": 0.0558, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 15.97, | |
| "learning_rate": 2.031496062992126e-05, | |
| "loss": 0.0985, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "learning_rate": 2.015748031496063e-05, | |
| "loss": 0.0552, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 16.03, | |
| "learning_rate": 2e-05, | |
| "loss": 0.0576, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 16.06, | |
| "learning_rate": 1.984251968503937e-05, | |
| "loss": 0.0493, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 16.09, | |
| "learning_rate": 1.9685039370078743e-05, | |
| "loss": 0.0635, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 16.12, | |
| "learning_rate": 1.952755905511811e-05, | |
| "loss": 0.0667, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 16.16, | |
| "learning_rate": 1.9370078740157484e-05, | |
| "loss": 0.0626, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 16.19, | |
| "learning_rate": 1.9212598425196852e-05, | |
| "loss": 0.0392, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 16.22, | |
| "learning_rate": 1.905511811023622e-05, | |
| "loss": 0.0555, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 16.25, | |
| "learning_rate": 1.889763779527559e-05, | |
| "loss": 0.0496, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 16.28, | |
| "learning_rate": 1.8740157480314962e-05, | |
| "loss": 0.0639, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 16.31, | |
| "learning_rate": 1.858267716535433e-05, | |
| "loss": 0.0659, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 16.34, | |
| "learning_rate": 1.8425196850393703e-05, | |
| "loss": 0.0492, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 16.38, | |
| "learning_rate": 1.8267716535433072e-05, | |
| "loss": 0.0505, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 16.41, | |
| "learning_rate": 1.8110236220472444e-05, | |
| "loss": 0.0368, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 16.44, | |
| "learning_rate": 1.7952755905511813e-05, | |
| "loss": 0.0542, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 16.47, | |
| "learning_rate": 1.779527559055118e-05, | |
| "loss": 0.0362, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 16.5, | |
| "learning_rate": 1.763779527559055e-05, | |
| "loss": 0.0603, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 16.53, | |
| "learning_rate": 1.7480314960629923e-05, | |
| "loss": 0.0733, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 16.56, | |
| "learning_rate": 1.732283464566929e-05, | |
| "loss": 0.044, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 16.59, | |
| "learning_rate": 1.7165354330708663e-05, | |
| "loss": 0.1038, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 16.62, | |
| "learning_rate": 1.7007874015748032e-05, | |
| "loss": 0.0492, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 16.66, | |
| "learning_rate": 1.6850393700787404e-05, | |
| "loss": 0.0592, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 16.69, | |
| "learning_rate": 1.6692913385826773e-05, | |
| "loss": 0.0987, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 16.72, | |
| "learning_rate": 1.6535433070866142e-05, | |
| "loss": 0.0589, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 16.75, | |
| "learning_rate": 1.637795275590551e-05, | |
| "loss": 0.063, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 16.78, | |
| "learning_rate": 1.6220472440944883e-05, | |
| "loss": 0.0659, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 16.81, | |
| "learning_rate": 1.6062992125984252e-05, | |
| "loss": 0.0507, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 16.84, | |
| "learning_rate": 1.5905511811023624e-05, | |
| "loss": 0.067, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 16.88, | |
| "learning_rate": 1.5748031496062993e-05, | |
| "loss": 0.0767, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 16.91, | |
| "learning_rate": 1.5590551181102365e-05, | |
| "loss": 0.0657, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 16.94, | |
| "learning_rate": 1.5433070866141734e-05, | |
| "loss": 0.0415, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 16.97, | |
| "learning_rate": 1.5275590551181106e-05, | |
| "loss": 0.0629, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "learning_rate": 1.5118110236220473e-05, | |
| "loss": 0.081, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 17.03, | |
| "learning_rate": 1.4960629921259845e-05, | |
| "loss": 0.0579, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 17.06, | |
| "learning_rate": 1.4803149606299214e-05, | |
| "loss": 0.0556, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 17.09, | |
| "learning_rate": 1.4645669291338584e-05, | |
| "loss": 0.0445, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 17.12, | |
| "learning_rate": 1.4488188976377953e-05, | |
| "loss": 0.0457, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 17.16, | |
| "learning_rate": 1.4330708661417325e-05, | |
| "loss": 0.0882, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 17.19, | |
| "learning_rate": 1.4173228346456694e-05, | |
| "loss": 0.0306, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 17.22, | |
| "learning_rate": 1.4015748031496065e-05, | |
| "loss": 0.0509, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 17.25, | |
| "learning_rate": 1.3858267716535433e-05, | |
| "loss": 0.0464, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 17.28, | |
| "learning_rate": 1.3700787401574806e-05, | |
| "loss": 0.0792, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 17.31, | |
| "learning_rate": 1.3543307086614174e-05, | |
| "loss": 0.0576, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 17.34, | |
| "learning_rate": 1.3385826771653545e-05, | |
| "loss": 0.0573, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 17.38, | |
| "learning_rate": 1.3228346456692914e-05, | |
| "loss": 0.0428, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 17.41, | |
| "learning_rate": 1.3070866141732286e-05, | |
| "loss": 0.0456, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 17.44, | |
| "learning_rate": 1.2913385826771655e-05, | |
| "loss": 0.0593, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 17.47, | |
| "learning_rate": 1.2755905511811025e-05, | |
| "loss": 0.0763, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "learning_rate": 1.2598425196850394e-05, | |
| "loss": 0.0596, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 17.53, | |
| "learning_rate": 1.2440944881889764e-05, | |
| "loss": 0.0384, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 17.56, | |
| "learning_rate": 1.2283464566929135e-05, | |
| "loss": 0.0748, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 17.59, | |
| "learning_rate": 1.2125984251968504e-05, | |
| "loss": 0.0599, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 17.62, | |
| "learning_rate": 1.1968503937007874e-05, | |
| "loss": 0.041, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 17.66, | |
| "learning_rate": 1.1811023622047245e-05, | |
| "loss": 0.067, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 17.69, | |
| "learning_rate": 1.1653543307086615e-05, | |
| "loss": 0.0471, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 17.72, | |
| "learning_rate": 1.1496062992125984e-05, | |
| "loss": 0.0638, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 17.75, | |
| "learning_rate": 1.1338582677165354e-05, | |
| "loss": 0.1006, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 17.78, | |
| "learning_rate": 1.1181102362204725e-05, | |
| "loss": 0.1003, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 17.81, | |
| "learning_rate": 1.1023622047244095e-05, | |
| "loss": 0.051, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 17.84, | |
| "learning_rate": 1.0866141732283466e-05, | |
| "loss": 0.0758, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 17.88, | |
| "learning_rate": 1.0708661417322835e-05, | |
| "loss": 0.0521, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 17.91, | |
| "learning_rate": 1.0551181102362205e-05, | |
| "loss": 0.0472, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 17.94, | |
| "learning_rate": 1.0393700787401575e-05, | |
| "loss": 0.0653, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 17.97, | |
| "learning_rate": 1.0236220472440946e-05, | |
| "loss": 0.0708, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "learning_rate": 1.0078740157480315e-05, | |
| "loss": 0.0776, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 18.03, | |
| "learning_rate": 9.921259842519685e-06, | |
| "loss": 0.0401, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 18.06, | |
| "learning_rate": 9.763779527559056e-06, | |
| "loss": 0.058, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 18.09, | |
| "learning_rate": 9.606299212598426e-06, | |
| "loss": 0.0532, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 18.12, | |
| "learning_rate": 9.448818897637795e-06, | |
| "loss": 0.0378, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 18.16, | |
| "learning_rate": 9.291338582677165e-06, | |
| "loss": 0.0596, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 18.19, | |
| "learning_rate": 9.133858267716536e-06, | |
| "loss": 0.0503, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 18.22, | |
| "learning_rate": 8.976377952755906e-06, | |
| "loss": 0.0894, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 18.25, | |
| "learning_rate": 8.818897637795275e-06, | |
| "loss": 0.0611, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 18.28, | |
| "learning_rate": 8.661417322834646e-06, | |
| "loss": 0.0472, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 18.31, | |
| "learning_rate": 8.503937007874016e-06, | |
| "loss": 0.1272, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 18.34, | |
| "learning_rate": 8.346456692913387e-06, | |
| "loss": 0.0344, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 18.38, | |
| "learning_rate": 8.188976377952755e-06, | |
| "loss": 0.0701, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 18.41, | |
| "learning_rate": 8.031496062992126e-06, | |
| "loss": 0.0469, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 18.44, | |
| "learning_rate": 7.874015748031496e-06, | |
| "loss": 0.0416, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 18.47, | |
| "learning_rate": 7.716535433070867e-06, | |
| "loss": 0.1222, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 18.5, | |
| "learning_rate": 7.5590551181102365e-06, | |
| "loss": 0.0518, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 18.53, | |
| "learning_rate": 7.401574803149607e-06, | |
| "loss": 0.0404, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 18.56, | |
| "learning_rate": 7.244094488188977e-06, | |
| "loss": 0.0547, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 18.59, | |
| "learning_rate": 7.086614173228347e-06, | |
| "loss": 0.0749, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 18.62, | |
| "learning_rate": 6.929133858267717e-06, | |
| "loss": 0.0622, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 18.66, | |
| "learning_rate": 6.771653543307087e-06, | |
| "loss": 0.0486, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 18.69, | |
| "learning_rate": 6.614173228346457e-06, | |
| "loss": 0.0669, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 18.72, | |
| "learning_rate": 6.456692913385827e-06, | |
| "loss": 0.0613, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 18.75, | |
| "learning_rate": 6.299212598425197e-06, | |
| "loss": 0.0621, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 18.78, | |
| "learning_rate": 6.141732283464567e-06, | |
| "loss": 0.0616, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 18.81, | |
| "learning_rate": 5.984251968503937e-06, | |
| "loss": 0.1249, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 18.84, | |
| "learning_rate": 5.8267716535433075e-06, | |
| "loss": 0.0364, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 18.88, | |
| "learning_rate": 5.669291338582677e-06, | |
| "loss": 0.0683, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 18.91, | |
| "learning_rate": 5.511811023622048e-06, | |
| "loss": 0.0467, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 18.94, | |
| "learning_rate": 5.354330708661417e-06, | |
| "loss": 0.0689, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 18.97, | |
| "learning_rate": 5.196850393700788e-06, | |
| "loss": 0.0512, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 19.0, | |
| "learning_rate": 5.039370078740157e-06, | |
| "loss": 0.0577, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 19.03, | |
| "learning_rate": 4.881889763779528e-06, | |
| "loss": 0.0615, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 19.06, | |
| "learning_rate": 4.7244094488188975e-06, | |
| "loss": 0.1133, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 19.09, | |
| "learning_rate": 4.566929133858268e-06, | |
| "loss": 0.053, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 19.12, | |
| "learning_rate": 4.409448818897638e-06, | |
| "loss": 0.047, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 19.16, | |
| "learning_rate": 4.251968503937008e-06, | |
| "loss": 0.0513, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 19.19, | |
| "learning_rate": 4.094488188976378e-06, | |
| "loss": 0.0546, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 19.22, | |
| "learning_rate": 3.937007874015748e-06, | |
| "loss": 0.051, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 19.25, | |
| "learning_rate": 3.7795275590551182e-06, | |
| "loss": 0.0445, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 19.28, | |
| "learning_rate": 3.6220472440944883e-06, | |
| "loss": 0.0524, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 19.31, | |
| "learning_rate": 3.4645669291338583e-06, | |
| "loss": 0.0706, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 19.34, | |
| "learning_rate": 3.3070866141732284e-06, | |
| "loss": 0.0438, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 19.38, | |
| "learning_rate": 3.1496062992125985e-06, | |
| "loss": 0.055, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 19.41, | |
| "learning_rate": 2.9921259842519685e-06, | |
| "loss": 0.0432, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 19.44, | |
| "learning_rate": 2.8346456692913386e-06, | |
| "loss": 0.0523, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 19.47, | |
| "learning_rate": 2.6771653543307086e-06, | |
| "loss": 0.0872, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 19.5, | |
| "learning_rate": 2.5196850393700787e-06, | |
| "loss": 0.0491, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 19.53, | |
| "learning_rate": 2.3622047244094487e-06, | |
| "loss": 0.051, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 19.56, | |
| "learning_rate": 2.204724409448819e-06, | |
| "loss": 0.0503, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 19.59, | |
| "learning_rate": 2.047244094488189e-06, | |
| "loss": 0.0512, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 19.62, | |
| "learning_rate": 1.8897637795275591e-06, | |
| "loss": 0.0936, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 19.66, | |
| "learning_rate": 1.7322834645669292e-06, | |
| "loss": 0.0519, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 19.69, | |
| "learning_rate": 1.5748031496062992e-06, | |
| "loss": 0.0828, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 19.72, | |
| "learning_rate": 1.4173228346456693e-06, | |
| "loss": 0.0482, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 19.75, | |
| "learning_rate": 1.2598425196850393e-06, | |
| "loss": 0.0922, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 19.78, | |
| "learning_rate": 1.1023622047244094e-06, | |
| "loss": 0.0368, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 19.81, | |
| "learning_rate": 9.448818897637796e-07, | |
| "loss": 0.0563, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 19.84, | |
| "learning_rate": 7.874015748031496e-07, | |
| "loss": 0.053, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 19.88, | |
| "learning_rate": 6.299212598425197e-07, | |
| "loss": 0.0479, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 19.91, | |
| "learning_rate": 4.724409448818898e-07, | |
| "loss": 0.047, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 19.94, | |
| "learning_rate": 3.1496062992125984e-07, | |
| "loss": 0.0394, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 19.97, | |
| "learning_rate": 1.5748031496062992e-07, | |
| "loss": 0.0832, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.0538, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "step": 640, | |
| "total_flos": 2.981403046182912e+16, | |
| "train_loss": 0.3128850045846775, | |
| "train_runtime": 1040.957, | |
| "train_samples_per_second": 2.459, | |
| "train_steps_per_second": 0.615 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 640, | |
| "num_train_epochs": 20, | |
| "save_steps": 500, | |
| "total_flos": 2.981403046182912e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |