| { | |
| "best_metric": 0.9558704453441296, | |
| "best_model_checkpoint": "swin-tiny-patch4-window7-224-hotel_images_classifier_v5_10epocs/checkpoint-6940", | |
| "epoch": 9.989204749910039, | |
| "eval_steps": 500, | |
| "global_step": 6940, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 5.388793468475342, | |
| "learning_rate": 3.602305475504323e-07, | |
| "loss": 2.0168, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 5.493525981903076, | |
| "learning_rate": 7.204610951008646e-07, | |
| "loss": 1.9925, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 4.990756034851074, | |
| "learning_rate": 1.080691642651297e-06, | |
| "loss": 1.9873, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 4.94097900390625, | |
| "learning_rate": 1.4409221902017292e-06, | |
| "loss": 1.9782, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 5.246731281280518, | |
| "learning_rate": 1.8011527377521614e-06, | |
| "loss": 1.9525, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 4.839789867401123, | |
| "learning_rate": 2.161383285302594e-06, | |
| "loss": 1.9125, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 5.326338291168213, | |
| "learning_rate": 2.521613832853026e-06, | |
| "loss": 1.9043, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 4.707699298858643, | |
| "learning_rate": 2.8818443804034585e-06, | |
| "loss": 1.8724, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 4.742429256439209, | |
| "learning_rate": 3.2420749279538904e-06, | |
| "loss": 1.8261, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 7.061827659606934, | |
| "learning_rate": 3.602305475504323e-06, | |
| "loss": 1.783, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 5.174450397491455, | |
| "learning_rate": 3.962536023054755e-06, | |
| "loss": 1.7679, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 4.2161641120910645, | |
| "learning_rate": 4.322766570605188e-06, | |
| "loss": 1.7214, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 6.96381950378418, | |
| "learning_rate": 4.68299711815562e-06, | |
| "loss": 1.6884, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 5.790141582489014, | |
| "learning_rate": 5.043227665706052e-06, | |
| "loss": 1.5961, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 5.271084308624268, | |
| "learning_rate": 5.403458213256484e-06, | |
| "loss": 1.5396, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 5.163028717041016, | |
| "learning_rate": 5.763688760806917e-06, | |
| "loss": 1.4907, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 4.985466003417969, | |
| "learning_rate": 6.123919308357349e-06, | |
| "loss": 1.3996, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 5.5068745613098145, | |
| "learning_rate": 6.484149855907781e-06, | |
| "loss": 1.3632, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 4.985295295715332, | |
| "learning_rate": 6.844380403458213e-06, | |
| "loss": 1.2596, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 5.087879657745361, | |
| "learning_rate": 7.204610951008646e-06, | |
| "loss": 1.1991, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 5.104519844055176, | |
| "learning_rate": 7.564841498559078e-06, | |
| "loss": 1.1213, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 7.174159049987793, | |
| "learning_rate": 7.92507204610951e-06, | |
| "loss": 1.0379, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 7.0040788650512695, | |
| "learning_rate": 8.285302593659942e-06, | |
| "loss": 0.976, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 6.505568981170654, | |
| "learning_rate": 8.645533141210376e-06, | |
| "loss": 0.9049, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 5.980679988861084, | |
| "learning_rate": 9.005763688760807e-06, | |
| "loss": 0.8347, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 7.585896968841553, | |
| "learning_rate": 9.36599423631124e-06, | |
| "loss": 0.7498, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 10.884958267211914, | |
| "learning_rate": 9.726224783861671e-06, | |
| "loss": 0.7642, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 6.63291072845459, | |
| "learning_rate": 1.0086455331412104e-05, | |
| "loss": 0.7189, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 7.328841209411621, | |
| "learning_rate": 1.0446685878962537e-05, | |
| "loss": 0.6749, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 6.733003616333008, | |
| "learning_rate": 1.0806916426512968e-05, | |
| "loss": 0.6278, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 5.942975044250488, | |
| "learning_rate": 1.1167146974063401e-05, | |
| "loss": 0.6223, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 7.776107311248779, | |
| "learning_rate": 1.1527377521613834e-05, | |
| "loss": 0.6424, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 9.42983341217041, | |
| "learning_rate": 1.1887608069164267e-05, | |
| "loss": 0.6153, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 7.377403259277344, | |
| "learning_rate": 1.2247838616714698e-05, | |
| "loss": 0.5434, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 8.021675109863281, | |
| "learning_rate": 1.2608069164265129e-05, | |
| "loss": 0.5843, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 9.733675003051758, | |
| "learning_rate": 1.2968299711815562e-05, | |
| "loss": 0.5932, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 6.97348690032959, | |
| "learning_rate": 1.3328530259365996e-05, | |
| "loss": 0.5906, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 7.889408111572266, | |
| "learning_rate": 1.3688760806916426e-05, | |
| "loss": 0.5389, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 8.591092109680176, | |
| "learning_rate": 1.4048991354466859e-05, | |
| "loss": 0.5179, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 11.209196090698242, | |
| "learning_rate": 1.4409221902017291e-05, | |
| "loss": 0.5233, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 9.045553207397461, | |
| "learning_rate": 1.4769452449567724e-05, | |
| "loss": 0.5049, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 6.595673561096191, | |
| "learning_rate": 1.5129682997118155e-05, | |
| "loss": 0.4618, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 6.817091941833496, | |
| "learning_rate": 1.548991354466859e-05, | |
| "loss": 0.4885, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 20.116256713867188, | |
| "learning_rate": 1.585014409221902e-05, | |
| "loss": 0.4788, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 5.532088279724121, | |
| "learning_rate": 1.6210374639769456e-05, | |
| "loss": 0.5049, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 8.998950004577637, | |
| "learning_rate": 1.6570605187319883e-05, | |
| "loss": 0.469, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 8.597137451171875, | |
| "learning_rate": 1.6930835734870318e-05, | |
| "loss": 0.5467, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 6.714593887329102, | |
| "learning_rate": 1.7291066282420752e-05, | |
| "loss": 0.4635, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 7.317856788635254, | |
| "learning_rate": 1.765129682997118e-05, | |
| "loss": 0.4495, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 7.8448591232299805, | |
| "learning_rate": 1.8011527377521615e-05, | |
| "loss": 0.4652, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 7.0624213218688965, | |
| "learning_rate": 1.8371757925072046e-05, | |
| "loss": 0.4267, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 8.130319595336914, | |
| "learning_rate": 1.873198847262248e-05, | |
| "loss": 0.5095, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 12.399904251098633, | |
| "learning_rate": 1.909221902017291e-05, | |
| "loss": 0.4315, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 8.656498908996582, | |
| "learning_rate": 1.9452449567723343e-05, | |
| "loss": 0.442, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 9.993114471435547, | |
| "learning_rate": 1.9812680115273777e-05, | |
| "loss": 0.4471, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 8.428305625915527, | |
| "learning_rate": 2.017291066282421e-05, | |
| "loss": 0.4121, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 11.432868003845215, | |
| "learning_rate": 2.053314121037464e-05, | |
| "loss": 0.5125, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 6.569263935089111, | |
| "learning_rate": 2.0893371757925074e-05, | |
| "loss": 0.4177, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 6.448131084442139, | |
| "learning_rate": 2.1253602305475505e-05, | |
| "loss": 0.4183, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 6.2162628173828125, | |
| "learning_rate": 2.1613832853025936e-05, | |
| "loss": 0.378, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 15.349369049072266, | |
| "learning_rate": 2.197406340057637e-05, | |
| "loss": 0.3972, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 11.309712409973145, | |
| "learning_rate": 2.2334293948126802e-05, | |
| "loss": 0.3721, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 11.087018966674805, | |
| "learning_rate": 2.2694524495677236e-05, | |
| "loss": 0.4345, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 8.34060001373291, | |
| "learning_rate": 2.3054755043227668e-05, | |
| "loss": 0.4085, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 8.439334869384766, | |
| "learning_rate": 2.34149855907781e-05, | |
| "loss": 0.3855, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 7.37805700302124, | |
| "learning_rate": 2.3775216138328533e-05, | |
| "loss": 0.4104, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 10.912667274475098, | |
| "learning_rate": 2.413544668587896e-05, | |
| "loss": 0.4226, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 8.15848159790039, | |
| "learning_rate": 2.4495677233429396e-05, | |
| "loss": 0.4045, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 9.146815299987793, | |
| "learning_rate": 2.485590778097983e-05, | |
| "loss": 0.3754, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 24.85068130493164, | |
| "learning_rate": 2.5216138328530258e-05, | |
| "loss": 0.3406, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 9.068620681762695, | |
| "learning_rate": 2.5576368876080692e-05, | |
| "loss": 0.3673, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 9.011393547058105, | |
| "learning_rate": 2.5936599423631124e-05, | |
| "loss": 0.3766, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 7.080930233001709, | |
| "learning_rate": 2.6296829971181558e-05, | |
| "loss": 0.4243, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 8.521806716918945, | |
| "learning_rate": 2.6657060518731993e-05, | |
| "loss": 0.3984, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 8.133712768554688, | |
| "learning_rate": 2.7017291066282424e-05, | |
| "loss": 0.3957, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 8.477057456970215, | |
| "learning_rate": 2.737752161383285e-05, | |
| "loss": 0.4256, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 7.391529083251953, | |
| "learning_rate": 2.7737752161383286e-05, | |
| "loss": 0.3621, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 7.98112154006958, | |
| "learning_rate": 2.8097982708933717e-05, | |
| "loss": 0.4113, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 6.7378249168396, | |
| "learning_rate": 2.845821325648415e-05, | |
| "loss": 0.3808, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 8.315476417541504, | |
| "learning_rate": 2.8818443804034583e-05, | |
| "loss": 0.4359, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 4.960958480834961, | |
| "learning_rate": 2.9178674351585017e-05, | |
| "loss": 0.3533, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 7.491067886352539, | |
| "learning_rate": 2.953890489913545e-05, | |
| "loss": 0.3839, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 8.103771209716797, | |
| "learning_rate": 2.9899135446685883e-05, | |
| "loss": 0.3777, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 7.958097457885742, | |
| "learning_rate": 3.025936599423631e-05, | |
| "loss": 0.3984, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 9.805606842041016, | |
| "learning_rate": 3.0619596541786745e-05, | |
| "loss": 0.3952, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 10.124835014343262, | |
| "learning_rate": 3.097982708933718e-05, | |
| "loss": 0.3892, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 6.68670654296875, | |
| "learning_rate": 3.134005763688761e-05, | |
| "loss": 0.4251, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 6.3270440101623535, | |
| "learning_rate": 3.170028818443804e-05, | |
| "loss": 0.4049, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 5.105160236358643, | |
| "learning_rate": 3.206051873198848e-05, | |
| "loss": 0.3664, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 6.3673601150512695, | |
| "learning_rate": 3.242074927953891e-05, | |
| "loss": 0.3747, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 10.308377265930176, | |
| "learning_rate": 3.278097982708934e-05, | |
| "loss": 0.4098, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 5.274810791015625, | |
| "learning_rate": 3.314121037463977e-05, | |
| "loss": 0.3722, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 4.596935272216797, | |
| "learning_rate": 3.35014409221902e-05, | |
| "loss": 0.3155, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 13.104801177978516, | |
| "learning_rate": 3.3861671469740636e-05, | |
| "loss": 0.3442, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 7.388278484344482, | |
| "learning_rate": 3.422190201729107e-05, | |
| "loss": 0.3528, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 5.649425029754639, | |
| "learning_rate": 3.4582132564841505e-05, | |
| "loss": 0.3161, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 5.531047344207764, | |
| "learning_rate": 3.494236311239193e-05, | |
| "loss": 0.3465, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 5.138969421386719, | |
| "learning_rate": 3.530259365994236e-05, | |
| "loss": 0.3395, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 12.942193984985352, | |
| "learning_rate": 3.5662824207492795e-05, | |
| "loss": 0.3457, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 5.338680744171143, | |
| "learning_rate": 3.602305475504323e-05, | |
| "loss": 0.3203, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 6.598853588104248, | |
| "learning_rate": 3.6383285302593664e-05, | |
| "loss": 0.3691, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 6.757806777954102, | |
| "learning_rate": 3.674351585014409e-05, | |
| "loss": 0.354, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 6.43842077255249, | |
| "learning_rate": 3.7103746397694526e-05, | |
| "loss": 0.3492, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 6.269893169403076, | |
| "learning_rate": 3.746397694524496e-05, | |
| "loss": 0.3861, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 6.979896545410156, | |
| "learning_rate": 3.782420749279539e-05, | |
| "loss": 0.3423, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 7.62592077255249, | |
| "learning_rate": 3.818443804034582e-05, | |
| "loss": 0.413, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 6.566605567932129, | |
| "learning_rate": 3.854466858789626e-05, | |
| "loss": 0.414, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 5.802229881286621, | |
| "learning_rate": 3.8904899135446685e-05, | |
| "loss": 0.336, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 6.355484962463379, | |
| "learning_rate": 3.926512968299712e-05, | |
| "loss": 0.3425, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 6.845495700836182, | |
| "learning_rate": 3.9625360230547554e-05, | |
| "loss": 0.3603, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 5.654047966003418, | |
| "learning_rate": 3.998559077809799e-05, | |
| "loss": 0.3408, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 6.1373467445373535, | |
| "learning_rate": 4.034582132564842e-05, | |
| "loss": 0.3588, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 7.257894039154053, | |
| "learning_rate": 4.0706051873198844e-05, | |
| "loss": 0.3414, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 9.20009994506836, | |
| "learning_rate": 4.106628242074928e-05, | |
| "loss": 0.3749, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 8.215997695922852, | |
| "learning_rate": 4.1426512968299713e-05, | |
| "loss": 0.3673, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 8.917034149169922, | |
| "learning_rate": 4.178674351585015e-05, | |
| "loss": 0.3804, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 4.663518905639648, | |
| "learning_rate": 4.214697406340058e-05, | |
| "loss": 0.3454, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 5.577089786529541, | |
| "learning_rate": 4.250720461095101e-05, | |
| "loss": 0.3176, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 8.387089729309082, | |
| "learning_rate": 4.2867435158501445e-05, | |
| "loss": 0.3112, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 5.613365650177002, | |
| "learning_rate": 4.322766570605187e-05, | |
| "loss": 0.3491, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 6.961580753326416, | |
| "learning_rate": 4.358789625360231e-05, | |
| "loss": 0.3005, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 5.606225967407227, | |
| "learning_rate": 4.394812680115274e-05, | |
| "loss": 0.2919, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 5.59161376953125, | |
| "learning_rate": 4.430835734870317e-05, | |
| "loss": 0.3206, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 5.032812595367432, | |
| "learning_rate": 4.4668587896253604e-05, | |
| "loss": 0.3447, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 6.477130889892578, | |
| "learning_rate": 4.502881844380404e-05, | |
| "loss": 0.2768, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 6.052380084991455, | |
| "learning_rate": 4.538904899135447e-05, | |
| "loss": 0.3161, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 6.129565238952637, | |
| "learning_rate": 4.57492795389049e-05, | |
| "loss": 0.2634, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 7.382906436920166, | |
| "learning_rate": 4.6109510086455335e-05, | |
| "loss": 0.3402, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 6.6154375076293945, | |
| "learning_rate": 4.646974063400576e-05, | |
| "loss": 0.2461, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 4.32603645324707, | |
| "learning_rate": 4.68299711815562e-05, | |
| "loss": 0.2847, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 5.073220252990723, | |
| "learning_rate": 4.719020172910663e-05, | |
| "loss": 0.3306, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 6.942915916442871, | |
| "learning_rate": 4.7550432276657067e-05, | |
| "loss": 0.3361, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 5.637122631072998, | |
| "learning_rate": 4.7910662824207494e-05, | |
| "loss": 0.3489, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 5.205631256103516, | |
| "learning_rate": 4.827089337175792e-05, | |
| "loss": 0.3484, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 8.950785636901855, | |
| "learning_rate": 4.8631123919308357e-05, | |
| "loss": 0.3296, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 5.128692150115967, | |
| "learning_rate": 4.899135446685879e-05, | |
| "loss": 0.3583, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 3.9243364334106445, | |
| "learning_rate": 4.9351585014409226e-05, | |
| "loss": 0.346, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 6.407800197601318, | |
| "learning_rate": 4.971181556195966e-05, | |
| "loss": 0.3795, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.932591093117409, | |
| "eval_loss": 0.19223400950431824, | |
| "eval_runtime": 27.08, | |
| "eval_samples_per_second": 364.844, | |
| "eval_steps_per_second": 11.411, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 4.62927770614624, | |
| "learning_rate": 4.99919948767211e-05, | |
| "loss": 0.3346, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 4.929562568664551, | |
| "learning_rate": 4.995196926032661e-05, | |
| "loss": 0.3193, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 5.248744964599609, | |
| "learning_rate": 4.9911943643932116e-05, | |
| "loss": 0.3038, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 5.355862140655518, | |
| "learning_rate": 4.9871918027537626e-05, | |
| "loss": 0.3633, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 6.854907989501953, | |
| "learning_rate": 4.9831892411143136e-05, | |
| "loss": 0.3343, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 4.099529266357422, | |
| "learning_rate": 4.979186679474864e-05, | |
| "loss": 0.3242, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 4.553044319152832, | |
| "learning_rate": 4.975184117835415e-05, | |
| "loss": 0.3036, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 5.2062602043151855, | |
| "learning_rate": 4.971181556195966e-05, | |
| "loss": 0.3119, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 6.551631450653076, | |
| "learning_rate": 4.9671789945565164e-05, | |
| "loss": 0.3169, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 4.4297637939453125, | |
| "learning_rate": 4.9631764329170674e-05, | |
| "loss": 0.3358, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 4.483184814453125, | |
| "learning_rate": 4.9591738712776184e-05, | |
| "loss": 0.2771, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 4.935523986816406, | |
| "learning_rate": 4.955171309638169e-05, | |
| "loss": 0.2786, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 8.67397403717041, | |
| "learning_rate": 4.95116874799872e-05, | |
| "loss": 0.302, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 5.298563480377197, | |
| "learning_rate": 4.94716618635927e-05, | |
| "loss": 0.317, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 4.321622371673584, | |
| "learning_rate": 4.943163624719821e-05, | |
| "loss": 0.3213, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 5.290216445922852, | |
| "learning_rate": 4.9391610630803715e-05, | |
| "loss": 0.3334, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 5.450775146484375, | |
| "learning_rate": 4.9351585014409226e-05, | |
| "loss": 0.319, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 5.571349620819092, | |
| "learning_rate": 4.9311559398014736e-05, | |
| "loss": 0.27, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 5.267562389373779, | |
| "learning_rate": 4.927153378162024e-05, | |
| "loss": 0.3218, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 3.8232898712158203, | |
| "learning_rate": 4.923150816522575e-05, | |
| "loss": 0.2955, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 5.099688529968262, | |
| "learning_rate": 4.919148254883125e-05, | |
| "loss": 0.3217, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 3.9266700744628906, | |
| "learning_rate": 4.9151456932436764e-05, | |
| "loss": 0.3177, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 5.140632629394531, | |
| "learning_rate": 4.911143131604227e-05, | |
| "loss": 0.3068, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 4.642924785614014, | |
| "learning_rate": 4.907140569964778e-05, | |
| "loss": 0.2502, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 3.127683401107788, | |
| "learning_rate": 4.903138008325329e-05, | |
| "loss": 0.3102, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 5.013187408447266, | |
| "learning_rate": 4.899135446685879e-05, | |
| "loss": 0.312, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 5.893740177154541, | |
| "learning_rate": 4.89513288504643e-05, | |
| "loss": 0.3123, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 4.198624610900879, | |
| "learning_rate": 4.8911303234069805e-05, | |
| "loss": 0.2843, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 5.550350666046143, | |
| "learning_rate": 4.8871277617675315e-05, | |
| "loss": 0.3515, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 4.0659966468811035, | |
| "learning_rate": 4.883125200128082e-05, | |
| "loss": 0.2849, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 4.937682628631592, | |
| "learning_rate": 4.879122638488633e-05, | |
| "loss": 0.3533, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 4.22144079208374, | |
| "learning_rate": 4.875120076849184e-05, | |
| "loss": 0.3136, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 5.999542236328125, | |
| "learning_rate": 4.871117515209734e-05, | |
| "loss": 0.2703, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 6.5856828689575195, | |
| "learning_rate": 4.867114953570285e-05, | |
| "loss": 0.2894, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 3.6321206092834473, | |
| "learning_rate": 4.8631123919308357e-05, | |
| "loss": 0.2804, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 4.104198455810547, | |
| "learning_rate": 4.859109830291387e-05, | |
| "loss": 0.2767, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 5.859384059906006, | |
| "learning_rate": 4.855107268651937e-05, | |
| "loss": 0.2994, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 6.007699966430664, | |
| "learning_rate": 4.851104707012488e-05, | |
| "loss": 0.3492, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 4.246580123901367, | |
| "learning_rate": 4.847102145373039e-05, | |
| "loss": 0.2911, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 4.882290840148926, | |
| "learning_rate": 4.8430995837335894e-05, | |
| "loss": 0.3154, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 4.810091018676758, | |
| "learning_rate": 4.8390970220941405e-05, | |
| "loss": 0.2723, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 5.89586877822876, | |
| "learning_rate": 4.835094460454691e-05, | |
| "loss": 0.3139, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 4.357601642608643, | |
| "learning_rate": 4.831091898815242e-05, | |
| "loss": 0.2872, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 4.09877347946167, | |
| "learning_rate": 4.827089337175792e-05, | |
| "loss": 0.2555, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 4.7353925704956055, | |
| "learning_rate": 4.823086775536343e-05, | |
| "loss": 0.35, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 5.126601219177246, | |
| "learning_rate": 4.819084213896894e-05, | |
| "loss": 0.2799, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 5.320415496826172, | |
| "learning_rate": 4.8150816522574446e-05, | |
| "loss": 0.2905, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 5.826961040496826, | |
| "learning_rate": 4.8110790906179956e-05, | |
| "loss": 0.2916, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 4.833921432495117, | |
| "learning_rate": 4.807076528978546e-05, | |
| "loss": 0.324, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 5.386748313903809, | |
| "learning_rate": 4.803073967339097e-05, | |
| "loss": 0.3005, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 4.9491424560546875, | |
| "learning_rate": 4.799071405699648e-05, | |
| "loss": 0.2901, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 4.132954120635986, | |
| "learning_rate": 4.7950688440601984e-05, | |
| "loss": 0.278, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 5.6594319343566895, | |
| "learning_rate": 4.7910662824207494e-05, | |
| "loss": 0.3134, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 5.512797832489014, | |
| "learning_rate": 4.7870637207813005e-05, | |
| "loss": 0.2781, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 5.356082916259766, | |
| "learning_rate": 4.7830611591418515e-05, | |
| "loss": 0.2965, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 4.4097113609313965, | |
| "learning_rate": 4.779058597502402e-05, | |
| "loss": 0.2756, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 5.423006534576416, | |
| "learning_rate": 4.775056035862953e-05, | |
| "loss": 0.3404, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 4.108374118804932, | |
| "learning_rate": 4.771053474223503e-05, | |
| "loss": 0.2822, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 5.638019561767578, | |
| "learning_rate": 4.767050912584054e-05, | |
| "loss": 0.2729, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 4.037389278411865, | |
| "learning_rate": 4.763048350944605e-05, | |
| "loss": 0.2832, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 5.815566539764404, | |
| "learning_rate": 4.7590457893051556e-05, | |
| "loss": 0.2859, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 4.1045732498168945, | |
| "learning_rate": 4.7550432276657067e-05, | |
| "loss": 0.2734, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 7.296690464019775, | |
| "learning_rate": 4.751040666026257e-05, | |
| "loss": 0.3211, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 8.276504516601562, | |
| "learning_rate": 4.747038104386808e-05, | |
| "loss": 0.2964, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 5.901710510253906, | |
| "learning_rate": 4.7430355427473584e-05, | |
| "loss": 0.318, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 5.140340328216553, | |
| "learning_rate": 4.7390329811079094e-05, | |
| "loss": 0.3337, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 5.367193222045898, | |
| "learning_rate": 4.7350304194684604e-05, | |
| "loss": 0.2844, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 5.234673500061035, | |
| "learning_rate": 4.731027857829011e-05, | |
| "loss": 0.2676, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "grad_norm": 4.406520366668701, | |
| "learning_rate": 4.727025296189562e-05, | |
| "loss": 0.2394, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 5.445221900939941, | |
| "learning_rate": 4.723022734550112e-05, | |
| "loss": 0.277, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 5.237115383148193, | |
| "learning_rate": 4.719020172910663e-05, | |
| "loss": 0.3117, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "grad_norm": 4.234028339385986, | |
| "learning_rate": 4.7150176112712136e-05, | |
| "loss": 0.2931, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 4.2522430419921875, | |
| "learning_rate": 4.7110150496317646e-05, | |
| "loss": 0.3156, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 6.036770820617676, | |
| "learning_rate": 4.7070124879923156e-05, | |
| "loss": 0.2222, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 5.12794303894043, | |
| "learning_rate": 4.703009926352866e-05, | |
| "loss": 0.3077, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 4.9581756591796875, | |
| "learning_rate": 4.699007364713417e-05, | |
| "loss": 0.2537, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 4.344903945922852, | |
| "learning_rate": 4.6950048030739673e-05, | |
| "loss": 0.3009, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 3.9201972484588623, | |
| "learning_rate": 4.6910022414345184e-05, | |
| "loss": 0.2605, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 4.709476947784424, | |
| "learning_rate": 4.686999679795069e-05, | |
| "loss": 0.3037, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 5.150713920593262, | |
| "learning_rate": 4.68299711815562e-05, | |
| "loss": 0.258, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 2.914226531982422, | |
| "learning_rate": 4.678994556516171e-05, | |
| "loss": 0.3184, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 3.4970335960388184, | |
| "learning_rate": 4.674991994876721e-05, | |
| "loss": 0.2623, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 5.648881912231445, | |
| "learning_rate": 4.670989433237272e-05, | |
| "loss": 0.2897, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 5.417716026306152, | |
| "learning_rate": 4.6669868715978225e-05, | |
| "loss": 0.2765, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 5.414010524749756, | |
| "learning_rate": 4.6629843099583735e-05, | |
| "loss": 0.2596, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "grad_norm": 6.044466972351074, | |
| "learning_rate": 4.658981748318924e-05, | |
| "loss": 0.2846, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 4.162852764129639, | |
| "learning_rate": 4.654979186679475e-05, | |
| "loss": 0.2756, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 4.842267036437988, | |
| "learning_rate": 4.650976625040026e-05, | |
| "loss": 0.2836, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 5.485855579376221, | |
| "learning_rate": 4.646974063400576e-05, | |
| "loss": 0.3205, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 3.7354228496551514, | |
| "learning_rate": 4.642971501761127e-05, | |
| "loss": 0.2127, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "grad_norm": 5.490901470184326, | |
| "learning_rate": 4.638968940121678e-05, | |
| "loss": 0.3052, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 5.564297676086426, | |
| "learning_rate": 4.634966378482229e-05, | |
| "loss": 0.2564, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 8.057859420776367, | |
| "learning_rate": 4.630963816842779e-05, | |
| "loss": 0.2581, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 7.346789836883545, | |
| "learning_rate": 4.62696125520333e-05, | |
| "loss": 0.2802, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 6.667150020599365, | |
| "learning_rate": 4.622958693563881e-05, | |
| "loss": 0.2845, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 5.178923606872559, | |
| "learning_rate": 4.6189561319244315e-05, | |
| "loss": 0.2948, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 4.015057563781738, | |
| "learning_rate": 4.6149535702849825e-05, | |
| "loss": 0.3022, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 3.952420234680176, | |
| "learning_rate": 4.6109510086455335e-05, | |
| "loss": 0.2521, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "grad_norm": 4.373567581176758, | |
| "learning_rate": 4.606948447006084e-05, | |
| "loss": 0.2505, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "grad_norm": 3.232614040374756, | |
| "learning_rate": 4.602945885366635e-05, | |
| "loss": 0.2713, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 3.8914895057678223, | |
| "learning_rate": 4.598943323727186e-05, | |
| "loss": 0.2547, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 5.603899002075195, | |
| "learning_rate": 4.594940762087736e-05, | |
| "loss": 0.282, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 3.351109743118286, | |
| "learning_rate": 4.590938200448287e-05, | |
| "loss": 0.2214, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 5.184418201446533, | |
| "learning_rate": 4.586935638808838e-05, | |
| "loss": 0.298, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 5.310287952423096, | |
| "learning_rate": 4.582933077169389e-05, | |
| "loss": 0.2825, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 5.883835792541504, | |
| "learning_rate": 4.57893051552994e-05, | |
| "loss": 0.3257, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 3.4902472496032715, | |
| "learning_rate": 4.57492795389049e-05, | |
| "loss": 0.2691, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 3.718014717102051, | |
| "learning_rate": 4.570925392251041e-05, | |
| "loss": 0.2918, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 5.416603088378906, | |
| "learning_rate": 4.566922830611592e-05, | |
| "loss": 0.2776, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 5.364027500152588, | |
| "learning_rate": 4.5629202689721425e-05, | |
| "loss": 0.2804, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 5.734582424163818, | |
| "learning_rate": 4.5589177073326935e-05, | |
| "loss": 0.265, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 3.5217783451080322, | |
| "learning_rate": 4.554915145693244e-05, | |
| "loss": 0.2499, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 4.218166351318359, | |
| "learning_rate": 4.550912584053795e-05, | |
| "loss": 0.292, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 4.511210918426514, | |
| "learning_rate": 4.546910022414345e-05, | |
| "loss": 0.2325, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 4.017195224761963, | |
| "learning_rate": 4.542907460774896e-05, | |
| "loss": 0.2584, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "grad_norm": 3.4999618530273438, | |
| "learning_rate": 4.538904899135447e-05, | |
| "loss": 0.3007, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 5.2072248458862305, | |
| "learning_rate": 4.5349023374959976e-05, | |
| "loss": 0.2659, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 4.940614223480225, | |
| "learning_rate": 4.530899775856549e-05, | |
| "loss": 0.282, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "grad_norm": 3.722916603088379, | |
| "learning_rate": 4.526897214217099e-05, | |
| "loss": 0.2555, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 4.483854293823242, | |
| "learning_rate": 4.52289465257765e-05, | |
| "loss": 0.2594, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 4.568425178527832, | |
| "learning_rate": 4.5188920909382004e-05, | |
| "loss": 0.262, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 4.76126766204834, | |
| "learning_rate": 4.5148895292987514e-05, | |
| "loss": 0.2645, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 4.418682098388672, | |
| "learning_rate": 4.5108869676593025e-05, | |
| "loss": 0.268, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "grad_norm": 3.255800485610962, | |
| "learning_rate": 4.506884406019853e-05, | |
| "loss": 0.2665, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "grad_norm": 4.66806697845459, | |
| "learning_rate": 4.502881844380404e-05, | |
| "loss": 0.2965, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 5.117862701416016, | |
| "learning_rate": 4.498879282740954e-05, | |
| "loss": 0.2423, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "grad_norm": 3.500521421432495, | |
| "learning_rate": 4.494876721101505e-05, | |
| "loss": 0.2682, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "grad_norm": 3.541145086288452, | |
| "learning_rate": 4.4908741594620556e-05, | |
| "loss": 0.2676, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 5.114723205566406, | |
| "learning_rate": 4.4868715978226066e-05, | |
| "loss": 0.214, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "grad_norm": 3.2529759407043457, | |
| "learning_rate": 4.4828690361831576e-05, | |
| "loss": 0.2757, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 4.737575531005859, | |
| "learning_rate": 4.478866474543708e-05, | |
| "loss": 0.2541, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 3.7248144149780273, | |
| "learning_rate": 4.474863912904259e-05, | |
| "loss": 0.2837, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "grad_norm": 6.532453536987305, | |
| "learning_rate": 4.4708613512648094e-05, | |
| "loss": 0.2667, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 4.162599563598633, | |
| "learning_rate": 4.4668587896253604e-05, | |
| "loss": 0.3142, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 4.6284027099609375, | |
| "learning_rate": 4.462856227985911e-05, | |
| "loss": 0.2566, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "grad_norm": 3.6103484630584717, | |
| "learning_rate": 4.458853666346462e-05, | |
| "loss": 0.2723, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 3.301093816757202, | |
| "learning_rate": 4.454851104707013e-05, | |
| "loss": 0.2213, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": 4.462352752685547, | |
| "learning_rate": 4.450848543067563e-05, | |
| "loss": 0.2655, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": 3.400181770324707, | |
| "learning_rate": 4.446845981428114e-05, | |
| "loss": 0.261, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.933502024291498, | |
| "eval_loss": 0.18496161699295044, | |
| "eval_runtime": 26.9431, | |
| "eval_samples_per_second": 366.698, | |
| "eval_steps_per_second": 11.469, | |
| "step": 1389 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 4.32210111618042, | |
| "learning_rate": 4.4428434197886645e-05, | |
| "loss": 0.2947, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "grad_norm": 3.8557567596435547, | |
| "learning_rate": 4.4388408581492156e-05, | |
| "loss": 0.2373, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 4.735769748687744, | |
| "learning_rate": 4.434838296509766e-05, | |
| "loss": 0.2476, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 4.38801383972168, | |
| "learning_rate": 4.430835734870317e-05, | |
| "loss": 0.2279, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 4.940765857696533, | |
| "learning_rate": 4.426833173230868e-05, | |
| "loss": 0.2529, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 4.5427045822143555, | |
| "learning_rate": 4.422830611591418e-05, | |
| "loss": 0.2499, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 4.766185760498047, | |
| "learning_rate": 4.4188280499519693e-05, | |
| "loss": 0.2052, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 4.980799674987793, | |
| "learning_rate": 4.4148254883125204e-05, | |
| "loss": 0.2676, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 3.4634597301483154, | |
| "learning_rate": 4.4108229266730714e-05, | |
| "loss": 0.2417, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 3.6540205478668213, | |
| "learning_rate": 4.406820365033622e-05, | |
| "loss": 0.2876, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 3.4874844551086426, | |
| "learning_rate": 4.402817803394173e-05, | |
| "loss": 0.2635, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 4.6576457023620605, | |
| "learning_rate": 4.398815241754724e-05, | |
| "loss": 0.3051, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 4.110597610473633, | |
| "learning_rate": 4.394812680115274e-05, | |
| "loss": 0.2595, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 4.409259796142578, | |
| "learning_rate": 4.390810118475825e-05, | |
| "loss": 0.2099, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 3.2214550971984863, | |
| "learning_rate": 4.3868075568363755e-05, | |
| "loss": 0.2627, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 4.47964334487915, | |
| "learning_rate": 4.3828049951969266e-05, | |
| "loss": 0.3545, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 4.1163105964660645, | |
| "learning_rate": 4.378802433557477e-05, | |
| "loss": 0.2459, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 3.891965866088867, | |
| "learning_rate": 4.374799871918028e-05, | |
| "loss": 0.2431, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "grad_norm": 3.4245457649230957, | |
| "learning_rate": 4.370797310278579e-05, | |
| "loss": 0.2511, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 6.8035807609558105, | |
| "learning_rate": 4.366794748639129e-05, | |
| "loss": 0.2619, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 5.102532386779785, | |
| "learning_rate": 4.3627921869996804e-05, | |
| "loss": 0.2707, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 3.1594481468200684, | |
| "learning_rate": 4.358789625360231e-05, | |
| "loss": 0.2273, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 3.9931840896606445, | |
| "learning_rate": 4.354787063720782e-05, | |
| "loss": 0.2537, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 5.2231221199035645, | |
| "learning_rate": 4.350784502081332e-05, | |
| "loss": 0.2431, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 4.415401458740234, | |
| "learning_rate": 4.346781940441883e-05, | |
| "loss": 0.2431, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 4.261172294616699, | |
| "learning_rate": 4.342779378802434e-05, | |
| "loss": 0.2779, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "grad_norm": 3.6781246662139893, | |
| "learning_rate": 4.3387768171629845e-05, | |
| "loss": 0.2052, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 4.412373065948486, | |
| "learning_rate": 4.3347742555235355e-05, | |
| "loss": 0.2797, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 5.768673419952393, | |
| "learning_rate": 4.330771693884086e-05, | |
| "loss": 0.2637, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 7.167098045349121, | |
| "learning_rate": 4.326769132244637e-05, | |
| "loss": 0.2486, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 3.754503011703491, | |
| "learning_rate": 4.322766570605187e-05, | |
| "loss": 0.2315, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 3.3107941150665283, | |
| "learning_rate": 4.318764008965738e-05, | |
| "loss": 0.2175, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "grad_norm": 3.4948480129241943, | |
| "learning_rate": 4.314761447326289e-05, | |
| "loss": 0.2228, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 3.740145444869995, | |
| "learning_rate": 4.31075888568684e-05, | |
| "loss": 0.2475, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 5.239324569702148, | |
| "learning_rate": 4.306756324047391e-05, | |
| "loss": 0.2427, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 5.795839309692383, | |
| "learning_rate": 4.302753762407941e-05, | |
| "loss": 0.2177, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 4.080209255218506, | |
| "learning_rate": 4.298751200768492e-05, | |
| "loss": 0.236, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 6.359370708465576, | |
| "learning_rate": 4.2947486391290424e-05, | |
| "loss": 0.2796, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 4.7210235595703125, | |
| "learning_rate": 4.2907460774895934e-05, | |
| "loss": 0.2409, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 4.770945072174072, | |
| "learning_rate": 4.2867435158501445e-05, | |
| "loss": 0.2774, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "grad_norm": 5.309744834899902, | |
| "learning_rate": 4.282740954210695e-05, | |
| "loss": 0.2317, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 4.85918664932251, | |
| "learning_rate": 4.278738392571246e-05, | |
| "loss": 0.2666, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 3.067629337310791, | |
| "learning_rate": 4.274735830931796e-05, | |
| "loss": 0.2635, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 3.3474929332733154, | |
| "learning_rate": 4.270733269292347e-05, | |
| "loss": 0.2271, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 3.961737871170044, | |
| "learning_rate": 4.2667307076528976e-05, | |
| "loss": 0.2403, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 2.509068489074707, | |
| "learning_rate": 4.2627281460134486e-05, | |
| "loss": 0.2283, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "grad_norm": 5.405132293701172, | |
| "learning_rate": 4.2587255843739996e-05, | |
| "loss": 0.1761, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 4.707294940948486, | |
| "learning_rate": 4.25472302273455e-05, | |
| "loss": 0.2561, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "grad_norm": 4.00047492980957, | |
| "learning_rate": 4.250720461095101e-05, | |
| "loss": 0.2367, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "grad_norm": 5.079905986785889, | |
| "learning_rate": 4.2467178994556514e-05, | |
| "loss": 0.2878, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 5.759792804718018, | |
| "learning_rate": 4.2427153378162024e-05, | |
| "loss": 0.2989, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 2.7700154781341553, | |
| "learning_rate": 4.2387127761767534e-05, | |
| "loss": 0.2531, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 3.7735087871551514, | |
| "learning_rate": 4.234710214537304e-05, | |
| "loss": 0.2304, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 4.496865749359131, | |
| "learning_rate": 4.230707652897855e-05, | |
| "loss": 0.263, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "grad_norm": 4.752154350280762, | |
| "learning_rate": 4.226705091258406e-05, | |
| "loss": 0.2565, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 3.979544162750244, | |
| "learning_rate": 4.222702529618956e-05, | |
| "loss": 0.2166, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 3.6765263080596924, | |
| "learning_rate": 4.218699967979507e-05, | |
| "loss": 0.2438, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 4.9830322265625, | |
| "learning_rate": 4.214697406340058e-05, | |
| "loss": 0.2551, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 3.655550956726074, | |
| "learning_rate": 4.2106948447006086e-05, | |
| "loss": 0.2367, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 7.952022075653076, | |
| "learning_rate": 4.2066922830611596e-05, | |
| "loss": 0.2355, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 5.706752777099609, | |
| "learning_rate": 4.2026897214217107e-05, | |
| "loss": 0.2127, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 5.1583099365234375, | |
| "learning_rate": 4.198687159782261e-05, | |
| "loss": 0.2496, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 5.349088191986084, | |
| "learning_rate": 4.194684598142812e-05, | |
| "loss": 0.2593, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 4.123900890350342, | |
| "learning_rate": 4.1906820365033624e-05, | |
| "loss": 0.2203, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 6.052340030670166, | |
| "learning_rate": 4.1866794748639134e-05, | |
| "loss": 0.2613, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 5.388720989227295, | |
| "learning_rate": 4.182676913224464e-05, | |
| "loss": 0.2277, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 17.35828971862793, | |
| "learning_rate": 4.178674351585015e-05, | |
| "loss": 0.2445, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 5.437941074371338, | |
| "learning_rate": 4.174671789945566e-05, | |
| "loss": 0.226, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 4.627963066101074, | |
| "learning_rate": 4.170669228306116e-05, | |
| "loss": 0.2684, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 2.598243474960327, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 0.2242, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 3.801103353500366, | |
| "learning_rate": 4.1626641050272176e-05, | |
| "loss": 0.2603, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 3.749072313308716, | |
| "learning_rate": 4.1586615433877686e-05, | |
| "loss": 0.2382, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 5.224049091339111, | |
| "learning_rate": 4.154658981748319e-05, | |
| "loss": 0.2103, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 6.435360431671143, | |
| "learning_rate": 4.15065642010887e-05, | |
| "loss": 0.244, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 3.173849105834961, | |
| "learning_rate": 4.146653858469421e-05, | |
| "loss": 0.2413, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "grad_norm": 4.47773551940918, | |
| "learning_rate": 4.1426512968299713e-05, | |
| "loss": 0.2762, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 3.3660101890563965, | |
| "learning_rate": 4.1386487351905224e-05, | |
| "loss": 0.1729, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 3.466449737548828, | |
| "learning_rate": 4.134646173551073e-05, | |
| "loss": 0.2526, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 4.319614887237549, | |
| "learning_rate": 4.130643611911624e-05, | |
| "loss": 0.2421, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 3.959930181503296, | |
| "learning_rate": 4.126641050272174e-05, | |
| "loss": 0.252, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 5.325150012969971, | |
| "learning_rate": 4.122638488632725e-05, | |
| "loss": 0.2186, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 4.520905494689941, | |
| "learning_rate": 4.118635926993276e-05, | |
| "loss": 0.212, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "grad_norm": 4.795076847076416, | |
| "learning_rate": 4.1146333653538265e-05, | |
| "loss": 0.2496, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 4.291173934936523, | |
| "learning_rate": 4.1106308037143775e-05, | |
| "loss": 0.2587, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 4.245218753814697, | |
| "learning_rate": 4.106628242074928e-05, | |
| "loss": 0.2725, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 3.0920708179473877, | |
| "learning_rate": 4.102625680435479e-05, | |
| "loss": 0.2218, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 2.9690959453582764, | |
| "learning_rate": 4.098623118796029e-05, | |
| "loss": 0.251, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "grad_norm": 4.769535064697266, | |
| "learning_rate": 4.09462055715658e-05, | |
| "loss": 0.2301, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "grad_norm": 2.9699225425720215, | |
| "learning_rate": 4.090617995517131e-05, | |
| "loss": 0.1879, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 5.330379962921143, | |
| "learning_rate": 4.086615433877682e-05, | |
| "loss": 0.2343, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "grad_norm": 5.215531826019287, | |
| "learning_rate": 4.082612872238233e-05, | |
| "loss": 0.2879, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 5.113295555114746, | |
| "learning_rate": 4.078610310598783e-05, | |
| "loss": 0.2139, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 3.597294807434082, | |
| "learning_rate": 4.074607748959334e-05, | |
| "loss": 0.2314, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "grad_norm": 4.293632507324219, | |
| "learning_rate": 4.0706051873198844e-05, | |
| "loss": 0.2201, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 5.499499797821045, | |
| "learning_rate": 4.0666026256804355e-05, | |
| "loss": 0.2463, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 4.88955020904541, | |
| "learning_rate": 4.0626000640409865e-05, | |
| "loss": 0.1911, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "grad_norm": 3.8911263942718506, | |
| "learning_rate": 4.058597502401537e-05, | |
| "loss": 0.2021, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 4.9449920654296875, | |
| "learning_rate": 4.054594940762088e-05, | |
| "loss": 0.2542, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "grad_norm": 5.476675033569336, | |
| "learning_rate": 4.050592379122638e-05, | |
| "loss": 0.2391, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "grad_norm": 4.407501697540283, | |
| "learning_rate": 4.046589817483189e-05, | |
| "loss": 0.2695, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "grad_norm": 2.3330883979797363, | |
| "learning_rate": 4.04258725584374e-05, | |
| "loss": 0.2438, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 5.288588523864746, | |
| "learning_rate": 4.038584694204291e-05, | |
| "loss": 0.2618, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 5.602869510650635, | |
| "learning_rate": 4.034582132564842e-05, | |
| "loss": 0.2402, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 5.500049591064453, | |
| "learning_rate": 4.030579570925393e-05, | |
| "loss": 0.294, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 4.733332633972168, | |
| "learning_rate": 4.026577009285944e-05, | |
| "loss": 0.2065, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 4.457444190979004, | |
| "learning_rate": 4.022574447646494e-05, | |
| "loss": 0.2276, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 2.527444362640381, | |
| "learning_rate": 4.018571886007045e-05, | |
| "loss": 0.2209, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "grad_norm": 3.968388795852661, | |
| "learning_rate": 4.0145693243675955e-05, | |
| "loss": 0.2292, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "grad_norm": 3.083031415939331, | |
| "learning_rate": 4.0105667627281465e-05, | |
| "loss": 0.2072, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 6.950841903686523, | |
| "learning_rate": 4.0065642010886975e-05, | |
| "loss": 0.2179, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 4.184908390045166, | |
| "learning_rate": 4.002561639449248e-05, | |
| "loss": 0.2541, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 3.3590152263641357, | |
| "learning_rate": 3.998559077809799e-05, | |
| "loss": 0.2091, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 3.5402350425720215, | |
| "learning_rate": 3.994556516170349e-05, | |
| "loss": 0.2279, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 3.7153353691101074, | |
| "learning_rate": 3.9905539545309e-05, | |
| "loss": 0.2814, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 4.787295818328857, | |
| "learning_rate": 3.9865513928914506e-05, | |
| "loss": 0.222, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "grad_norm": 3.247901201248169, | |
| "learning_rate": 3.9825488312520016e-05, | |
| "loss": 0.2274, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 3.51587176322937, | |
| "learning_rate": 3.978546269612553e-05, | |
| "loss": 0.2329, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 3.9080677032470703, | |
| "learning_rate": 3.974543707973103e-05, | |
| "loss": 0.2533, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 5.821875095367432, | |
| "learning_rate": 3.970541146333654e-05, | |
| "loss": 0.2954, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 3.9195449352264404, | |
| "learning_rate": 3.9665385846942044e-05, | |
| "loss": 0.2096, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 4.23248815536499, | |
| "learning_rate": 3.9625360230547554e-05, | |
| "loss": 0.2369, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "grad_norm": 5.370137691497803, | |
| "learning_rate": 3.958533461415306e-05, | |
| "loss": 0.2059, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 7.493764400482178, | |
| "learning_rate": 3.954530899775857e-05, | |
| "loss": 0.2312, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "grad_norm": 5.190505504608154, | |
| "learning_rate": 3.950528338136407e-05, | |
| "loss": 0.1946, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "grad_norm": 3.261181116104126, | |
| "learning_rate": 3.946525776496958e-05, | |
| "loss": 0.2326, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 5.753521919250488, | |
| "learning_rate": 3.942523214857509e-05, | |
| "loss": 0.264, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 6.831991195678711, | |
| "learning_rate": 3.9385206532180596e-05, | |
| "loss": 0.2595, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 3.9617631435394287, | |
| "learning_rate": 3.9345180915786106e-05, | |
| "loss": 0.2754, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 4.283369541168213, | |
| "learning_rate": 3.930515529939161e-05, | |
| "loss": 0.2605, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "grad_norm": 2.7431774139404297, | |
| "learning_rate": 3.926512968299712e-05, | |
| "loss": 0.2512, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 5.476824760437012, | |
| "learning_rate": 3.922510406660262e-05, | |
| "loss": 0.2505, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 3.6471610069274902, | |
| "learning_rate": 3.9185078450208134e-05, | |
| "loss": 0.2352, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 5.501186847686768, | |
| "learning_rate": 3.9145052833813644e-05, | |
| "loss": 0.2589, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 5.927517414093018, | |
| "learning_rate": 3.910502721741915e-05, | |
| "loss": 0.2323, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 2.320692539215088, | |
| "learning_rate": 3.906500160102466e-05, | |
| "loss": 0.2417, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 4.741787910461426, | |
| "learning_rate": 3.902497598463016e-05, | |
| "loss": 0.2565, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "grad_norm": 4.28122091293335, | |
| "learning_rate": 3.898495036823567e-05, | |
| "loss": 0.2298, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "grad_norm": 4.997034072875977, | |
| "learning_rate": 3.8944924751841175e-05, | |
| "loss": 0.2254, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "grad_norm": 3.0995302200317383, | |
| "learning_rate": 3.8904899135446685e-05, | |
| "loss": 0.2187, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.944838056680162, | |
| "eval_loss": 0.15164293348789215, | |
| "eval_runtime": 26.8648, | |
| "eval_samples_per_second": 367.768, | |
| "eval_steps_per_second": 11.502, | |
| "step": 2084 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 3.3711345195770264, | |
| "learning_rate": 3.8864873519052196e-05, | |
| "loss": 0.2469, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "grad_norm": 3.6812057495117188, | |
| "learning_rate": 3.88248479026577e-05, | |
| "loss": 0.2199, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "grad_norm": 3.678959846496582, | |
| "learning_rate": 3.878482228626321e-05, | |
| "loss": 0.2292, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "grad_norm": 4.287693500518799, | |
| "learning_rate": 3.874479666986871e-05, | |
| "loss": 0.2608, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "grad_norm": 3.0512278079986572, | |
| "learning_rate": 3.870477105347422e-05, | |
| "loss": 0.186, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "grad_norm": 7.497838020324707, | |
| "learning_rate": 3.8664745437079733e-05, | |
| "loss": 0.2481, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "grad_norm": 6.538998126983643, | |
| "learning_rate": 3.862471982068524e-05, | |
| "loss": 0.2415, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "grad_norm": 3.499305486679077, | |
| "learning_rate": 3.858469420429075e-05, | |
| "loss": 0.2072, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "grad_norm": 5.734482765197754, | |
| "learning_rate": 3.854466858789626e-05, | |
| "loss": 0.1759, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "grad_norm": 5.562732696533203, | |
| "learning_rate": 3.850464297150176e-05, | |
| "loss": 0.2327, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "grad_norm": 3.5141441822052, | |
| "learning_rate": 3.846461735510727e-05, | |
| "loss": 0.2404, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "grad_norm": 4.6779375076293945, | |
| "learning_rate": 3.842459173871278e-05, | |
| "loss": 0.2097, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "grad_norm": 5.7871994972229, | |
| "learning_rate": 3.8384566122318285e-05, | |
| "loss": 0.1762, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "grad_norm": 3.155024290084839, | |
| "learning_rate": 3.8344540505923795e-05, | |
| "loss": 0.2111, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "grad_norm": 6.282127380371094, | |
| "learning_rate": 3.8304514889529306e-05, | |
| "loss": 0.2194, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "grad_norm": 4.201207160949707, | |
| "learning_rate": 3.826448927313481e-05, | |
| "loss": 0.2647, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "grad_norm": 4.582483291625977, | |
| "learning_rate": 3.822446365674032e-05, | |
| "loss": 0.2315, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "grad_norm": 4.078041076660156, | |
| "learning_rate": 3.818443804034582e-05, | |
| "loss": 0.2249, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "grad_norm": 4.206385135650635, | |
| "learning_rate": 3.814441242395133e-05, | |
| "loss": 0.2542, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "grad_norm": 4.039742946624756, | |
| "learning_rate": 3.810438680755684e-05, | |
| "loss": 0.2278, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "grad_norm": 5.705816745758057, | |
| "learning_rate": 3.806436119116235e-05, | |
| "loss": 0.302, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "grad_norm": 3.5647683143615723, | |
| "learning_rate": 3.802433557476786e-05, | |
| "loss": 0.2076, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "grad_norm": 3.260532855987549, | |
| "learning_rate": 3.798430995837336e-05, | |
| "loss": 0.2568, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "grad_norm": 2.460192918777466, | |
| "learning_rate": 3.794428434197887e-05, | |
| "loss": 0.169, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "grad_norm": 4.057191848754883, | |
| "learning_rate": 3.7904258725584375e-05, | |
| "loss": 0.2131, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "grad_norm": 3.4373152256011963, | |
| "learning_rate": 3.7864233109189885e-05, | |
| "loss": 0.2099, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "grad_norm": 4.2746405601501465, | |
| "learning_rate": 3.782420749279539e-05, | |
| "loss": 0.181, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 6.235533714294434, | |
| "learning_rate": 3.77841818764009e-05, | |
| "loss": 0.2497, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 4.6660871505737305, | |
| "learning_rate": 3.774415626000641e-05, | |
| "loss": 0.2269, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "grad_norm": 5.937751293182373, | |
| "learning_rate": 3.770413064361191e-05, | |
| "loss": 0.2385, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "grad_norm": 3.7235584259033203, | |
| "learning_rate": 3.766410502721742e-05, | |
| "loss": 0.1767, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "grad_norm": 3.4258034229278564, | |
| "learning_rate": 3.7624079410822926e-05, | |
| "loss": 0.2111, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "grad_norm": 3.5872249603271484, | |
| "learning_rate": 3.758405379442844e-05, | |
| "loss": 0.1859, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "grad_norm": 10.837464332580566, | |
| "learning_rate": 3.754402817803394e-05, | |
| "loss": 0.2078, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "grad_norm": 4.330519199371338, | |
| "learning_rate": 3.750400256163945e-05, | |
| "loss": 0.2145, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "grad_norm": 2.915856122970581, | |
| "learning_rate": 3.746397694524496e-05, | |
| "loss": 0.2332, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "grad_norm": 4.104254245758057, | |
| "learning_rate": 3.7423951328850464e-05, | |
| "loss": 0.1799, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "grad_norm": 4.224066257476807, | |
| "learning_rate": 3.7383925712455975e-05, | |
| "loss": 0.2486, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "grad_norm": 4.464962005615234, | |
| "learning_rate": 3.734390009606148e-05, | |
| "loss": 0.1884, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "grad_norm": 4.688321590423584, | |
| "learning_rate": 3.730387447966699e-05, | |
| "loss": 0.2268, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "grad_norm": 4.25119686126709, | |
| "learning_rate": 3.726384886327249e-05, | |
| "loss": 0.2202, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "grad_norm": 2.777003765106201, | |
| "learning_rate": 3.7223823246878e-05, | |
| "loss": 0.2411, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "grad_norm": 4.11234188079834, | |
| "learning_rate": 3.718379763048351e-05, | |
| "loss": 0.22, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "grad_norm": 4.097933769226074, | |
| "learning_rate": 3.7143772014089016e-05, | |
| "loss": 0.2586, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "grad_norm": 4.043206691741943, | |
| "learning_rate": 3.7103746397694526e-05, | |
| "loss": 0.2472, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "grad_norm": 5.116567611694336, | |
| "learning_rate": 3.706372078130003e-05, | |
| "loss": 0.235, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "grad_norm": 3.207937717437744, | |
| "learning_rate": 3.702369516490554e-05, | |
| "loss": 0.2311, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "grad_norm": 3.5227158069610596, | |
| "learning_rate": 3.6983669548511043e-05, | |
| "loss": 0.2425, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "grad_norm": 3.8556931018829346, | |
| "learning_rate": 3.6943643932116554e-05, | |
| "loss": 0.2619, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "grad_norm": 3.575005054473877, | |
| "learning_rate": 3.6903618315722064e-05, | |
| "loss": 0.1952, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "grad_norm": 3.004302740097046, | |
| "learning_rate": 3.686359269932757e-05, | |
| "loss": 0.1923, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "grad_norm": 4.4943976402282715, | |
| "learning_rate": 3.682356708293308e-05, | |
| "loss": 0.2607, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "grad_norm": 4.194296360015869, | |
| "learning_rate": 3.678354146653858e-05, | |
| "loss": 0.2363, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "grad_norm": 5.696502208709717, | |
| "learning_rate": 3.674351585014409e-05, | |
| "loss": 0.2458, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "grad_norm": 3.8679604530334473, | |
| "learning_rate": 3.67034902337496e-05, | |
| "loss": 0.2113, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "grad_norm": 4.158144474029541, | |
| "learning_rate": 3.666346461735511e-05, | |
| "loss": 0.2335, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "grad_norm": 3.0606186389923096, | |
| "learning_rate": 3.6623439000960616e-05, | |
| "loss": 0.1956, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "grad_norm": 3.3307764530181885, | |
| "learning_rate": 3.6583413384566126e-05, | |
| "loss": 0.2589, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "grad_norm": 4.276596546173096, | |
| "learning_rate": 3.6543387768171636e-05, | |
| "loss": 0.2316, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "grad_norm": 3.0126774311065674, | |
| "learning_rate": 3.650336215177714e-05, | |
| "loss": 0.185, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "grad_norm": 3.945223331451416, | |
| "learning_rate": 3.646333653538265e-05, | |
| "loss": 0.1646, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "grad_norm": 3.874509334564209, | |
| "learning_rate": 3.6423310918988154e-05, | |
| "loss": 0.1987, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "grad_norm": 4.1151018142700195, | |
| "learning_rate": 3.6383285302593664e-05, | |
| "loss": 0.2219, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "grad_norm": 4.987639427185059, | |
| "learning_rate": 3.6343259686199174e-05, | |
| "loss": 0.1767, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "grad_norm": 3.1510512828826904, | |
| "learning_rate": 3.630323406980468e-05, | |
| "loss": 0.1942, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "grad_norm": 4.838677406311035, | |
| "learning_rate": 3.626320845341019e-05, | |
| "loss": 0.2122, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "grad_norm": 5.568162441253662, | |
| "learning_rate": 3.622318283701569e-05, | |
| "loss": 0.2429, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "grad_norm": 3.110318183898926, | |
| "learning_rate": 3.61831572206212e-05, | |
| "loss": 0.2571, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "grad_norm": 3.7364163398742676, | |
| "learning_rate": 3.6143131604226705e-05, | |
| "loss": 0.2329, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 4.126183986663818, | |
| "learning_rate": 3.6103105987832216e-05, | |
| "loss": 0.2439, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 5.250816345214844, | |
| "learning_rate": 3.6063080371437726e-05, | |
| "loss": 0.1947, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "grad_norm": 4.749518871307373, | |
| "learning_rate": 3.602305475504323e-05, | |
| "loss": 0.2585, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "grad_norm": 4.378204345703125, | |
| "learning_rate": 3.598302913864874e-05, | |
| "loss": 0.2036, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "grad_norm": 4.115077018737793, | |
| "learning_rate": 3.594300352225424e-05, | |
| "loss": 0.1862, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "grad_norm": 3.890913963317871, | |
| "learning_rate": 3.5902977905859753e-05, | |
| "loss": 0.1771, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "grad_norm": 4.176694393157959, | |
| "learning_rate": 3.586295228946526e-05, | |
| "loss": 0.1872, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "grad_norm": 3.7083818912506104, | |
| "learning_rate": 3.582292667307077e-05, | |
| "loss": 0.2303, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "grad_norm": 5.073873996734619, | |
| "learning_rate": 3.578290105667628e-05, | |
| "loss": 0.2079, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "grad_norm": 3.9572906494140625, | |
| "learning_rate": 3.574287544028178e-05, | |
| "loss": 0.2173, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "grad_norm": 3.172136068344116, | |
| "learning_rate": 3.570284982388729e-05, | |
| "loss": 0.2303, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "grad_norm": 3.644803285598755, | |
| "learning_rate": 3.5662824207492795e-05, | |
| "loss": 0.2467, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "grad_norm": 3.4908697605133057, | |
| "learning_rate": 3.5622798591098305e-05, | |
| "loss": 0.2053, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "grad_norm": 3.2498650550842285, | |
| "learning_rate": 3.558277297470381e-05, | |
| "loss": 0.2058, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 4.260957717895508, | |
| "learning_rate": 3.554274735830932e-05, | |
| "loss": 0.25, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "grad_norm": 4.03501558303833, | |
| "learning_rate": 3.550272174191483e-05, | |
| "loss": 0.2376, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "grad_norm": 3.145692825317383, | |
| "learning_rate": 3.546269612552033e-05, | |
| "loss": 0.219, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "grad_norm": 3.201892614364624, | |
| "learning_rate": 3.542267050912584e-05, | |
| "loss": 0.2202, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "grad_norm": 2.6380460262298584, | |
| "learning_rate": 3.5382644892731347e-05, | |
| "loss": 0.206, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "grad_norm": 3.144564390182495, | |
| "learning_rate": 3.534261927633686e-05, | |
| "loss": 0.2259, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "grad_norm": 7.223779201507568, | |
| "learning_rate": 3.530259365994236e-05, | |
| "loss": 0.2447, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "grad_norm": 4.040417194366455, | |
| "learning_rate": 3.526256804354787e-05, | |
| "loss": 0.2032, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "grad_norm": 3.7327756881713867, | |
| "learning_rate": 3.522254242715338e-05, | |
| "loss": 0.2437, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "grad_norm": 6.253401279449463, | |
| "learning_rate": 3.5182516810758884e-05, | |
| "loss": 0.2113, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "grad_norm": 4.586529731750488, | |
| "learning_rate": 3.5142491194364395e-05, | |
| "loss": 0.2213, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "grad_norm": 4.374157428741455, | |
| "learning_rate": 3.51024655779699e-05, | |
| "loss": 0.2183, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "grad_norm": 4.311252593994141, | |
| "learning_rate": 3.506243996157541e-05, | |
| "loss": 0.2223, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "grad_norm": 5.473079204559326, | |
| "learning_rate": 3.502241434518091e-05, | |
| "loss": 0.2318, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "grad_norm": 3.0324044227600098, | |
| "learning_rate": 3.498238872878642e-05, | |
| "loss": 0.2141, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "grad_norm": 3.7828001976013184, | |
| "learning_rate": 3.494236311239193e-05, | |
| "loss": 0.2164, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "grad_norm": 3.766996383666992, | |
| "learning_rate": 3.4902337495997436e-05, | |
| "loss": 0.2597, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "grad_norm": 5.172212600708008, | |
| "learning_rate": 3.4862311879602946e-05, | |
| "loss": 0.2471, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "grad_norm": 3.6711273193359375, | |
| "learning_rate": 3.482228626320846e-05, | |
| "loss": 0.2387, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "grad_norm": 3.535651445388794, | |
| "learning_rate": 3.478226064681396e-05, | |
| "loss": 0.2029, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "grad_norm": 3.2503421306610107, | |
| "learning_rate": 3.474223503041947e-05, | |
| "loss": 0.2263, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 3.7369303703308105, | |
| "learning_rate": 3.470220941402498e-05, | |
| "loss": 0.2374, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "grad_norm": 3.4637975692749023, | |
| "learning_rate": 3.4662183797630484e-05, | |
| "loss": 0.215, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "grad_norm": 3.3387277126312256, | |
| "learning_rate": 3.4622158181235995e-05, | |
| "loss": 0.2168, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "grad_norm": 4.119049072265625, | |
| "learning_rate": 3.4582132564841505e-05, | |
| "loss": 0.2292, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "grad_norm": 3.3130059242248535, | |
| "learning_rate": 3.454210694844701e-05, | |
| "loss": 0.1932, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "grad_norm": 4.4937424659729, | |
| "learning_rate": 3.450208133205252e-05, | |
| "loss": 0.1795, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "grad_norm": 3.4141628742218018, | |
| "learning_rate": 3.446205571565802e-05, | |
| "loss": 0.2059, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "grad_norm": 3.247016191482544, | |
| "learning_rate": 3.442203009926353e-05, | |
| "loss": 0.2439, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "grad_norm": 4.921724319458008, | |
| "learning_rate": 3.438200448286904e-05, | |
| "loss": 0.2441, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "grad_norm": 9.78645133972168, | |
| "learning_rate": 3.4341978866474546e-05, | |
| "loss": 0.2285, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "grad_norm": 4.2883148193359375, | |
| "learning_rate": 3.4301953250080056e-05, | |
| "loss": 0.1949, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "grad_norm": 4.0928544998168945, | |
| "learning_rate": 3.426192763368556e-05, | |
| "loss": 0.2025, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 3.5520989894866943, | |
| "learning_rate": 3.422190201729107e-05, | |
| "loss": 0.2041, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 6.448659896850586, | |
| "learning_rate": 3.4181876400896574e-05, | |
| "loss": 0.2051, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "grad_norm": 3.6344971656799316, | |
| "learning_rate": 3.4141850784502084e-05, | |
| "loss": 0.198, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "grad_norm": 3.866029977798462, | |
| "learning_rate": 3.4101825168107594e-05, | |
| "loss": 0.2465, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "grad_norm": 5.6679840087890625, | |
| "learning_rate": 3.40617995517131e-05, | |
| "loss": 0.2201, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "grad_norm": 4.224870204925537, | |
| "learning_rate": 3.402177393531861e-05, | |
| "loss": 0.1742, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "grad_norm": 3.722661256790161, | |
| "learning_rate": 3.398174831892411e-05, | |
| "loss": 0.2421, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "grad_norm": 3.9871938228607178, | |
| "learning_rate": 3.394172270252962e-05, | |
| "loss": 0.2048, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "grad_norm": 4.0545806884765625, | |
| "learning_rate": 3.3901697086135125e-05, | |
| "loss": 0.1936, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "grad_norm": 3.1876699924468994, | |
| "learning_rate": 3.3861671469740636e-05, | |
| "loss": 0.2055, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "grad_norm": 4.426385879516602, | |
| "learning_rate": 3.3821645853346146e-05, | |
| "loss": 0.2047, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "grad_norm": 3.2227375507354736, | |
| "learning_rate": 3.378162023695165e-05, | |
| "loss": 0.2183, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "grad_norm": 4.088392734527588, | |
| "learning_rate": 3.374159462055716e-05, | |
| "loss": 0.2094, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "grad_norm": 5.6704182624816895, | |
| "learning_rate": 3.370156900416266e-05, | |
| "loss": 0.2646, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "grad_norm": 4.020977973937988, | |
| "learning_rate": 3.3661543387768174e-05, | |
| "loss": 0.2152, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "grad_norm": 3.417588710784912, | |
| "learning_rate": 3.362151777137368e-05, | |
| "loss": 0.2011, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "grad_norm": 4.737537384033203, | |
| "learning_rate": 3.358149215497919e-05, | |
| "loss": 0.2468, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "grad_norm": 5.6779351234436035, | |
| "learning_rate": 3.35414665385847e-05, | |
| "loss": 0.2208, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "grad_norm": 3.3792531490325928, | |
| "learning_rate": 3.35014409221902e-05, | |
| "loss": 0.2221, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "grad_norm": 2.7218449115753174, | |
| "learning_rate": 3.346141530579571e-05, | |
| "loss": 0.2376, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "grad_norm": 5.76757287979126, | |
| "learning_rate": 3.3421389689401215e-05, | |
| "loss": 0.2505, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "grad_norm": 4.965346813201904, | |
| "learning_rate": 3.3381364073006725e-05, | |
| "loss": 0.2339, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "grad_norm": 3.6965980529785156, | |
| "learning_rate": 3.334133845661223e-05, | |
| "loss": 0.1491, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.9518218623481781, | |
| "eval_loss": 0.13597902655601501, | |
| "eval_runtime": 26.7799, | |
| "eval_samples_per_second": 368.933, | |
| "eval_steps_per_second": 11.538, | |
| "step": 2779 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 4.749961853027344, | |
| "learning_rate": 3.330131284021774e-05, | |
| "loss": 0.2327, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "grad_norm": 2.841573476791382, | |
| "learning_rate": 3.326128722382325e-05, | |
| "loss": 0.2243, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "grad_norm": 5.107615947723389, | |
| "learning_rate": 3.322126160742875e-05, | |
| "loss": 0.2101, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "grad_norm": 5.680942535400391, | |
| "learning_rate": 3.318123599103426e-05, | |
| "loss": 0.1936, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "grad_norm": 3.2696499824523926, | |
| "learning_rate": 3.314121037463977e-05, | |
| "loss": 0.2145, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "grad_norm": 3.674295425415039, | |
| "learning_rate": 3.310118475824528e-05, | |
| "loss": 0.2228, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "grad_norm": 3.9477553367614746, | |
| "learning_rate": 3.306115914185078e-05, | |
| "loss": 0.2029, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "grad_norm": 2.8797388076782227, | |
| "learning_rate": 3.302113352545629e-05, | |
| "loss": 0.1741, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "grad_norm": 3.3909058570861816, | |
| "learning_rate": 3.29811079090618e-05, | |
| "loss": 0.2246, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "grad_norm": 5.92777156829834, | |
| "learning_rate": 3.2941082292667305e-05, | |
| "loss": 0.2162, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "grad_norm": 9.48619556427002, | |
| "learning_rate": 3.2901056676272815e-05, | |
| "loss": 0.1948, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "grad_norm": 4.641541004180908, | |
| "learning_rate": 3.2861031059878325e-05, | |
| "loss": 0.2002, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "grad_norm": 3.901582956314087, | |
| "learning_rate": 3.2821005443483835e-05, | |
| "loss": 0.2303, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "grad_norm": 3.974966526031494, | |
| "learning_rate": 3.278097982708934e-05, | |
| "loss": 0.17, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "grad_norm": 4.02310037612915, | |
| "learning_rate": 3.274095421069485e-05, | |
| "loss": 0.1959, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "grad_norm": 2.948073148727417, | |
| "learning_rate": 3.270092859430036e-05, | |
| "loss": 0.1819, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "grad_norm": 4.0745391845703125, | |
| "learning_rate": 3.266090297790586e-05, | |
| "loss": 0.2008, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "grad_norm": 4.957521438598633, | |
| "learning_rate": 3.262087736151137e-05, | |
| "loss": 0.2019, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "grad_norm": 3.015536069869995, | |
| "learning_rate": 3.258085174511688e-05, | |
| "loss": 0.2166, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "grad_norm": 4.570614337921143, | |
| "learning_rate": 3.254082612872239e-05, | |
| "loss": 0.2058, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "grad_norm": 4.153733253479004, | |
| "learning_rate": 3.250080051232789e-05, | |
| "loss": 0.2092, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "grad_norm": 5.32897424697876, | |
| "learning_rate": 3.24607748959334e-05, | |
| "loss": 0.2379, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "grad_norm": 4.037844181060791, | |
| "learning_rate": 3.242074927953891e-05, | |
| "loss": 0.2191, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "grad_norm": 4.094261646270752, | |
| "learning_rate": 3.2380723663144415e-05, | |
| "loss": 0.1733, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "grad_norm": 5.6198015213012695, | |
| "learning_rate": 3.2340698046749925e-05, | |
| "loss": 0.2437, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "grad_norm": 2.6774141788482666, | |
| "learning_rate": 3.230067243035543e-05, | |
| "loss": 0.1806, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "grad_norm": 4.591428279876709, | |
| "learning_rate": 3.226064681396094e-05, | |
| "loss": 0.1828, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "grad_norm": 3.5464799404144287, | |
| "learning_rate": 3.222062119756644e-05, | |
| "loss": 0.2177, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "grad_norm": 3.6952743530273438, | |
| "learning_rate": 3.218059558117195e-05, | |
| "loss": 0.1963, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "grad_norm": 2.857217788696289, | |
| "learning_rate": 3.214056996477746e-05, | |
| "loss": 0.1828, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "grad_norm": 3.064542293548584, | |
| "learning_rate": 3.2100544348382966e-05, | |
| "loss": 0.2227, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "grad_norm": 5.486659526824951, | |
| "learning_rate": 3.206051873198848e-05, | |
| "loss": 0.2019, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "grad_norm": 3.4268643856048584, | |
| "learning_rate": 3.202049311559398e-05, | |
| "loss": 0.169, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "grad_norm": 4.867826461791992, | |
| "learning_rate": 3.198046749919949e-05, | |
| "loss": 0.2222, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "grad_norm": 1.942135214805603, | |
| "learning_rate": 3.1940441882804994e-05, | |
| "loss": 0.2087, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "grad_norm": 4.17222261428833, | |
| "learning_rate": 3.1900416266410504e-05, | |
| "loss": 0.1833, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "grad_norm": 3.8982694149017334, | |
| "learning_rate": 3.1860390650016015e-05, | |
| "loss": 0.1877, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "grad_norm": 3.6078577041625977, | |
| "learning_rate": 3.182036503362152e-05, | |
| "loss": 0.1587, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "grad_norm": 4.184298992156982, | |
| "learning_rate": 3.178033941722703e-05, | |
| "loss": 0.1787, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "grad_norm": 4.788322925567627, | |
| "learning_rate": 3.174031380083253e-05, | |
| "loss": 0.2037, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "grad_norm": 4.06505012512207, | |
| "learning_rate": 3.170028818443804e-05, | |
| "loss": 0.2102, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "grad_norm": 4.116428375244141, | |
| "learning_rate": 3.1660262568043546e-05, | |
| "loss": 0.1717, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "grad_norm": 2.654528856277466, | |
| "learning_rate": 3.1620236951649056e-05, | |
| "loss": 0.1519, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "grad_norm": 2.9073519706726074, | |
| "learning_rate": 3.1580211335254566e-05, | |
| "loss": 0.2359, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "grad_norm": 4.120742321014404, | |
| "learning_rate": 3.154018571886007e-05, | |
| "loss": 0.2294, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "grad_norm": 3.727126121520996, | |
| "learning_rate": 3.150016010246558e-05, | |
| "loss": 0.186, | |
| "step": 3005 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "grad_norm": 3.9322612285614014, | |
| "learning_rate": 3.1460134486071084e-05, | |
| "loss": 0.2311, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "grad_norm": 3.876603841781616, | |
| "learning_rate": 3.1420108869676594e-05, | |
| "loss": 0.2384, | |
| "step": 3015 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "grad_norm": 3.5300958156585693, | |
| "learning_rate": 3.13800832532821e-05, | |
| "loss": 0.2141, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "grad_norm": 3.2177224159240723, | |
| "learning_rate": 3.134005763688761e-05, | |
| "loss": 0.2157, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "grad_norm": 5.607058048248291, | |
| "learning_rate": 3.130003202049312e-05, | |
| "loss": 0.2121, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "grad_norm": 4.676594257354736, | |
| "learning_rate": 3.126000640409862e-05, | |
| "loss": 0.2647, | |
| "step": 3035 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "grad_norm": 4.193298816680908, | |
| "learning_rate": 3.121998078770413e-05, | |
| "loss": 0.1926, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "grad_norm": 4.498405933380127, | |
| "learning_rate": 3.1179955171309635e-05, | |
| "loss": 0.2174, | |
| "step": 3045 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "grad_norm": 2.3342103958129883, | |
| "learning_rate": 3.1139929554915145e-05, | |
| "loss": 0.1952, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 3.535201072692871, | |
| "learning_rate": 3.1099903938520656e-05, | |
| "loss": 0.2383, | |
| "step": 3055 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 3.458115816116333, | |
| "learning_rate": 3.105987832212616e-05, | |
| "loss": 0.251, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "grad_norm": 5.184045314788818, | |
| "learning_rate": 3.101985270573167e-05, | |
| "loss": 0.2367, | |
| "step": 3065 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "grad_norm": 5.617215156555176, | |
| "learning_rate": 3.097982708933718e-05, | |
| "loss": 0.2113, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "grad_norm": 2.9581499099731445, | |
| "learning_rate": 3.093980147294268e-05, | |
| "loss": 0.1746, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "grad_norm": 4.895233631134033, | |
| "learning_rate": 3.0899775856548194e-05, | |
| "loss": 0.2228, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "grad_norm": 6.0638346672058105, | |
| "learning_rate": 3.0859750240153704e-05, | |
| "loss": 0.2143, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "grad_norm": 3.597208023071289, | |
| "learning_rate": 3.081972462375921e-05, | |
| "loss": 0.1849, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "grad_norm": 4.183586597442627, | |
| "learning_rate": 3.077969900736472e-05, | |
| "loss": 0.1877, | |
| "step": 3095 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "grad_norm": 4.0433855056762695, | |
| "learning_rate": 3.073967339097023e-05, | |
| "loss": 0.1655, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "grad_norm": 4.732285499572754, | |
| "learning_rate": 3.069964777457573e-05, | |
| "loss": 0.2016, | |
| "step": 3105 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 3.617846727371216, | |
| "learning_rate": 3.065962215818124e-05, | |
| "loss": 0.197, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 4.5917134284973145, | |
| "learning_rate": 3.0619596541786745e-05, | |
| "loss": 0.1746, | |
| "step": 3115 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "grad_norm": 5.0926032066345215, | |
| "learning_rate": 3.0579570925392256e-05, | |
| "loss": 0.1932, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 3.462219715118408, | |
| "learning_rate": 3.053954530899776e-05, | |
| "loss": 0.1851, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "grad_norm": 4.013971328735352, | |
| "learning_rate": 3.049951969260327e-05, | |
| "loss": 0.1873, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "grad_norm": 4.1465373039245605, | |
| "learning_rate": 3.0459494076208776e-05, | |
| "loss": 0.2215, | |
| "step": 3135 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "grad_norm": 3.554340124130249, | |
| "learning_rate": 3.0419468459814283e-05, | |
| "loss": 0.2169, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "grad_norm": 5.059957981109619, | |
| "learning_rate": 3.0379442843419793e-05, | |
| "loss": 0.2123, | |
| "step": 3145 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "grad_norm": 3.727722406387329, | |
| "learning_rate": 3.0339417227025297e-05, | |
| "loss": 0.1717, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "grad_norm": 3.030172348022461, | |
| "learning_rate": 3.0299391610630807e-05, | |
| "loss": 0.1813, | |
| "step": 3155 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "grad_norm": 3.366147994995117, | |
| "learning_rate": 3.025936599423631e-05, | |
| "loss": 0.1761, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "grad_norm": 3.159599542617798, | |
| "learning_rate": 3.021934037784182e-05, | |
| "loss": 0.2178, | |
| "step": 3165 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "grad_norm": 4.538158416748047, | |
| "learning_rate": 3.017931476144733e-05, | |
| "loss": 0.2123, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "grad_norm": 4.336025714874268, | |
| "learning_rate": 3.0139289145052835e-05, | |
| "loss": 0.2083, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "grad_norm": 8.234651565551758, | |
| "learning_rate": 3.0099263528658345e-05, | |
| "loss": 0.1929, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "grad_norm": 3.061892509460449, | |
| "learning_rate": 3.005923791226385e-05, | |
| "loss": 0.1805, | |
| "step": 3185 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "grad_norm": 3.444242238998413, | |
| "learning_rate": 3.001921229586936e-05, | |
| "loss": 0.1897, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "grad_norm": 3.281337261199951, | |
| "learning_rate": 2.9979186679474862e-05, | |
| "loss": 0.1676, | |
| "step": 3195 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "grad_norm": 4.178293228149414, | |
| "learning_rate": 2.9939161063080373e-05, | |
| "loss": 0.2371, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "grad_norm": 3.2570972442626953, | |
| "learning_rate": 2.9899135446685883e-05, | |
| "loss": 0.2106, | |
| "step": 3205 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "grad_norm": 4.102814197540283, | |
| "learning_rate": 2.9859109830291387e-05, | |
| "loss": 0.2123, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "grad_norm": 4.337564945220947, | |
| "learning_rate": 2.9819084213896897e-05, | |
| "loss": 0.2158, | |
| "step": 3215 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "grad_norm": 4.369962692260742, | |
| "learning_rate": 2.97790585975024e-05, | |
| "loss": 0.1974, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "grad_norm": 3.994830369949341, | |
| "learning_rate": 2.973903298110791e-05, | |
| "loss": 0.1798, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "grad_norm": 5.044273376464844, | |
| "learning_rate": 2.9699007364713418e-05, | |
| "loss": 0.2527, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "grad_norm": 4.7880682945251465, | |
| "learning_rate": 2.9658981748318924e-05, | |
| "loss": 0.2461, | |
| "step": 3235 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "grad_norm": 3.5522334575653076, | |
| "learning_rate": 2.9618956131924435e-05, | |
| "loss": 0.1891, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "grad_norm": 3.3652679920196533, | |
| "learning_rate": 2.957893051552994e-05, | |
| "loss": 0.2123, | |
| "step": 3245 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "grad_norm": 3.255449056625366, | |
| "learning_rate": 2.953890489913545e-05, | |
| "loss": 0.204, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "grad_norm": 3.2218666076660156, | |
| "learning_rate": 2.9498879282740955e-05, | |
| "loss": 0.1947, | |
| "step": 3255 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "grad_norm": 4.256184101104736, | |
| "learning_rate": 2.9458853666346466e-05, | |
| "loss": 0.1948, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "grad_norm": 3.0359115600585938, | |
| "learning_rate": 2.941882804995197e-05, | |
| "loss": 0.2015, | |
| "step": 3265 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "grad_norm": 3.3159563541412354, | |
| "learning_rate": 2.937880243355748e-05, | |
| "loss": 0.223, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "grad_norm": 4.972184658050537, | |
| "learning_rate": 2.933877681716299e-05, | |
| "loss": 0.2057, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "grad_norm": 3.500704526901245, | |
| "learning_rate": 2.9298751200768493e-05, | |
| "loss": 0.1895, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "grad_norm": 2.18817400932312, | |
| "learning_rate": 2.9258725584374004e-05, | |
| "loss": 0.2213, | |
| "step": 3285 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "grad_norm": 3.3040566444396973, | |
| "learning_rate": 2.9218699967979507e-05, | |
| "loss": 0.1929, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "grad_norm": 3.868074417114258, | |
| "learning_rate": 2.9178674351585017e-05, | |
| "loss": 0.2007, | |
| "step": 3295 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "grad_norm": 2.987790584564209, | |
| "learning_rate": 2.913864873519052e-05, | |
| "loss": 0.1843, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "grad_norm": 3.395069122314453, | |
| "learning_rate": 2.909862311879603e-05, | |
| "loss": 0.2006, | |
| "step": 3305 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "grad_norm": 5.1089606285095215, | |
| "learning_rate": 2.905859750240154e-05, | |
| "loss": 0.1996, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "grad_norm": 3.1184518337249756, | |
| "learning_rate": 2.9018571886007045e-05, | |
| "loss": 0.1927, | |
| "step": 3315 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "grad_norm": 3.2215800285339355, | |
| "learning_rate": 2.8978546269612555e-05, | |
| "loss": 0.2127, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "grad_norm": 3.3580570220947266, | |
| "learning_rate": 2.893852065321806e-05, | |
| "loss": 0.2274, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "grad_norm": 4.6123366355896, | |
| "learning_rate": 2.889849503682357e-05, | |
| "loss": 0.1864, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 3.454392910003662, | |
| "learning_rate": 2.8858469420429073e-05, | |
| "loss": 0.1665, | |
| "step": 3335 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "grad_norm": 3.018871784210205, | |
| "learning_rate": 2.8818443804034583e-05, | |
| "loss": 0.1969, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "grad_norm": 3.789463520050049, | |
| "learning_rate": 2.8778418187640093e-05, | |
| "loss": 0.2156, | |
| "step": 3345 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "grad_norm": 2.578465223312378, | |
| "learning_rate": 2.8738392571245597e-05, | |
| "loss": 0.1985, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "grad_norm": 5.120762825012207, | |
| "learning_rate": 2.8698366954851107e-05, | |
| "loss": 0.1897, | |
| "step": 3355 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "grad_norm": 5.5928425788879395, | |
| "learning_rate": 2.8658341338456614e-05, | |
| "loss": 0.2231, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "grad_norm": 3.3155384063720703, | |
| "learning_rate": 2.861831572206212e-05, | |
| "loss": 0.1672, | |
| "step": 3365 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "grad_norm": 2.947312116622925, | |
| "learning_rate": 2.8578290105667628e-05, | |
| "loss": 0.1473, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "grad_norm": 5.2089314460754395, | |
| "learning_rate": 2.8538264489273138e-05, | |
| "loss": 0.2265, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "grad_norm": 3.4539883136749268, | |
| "learning_rate": 2.8498238872878645e-05, | |
| "loss": 0.1995, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "grad_norm": 4.001387596130371, | |
| "learning_rate": 2.845821325648415e-05, | |
| "loss": 0.1999, | |
| "step": 3385 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "grad_norm": 3.7366297245025635, | |
| "learning_rate": 2.8418187640089662e-05, | |
| "loss": 0.2098, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "grad_norm": 3.679403066635132, | |
| "learning_rate": 2.8378162023695165e-05, | |
| "loss": 0.1831, | |
| "step": 3395 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "grad_norm": 3.4778425693511963, | |
| "learning_rate": 2.8338136407300676e-05, | |
| "loss": 0.1985, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "grad_norm": 3.987553834915161, | |
| "learning_rate": 2.829811079090618e-05, | |
| "loss": 0.2251, | |
| "step": 3405 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "grad_norm": 4.322044372558594, | |
| "learning_rate": 2.825808517451169e-05, | |
| "loss": 0.2069, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "grad_norm": 5.449688911437988, | |
| "learning_rate": 2.8218059558117193e-05, | |
| "loss": 0.1794, | |
| "step": 3415 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "grad_norm": 3.6124987602233887, | |
| "learning_rate": 2.8178033941722703e-05, | |
| "loss": 0.2339, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "grad_norm": 3.3568663597106934, | |
| "learning_rate": 2.8138008325328214e-05, | |
| "loss": 0.1907, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "grad_norm": 4.301671028137207, | |
| "learning_rate": 2.8097982708933717e-05, | |
| "loss": 0.2468, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "grad_norm": 2.871671676635742, | |
| "learning_rate": 2.8057957092539227e-05, | |
| "loss": 0.1939, | |
| "step": 3435 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "grad_norm": 3.9504876136779785, | |
| "learning_rate": 2.801793147614473e-05, | |
| "loss": 0.2247, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "grad_norm": 3.9967737197875977, | |
| "learning_rate": 2.797790585975024e-05, | |
| "loss": 0.2198, | |
| "step": 3445 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "grad_norm": 3.5244204998016357, | |
| "learning_rate": 2.7937880243355745e-05, | |
| "loss": 0.2026, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "grad_norm": 3.6014513969421387, | |
| "learning_rate": 2.7897854626961255e-05, | |
| "loss": 0.2131, | |
| "step": 3455 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "grad_norm": 4.934159278869629, | |
| "learning_rate": 2.7857829010566765e-05, | |
| "loss": 0.2254, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "grad_norm": 3.0008413791656494, | |
| "learning_rate": 2.781780339417227e-05, | |
| "loss": 0.2359, | |
| "step": 3465 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "grad_norm": 4.055464267730713, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.2038, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.951417004048583, | |
| "eval_loss": 0.1312309056520462, | |
| "eval_runtime": 26.8652, | |
| "eval_samples_per_second": 367.761, | |
| "eval_steps_per_second": 11.502, | |
| "step": 3473 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 3.999800205230713, | |
| "learning_rate": 2.7737752161383286e-05, | |
| "loss": 0.2119, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "grad_norm": 4.014290809631348, | |
| "learning_rate": 2.7697726544988796e-05, | |
| "loss": 0.1888, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "grad_norm": 3.3272271156311035, | |
| "learning_rate": 2.76577009285943e-05, | |
| "loss": 0.2187, | |
| "step": 3485 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "grad_norm": 5.279153347015381, | |
| "learning_rate": 2.761767531219981e-05, | |
| "loss": 0.1644, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "grad_norm": 2.8314247131347656, | |
| "learning_rate": 2.757764969580532e-05, | |
| "loss": 0.1695, | |
| "step": 3495 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "grad_norm": 3.674100637435913, | |
| "learning_rate": 2.7537624079410824e-05, | |
| "loss": 0.1971, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "grad_norm": 4.037543296813965, | |
| "learning_rate": 2.7497598463016334e-05, | |
| "loss": 0.145, | |
| "step": 3505 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "grad_norm": 2.6359853744506836, | |
| "learning_rate": 2.7457572846621838e-05, | |
| "loss": 0.1859, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "grad_norm": 3.589770555496216, | |
| "learning_rate": 2.7417547230227348e-05, | |
| "loss": 0.2093, | |
| "step": 3515 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "grad_norm": 2.143383741378784, | |
| "learning_rate": 2.737752161383285e-05, | |
| "loss": 0.1568, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "grad_norm": 2.837106227874756, | |
| "learning_rate": 2.7337495997438362e-05, | |
| "loss": 0.1999, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "grad_norm": 3.294853448867798, | |
| "learning_rate": 2.7297470381043872e-05, | |
| "loss": 0.1655, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "grad_norm": 4.975580215454102, | |
| "learning_rate": 2.7257444764649376e-05, | |
| "loss": 0.1859, | |
| "step": 3535 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "grad_norm": 2.522923231124878, | |
| "learning_rate": 2.7217419148254886e-05, | |
| "loss": 0.1736, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "grad_norm": 4.344380855560303, | |
| "learning_rate": 2.717739353186039e-05, | |
| "loss": 0.1921, | |
| "step": 3545 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "grad_norm": 2.803359270095825, | |
| "learning_rate": 2.71373679154659e-05, | |
| "loss": 0.199, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "grad_norm": 3.1321277618408203, | |
| "learning_rate": 2.7097342299071403e-05, | |
| "loss": 0.2111, | |
| "step": 3555 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "grad_norm": 4.107348442077637, | |
| "learning_rate": 2.7057316682676913e-05, | |
| "loss": 0.1625, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "grad_norm": 3.226501703262329, | |
| "learning_rate": 2.7017291066282424e-05, | |
| "loss": 0.1757, | |
| "step": 3565 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "grad_norm": 3.1203489303588867, | |
| "learning_rate": 2.6977265449887927e-05, | |
| "loss": 0.1898, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "grad_norm": 4.505608081817627, | |
| "learning_rate": 2.6937239833493438e-05, | |
| "loss": 0.2059, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "grad_norm": 3.804442882537842, | |
| "learning_rate": 2.6897214217098944e-05, | |
| "loss": 0.2217, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "grad_norm": 3.088012218475342, | |
| "learning_rate": 2.685718860070445e-05, | |
| "loss": 0.1662, | |
| "step": 3585 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "grad_norm": 2.7884981632232666, | |
| "learning_rate": 2.6817162984309958e-05, | |
| "loss": 0.2105, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "grad_norm": 3.278127908706665, | |
| "learning_rate": 2.677713736791547e-05, | |
| "loss": 0.1687, | |
| "step": 3595 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "grad_norm": 7.960609436035156, | |
| "learning_rate": 2.6737111751520975e-05, | |
| "loss": 0.1793, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "grad_norm": 3.4804646968841553, | |
| "learning_rate": 2.6697086135126482e-05, | |
| "loss": 0.175, | |
| "step": 3605 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "grad_norm": 3.5590946674346924, | |
| "learning_rate": 2.6657060518731993e-05, | |
| "loss": 0.1993, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "grad_norm": 3.4853434562683105, | |
| "learning_rate": 2.6617034902337496e-05, | |
| "loss": 0.1666, | |
| "step": 3615 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "grad_norm": 4.569425582885742, | |
| "learning_rate": 2.6577009285943006e-05, | |
| "loss": 0.1932, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "grad_norm": 3.9071567058563232, | |
| "learning_rate": 2.653698366954851e-05, | |
| "loss": 0.1899, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "grad_norm": 2.170576333999634, | |
| "learning_rate": 2.649695805315402e-05, | |
| "loss": 0.1427, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "grad_norm": 7.018930912017822, | |
| "learning_rate": 2.645693243675953e-05, | |
| "loss": 0.2429, | |
| "step": 3635 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "grad_norm": 4.312514305114746, | |
| "learning_rate": 2.6416906820365034e-05, | |
| "loss": 0.1731, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "grad_norm": 3.58902907371521, | |
| "learning_rate": 2.6376881203970544e-05, | |
| "loss": 0.1826, | |
| "step": 3645 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "grad_norm": 4.274717807769775, | |
| "learning_rate": 2.6336855587576048e-05, | |
| "loss": 0.1643, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "grad_norm": 5.12242317199707, | |
| "learning_rate": 2.6296829971181558e-05, | |
| "loss": 0.2335, | |
| "step": 3655 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "grad_norm": 3.3458309173583984, | |
| "learning_rate": 2.625680435478706e-05, | |
| "loss": 0.1515, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "grad_norm": 3.759378433227539, | |
| "learning_rate": 2.6216778738392572e-05, | |
| "loss": 0.1848, | |
| "step": 3665 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "grad_norm": 2.559863805770874, | |
| "learning_rate": 2.6176753121998082e-05, | |
| "loss": 0.1769, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "grad_norm": 2.732105016708374, | |
| "learning_rate": 2.6136727505603586e-05, | |
| "loss": 0.1529, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "grad_norm": 2.628984212875366, | |
| "learning_rate": 2.6096701889209096e-05, | |
| "loss": 0.1971, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "grad_norm": 4.4776716232299805, | |
| "learning_rate": 2.60566762728146e-05, | |
| "loss": 0.2038, | |
| "step": 3685 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "grad_norm": 5.578450679779053, | |
| "learning_rate": 2.601665065642011e-05, | |
| "loss": 0.1579, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "grad_norm": 5.5884480476379395, | |
| "learning_rate": 2.5976625040025617e-05, | |
| "loss": 0.1939, | |
| "step": 3695 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "grad_norm": 5.272316932678223, | |
| "learning_rate": 2.5936599423631124e-05, | |
| "loss": 0.1689, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "grad_norm": 3.2164971828460693, | |
| "learning_rate": 2.5896573807236634e-05, | |
| "loss": 0.1998, | |
| "step": 3705 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "grad_norm": 2.0555613040924072, | |
| "learning_rate": 2.585654819084214e-05, | |
| "loss": 0.1782, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "grad_norm": 4.028440475463867, | |
| "learning_rate": 2.5816522574447648e-05, | |
| "loss": 0.2054, | |
| "step": 3715 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "grad_norm": 2.9159467220306396, | |
| "learning_rate": 2.5776496958053155e-05, | |
| "loss": 0.1815, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "grad_norm": 4.195735454559326, | |
| "learning_rate": 2.5736471341658665e-05, | |
| "loss": 0.2067, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "grad_norm": 3.9031412601470947, | |
| "learning_rate": 2.5696445725264168e-05, | |
| "loss": 0.1815, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "grad_norm": 4.9545087814331055, | |
| "learning_rate": 2.565642010886968e-05, | |
| "loss": 0.1777, | |
| "step": 3735 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "grad_norm": 3.0578715801239014, | |
| "learning_rate": 2.561639449247519e-05, | |
| "loss": 0.1842, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "grad_norm": 4.023972034454346, | |
| "learning_rate": 2.5576368876080692e-05, | |
| "loss": 0.1804, | |
| "step": 3745 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "grad_norm": 4.069141387939453, | |
| "learning_rate": 2.5536343259686203e-05, | |
| "loss": 0.1774, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "grad_norm": 5.315836429595947, | |
| "learning_rate": 2.5496317643291706e-05, | |
| "loss": 0.2041, | |
| "step": 3755 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "grad_norm": 2.586327075958252, | |
| "learning_rate": 2.5456292026897216e-05, | |
| "loss": 0.236, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "grad_norm": 4.58768367767334, | |
| "learning_rate": 2.541626641050272e-05, | |
| "loss": 0.1555, | |
| "step": 3765 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "grad_norm": 3.5733981132507324, | |
| "learning_rate": 2.537624079410823e-05, | |
| "loss": 0.1699, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "grad_norm": 3.546565055847168, | |
| "learning_rate": 2.533621517771374e-05, | |
| "loss": 0.1761, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "grad_norm": 4.250943183898926, | |
| "learning_rate": 2.5296189561319244e-05, | |
| "loss": 0.2104, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "grad_norm": 3.210358142852783, | |
| "learning_rate": 2.5256163944924754e-05, | |
| "loss": 0.1998, | |
| "step": 3785 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "grad_norm": 4.336333274841309, | |
| "learning_rate": 2.5216138328530258e-05, | |
| "loss": 0.2217, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "grad_norm": 3.3414406776428223, | |
| "learning_rate": 2.5176112712135768e-05, | |
| "loss": 0.1878, | |
| "step": 3795 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "grad_norm": 3.6740903854370117, | |
| "learning_rate": 2.513608709574127e-05, | |
| "loss": 0.1686, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "grad_norm": 2.5579757690429688, | |
| "learning_rate": 2.5096061479346782e-05, | |
| "loss": 0.1725, | |
| "step": 3805 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "grad_norm": 4.028023719787598, | |
| "learning_rate": 2.5056035862952292e-05, | |
| "loss": 0.2001, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "grad_norm": 3.197957992553711, | |
| "learning_rate": 2.5016010246557796e-05, | |
| "loss": 0.1868, | |
| "step": 3815 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "grad_norm": 2.4677882194519043, | |
| "learning_rate": 2.4975984630163306e-05, | |
| "loss": 0.2052, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "grad_norm": 3.9607093334198, | |
| "learning_rate": 2.4935959013768813e-05, | |
| "loss": 0.2223, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "grad_norm": 3.7235755920410156, | |
| "learning_rate": 2.489593339737432e-05, | |
| "loss": 0.173, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "grad_norm": 4.570876598358154, | |
| "learning_rate": 2.485590778097983e-05, | |
| "loss": 0.195, | |
| "step": 3835 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "grad_norm": 4.305147171020508, | |
| "learning_rate": 2.4815882164585337e-05, | |
| "loss": 0.1984, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "grad_norm": 3.5951242446899414, | |
| "learning_rate": 2.4775856548190844e-05, | |
| "loss": 0.1915, | |
| "step": 3845 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "grad_norm": 4.068524360656738, | |
| "learning_rate": 2.473583093179635e-05, | |
| "loss": 0.2063, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "grad_norm": 4.295510292053223, | |
| "learning_rate": 2.4695805315401858e-05, | |
| "loss": 0.213, | |
| "step": 3855 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "grad_norm": 4.255641937255859, | |
| "learning_rate": 2.4655779699007368e-05, | |
| "loss": 0.2068, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "grad_norm": 3.168423652648926, | |
| "learning_rate": 2.4615754082612875e-05, | |
| "loss": 0.2092, | |
| "step": 3865 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "grad_norm": 2.8869216442108154, | |
| "learning_rate": 2.4575728466218382e-05, | |
| "loss": 0.1763, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "grad_norm": 3.74802565574646, | |
| "learning_rate": 2.453570284982389e-05, | |
| "loss": 0.1539, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "grad_norm": 4.2037034034729, | |
| "learning_rate": 2.4495677233429396e-05, | |
| "loss": 0.1764, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "grad_norm": 6.206282615661621, | |
| "learning_rate": 2.4455651617034902e-05, | |
| "loss": 0.2138, | |
| "step": 3885 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "grad_norm": 2.233813524246216, | |
| "learning_rate": 2.441562600064041e-05, | |
| "loss": 0.1478, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "grad_norm": 5.018454074859619, | |
| "learning_rate": 2.437560038424592e-05, | |
| "loss": 0.166, | |
| "step": 3895 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "grad_norm": 4.372547626495361, | |
| "learning_rate": 2.4335574767851427e-05, | |
| "loss": 0.1964, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "grad_norm": 5.0093488693237305, | |
| "learning_rate": 2.4295549151456933e-05, | |
| "loss": 0.208, | |
| "step": 3905 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "grad_norm": 4.177137851715088, | |
| "learning_rate": 2.425552353506244e-05, | |
| "loss": 0.2036, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "grad_norm": 3.151520013809204, | |
| "learning_rate": 2.4215497918667947e-05, | |
| "loss": 0.1867, | |
| "step": 3915 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "grad_norm": 2.9322078227996826, | |
| "learning_rate": 2.4175472302273454e-05, | |
| "loss": 0.1692, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "grad_norm": 3.5823426246643066, | |
| "learning_rate": 2.413544668587896e-05, | |
| "loss": 0.2218, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "grad_norm": 3.663548707962036, | |
| "learning_rate": 2.409542106948447e-05, | |
| "loss": 0.1654, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "grad_norm": 3.8358802795410156, | |
| "learning_rate": 2.4055395453089978e-05, | |
| "loss": 0.161, | |
| "step": 3935 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "grad_norm": 3.567988157272339, | |
| "learning_rate": 2.4015369836695485e-05, | |
| "loss": 0.1757, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "grad_norm": 4.0143046379089355, | |
| "learning_rate": 2.3975344220300992e-05, | |
| "loss": 0.1715, | |
| "step": 3945 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "grad_norm": 6.651194095611572, | |
| "learning_rate": 2.3935318603906502e-05, | |
| "loss": 0.191, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "grad_norm": 3.7898061275482178, | |
| "learning_rate": 2.389529298751201e-05, | |
| "loss": 0.2112, | |
| "step": 3955 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "grad_norm": 2.6467232704162598, | |
| "learning_rate": 2.3855267371117516e-05, | |
| "loss": 0.1807, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "grad_norm": 3.334956407546997, | |
| "learning_rate": 2.3815241754723026e-05, | |
| "loss": 0.1995, | |
| "step": 3965 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "grad_norm": 4.663839817047119, | |
| "learning_rate": 2.3775216138328533e-05, | |
| "loss": 0.1653, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "grad_norm": 3.8154709339141846, | |
| "learning_rate": 2.373519052193404e-05, | |
| "loss": 0.1893, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "grad_norm": 3.9679391384124756, | |
| "learning_rate": 2.3695164905539547e-05, | |
| "loss": 0.1995, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "grad_norm": 2.465738296508789, | |
| "learning_rate": 2.3655139289145054e-05, | |
| "loss": 0.1571, | |
| "step": 3985 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "grad_norm": 3.7358932495117188, | |
| "learning_rate": 2.361511367275056e-05, | |
| "loss": 0.1911, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "grad_norm": 2.5874032974243164, | |
| "learning_rate": 2.3575088056356068e-05, | |
| "loss": 0.1535, | |
| "step": 3995 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "grad_norm": 4.26051139831543, | |
| "learning_rate": 2.3535062439961578e-05, | |
| "loss": 0.1562, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "grad_norm": 3.7166473865509033, | |
| "learning_rate": 2.3495036823567085e-05, | |
| "loss": 0.1605, | |
| "step": 4005 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "grad_norm": 2.865448474884033, | |
| "learning_rate": 2.3455011207172592e-05, | |
| "loss": 0.1836, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "grad_norm": 2.9740395545959473, | |
| "learning_rate": 2.34149855907781e-05, | |
| "loss": 0.1539, | |
| "step": 4015 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "grad_norm": 2.9424655437469482, | |
| "learning_rate": 2.3374959974383606e-05, | |
| "loss": 0.1671, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "grad_norm": 2.8022515773773193, | |
| "learning_rate": 2.3334934357989113e-05, | |
| "loss": 0.1841, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "grad_norm": 3.5465009212493896, | |
| "learning_rate": 2.329490874159462e-05, | |
| "loss": 0.1789, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "grad_norm": 3.6574504375457764, | |
| "learning_rate": 2.325488312520013e-05, | |
| "loss": 0.2014, | |
| "step": 4035 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "grad_norm": 3.3290579319000244, | |
| "learning_rate": 2.3214857508805637e-05, | |
| "loss": 0.1704, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "grad_norm": 3.483360767364502, | |
| "learning_rate": 2.3174831892411144e-05, | |
| "loss": 0.2339, | |
| "step": 4045 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "grad_norm": 4.405252933502197, | |
| "learning_rate": 2.313480627601665e-05, | |
| "loss": 0.1826, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "grad_norm": 3.58305025100708, | |
| "learning_rate": 2.3094780659622157e-05, | |
| "loss": 0.1709, | |
| "step": 4055 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "grad_norm": 3.658742904663086, | |
| "learning_rate": 2.3054755043227668e-05, | |
| "loss": 0.1889, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "grad_norm": 3.7958412170410156, | |
| "learning_rate": 2.3014729426833175e-05, | |
| "loss": 0.1402, | |
| "step": 4065 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "grad_norm": 3.8586859703063965, | |
| "learning_rate": 2.297470381043868e-05, | |
| "loss": 0.1979, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "grad_norm": 3.769714117050171, | |
| "learning_rate": 2.293467819404419e-05, | |
| "loss": 0.2036, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "grad_norm": 4.616032600402832, | |
| "learning_rate": 2.28946525776497e-05, | |
| "loss": 0.2101, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "grad_norm": 4.014847755432129, | |
| "learning_rate": 2.2854626961255205e-05, | |
| "loss": 0.2255, | |
| "step": 4085 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "grad_norm": 4.2372002601623535, | |
| "learning_rate": 2.2814601344860712e-05, | |
| "loss": 0.1891, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "grad_norm": 5.569272518157959, | |
| "learning_rate": 2.277457572846622e-05, | |
| "loss": 0.2282, | |
| "step": 4095 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "grad_norm": 4.124386310577393, | |
| "learning_rate": 2.2734550112071726e-05, | |
| "loss": 0.2061, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "grad_norm": 3.636678457260132, | |
| "learning_rate": 2.2694524495677236e-05, | |
| "loss": 0.1935, | |
| "step": 4105 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "grad_norm": 5.7811808586120605, | |
| "learning_rate": 2.2654498879282743e-05, | |
| "loss": 0.1831, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "grad_norm": 3.2394773960113525, | |
| "learning_rate": 2.261447326288825e-05, | |
| "loss": 0.1831, | |
| "step": 4115 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "grad_norm": 3.8104443550109863, | |
| "learning_rate": 2.2574447646493757e-05, | |
| "loss": 0.2237, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "grad_norm": 3.458548069000244, | |
| "learning_rate": 2.2534422030099264e-05, | |
| "loss": 0.1785, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "grad_norm": 2.2402632236480713, | |
| "learning_rate": 2.249439641370477e-05, | |
| "loss": 0.1534, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "grad_norm": 4.9622392654418945, | |
| "learning_rate": 2.2454370797310278e-05, | |
| "loss": 0.2295, | |
| "step": 4135 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "grad_norm": 2.6501126289367676, | |
| "learning_rate": 2.2414345180915788e-05, | |
| "loss": 0.1917, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "grad_norm": 2.041633367538452, | |
| "learning_rate": 2.2374319564521295e-05, | |
| "loss": 0.174, | |
| "step": 4145 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "grad_norm": 3.905135154724121, | |
| "learning_rate": 2.2334293948126802e-05, | |
| "loss": 0.2369, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "grad_norm": 2.685920238494873, | |
| "learning_rate": 2.229426833173231e-05, | |
| "loss": 0.2065, | |
| "step": 4155 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "grad_norm": 3.8590340614318848, | |
| "learning_rate": 2.2254242715337816e-05, | |
| "loss": 0.1832, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "grad_norm": 3.912101984024048, | |
| "learning_rate": 2.2214217098943323e-05, | |
| "loss": 0.1793, | |
| "step": 4165 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_accuracy": 0.9522267206477733, | |
| "eval_loss": 0.12901896238327026, | |
| "eval_runtime": 26.8108, | |
| "eval_samples_per_second": 368.508, | |
| "eval_steps_per_second": 11.525, | |
| "step": 4168 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 3.0803754329681396, | |
| "learning_rate": 2.217419148254883e-05, | |
| "loss": 0.1826, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "grad_norm": 3.364600896835327, | |
| "learning_rate": 2.213416586615434e-05, | |
| "loss": 0.187, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "grad_norm": 3.815800428390503, | |
| "learning_rate": 2.2094140249759847e-05, | |
| "loss": 0.1836, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "grad_norm": 2.7681992053985596, | |
| "learning_rate": 2.2054114633365357e-05, | |
| "loss": 0.154, | |
| "step": 4185 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "grad_norm": 3.0132570266723633, | |
| "learning_rate": 2.2014089016970864e-05, | |
| "loss": 0.1915, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "grad_norm": 5.308042526245117, | |
| "learning_rate": 2.197406340057637e-05, | |
| "loss": 0.1596, | |
| "step": 4195 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "grad_norm": 4.3803863525390625, | |
| "learning_rate": 2.1934037784181878e-05, | |
| "loss": 0.2031, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "grad_norm": 3.398024082183838, | |
| "learning_rate": 2.1894012167787385e-05, | |
| "loss": 0.1732, | |
| "step": 4205 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "grad_norm": 2.77665114402771, | |
| "learning_rate": 2.1853986551392895e-05, | |
| "loss": 0.1978, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "grad_norm": 6.4980974197387695, | |
| "learning_rate": 2.1813960934998402e-05, | |
| "loss": 0.1928, | |
| "step": 4215 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "grad_norm": 5.6872100830078125, | |
| "learning_rate": 2.177393531860391e-05, | |
| "loss": 0.2128, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "grad_norm": 3.19380784034729, | |
| "learning_rate": 2.1733909702209416e-05, | |
| "loss": 0.1975, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "grad_norm": 2.491637945175171, | |
| "learning_rate": 2.1693884085814922e-05, | |
| "loss": 0.1662, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "grad_norm": 4.6479716300964355, | |
| "learning_rate": 2.165385846942043e-05, | |
| "loss": 0.1598, | |
| "step": 4235 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "grad_norm": 3.320585250854492, | |
| "learning_rate": 2.1613832853025936e-05, | |
| "loss": 0.1476, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "grad_norm": 4.611423015594482, | |
| "learning_rate": 2.1573807236631447e-05, | |
| "loss": 0.1897, | |
| "step": 4245 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "grad_norm": 3.57256817817688, | |
| "learning_rate": 2.1533781620236953e-05, | |
| "loss": 0.1562, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "grad_norm": 5.594003200531006, | |
| "learning_rate": 2.149375600384246e-05, | |
| "loss": 0.2314, | |
| "step": 4255 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "grad_norm": 4.315159797668457, | |
| "learning_rate": 2.1453730387447967e-05, | |
| "loss": 0.1761, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "grad_norm": 4.169220924377441, | |
| "learning_rate": 2.1413704771053474e-05, | |
| "loss": 0.188, | |
| "step": 4265 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "grad_norm": 7.287806510925293, | |
| "learning_rate": 2.137367915465898e-05, | |
| "loss": 0.2059, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "grad_norm": 3.8299801349639893, | |
| "learning_rate": 2.1333653538264488e-05, | |
| "loss": 0.1499, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "grad_norm": 3.4622209072113037, | |
| "learning_rate": 2.1293627921869998e-05, | |
| "loss": 0.1913, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "grad_norm": 4.407230377197266, | |
| "learning_rate": 2.1253602305475505e-05, | |
| "loss": 0.1673, | |
| "step": 4285 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "grad_norm": 3.8075199127197266, | |
| "learning_rate": 2.1213576689081012e-05, | |
| "loss": 0.1341, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "grad_norm": 2.8441085815429688, | |
| "learning_rate": 2.117355107268652e-05, | |
| "loss": 0.1937, | |
| "step": 4295 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "grad_norm": 3.905503034591675, | |
| "learning_rate": 2.113352545629203e-05, | |
| "loss": 0.1853, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "grad_norm": 2.487541675567627, | |
| "learning_rate": 2.1093499839897536e-05, | |
| "loss": 0.1671, | |
| "step": 4305 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "grad_norm": 4.602748870849609, | |
| "learning_rate": 2.1053474223503043e-05, | |
| "loss": 0.1677, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "grad_norm": 3.4200439453125, | |
| "learning_rate": 2.1013448607108553e-05, | |
| "loss": 0.169, | |
| "step": 4315 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "grad_norm": 3.3340063095092773, | |
| "learning_rate": 2.097342299071406e-05, | |
| "loss": 0.1686, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "grad_norm": 6.540536403656006, | |
| "learning_rate": 2.0933397374319567e-05, | |
| "loss": 0.1712, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "grad_norm": 3.8900535106658936, | |
| "learning_rate": 2.0893371757925074e-05, | |
| "loss": 0.1524, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "grad_norm": 3.2148051261901855, | |
| "learning_rate": 2.085334614153058e-05, | |
| "loss": 0.1393, | |
| "step": 4335 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "grad_norm": 5.575131416320801, | |
| "learning_rate": 2.0813320525136088e-05, | |
| "loss": 0.2153, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "grad_norm": 3.013366222381592, | |
| "learning_rate": 2.0773294908741595e-05, | |
| "loss": 0.1949, | |
| "step": 4345 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "grad_norm": 5.952768325805664, | |
| "learning_rate": 2.0733269292347105e-05, | |
| "loss": 0.2062, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "grad_norm": 2.8933584690093994, | |
| "learning_rate": 2.0693243675952612e-05, | |
| "loss": 0.1815, | |
| "step": 4355 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "grad_norm": 4.637942790985107, | |
| "learning_rate": 2.065321805955812e-05, | |
| "loss": 0.1566, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "grad_norm": 3.233267068862915, | |
| "learning_rate": 2.0613192443163626e-05, | |
| "loss": 0.1657, | |
| "step": 4365 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "grad_norm": 3.421495199203491, | |
| "learning_rate": 2.0573166826769133e-05, | |
| "loss": 0.1737, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "grad_norm": 3.9024555683135986, | |
| "learning_rate": 2.053314121037464e-05, | |
| "loss": 0.1997, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "grad_norm": 3.0292460918426514, | |
| "learning_rate": 2.0493115593980146e-05, | |
| "loss": 0.1685, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "grad_norm": 3.764313220977783, | |
| "learning_rate": 2.0453089977585657e-05, | |
| "loss": 0.1926, | |
| "step": 4385 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "grad_norm": 3.7476236820220947, | |
| "learning_rate": 2.0413064361191164e-05, | |
| "loss": 0.155, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "grad_norm": 3.0338127613067627, | |
| "learning_rate": 2.037303874479667e-05, | |
| "loss": 0.1698, | |
| "step": 4395 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "grad_norm": 3.0882599353790283, | |
| "learning_rate": 2.0333013128402177e-05, | |
| "loss": 0.1895, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "grad_norm": 3.8048288822174072, | |
| "learning_rate": 2.0292987512007684e-05, | |
| "loss": 0.2199, | |
| "step": 4405 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "grad_norm": 4.483154296875, | |
| "learning_rate": 2.025296189561319e-05, | |
| "loss": 0.1615, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "grad_norm": 2.9506173133850098, | |
| "learning_rate": 2.02129362792187e-05, | |
| "loss": 0.1447, | |
| "step": 4415 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "grad_norm": 6.518076419830322, | |
| "learning_rate": 2.017291066282421e-05, | |
| "loss": 0.1792, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "grad_norm": 2.7018208503723145, | |
| "learning_rate": 2.013288504642972e-05, | |
| "loss": 0.1908, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "grad_norm": 3.325758457183838, | |
| "learning_rate": 2.0092859430035225e-05, | |
| "loss": 0.1714, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "grad_norm": 3.347137928009033, | |
| "learning_rate": 2.0052833813640732e-05, | |
| "loss": 0.1641, | |
| "step": 4435 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "grad_norm": 3.994041681289673, | |
| "learning_rate": 2.001280819724624e-05, | |
| "loss": 0.1857, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "grad_norm": 4.458554744720459, | |
| "learning_rate": 1.9972782580851746e-05, | |
| "loss": 0.1762, | |
| "step": 4445 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "grad_norm": 4.118882656097412, | |
| "learning_rate": 1.9932756964457253e-05, | |
| "loss": 0.2023, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "grad_norm": 3.061014175415039, | |
| "learning_rate": 1.9892731348062763e-05, | |
| "loss": 0.1874, | |
| "step": 4455 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "grad_norm": 3.94010853767395, | |
| "learning_rate": 1.985270573166827e-05, | |
| "loss": 0.1926, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "grad_norm": 3.748819351196289, | |
| "learning_rate": 1.9812680115273777e-05, | |
| "loss": 0.1585, | |
| "step": 4465 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "grad_norm": 4.36990213394165, | |
| "learning_rate": 1.9772654498879284e-05, | |
| "loss": 0.1674, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "grad_norm": 4.165234565734863, | |
| "learning_rate": 1.973262888248479e-05, | |
| "loss": 0.1858, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "grad_norm": 5.72090482711792, | |
| "learning_rate": 1.9692603266090298e-05, | |
| "loss": 0.1881, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "grad_norm": 3.430233955383301, | |
| "learning_rate": 1.9652577649695805e-05, | |
| "loss": 0.188, | |
| "step": 4485 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "grad_norm": 4.649074554443359, | |
| "learning_rate": 1.961255203330131e-05, | |
| "loss": 0.1757, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "grad_norm": 4.929919242858887, | |
| "learning_rate": 1.9572526416906822e-05, | |
| "loss": 0.1574, | |
| "step": 4495 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "grad_norm": 4.103848934173584, | |
| "learning_rate": 1.953250080051233e-05, | |
| "loss": 0.1382, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "grad_norm": 3.4986753463745117, | |
| "learning_rate": 1.9492475184117836e-05, | |
| "loss": 0.1716, | |
| "step": 4505 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "grad_norm": 3.33921217918396, | |
| "learning_rate": 1.9452449567723343e-05, | |
| "loss": 0.1825, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "grad_norm": 3.54250431060791, | |
| "learning_rate": 1.941242395132885e-05, | |
| "loss": 0.2003, | |
| "step": 4515 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "grad_norm": 4.161519527435303, | |
| "learning_rate": 1.9372398334934356e-05, | |
| "loss": 0.1651, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "grad_norm": 3.6889195442199707, | |
| "learning_rate": 1.9332372718539867e-05, | |
| "loss": 0.1684, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "grad_norm": 2.6100666522979736, | |
| "learning_rate": 1.9292347102145374e-05, | |
| "loss": 0.1705, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "grad_norm": 4.2136945724487305, | |
| "learning_rate": 1.925232148575088e-05, | |
| "loss": 0.1724, | |
| "step": 4535 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "grad_norm": 3.131673812866211, | |
| "learning_rate": 1.921229586935639e-05, | |
| "loss": 0.1764, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "grad_norm": 3.179766893386841, | |
| "learning_rate": 1.9172270252961898e-05, | |
| "loss": 0.1691, | |
| "step": 4545 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "grad_norm": 3.707369089126587, | |
| "learning_rate": 1.9132244636567405e-05, | |
| "loss": 0.1568, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "grad_norm": 4.39201545715332, | |
| "learning_rate": 1.909221902017291e-05, | |
| "loss": 0.1556, | |
| "step": 4555 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "grad_norm": 4.148761749267578, | |
| "learning_rate": 1.905219340377842e-05, | |
| "loss": 0.1541, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "grad_norm": 3.5763509273529053, | |
| "learning_rate": 1.901216778738393e-05, | |
| "loss": 0.18, | |
| "step": 4565 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "grad_norm": 3.047024726867676, | |
| "learning_rate": 1.8972142170989436e-05, | |
| "loss": 0.1967, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "grad_norm": 4.121464252471924, | |
| "learning_rate": 1.8932116554594942e-05, | |
| "loss": 0.1543, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "grad_norm": 2.1216299533843994, | |
| "learning_rate": 1.889209093820045e-05, | |
| "loss": 0.1663, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "grad_norm": 2.798755645751953, | |
| "learning_rate": 1.8852065321805956e-05, | |
| "loss": 0.1794, | |
| "step": 4585 | |
| }, | |
| { | |
| "epoch": 6.61, | |
| "grad_norm": 3.9779279232025146, | |
| "learning_rate": 1.8812039705411463e-05, | |
| "loss": 0.2191, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 6.61, | |
| "grad_norm": 3.798940658569336, | |
| "learning_rate": 1.877201408901697e-05, | |
| "loss": 0.1791, | |
| "step": 4595 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "grad_norm": 4.1768951416015625, | |
| "learning_rate": 1.873198847262248e-05, | |
| "loss": 0.1853, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "grad_norm": 2.557591199874878, | |
| "learning_rate": 1.8691962856227987e-05, | |
| "loss": 0.1693, | |
| "step": 4605 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "grad_norm": 3.179755687713623, | |
| "learning_rate": 1.8651937239833494e-05, | |
| "loss": 0.1703, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "grad_norm": 2.7072629928588867, | |
| "learning_rate": 1.8611911623439e-05, | |
| "loss": 0.1592, | |
| "step": 4615 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "grad_norm": 4.008791923522949, | |
| "learning_rate": 1.8571886007044508e-05, | |
| "loss": 0.1781, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "grad_norm": 3.89431095123291, | |
| "learning_rate": 1.8531860390650015e-05, | |
| "loss": 0.1973, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "grad_norm": 3.258575201034546, | |
| "learning_rate": 1.8491834774255522e-05, | |
| "loss": 0.143, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "grad_norm": 6.140786647796631, | |
| "learning_rate": 1.8451809157861032e-05, | |
| "loss": 0.1682, | |
| "step": 4635 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "grad_norm": 2.957918405532837, | |
| "learning_rate": 1.841178354146654e-05, | |
| "loss": 0.1977, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "grad_norm": 4.5869574546813965, | |
| "learning_rate": 1.8371757925072046e-05, | |
| "loss": 0.1443, | |
| "step": 4645 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "grad_norm": 2.5755558013916016, | |
| "learning_rate": 1.8331732308677556e-05, | |
| "loss": 0.2153, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "grad_norm": 3.475339889526367, | |
| "learning_rate": 1.8291706692283063e-05, | |
| "loss": 0.1443, | |
| "step": 4655 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "grad_norm": 2.79864501953125, | |
| "learning_rate": 1.825168107588857e-05, | |
| "loss": 0.1564, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "grad_norm": 2.523737668991089, | |
| "learning_rate": 1.8211655459494077e-05, | |
| "loss": 0.1609, | |
| "step": 4665 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "grad_norm": 4.166416645050049, | |
| "learning_rate": 1.8171629843099587e-05, | |
| "loss": 0.1926, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "grad_norm": 3.4169209003448486, | |
| "learning_rate": 1.8131604226705094e-05, | |
| "loss": 0.1841, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "grad_norm": 4.482958793640137, | |
| "learning_rate": 1.80915786103106e-05, | |
| "loss": 0.1699, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "grad_norm": 3.656352996826172, | |
| "learning_rate": 1.8051552993916108e-05, | |
| "loss": 0.1883, | |
| "step": 4685 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "grad_norm": 3.9258341789245605, | |
| "learning_rate": 1.8011527377521615e-05, | |
| "loss": 0.1847, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "grad_norm": 3.6343822479248047, | |
| "learning_rate": 1.797150176112712e-05, | |
| "loss": 0.2101, | |
| "step": 4695 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "grad_norm": 4.363142013549805, | |
| "learning_rate": 1.793147614473263e-05, | |
| "loss": 0.1752, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "grad_norm": 3.392268419265747, | |
| "learning_rate": 1.789145052833814e-05, | |
| "loss": 0.1807, | |
| "step": 4705 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "grad_norm": 2.219773054122925, | |
| "learning_rate": 1.7851424911943646e-05, | |
| "loss": 0.1176, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "grad_norm": 2.391719102859497, | |
| "learning_rate": 1.7811399295549153e-05, | |
| "loss": 0.1475, | |
| "step": 4715 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "grad_norm": 3.7803821563720703, | |
| "learning_rate": 1.777137367915466e-05, | |
| "loss": 0.179, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "grad_norm": 4.069080352783203, | |
| "learning_rate": 1.7731348062760166e-05, | |
| "loss": 0.199, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "grad_norm": 2.244948387145996, | |
| "learning_rate": 1.7691322446365673e-05, | |
| "loss": 0.15, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "grad_norm": 2.642184019088745, | |
| "learning_rate": 1.765129682997118e-05, | |
| "loss": 0.1525, | |
| "step": 4735 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "grad_norm": 3.719548463821411, | |
| "learning_rate": 1.761127121357669e-05, | |
| "loss": 0.1744, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "grad_norm": 4.889650821685791, | |
| "learning_rate": 1.7571245597182197e-05, | |
| "loss": 0.1936, | |
| "step": 4745 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "grad_norm": 4.086753845214844, | |
| "learning_rate": 1.7531219980787704e-05, | |
| "loss": 0.1745, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "grad_norm": 3.039050817489624, | |
| "learning_rate": 1.749119436439321e-05, | |
| "loss": 0.1442, | |
| "step": 4755 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "grad_norm": 3.0932884216308594, | |
| "learning_rate": 1.7451168747998718e-05, | |
| "loss": 0.1575, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "grad_norm": 14.424320220947266, | |
| "learning_rate": 1.741114313160423e-05, | |
| "loss": 0.1479, | |
| "step": 4765 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "grad_norm": 6.001073837280273, | |
| "learning_rate": 1.7371117515209735e-05, | |
| "loss": 0.1448, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "grad_norm": 4.473045349121094, | |
| "learning_rate": 1.7331091898815242e-05, | |
| "loss": 0.1468, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "grad_norm": 3.3458259105682373, | |
| "learning_rate": 1.7291066282420752e-05, | |
| "loss": 0.142, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "grad_norm": 4.793277263641357, | |
| "learning_rate": 1.725104066602626e-05, | |
| "loss": 0.1837, | |
| "step": 4785 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "grad_norm": 3.12072491645813, | |
| "learning_rate": 1.7211015049631766e-05, | |
| "loss": 0.2176, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "grad_norm": 2.7718887329101562, | |
| "learning_rate": 1.7170989433237273e-05, | |
| "loss": 0.1801, | |
| "step": 4795 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "grad_norm": 4.768100738525391, | |
| "learning_rate": 1.713096381684278e-05, | |
| "loss": 0.2163, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "grad_norm": 3.0787980556488037, | |
| "learning_rate": 1.7090938200448287e-05, | |
| "loss": 0.1444, | |
| "step": 4805 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "grad_norm": 4.640768051147461, | |
| "learning_rate": 1.7050912584053797e-05, | |
| "loss": 0.1869, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "grad_norm": 2.0733461380004883, | |
| "learning_rate": 1.7010886967659304e-05, | |
| "loss": 0.1588, | |
| "step": 4815 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "grad_norm": 4.384278774261475, | |
| "learning_rate": 1.697086135126481e-05, | |
| "loss": 0.195, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "grad_norm": 4.83052396774292, | |
| "learning_rate": 1.6930835734870318e-05, | |
| "loss": 0.1871, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "grad_norm": 3.9045228958129883, | |
| "learning_rate": 1.6890810118475825e-05, | |
| "loss": 0.1829, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "grad_norm": 2.1931328773498535, | |
| "learning_rate": 1.685078450208133e-05, | |
| "loss": 0.1697, | |
| "step": 4835 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "grad_norm": 5.390465259552002, | |
| "learning_rate": 1.681075888568684e-05, | |
| "loss": 0.1787, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "grad_norm": 2.7756080627441406, | |
| "learning_rate": 1.677073326929235e-05, | |
| "loss": 0.1668, | |
| "step": 4845 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "grad_norm": 3.4794211387634277, | |
| "learning_rate": 1.6730707652897856e-05, | |
| "loss": 0.1443, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "grad_norm": 2.9568474292755127, | |
| "learning_rate": 1.6690682036503363e-05, | |
| "loss": 0.124, | |
| "step": 4855 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "grad_norm": 2.9625253677368164, | |
| "learning_rate": 1.665065642010887e-05, | |
| "loss": 0.19, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_accuracy": 0.9533400809716599, | |
| "eval_loss": 0.13323618471622467, | |
| "eval_runtime": 26.9803, | |
| "eval_samples_per_second": 366.194, | |
| "eval_steps_per_second": 11.453, | |
| "step": 4863 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "grad_norm": 3.4989819526672363, | |
| "learning_rate": 1.6610630803714376e-05, | |
| "loss": 0.1874, | |
| "step": 4865 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "grad_norm": 3.2885279655456543, | |
| "learning_rate": 1.6570605187319883e-05, | |
| "loss": 0.1561, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "grad_norm": 3.600435972213745, | |
| "learning_rate": 1.653057957092539e-05, | |
| "loss": 0.1705, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "grad_norm": 4.295845031738281, | |
| "learning_rate": 1.64905539545309e-05, | |
| "loss": 0.1646, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "grad_norm": 2.21591854095459, | |
| "learning_rate": 1.6450528338136407e-05, | |
| "loss": 0.1434, | |
| "step": 4885 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "grad_norm": 3.466616153717041, | |
| "learning_rate": 1.6410502721741918e-05, | |
| "loss": 0.1694, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "grad_norm": 3.914956569671631, | |
| "learning_rate": 1.6370477105347425e-05, | |
| "loss": 0.1535, | |
| "step": 4895 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "grad_norm": 4.452558994293213, | |
| "learning_rate": 1.633045148895293e-05, | |
| "loss": 0.1463, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "grad_norm": 3.224085807800293, | |
| "learning_rate": 1.629042587255844e-05, | |
| "loss": 0.1182, | |
| "step": 4905 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "grad_norm": 3.1381661891937256, | |
| "learning_rate": 1.6250400256163945e-05, | |
| "loss": 0.1627, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "grad_norm": 3.2462520599365234, | |
| "learning_rate": 1.6210374639769456e-05, | |
| "loss": 0.1611, | |
| "step": 4915 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "grad_norm": 2.3595752716064453, | |
| "learning_rate": 1.6170349023374962e-05, | |
| "loss": 0.1383, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "grad_norm": 2.8666834831237793, | |
| "learning_rate": 1.613032340698047e-05, | |
| "loss": 0.1735, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "grad_norm": 4.893600940704346, | |
| "learning_rate": 1.6090297790585976e-05, | |
| "loss": 0.1909, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "grad_norm": 3.570570230484009, | |
| "learning_rate": 1.6050272174191483e-05, | |
| "loss": 0.1695, | |
| "step": 4935 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "grad_norm": 2.665599822998047, | |
| "learning_rate": 1.601024655779699e-05, | |
| "loss": 0.1534, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "grad_norm": 5.707326412200928, | |
| "learning_rate": 1.5970220941402497e-05, | |
| "loss": 0.1687, | |
| "step": 4945 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "grad_norm": 3.2267582416534424, | |
| "learning_rate": 1.5930195325008007e-05, | |
| "loss": 0.16, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "grad_norm": 5.348135948181152, | |
| "learning_rate": 1.5890169708613514e-05, | |
| "loss": 0.1575, | |
| "step": 4955 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "grad_norm": 2.868098497390747, | |
| "learning_rate": 1.585014409221902e-05, | |
| "loss": 0.1875, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "grad_norm": 4.623507976531982, | |
| "learning_rate": 1.5810118475824528e-05, | |
| "loss": 0.2132, | |
| "step": 4965 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "grad_norm": 3.894582509994507, | |
| "learning_rate": 1.5770092859430035e-05, | |
| "loss": 0.202, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "grad_norm": 4.989294052124023, | |
| "learning_rate": 1.5730067243035542e-05, | |
| "loss": 0.1773, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "grad_norm": 2.993002414703369, | |
| "learning_rate": 1.569004162664105e-05, | |
| "loss": 0.1961, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "grad_norm": 3.3114404678344727, | |
| "learning_rate": 1.565001601024656e-05, | |
| "loss": 0.1341, | |
| "step": 4985 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "grad_norm": 4.713860988616943, | |
| "learning_rate": 1.5609990393852066e-05, | |
| "loss": 0.1367, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "grad_norm": 3.016030788421631, | |
| "learning_rate": 1.5569964777457573e-05, | |
| "loss": 0.1768, | |
| "step": 4995 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "grad_norm": 3.965454578399658, | |
| "learning_rate": 1.552993916106308e-05, | |
| "loss": 0.1454, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "grad_norm": 2.214106798171997, | |
| "learning_rate": 1.548991354466859e-05, | |
| "loss": 0.1392, | |
| "step": 5005 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "grad_norm": 3.117654800415039, | |
| "learning_rate": 1.5449887928274097e-05, | |
| "loss": 0.1716, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "grad_norm": 4.0507493019104, | |
| "learning_rate": 1.5409862311879604e-05, | |
| "loss": 0.175, | |
| "step": 5015 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "grad_norm": 4.791309356689453, | |
| "learning_rate": 1.5369836695485114e-05, | |
| "loss": 0.1885, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "grad_norm": 4.158595561981201, | |
| "learning_rate": 1.532981107909062e-05, | |
| "loss": 0.1712, | |
| "step": 5025 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "grad_norm": 3.812695264816284, | |
| "learning_rate": 1.5289785462696128e-05, | |
| "loss": 0.1444, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "grad_norm": 3.045092821121216, | |
| "learning_rate": 1.5249759846301635e-05, | |
| "loss": 0.183, | |
| "step": 5035 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "grad_norm": 2.478226661682129, | |
| "learning_rate": 1.5209734229907142e-05, | |
| "loss": 0.1099, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "grad_norm": 5.668630599975586, | |
| "learning_rate": 1.5169708613512648e-05, | |
| "loss": 0.1812, | |
| "step": 5045 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "grad_norm": 4.244511604309082, | |
| "learning_rate": 1.5129682997118155e-05, | |
| "loss": 0.1864, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "grad_norm": 2.8045542240142822, | |
| "learning_rate": 1.5089657380723666e-05, | |
| "loss": 0.1713, | |
| "step": 5055 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "grad_norm": 2.768808126449585, | |
| "learning_rate": 1.5049631764329173e-05, | |
| "loss": 0.1761, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "grad_norm": 3.726841449737549, | |
| "learning_rate": 1.500960614793468e-05, | |
| "loss": 0.1605, | |
| "step": 5065 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "grad_norm": 4.635730743408203, | |
| "learning_rate": 1.4969580531540186e-05, | |
| "loss": 0.179, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "grad_norm": 3.83880877494812, | |
| "learning_rate": 1.4929554915145693e-05, | |
| "loss": 0.1506, | |
| "step": 5075 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "grad_norm": 2.271193504333496, | |
| "learning_rate": 1.48895292987512e-05, | |
| "loss": 0.1327, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "grad_norm": 2.6322343349456787, | |
| "learning_rate": 1.4849503682356709e-05, | |
| "loss": 0.1543, | |
| "step": 5085 | |
| }, | |
| { | |
| "epoch": 7.33, | |
| "grad_norm": 4.538561820983887, | |
| "learning_rate": 1.4809478065962217e-05, | |
| "loss": 0.1797, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 7.33, | |
| "grad_norm": 2.668684720993042, | |
| "learning_rate": 1.4769452449567724e-05, | |
| "loss": 0.1601, | |
| "step": 5095 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "grad_norm": 3.4693799018859863, | |
| "learning_rate": 1.4729426833173233e-05, | |
| "loss": 0.1713, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "grad_norm": 3.7429068088531494, | |
| "learning_rate": 1.468940121677874e-05, | |
| "loss": 0.1934, | |
| "step": 5105 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "grad_norm": 2.9476771354675293, | |
| "learning_rate": 1.4649375600384247e-05, | |
| "loss": 0.1566, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "grad_norm": 3.614051103591919, | |
| "learning_rate": 1.4609349983989754e-05, | |
| "loss": 0.1706, | |
| "step": 5115 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "grad_norm": 5.335232734680176, | |
| "learning_rate": 1.456932436759526e-05, | |
| "loss": 0.2092, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "grad_norm": 2.373945474624634, | |
| "learning_rate": 1.452929875120077e-05, | |
| "loss": 0.1533, | |
| "step": 5125 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "grad_norm": 4.9412312507629395, | |
| "learning_rate": 1.4489273134806278e-05, | |
| "loss": 0.1823, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "grad_norm": 4.234364032745361, | |
| "learning_rate": 1.4449247518411785e-05, | |
| "loss": 0.1649, | |
| "step": 5135 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "grad_norm": 3.623694896697998, | |
| "learning_rate": 1.4409221902017291e-05, | |
| "loss": 0.1746, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "grad_norm": 2.8488681316375732, | |
| "learning_rate": 1.4369196285622798e-05, | |
| "loss": 0.1589, | |
| "step": 5145 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "grad_norm": 4.376282691955566, | |
| "learning_rate": 1.4329170669228307e-05, | |
| "loss": 0.1619, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "grad_norm": 4.9292426109313965, | |
| "learning_rate": 1.4289145052833814e-05, | |
| "loss": 0.1638, | |
| "step": 5155 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "grad_norm": 5.3557658195495605, | |
| "learning_rate": 1.4249119436439322e-05, | |
| "loss": 0.1841, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "grad_norm": 5.3310394287109375, | |
| "learning_rate": 1.4209093820044831e-05, | |
| "loss": 0.1746, | |
| "step": 5165 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "grad_norm": 4.013771057128906, | |
| "learning_rate": 1.4169068203650338e-05, | |
| "loss": 0.1853, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "grad_norm": 3.4680042266845703, | |
| "learning_rate": 1.4129042587255845e-05, | |
| "loss": 0.1796, | |
| "step": 5175 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "grad_norm": 3.229072332382202, | |
| "learning_rate": 1.4089016970861352e-05, | |
| "loss": 0.1555, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "grad_norm": 2.8179924488067627, | |
| "learning_rate": 1.4048991354466859e-05, | |
| "loss": 0.1861, | |
| "step": 5185 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "grad_norm": 2.785851001739502, | |
| "learning_rate": 1.4008965738072365e-05, | |
| "loss": 0.16, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "grad_norm": 2.7952377796173096, | |
| "learning_rate": 1.3968940121677872e-05, | |
| "loss": 0.1587, | |
| "step": 5195 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "grad_norm": 4.441540241241455, | |
| "learning_rate": 1.3928914505283383e-05, | |
| "loss": 0.1741, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "grad_norm": 4.560239791870117, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 0.1657, | |
| "step": 5205 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "grad_norm": 2.9085419178009033, | |
| "learning_rate": 1.3848863272494398e-05, | |
| "loss": 0.1391, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "grad_norm": 2.2866363525390625, | |
| "learning_rate": 1.3808837656099905e-05, | |
| "loss": 0.1297, | |
| "step": 5215 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "grad_norm": 3.377863645553589, | |
| "learning_rate": 1.3768812039705412e-05, | |
| "loss": 0.1747, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "grad_norm": 2.285461187362671, | |
| "learning_rate": 1.3728786423310919e-05, | |
| "loss": 0.1473, | |
| "step": 5225 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "grad_norm": 3.6761443614959717, | |
| "learning_rate": 1.3688760806916426e-05, | |
| "loss": 0.1518, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "grad_norm": 2.0515265464782715, | |
| "learning_rate": 1.3648735190521936e-05, | |
| "loss": 0.1755, | |
| "step": 5235 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "grad_norm": 3.5392673015594482, | |
| "learning_rate": 1.3608709574127443e-05, | |
| "loss": 0.1687, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "grad_norm": 4.235048770904541, | |
| "learning_rate": 1.356868395773295e-05, | |
| "loss": 0.159, | |
| "step": 5245 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "grad_norm": 3.9819908142089844, | |
| "learning_rate": 1.3528658341338457e-05, | |
| "loss": 0.1388, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "grad_norm": 4.2567524909973145, | |
| "learning_rate": 1.3488632724943964e-05, | |
| "loss": 0.1855, | |
| "step": 5255 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "grad_norm": 3.7046706676483154, | |
| "learning_rate": 1.3448607108549472e-05, | |
| "loss": 0.1705, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "grad_norm": 5.374367713928223, | |
| "learning_rate": 1.3408581492154979e-05, | |
| "loss": 0.1558, | |
| "step": 5265 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "grad_norm": 3.5017898082733154, | |
| "learning_rate": 1.3368555875760488e-05, | |
| "loss": 0.1665, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "grad_norm": 2.907083034515381, | |
| "learning_rate": 1.3328530259365996e-05, | |
| "loss": 0.1407, | |
| "step": 5275 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "grad_norm": 3.715378522872925, | |
| "learning_rate": 1.3288504642971503e-05, | |
| "loss": 0.1519, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "grad_norm": 5.481036186218262, | |
| "learning_rate": 1.324847902657701e-05, | |
| "loss": 0.1559, | |
| "step": 5285 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "grad_norm": 2.949070692062378, | |
| "learning_rate": 1.3208453410182517e-05, | |
| "loss": 0.1673, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "grad_norm": 2.929731845855713, | |
| "learning_rate": 1.3168427793788024e-05, | |
| "loss": 0.1592, | |
| "step": 5295 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "grad_norm": 4.555692672729492, | |
| "learning_rate": 1.312840217739353e-05, | |
| "loss": 0.2078, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "grad_norm": 3.130526065826416, | |
| "learning_rate": 1.3088376560999041e-05, | |
| "loss": 0.1381, | |
| "step": 5305 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "grad_norm": 4.921693325042725, | |
| "learning_rate": 1.3048350944604548e-05, | |
| "loss": 0.2067, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "grad_norm": 3.4057352542877197, | |
| "learning_rate": 1.3008325328210055e-05, | |
| "loss": 0.1232, | |
| "step": 5315 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "grad_norm": 6.398148536682129, | |
| "learning_rate": 1.2968299711815562e-05, | |
| "loss": 0.1854, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "grad_norm": 4.1279754638671875, | |
| "learning_rate": 1.292827409542107e-05, | |
| "loss": 0.1674, | |
| "step": 5325 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "grad_norm": 3.3673510551452637, | |
| "learning_rate": 1.2888248479026577e-05, | |
| "loss": 0.144, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "grad_norm": 3.9797935485839844, | |
| "learning_rate": 1.2848222862632084e-05, | |
| "loss": 0.1506, | |
| "step": 5335 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "grad_norm": 4.889264106750488, | |
| "learning_rate": 1.2808197246237594e-05, | |
| "loss": 0.1409, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "grad_norm": 3.3350377082824707, | |
| "learning_rate": 1.2768171629843101e-05, | |
| "loss": 0.1267, | |
| "step": 5345 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "grad_norm": 3.927999496459961, | |
| "learning_rate": 1.2728146013448608e-05, | |
| "loss": 0.1484, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "grad_norm": 3.547835350036621, | |
| "learning_rate": 1.2688120397054115e-05, | |
| "loss": 0.1444, | |
| "step": 5355 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "grad_norm": 3.605868101119995, | |
| "learning_rate": 1.2648094780659622e-05, | |
| "loss": 0.1524, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "grad_norm": 4.846922397613525, | |
| "learning_rate": 1.2608069164265129e-05, | |
| "loss": 0.1815, | |
| "step": 5365 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "grad_norm": 3.1063039302825928, | |
| "learning_rate": 1.2568043547870636e-05, | |
| "loss": 0.1288, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "grad_norm": 3.755476236343384, | |
| "learning_rate": 1.2528017931476146e-05, | |
| "loss": 0.1703, | |
| "step": 5375 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "grad_norm": 2.933077096939087, | |
| "learning_rate": 1.2487992315081653e-05, | |
| "loss": 0.168, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "grad_norm": 3.11079478263855, | |
| "learning_rate": 1.244796669868716e-05, | |
| "loss": 0.161, | |
| "step": 5385 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "grad_norm": 3.155428409576416, | |
| "learning_rate": 1.2407941082292668e-05, | |
| "loss": 0.1903, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "grad_norm": 4.022368907928467, | |
| "learning_rate": 1.2367915465898175e-05, | |
| "loss": 0.1941, | |
| "step": 5395 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "grad_norm": 3.187999725341797, | |
| "learning_rate": 1.2327889849503684e-05, | |
| "loss": 0.1445, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "grad_norm": 4.614522457122803, | |
| "learning_rate": 1.2287864233109191e-05, | |
| "loss": 0.145, | |
| "step": 5405 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "grad_norm": 4.4915266036987305, | |
| "learning_rate": 1.2247838616714698e-05, | |
| "loss": 0.1815, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "grad_norm": 3.4047083854675293, | |
| "learning_rate": 1.2207813000320205e-05, | |
| "loss": 0.1781, | |
| "step": 5415 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "grad_norm": 3.843172550201416, | |
| "learning_rate": 1.2167787383925713e-05, | |
| "loss": 0.1601, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "grad_norm": 3.592343330383301, | |
| "learning_rate": 1.212776176753122e-05, | |
| "loss": 0.1948, | |
| "step": 5425 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "grad_norm": 6.0892133712768555, | |
| "learning_rate": 1.2087736151136727e-05, | |
| "loss": 0.1562, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "grad_norm": 2.9761712551116943, | |
| "learning_rate": 1.2047710534742236e-05, | |
| "loss": 0.1584, | |
| "step": 5435 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "grad_norm": 4.459796905517578, | |
| "learning_rate": 1.2007684918347743e-05, | |
| "loss": 0.1492, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "grad_norm": 3.517129421234131, | |
| "learning_rate": 1.1967659301953251e-05, | |
| "loss": 0.1892, | |
| "step": 5445 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "grad_norm": 3.708636999130249, | |
| "learning_rate": 1.1927633685558758e-05, | |
| "loss": 0.1644, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "grad_norm": 4.543480396270752, | |
| "learning_rate": 1.1887608069164267e-05, | |
| "loss": 0.1459, | |
| "step": 5455 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "grad_norm": 3.9120936393737793, | |
| "learning_rate": 1.1847582452769774e-05, | |
| "loss": 0.1597, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "grad_norm": 4.939571380615234, | |
| "learning_rate": 1.180755683637528e-05, | |
| "loss": 0.2059, | |
| "step": 5465 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "grad_norm": 3.939115047454834, | |
| "learning_rate": 1.1767531219980789e-05, | |
| "loss": 0.16, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "grad_norm": 3.2744674682617188, | |
| "learning_rate": 1.1727505603586296e-05, | |
| "loss": 0.1793, | |
| "step": 5475 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "grad_norm": 4.565367698669434, | |
| "learning_rate": 1.1687479987191803e-05, | |
| "loss": 0.1982, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "grad_norm": 5.799792289733887, | |
| "learning_rate": 1.164745437079731e-05, | |
| "loss": 0.1593, | |
| "step": 5485 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "grad_norm": 2.6292014122009277, | |
| "learning_rate": 1.1607428754402818e-05, | |
| "loss": 0.1484, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "grad_norm": 4.108443260192871, | |
| "learning_rate": 1.1567403138008325e-05, | |
| "loss": 0.1535, | |
| "step": 5495 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "grad_norm": 3.3221068382263184, | |
| "learning_rate": 1.1527377521613834e-05, | |
| "loss": 0.1611, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "grad_norm": 3.2103445529937744, | |
| "learning_rate": 1.148735190521934e-05, | |
| "loss": 0.1159, | |
| "step": 5505 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "grad_norm": 4.046101093292236, | |
| "learning_rate": 1.144732628882485e-05, | |
| "loss": 0.1895, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "grad_norm": 5.981593608856201, | |
| "learning_rate": 1.1407300672430356e-05, | |
| "loss": 0.1748, | |
| "step": 5515 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "grad_norm": 3.193052291870117, | |
| "learning_rate": 1.1367275056035863e-05, | |
| "loss": 0.1463, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "grad_norm": 4.645889759063721, | |
| "learning_rate": 1.1327249439641372e-05, | |
| "loss": 0.162, | |
| "step": 5525 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "grad_norm": 3.167149543762207, | |
| "learning_rate": 1.1287223823246879e-05, | |
| "loss": 0.1663, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "grad_norm": 5.2093048095703125, | |
| "learning_rate": 1.1247198206852385e-05, | |
| "loss": 0.1256, | |
| "step": 5535 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "grad_norm": 3.816267728805542, | |
| "learning_rate": 1.1207172590457894e-05, | |
| "loss": 0.1713, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "grad_norm": 3.6065261363983154, | |
| "learning_rate": 1.1167146974063401e-05, | |
| "loss": 0.1593, | |
| "step": 5545 | |
| }, | |
| { | |
| "epoch": 7.99, | |
| "grad_norm": 3.608593463897705, | |
| "learning_rate": 1.1127121357668908e-05, | |
| "loss": 0.1727, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 2.9817004203796387, | |
| "learning_rate": 1.1087095741274415e-05, | |
| "loss": 0.1424, | |
| "step": 5555 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.9548582995951417, | |
| "eval_loss": 0.1296539306640625, | |
| "eval_runtime": 26.6571, | |
| "eval_samples_per_second": 370.634, | |
| "eval_steps_per_second": 11.592, | |
| "step": 5558 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 7.694788455963135, | |
| "learning_rate": 1.1047070124879923e-05, | |
| "loss": 0.1771, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "grad_norm": 2.9499638080596924, | |
| "learning_rate": 1.1007044508485432e-05, | |
| "loss": 0.1576, | |
| "step": 5565 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "grad_norm": 3.4104106426239014, | |
| "learning_rate": 1.0967018892090939e-05, | |
| "loss": 0.1495, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "grad_norm": 4.150513648986816, | |
| "learning_rate": 1.0926993275696447e-05, | |
| "loss": 0.1456, | |
| "step": 5575 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "grad_norm": 4.284981727600098, | |
| "learning_rate": 1.0886967659301954e-05, | |
| "loss": 0.1184, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "grad_norm": 5.80072546005249, | |
| "learning_rate": 1.0846942042907461e-05, | |
| "loss": 0.188, | |
| "step": 5585 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "grad_norm": 3.601327419281006, | |
| "learning_rate": 1.0806916426512968e-05, | |
| "loss": 0.1397, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "grad_norm": 3.128159999847412, | |
| "learning_rate": 1.0766890810118477e-05, | |
| "loss": 0.1188, | |
| "step": 5595 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "grad_norm": 4.803286552429199, | |
| "learning_rate": 1.0726865193723984e-05, | |
| "loss": 0.199, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "grad_norm": 2.26747727394104, | |
| "learning_rate": 1.068683957732949e-05, | |
| "loss": 0.1105, | |
| "step": 5605 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "grad_norm": 3.7966387271881104, | |
| "learning_rate": 1.0646813960934999e-05, | |
| "loss": 0.1747, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "grad_norm": 2.617194652557373, | |
| "learning_rate": 1.0606788344540506e-05, | |
| "loss": 0.1529, | |
| "step": 5615 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "grad_norm": 3.259854793548584, | |
| "learning_rate": 1.0566762728146015e-05, | |
| "loss": 0.1641, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "grad_norm": 4.257889270782471, | |
| "learning_rate": 1.0526737111751522e-05, | |
| "loss": 0.1633, | |
| "step": 5625 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "grad_norm": 2.608647108078003, | |
| "learning_rate": 1.048671149535703e-05, | |
| "loss": 0.1448, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 8.11, | |
| "grad_norm": 3.398613929748535, | |
| "learning_rate": 1.0446685878962537e-05, | |
| "loss": 0.1447, | |
| "step": 5635 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "grad_norm": 4.047391891479492, | |
| "learning_rate": 1.0406660262568044e-05, | |
| "loss": 0.142, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "grad_norm": 4.146562576293945, | |
| "learning_rate": 1.0366634646173552e-05, | |
| "loss": 0.131, | |
| "step": 5645 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "grad_norm": 3.4250409603118896, | |
| "learning_rate": 1.032660902977906e-05, | |
| "loss": 0.1732, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 8.14, | |
| "grad_norm": 3.730803966522217, | |
| "learning_rate": 1.0286583413384566e-05, | |
| "loss": 0.163, | |
| "step": 5655 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "grad_norm": 5.8135504722595215, | |
| "learning_rate": 1.0246557796990073e-05, | |
| "loss": 0.1422, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "grad_norm": 6.937263488769531, | |
| "learning_rate": 1.0206532180595582e-05, | |
| "loss": 0.1504, | |
| "step": 5665 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "grad_norm": 5.335781097412109, | |
| "learning_rate": 1.0166506564201089e-05, | |
| "loss": 0.1636, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 8.17, | |
| "grad_norm": 5.222043037414551, | |
| "learning_rate": 1.0126480947806596e-05, | |
| "loss": 0.1459, | |
| "step": 5675 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "grad_norm": 7.020112037658691, | |
| "learning_rate": 1.0086455331412104e-05, | |
| "loss": 0.1235, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "grad_norm": 5.291160583496094, | |
| "learning_rate": 1.0046429715017613e-05, | |
| "loss": 0.1816, | |
| "step": 5685 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "grad_norm": 5.9437575340271, | |
| "learning_rate": 1.000640409862312e-05, | |
| "loss": 0.1886, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "grad_norm": 5.075570106506348, | |
| "learning_rate": 9.966378482228627e-06, | |
| "loss": 0.1766, | |
| "step": 5695 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "grad_norm": 4.061030387878418, | |
| "learning_rate": 9.926352865834135e-06, | |
| "loss": 0.1349, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "grad_norm": 3.3227720260620117, | |
| "learning_rate": 9.886327249439642e-06, | |
| "loss": 0.1564, | |
| "step": 5705 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "grad_norm": 5.788105487823486, | |
| "learning_rate": 9.846301633045149e-06, | |
| "loss": 0.1792, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "grad_norm": 4.576950550079346, | |
| "learning_rate": 9.806276016650656e-06, | |
| "loss": 0.1707, | |
| "step": 5715 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "grad_norm": 1.9181571006774902, | |
| "learning_rate": 9.766250400256164e-06, | |
| "loss": 0.1588, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "grad_norm": 4.4835638999938965, | |
| "learning_rate": 9.726224783861671e-06, | |
| "loss": 0.1474, | |
| "step": 5725 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "grad_norm": 3.0491483211517334, | |
| "learning_rate": 9.686199167467178e-06, | |
| "loss": 0.1391, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "grad_norm": 6.024807929992676, | |
| "learning_rate": 9.646173551072687e-06, | |
| "loss": 0.1819, | |
| "step": 5735 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "grad_norm": 3.9458465576171875, | |
| "learning_rate": 9.606147934678195e-06, | |
| "loss": 0.1176, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "grad_norm": 5.939620494842529, | |
| "learning_rate": 9.566122318283702e-06, | |
| "loss": 0.1519, | |
| "step": 5745 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "grad_norm": 2.2622225284576416, | |
| "learning_rate": 9.52609670188921e-06, | |
| "loss": 0.152, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "grad_norm": 3.9069314002990723, | |
| "learning_rate": 9.486071085494718e-06, | |
| "loss": 0.1556, | |
| "step": 5755 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "grad_norm": 3.782322645187378, | |
| "learning_rate": 9.446045469100225e-06, | |
| "loss": 0.1863, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "grad_norm": 3.462238311767578, | |
| "learning_rate": 9.406019852705732e-06, | |
| "loss": 0.1217, | |
| "step": 5765 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "grad_norm": 3.4270153045654297, | |
| "learning_rate": 9.36599423631124e-06, | |
| "loss": 0.1562, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "grad_norm": 4.370534420013428, | |
| "learning_rate": 9.325968619916747e-06, | |
| "loss": 0.1868, | |
| "step": 5775 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "grad_norm": 5.722720623016357, | |
| "learning_rate": 9.285943003522254e-06, | |
| "loss": 0.1451, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "grad_norm": 4.850919723510742, | |
| "learning_rate": 9.245917387127761e-06, | |
| "loss": 0.1617, | |
| "step": 5785 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "grad_norm": 3.516965627670288, | |
| "learning_rate": 9.20589177073327e-06, | |
| "loss": 0.1658, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "grad_norm": 3.0313217639923096, | |
| "learning_rate": 9.165866154338778e-06, | |
| "loss": 0.1594, | |
| "step": 5795 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "grad_norm": 3.095097780227661, | |
| "learning_rate": 9.125840537944285e-06, | |
| "loss": 0.1256, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "grad_norm": 4.62247371673584, | |
| "learning_rate": 9.085814921549794e-06, | |
| "loss": 0.1664, | |
| "step": 5805 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "grad_norm": 4.7111310958862305, | |
| "learning_rate": 9.0457893051553e-06, | |
| "loss": 0.1656, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "grad_norm": 3.2498939037323, | |
| "learning_rate": 9.005763688760807e-06, | |
| "loss": 0.164, | |
| "step": 5815 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "grad_norm": 3.6476857662200928, | |
| "learning_rate": 8.965738072366314e-06, | |
| "loss": 0.1757, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "grad_norm": 6.174407482147217, | |
| "learning_rate": 8.925712455971823e-06, | |
| "loss": 0.1213, | |
| "step": 5825 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "grad_norm": 4.54398775100708, | |
| "learning_rate": 8.88568683957733e-06, | |
| "loss": 0.1786, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "grad_norm": 2.651683807373047, | |
| "learning_rate": 8.845661223182837e-06, | |
| "loss": 0.1537, | |
| "step": 5835 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "grad_norm": 4.218119144439697, | |
| "learning_rate": 8.805635606788345e-06, | |
| "loss": 0.1579, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "grad_norm": 3.273021936416626, | |
| "learning_rate": 8.765609990393852e-06, | |
| "loss": 0.154, | |
| "step": 5845 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "grad_norm": 7.346030235290527, | |
| "learning_rate": 8.725584373999359e-06, | |
| "loss": 0.1743, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "grad_norm": 4.279568672180176, | |
| "learning_rate": 8.685558757604868e-06, | |
| "loss": 0.1704, | |
| "step": 5855 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "grad_norm": 4.570573329925537, | |
| "learning_rate": 8.645533141210376e-06, | |
| "loss": 0.1697, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "grad_norm": 2.7568442821502686, | |
| "learning_rate": 8.605507524815883e-06, | |
| "loss": 0.1693, | |
| "step": 5865 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "grad_norm": 3.1747210025787354, | |
| "learning_rate": 8.56548190842139e-06, | |
| "loss": 0.141, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "grad_norm": 3.326231002807617, | |
| "learning_rate": 8.525456292026899e-06, | |
| "loss": 0.1191, | |
| "step": 5875 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "grad_norm": 5.524628639221191, | |
| "learning_rate": 8.485430675632405e-06, | |
| "loss": 0.1454, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "grad_norm": 2.5818092823028564, | |
| "learning_rate": 8.445405059237912e-06, | |
| "loss": 0.1409, | |
| "step": 5885 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "grad_norm": 6.412874698638916, | |
| "learning_rate": 8.40537944284342e-06, | |
| "loss": 0.1661, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "grad_norm": 2.9045023918151855, | |
| "learning_rate": 8.365353826448928e-06, | |
| "loss": 0.1251, | |
| "step": 5895 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "grad_norm": 4.160517692565918, | |
| "learning_rate": 8.325328210054435e-06, | |
| "loss": 0.1297, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "grad_norm": 5.732846736907959, | |
| "learning_rate": 8.285302593659942e-06, | |
| "loss": 0.1167, | |
| "step": 5905 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "grad_norm": 2.2596616744995117, | |
| "learning_rate": 8.24527697726545e-06, | |
| "loss": 0.116, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "grad_norm": 4.199057102203369, | |
| "learning_rate": 8.205251360870959e-06, | |
| "loss": 0.167, | |
| "step": 5915 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "grad_norm": 3.995457649230957, | |
| "learning_rate": 8.165225744476466e-06, | |
| "loss": 0.1513, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "grad_norm": 3.3315746784210205, | |
| "learning_rate": 8.125200128081973e-06, | |
| "loss": 0.1438, | |
| "step": 5925 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "grad_norm": 4.279821872711182, | |
| "learning_rate": 8.085174511687481e-06, | |
| "loss": 0.1544, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "grad_norm": 4.037819862365723, | |
| "learning_rate": 8.045148895292988e-06, | |
| "loss": 0.1424, | |
| "step": 5935 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "grad_norm": 2.268348455429077, | |
| "learning_rate": 8.005123278898495e-06, | |
| "loss": 0.136, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "grad_norm": 3.2165260314941406, | |
| "learning_rate": 7.965097662504004e-06, | |
| "loss": 0.1645, | |
| "step": 5945 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "grad_norm": 2.8319571018218994, | |
| "learning_rate": 7.92507204610951e-06, | |
| "loss": 0.1517, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "grad_norm": 2.6142780780792236, | |
| "learning_rate": 7.885046429715017e-06, | |
| "loss": 0.1181, | |
| "step": 5955 | |
| }, | |
| { | |
| "epoch": 8.58, | |
| "grad_norm": 4.016239643096924, | |
| "learning_rate": 7.845020813320524e-06, | |
| "loss": 0.141, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "grad_norm": 2.4863009452819824, | |
| "learning_rate": 7.804995196926033e-06, | |
| "loss": 0.1556, | |
| "step": 5965 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "grad_norm": 7.284638404846191, | |
| "learning_rate": 7.76496958053154e-06, | |
| "loss": 0.1665, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "grad_norm": 3.67448091506958, | |
| "learning_rate": 7.724943964137048e-06, | |
| "loss": 0.1473, | |
| "step": 5975 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "grad_norm": 3.4379279613494873, | |
| "learning_rate": 7.684918347742557e-06, | |
| "loss": 0.1304, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "grad_norm": 3.080416440963745, | |
| "learning_rate": 7.644892731348064e-06, | |
| "loss": 0.1304, | |
| "step": 5985 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "grad_norm": 4.215944290161133, | |
| "learning_rate": 7.604867114953571e-06, | |
| "loss": 0.1884, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "grad_norm": 3.3756349086761475, | |
| "learning_rate": 7.564841498559078e-06, | |
| "loss": 0.1415, | |
| "step": 5995 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "grad_norm": 4.051993370056152, | |
| "learning_rate": 7.524815882164586e-06, | |
| "loss": 0.1318, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "grad_norm": 3.0433032512664795, | |
| "learning_rate": 7.484790265770093e-06, | |
| "loss": 0.1427, | |
| "step": 6005 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "grad_norm": 2.945946216583252, | |
| "learning_rate": 7.4447646493756e-06, | |
| "loss": 0.1374, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "grad_norm": 3.9186439514160156, | |
| "learning_rate": 7.404739032981109e-06, | |
| "loss": 0.167, | |
| "step": 6015 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "grad_norm": 3.6851704120635986, | |
| "learning_rate": 7.364713416586616e-06, | |
| "loss": 0.1658, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "grad_norm": 5.047458171844482, | |
| "learning_rate": 7.324687800192123e-06, | |
| "loss": 0.1568, | |
| "step": 6025 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "grad_norm": 3.574936628341675, | |
| "learning_rate": 7.28466218379763e-06, | |
| "loss": 0.1508, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "grad_norm": 4.4691057205200195, | |
| "learning_rate": 7.244636567403139e-06, | |
| "loss": 0.1467, | |
| "step": 6035 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "grad_norm": 2.8131790161132812, | |
| "learning_rate": 7.204610951008646e-06, | |
| "loss": 0.1948, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "grad_norm": 4.179603576660156, | |
| "learning_rate": 7.1645853346141535e-06, | |
| "loss": 0.1824, | |
| "step": 6045 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "grad_norm": 3.927391290664673, | |
| "learning_rate": 7.124559718219661e-06, | |
| "loss": 0.1436, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "grad_norm": 4.930620193481445, | |
| "learning_rate": 7.084534101825169e-06, | |
| "loss": 0.1708, | |
| "step": 6055 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "grad_norm": 2.9689087867736816, | |
| "learning_rate": 7.044508485430676e-06, | |
| "loss": 0.1535, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "grad_norm": 4.4355316162109375, | |
| "learning_rate": 7.004482869036183e-06, | |
| "loss": 0.1758, | |
| "step": 6065 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "grad_norm": 4.122786045074463, | |
| "learning_rate": 6.964457252641691e-06, | |
| "loss": 0.1496, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "grad_norm": 6.657648086547852, | |
| "learning_rate": 6.924431636247199e-06, | |
| "loss": 0.1725, | |
| "step": 6075 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "grad_norm": 4.54775333404541, | |
| "learning_rate": 6.884406019852706e-06, | |
| "loss": 0.158, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "grad_norm": 2.5869054794311523, | |
| "learning_rate": 6.844380403458213e-06, | |
| "loss": 0.1509, | |
| "step": 6085 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "grad_norm": 5.620914459228516, | |
| "learning_rate": 6.8043547870637215e-06, | |
| "loss": 0.1647, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "grad_norm": 3.599468946456909, | |
| "learning_rate": 6.764329170669228e-06, | |
| "loss": 0.1253, | |
| "step": 6095 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "grad_norm": 3.4432973861694336, | |
| "learning_rate": 6.724303554274736e-06, | |
| "loss": 0.1452, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "grad_norm": 2.940871477127075, | |
| "learning_rate": 6.684277937880244e-06, | |
| "loss": 0.1382, | |
| "step": 6105 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "grad_norm": 3.9849982261657715, | |
| "learning_rate": 6.644252321485752e-06, | |
| "loss": 0.1394, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "grad_norm": 2.607046604156494, | |
| "learning_rate": 6.6042267050912585e-06, | |
| "loss": 0.1434, | |
| "step": 6115 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "grad_norm": 3.1801774501800537, | |
| "learning_rate": 6.564201088696765e-06, | |
| "loss": 0.126, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "grad_norm": 3.0512826442718506, | |
| "learning_rate": 6.524175472302274e-06, | |
| "loss": 0.1625, | |
| "step": 6125 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "grad_norm": 5.523614883422852, | |
| "learning_rate": 6.484149855907781e-06, | |
| "loss": 0.1591, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "grad_norm": 5.236200332641602, | |
| "learning_rate": 6.444124239513289e-06, | |
| "loss": 0.1653, | |
| "step": 6135 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "grad_norm": 4.800531387329102, | |
| "learning_rate": 6.404098623118797e-06, | |
| "loss": 0.1491, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "grad_norm": 3.7076640129089355, | |
| "learning_rate": 6.364073006724304e-06, | |
| "loss": 0.1459, | |
| "step": 6145 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "grad_norm": 3.248276948928833, | |
| "learning_rate": 6.324047390329811e-06, | |
| "loss": 0.1281, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "grad_norm": 3.0434398651123047, | |
| "learning_rate": 6.284021773935318e-06, | |
| "loss": 0.1532, | |
| "step": 6155 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "grad_norm": 3.669671058654785, | |
| "learning_rate": 6.2439961575408265e-06, | |
| "loss": 0.1308, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "grad_norm": 2.7335450649261475, | |
| "learning_rate": 6.203970541146334e-06, | |
| "loss": 0.1377, | |
| "step": 6165 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "grad_norm": 3.6164846420288086, | |
| "learning_rate": 6.163944924751842e-06, | |
| "loss": 0.175, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "grad_norm": 3.5082285404205322, | |
| "learning_rate": 6.123919308357349e-06, | |
| "loss": 0.1921, | |
| "step": 6175 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "grad_norm": 3.8012921810150146, | |
| "learning_rate": 6.083893691962857e-06, | |
| "loss": 0.1384, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "grad_norm": 2.176058053970337, | |
| "learning_rate": 6.0438680755683635e-06, | |
| "loss": 0.1364, | |
| "step": 6185 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "grad_norm": 3.2312066555023193, | |
| "learning_rate": 6.003842459173871e-06, | |
| "loss": 0.1686, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "grad_norm": 5.053197860717773, | |
| "learning_rate": 5.963816842779379e-06, | |
| "loss": 0.1815, | |
| "step": 6195 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "grad_norm": 3.445341110229492, | |
| "learning_rate": 5.923791226384887e-06, | |
| "loss": 0.1322, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "grad_norm": 4.0553669929504395, | |
| "learning_rate": 5.8837656099903945e-06, | |
| "loss": 0.1456, | |
| "step": 6205 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "grad_norm": 3.9864542484283447, | |
| "learning_rate": 5.843739993595901e-06, | |
| "loss": 0.1617, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 8.95, | |
| "grad_norm": 3.3293707370758057, | |
| "learning_rate": 5.803714377201409e-06, | |
| "loss": 0.1199, | |
| "step": 6215 | |
| }, | |
| { | |
| "epoch": 8.95, | |
| "grad_norm": 3.733670949935913, | |
| "learning_rate": 5.763688760806917e-06, | |
| "loss": 0.1965, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "grad_norm": 3.420346260070801, | |
| "learning_rate": 5.723663144412425e-06, | |
| "loss": 0.1764, | |
| "step": 6225 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "grad_norm": 4.500962257385254, | |
| "learning_rate": 5.6836375280179315e-06, | |
| "loss": 0.1632, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "grad_norm": 3.7348039150238037, | |
| "learning_rate": 5.643611911623439e-06, | |
| "loss": 0.1441, | |
| "step": 6235 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "grad_norm": 4.5082502365112305, | |
| "learning_rate": 5.603586295228947e-06, | |
| "loss": 0.1602, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "grad_norm": 7.206320762634277, | |
| "learning_rate": 5.563560678834454e-06, | |
| "loss": 0.2032, | |
| "step": 6245 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "grad_norm": 5.481276035308838, | |
| "learning_rate": 5.523535062439962e-06, | |
| "loss": 0.1555, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_accuracy": 0.9551619433198381, | |
| "eval_loss": 0.13027772307395935, | |
| "eval_runtime": 26.8168, | |
| "eval_samples_per_second": 368.425, | |
| "eval_steps_per_second": 11.523, | |
| "step": 6252 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "grad_norm": 4.276639938354492, | |
| "learning_rate": 5.4835094460454694e-06, | |
| "loss": 0.1455, | |
| "step": 6255 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "grad_norm": 3.9856693744659424, | |
| "learning_rate": 5.443483829650977e-06, | |
| "loss": 0.1415, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "grad_norm": 3.2395150661468506, | |
| "learning_rate": 5.403458213256484e-06, | |
| "loss": 0.146, | |
| "step": 6265 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "grad_norm": 1.18377685546875, | |
| "learning_rate": 5.363432596861992e-06, | |
| "loss": 0.1048, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "grad_norm": 4.3180928230285645, | |
| "learning_rate": 5.3234069804674996e-06, | |
| "loss": 0.1623, | |
| "step": 6275 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "grad_norm": 2.203953742980957, | |
| "learning_rate": 5.283381364073007e-06, | |
| "loss": 0.1763, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "grad_norm": 4.12826681137085, | |
| "learning_rate": 5.243355747678515e-06, | |
| "loss": 0.155, | |
| "step": 6285 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "grad_norm": 3.8807485103607178, | |
| "learning_rate": 5.203330131284022e-06, | |
| "loss": 0.1222, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "grad_norm": 3.572359085083008, | |
| "learning_rate": 5.16330451488953e-06, | |
| "loss": 0.1359, | |
| "step": 6295 | |
| }, | |
| { | |
| "epoch": 9.07, | |
| "grad_norm": 2.7034575939178467, | |
| "learning_rate": 5.123278898495037e-06, | |
| "loss": 0.128, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "grad_norm": 4.068083763122559, | |
| "learning_rate": 5.083253282100544e-06, | |
| "loss": 0.1842, | |
| "step": 6305 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "grad_norm": 3.0719077587127686, | |
| "learning_rate": 5.043227665706052e-06, | |
| "loss": 0.1435, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "grad_norm": 2.3211283683776855, | |
| "learning_rate": 5.00320204931156e-06, | |
| "loss": 0.1465, | |
| "step": 6315 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "grad_norm": 3.4732654094696045, | |
| "learning_rate": 4.9631764329170676e-06, | |
| "loss": 0.1643, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "grad_norm": 3.3369507789611816, | |
| "learning_rate": 4.9231508165225745e-06, | |
| "loss": 0.1434, | |
| "step": 6325 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "grad_norm": 3.845771312713623, | |
| "learning_rate": 4.883125200128082e-06, | |
| "loss": 0.1646, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "grad_norm": 3.8775432109832764, | |
| "learning_rate": 4.843099583733589e-06, | |
| "loss": 0.1551, | |
| "step": 6335 | |
| }, | |
| { | |
| "epoch": 9.13, | |
| "grad_norm": 3.2787814140319824, | |
| "learning_rate": 4.803073967339098e-06, | |
| "loss": 0.1054, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 9.13, | |
| "grad_norm": 4.375431060791016, | |
| "learning_rate": 4.763048350944605e-06, | |
| "loss": 0.1554, | |
| "step": 6345 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "grad_norm": 4.358311653137207, | |
| "learning_rate": 4.723022734550112e-06, | |
| "loss": 0.1384, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "grad_norm": 3.3018763065338135, | |
| "learning_rate": 4.68299711815562e-06, | |
| "loss": 0.1683, | |
| "step": 6355 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "grad_norm": 3.3858537673950195, | |
| "learning_rate": 4.642971501761127e-06, | |
| "loss": 0.1672, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "grad_norm": 3.9749813079833984, | |
| "learning_rate": 4.602945885366635e-06, | |
| "loss": 0.1638, | |
| "step": 6365 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "grad_norm": 5.917661666870117, | |
| "learning_rate": 4.5629202689721425e-06, | |
| "loss": 0.1417, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "grad_norm": 3.9763858318328857, | |
| "learning_rate": 4.52289465257765e-06, | |
| "loss": 0.0966, | |
| "step": 6375 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "grad_norm": 3.998041868209839, | |
| "learning_rate": 4.482869036183157e-06, | |
| "loss": 0.1367, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "grad_norm": 3.193472146987915, | |
| "learning_rate": 4.442843419788665e-06, | |
| "loss": 0.1464, | |
| "step": 6385 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "grad_norm": 4.275448322296143, | |
| "learning_rate": 4.402817803394173e-06, | |
| "loss": 0.1398, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "grad_norm": 4.345200538635254, | |
| "learning_rate": 4.3627921869996795e-06, | |
| "loss": 0.1347, | |
| "step": 6395 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "grad_norm": 3.47943115234375, | |
| "learning_rate": 4.322766570605188e-06, | |
| "loss": 0.1849, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "grad_norm": 3.6434147357940674, | |
| "learning_rate": 4.282740954210695e-06, | |
| "loss": 0.1694, | |
| "step": 6405 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "grad_norm": 3.210238456726074, | |
| "learning_rate": 4.242715337816203e-06, | |
| "loss": 0.1491, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "grad_norm": 2.9370250701904297, | |
| "learning_rate": 4.20268972142171e-06, | |
| "loss": 0.1279, | |
| "step": 6415 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "grad_norm": 3.1297073364257812, | |
| "learning_rate": 4.162664105027217e-06, | |
| "loss": 0.1501, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "grad_norm": 2.1068637371063232, | |
| "learning_rate": 4.122638488632725e-06, | |
| "loss": 0.1338, | |
| "step": 6425 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "grad_norm": 2.867342472076416, | |
| "learning_rate": 4.082612872238233e-06, | |
| "loss": 0.1261, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "grad_norm": 4.779037952423096, | |
| "learning_rate": 4.042587255843741e-06, | |
| "loss": 0.1451, | |
| "step": 6435 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "grad_norm": 4.961643218994141, | |
| "learning_rate": 4.0025616394492475e-06, | |
| "loss": 0.1724, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "grad_norm": 3.6226232051849365, | |
| "learning_rate": 3.962536023054755e-06, | |
| "loss": 0.1426, | |
| "step": 6445 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "grad_norm": 2.5623698234558105, | |
| "learning_rate": 3.922510406660262e-06, | |
| "loss": 0.1524, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "grad_norm": 5.580244064331055, | |
| "learning_rate": 3.88248479026577e-06, | |
| "loss": 0.1684, | |
| "step": 6455 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "grad_norm": 3.1150074005126953, | |
| "learning_rate": 3.8424591738712785e-06, | |
| "loss": 0.145, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "grad_norm": 2.4989027976989746, | |
| "learning_rate": 3.8024335574767854e-06, | |
| "loss": 0.1636, | |
| "step": 6465 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "grad_norm": 2.90736722946167, | |
| "learning_rate": 3.762407941082293e-06, | |
| "loss": 0.1573, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "grad_norm": 2.772012710571289, | |
| "learning_rate": 3.7223823246878e-06, | |
| "loss": 0.1512, | |
| "step": 6475 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "grad_norm": 2.553661346435547, | |
| "learning_rate": 3.682356708293308e-06, | |
| "loss": 0.1666, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "grad_norm": 3.670997381210327, | |
| "learning_rate": 3.642331091898815e-06, | |
| "loss": 0.2017, | |
| "step": 6485 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "grad_norm": 3.2845664024353027, | |
| "learning_rate": 3.602305475504323e-06, | |
| "loss": 0.1749, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "grad_norm": 3.8596317768096924, | |
| "learning_rate": 3.5622798591098306e-06, | |
| "loss": 0.2147, | |
| "step": 6495 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "grad_norm": 3.070552349090576, | |
| "learning_rate": 3.522254242715338e-06, | |
| "loss": 0.1353, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "grad_norm": 3.2333202362060547, | |
| "learning_rate": 3.4822286263208457e-06, | |
| "loss": 0.1722, | |
| "step": 6505 | |
| }, | |
| { | |
| "epoch": 9.37, | |
| "grad_norm": 3.9244325160980225, | |
| "learning_rate": 3.442203009926353e-06, | |
| "loss": 0.1528, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "grad_norm": 4.576772689819336, | |
| "learning_rate": 3.4021773935318607e-06, | |
| "loss": 0.1257, | |
| "step": 6515 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "grad_norm": 2.5687806606292725, | |
| "learning_rate": 3.362151777137368e-06, | |
| "loss": 0.1155, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 9.39, | |
| "grad_norm": 3.3034443855285645, | |
| "learning_rate": 3.322126160742876e-06, | |
| "loss": 0.1803, | |
| "step": 6525 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "grad_norm": 5.908419609069824, | |
| "learning_rate": 3.2821005443483827e-06, | |
| "loss": 0.1669, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "grad_norm": 3.6566712856292725, | |
| "learning_rate": 3.2420749279538904e-06, | |
| "loss": 0.1431, | |
| "step": 6535 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "grad_norm": 3.199873208999634, | |
| "learning_rate": 3.2020493115593986e-06, | |
| "loss": 0.1709, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "grad_norm": 3.752716302871704, | |
| "learning_rate": 3.1620236951649055e-06, | |
| "loss": 0.146, | |
| "step": 6545 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "grad_norm": 2.432011842727661, | |
| "learning_rate": 3.1219980787704133e-06, | |
| "loss": 0.1525, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "grad_norm": 2.615161895751953, | |
| "learning_rate": 3.081972462375921e-06, | |
| "loss": 0.1491, | |
| "step": 6555 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "grad_norm": 3.136516571044922, | |
| "learning_rate": 3.0419468459814283e-06, | |
| "loss": 0.1221, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "grad_norm": 3.304135322570801, | |
| "learning_rate": 3.0019212295869356e-06, | |
| "loss": 0.1717, | |
| "step": 6565 | |
| }, | |
| { | |
| "epoch": 9.46, | |
| "grad_norm": 5.078571796417236, | |
| "learning_rate": 2.9618956131924434e-06, | |
| "loss": 0.1656, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 9.46, | |
| "grad_norm": 3.5032825469970703, | |
| "learning_rate": 2.9218699967979507e-06, | |
| "loss": 0.2004, | |
| "step": 6575 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "grad_norm": 3.3871119022369385, | |
| "learning_rate": 2.8818443804034585e-06, | |
| "loss": 0.122, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 9.48, | |
| "grad_norm": 3.4582040309906006, | |
| "learning_rate": 2.8418187640089658e-06, | |
| "loss": 0.1465, | |
| "step": 6585 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "grad_norm": 2.7074215412139893, | |
| "learning_rate": 2.8017931476144735e-06, | |
| "loss": 0.159, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "grad_norm": 3.0131921768188477, | |
| "learning_rate": 2.761767531219981e-06, | |
| "loss": 0.1573, | |
| "step": 6595 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "grad_norm": 3.2007336616516113, | |
| "learning_rate": 2.7217419148254886e-06, | |
| "loss": 0.1196, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "grad_norm": 3.2668557167053223, | |
| "learning_rate": 2.681716298430996e-06, | |
| "loss": 0.1251, | |
| "step": 6605 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "grad_norm": 3.1396324634552, | |
| "learning_rate": 2.6416906820365037e-06, | |
| "loss": 0.1482, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "grad_norm": 3.94993257522583, | |
| "learning_rate": 2.601665065642011e-06, | |
| "loss": 0.1568, | |
| "step": 6615 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "grad_norm": 3.8396835327148438, | |
| "learning_rate": 2.5616394492475183e-06, | |
| "loss": 0.1813, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 9.54, | |
| "grad_norm": 6.602664947509766, | |
| "learning_rate": 2.521613832853026e-06, | |
| "loss": 0.1661, | |
| "step": 6625 | |
| }, | |
| { | |
| "epoch": 9.54, | |
| "grad_norm": 4.1188225746154785, | |
| "learning_rate": 2.4815882164585338e-06, | |
| "loss": 0.1111, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "grad_norm": 3.777866840362549, | |
| "learning_rate": 2.441562600064041e-06, | |
| "loss": 0.1788, | |
| "step": 6635 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "grad_norm": 4.2113542556762695, | |
| "learning_rate": 2.401536983669549e-06, | |
| "loss": 0.1694, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "grad_norm": 4.3874831199646, | |
| "learning_rate": 2.361511367275056e-06, | |
| "loss": 0.1448, | |
| "step": 6645 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "grad_norm": 3.2576651573181152, | |
| "learning_rate": 2.3214857508805635e-06, | |
| "loss": 0.1353, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "grad_norm": 3.4032511711120605, | |
| "learning_rate": 2.2814601344860712e-06, | |
| "loss": 0.1045, | |
| "step": 6655 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "grad_norm": 4.300445079803467, | |
| "learning_rate": 2.2414345180915786e-06, | |
| "loss": 0.1479, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "grad_norm": 3.0734059810638428, | |
| "learning_rate": 2.2014089016970863e-06, | |
| "loss": 0.1494, | |
| "step": 6665 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "grad_norm": 5.5670952796936035, | |
| "learning_rate": 2.161383285302594e-06, | |
| "loss": 0.1565, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "grad_norm": 4.408038139343262, | |
| "learning_rate": 2.1213576689081014e-06, | |
| "loss": 0.1758, | |
| "step": 6675 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "grad_norm": 4.501611709594727, | |
| "learning_rate": 2.0813320525136087e-06, | |
| "loss": 0.1658, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "grad_norm": 3.2457005977630615, | |
| "learning_rate": 2.0413064361191164e-06, | |
| "loss": 0.1599, | |
| "step": 6685 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "grad_norm": 4.1629862785339355, | |
| "learning_rate": 2.0012808197246238e-06, | |
| "loss": 0.141, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "grad_norm": 2.0646541118621826, | |
| "learning_rate": 1.961255203330131e-06, | |
| "loss": 0.1197, | |
| "step": 6695 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "grad_norm": 4.061581134796143, | |
| "learning_rate": 1.9212295869356392e-06, | |
| "loss": 0.1541, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 9.65, | |
| "grad_norm": 4.780194282531738, | |
| "learning_rate": 1.8812039705411466e-06, | |
| "loss": 0.1247, | |
| "step": 6705 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "grad_norm": 3.3698534965515137, | |
| "learning_rate": 1.841178354146654e-06, | |
| "loss": 0.1486, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "grad_norm": 2.801363945007324, | |
| "learning_rate": 1.8011527377521614e-06, | |
| "loss": 0.1459, | |
| "step": 6715 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "grad_norm": 4.238241672515869, | |
| "learning_rate": 1.761127121357669e-06, | |
| "loss": 0.1772, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "grad_norm": 2.364281415939331, | |
| "learning_rate": 1.7211015049631765e-06, | |
| "loss": 0.1272, | |
| "step": 6725 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "grad_norm": 4.3810038566589355, | |
| "learning_rate": 1.681075888568684e-06, | |
| "loss": 0.112, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "grad_norm": 2.3395895957946777, | |
| "learning_rate": 1.6410502721741913e-06, | |
| "loss": 0.1298, | |
| "step": 6735 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "grad_norm": 3.7817649841308594, | |
| "learning_rate": 1.6010246557796993e-06, | |
| "loss": 0.1638, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "grad_norm": 2.224323272705078, | |
| "learning_rate": 1.5609990393852066e-06, | |
| "loss": 0.1213, | |
| "step": 6745 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "grad_norm": 3.1445086002349854, | |
| "learning_rate": 1.5209734229907142e-06, | |
| "loss": 0.1252, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "grad_norm": 3.2290494441986084, | |
| "learning_rate": 1.4809478065962217e-06, | |
| "loss": 0.1635, | |
| "step": 6755 | |
| }, | |
| { | |
| "epoch": 9.73, | |
| "grad_norm": 3.6329257488250732, | |
| "learning_rate": 1.4409221902017292e-06, | |
| "loss": 0.13, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "grad_norm": 3.1905136108398438, | |
| "learning_rate": 1.4008965738072368e-06, | |
| "loss": 0.1265, | |
| "step": 6765 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "grad_norm": 5.263790607452393, | |
| "learning_rate": 1.3608709574127443e-06, | |
| "loss": 0.1498, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "grad_norm": 3.3033974170684814, | |
| "learning_rate": 1.3208453410182518e-06, | |
| "loss": 0.1371, | |
| "step": 6775 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "grad_norm": 3.2312657833099365, | |
| "learning_rate": 1.2808197246237591e-06, | |
| "loss": 0.1604, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "grad_norm": 4.103578567504883, | |
| "learning_rate": 1.2407941082292669e-06, | |
| "loss": 0.1532, | |
| "step": 6785 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "grad_norm": 3.236082077026367, | |
| "learning_rate": 1.2007684918347744e-06, | |
| "loss": 0.1199, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 9.78, | |
| "grad_norm": 3.1800456047058105, | |
| "learning_rate": 1.1607428754402817e-06, | |
| "loss": 0.1527, | |
| "step": 6795 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "grad_norm": 2.98972225189209, | |
| "learning_rate": 1.1207172590457893e-06, | |
| "loss": 0.1367, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "grad_norm": 3.6796061992645264, | |
| "learning_rate": 1.080691642651297e-06, | |
| "loss": 0.1639, | |
| "step": 6805 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "grad_norm": 3.3537867069244385, | |
| "learning_rate": 1.0406660262568043e-06, | |
| "loss": 0.1696, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "grad_norm": 3.785534143447876, | |
| "learning_rate": 1.0006404098623119e-06, | |
| "loss": 0.1267, | |
| "step": 6815 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "grad_norm": 3.4578371047973633, | |
| "learning_rate": 9.606147934678196e-07, | |
| "loss": 0.1542, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "grad_norm": 2.174834966659546, | |
| "learning_rate": 9.20589177073327e-07, | |
| "loss": 0.1908, | |
| "step": 6825 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "grad_norm": 2.7609782218933105, | |
| "learning_rate": 8.805635606788345e-07, | |
| "loss": 0.152, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "grad_norm": 2.602288246154785, | |
| "learning_rate": 8.40537944284342e-07, | |
| "loss": 0.1785, | |
| "step": 6835 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "grad_norm": 3.4062986373901367, | |
| "learning_rate": 8.005123278898497e-07, | |
| "loss": 0.175, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "grad_norm": 2.9411168098449707, | |
| "learning_rate": 7.604867114953571e-07, | |
| "loss": 0.1402, | |
| "step": 6845 | |
| }, | |
| { | |
| "epoch": 9.86, | |
| "grad_norm": 4.67616605758667, | |
| "learning_rate": 7.204610951008646e-07, | |
| "loss": 0.1281, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "grad_norm": 4.2490763664245605, | |
| "learning_rate": 6.804354787063721e-07, | |
| "loss": 0.1378, | |
| "step": 6855 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "grad_norm": 3.4150235652923584, | |
| "learning_rate": 6.404098623118796e-07, | |
| "loss": 0.15, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "grad_norm": 3.8505630493164062, | |
| "learning_rate": 6.003842459173872e-07, | |
| "loss": 0.1729, | |
| "step": 6865 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "grad_norm": 3.3457560539245605, | |
| "learning_rate": 5.603586295228946e-07, | |
| "loss": 0.1047, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "grad_norm": 4.059729099273682, | |
| "learning_rate": 5.203330131284022e-07, | |
| "loss": 0.1271, | |
| "step": 6875 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "grad_norm": 2.4905691146850586, | |
| "learning_rate": 4.803073967339098e-07, | |
| "loss": 0.1245, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "grad_norm": 5.17240571975708, | |
| "learning_rate": 4.4028178033941724e-07, | |
| "loss": 0.17, | |
| "step": 6885 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "grad_norm": 3.049504518508911, | |
| "learning_rate": 4.002561639449248e-07, | |
| "loss": 0.161, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "grad_norm": 3.2398414611816406, | |
| "learning_rate": 3.602305475504323e-07, | |
| "loss": 0.1539, | |
| "step": 6895 | |
| }, | |
| { | |
| "epoch": 9.93, | |
| "grad_norm": 3.9823930263519287, | |
| "learning_rate": 3.202049311559398e-07, | |
| "loss": 0.1569, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "grad_norm": 3.368542194366455, | |
| "learning_rate": 2.801793147614473e-07, | |
| "loss": 0.1652, | |
| "step": 6905 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "grad_norm": 2.684319019317627, | |
| "learning_rate": 2.401536983669549e-07, | |
| "loss": 0.1253, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "grad_norm": 3.630676746368408, | |
| "learning_rate": 2.001280819724624e-07, | |
| "loss": 0.1713, | |
| "step": 6915 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "grad_norm": 5.006500720977783, | |
| "learning_rate": 1.601024655779699e-07, | |
| "loss": 0.1505, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "grad_norm": 5.6927313804626465, | |
| "learning_rate": 1.2007684918347745e-07, | |
| "loss": 0.181, | |
| "step": 6925 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "grad_norm": 2.7989654541015625, | |
| "learning_rate": 8.005123278898495e-08, | |
| "loss": 0.1651, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "grad_norm": 6.005398750305176, | |
| "learning_rate": 4.0025616394492473e-08, | |
| "loss": 0.1509, | |
| "step": 6935 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "grad_norm": 3.4408304691314697, | |
| "learning_rate": 0.0, | |
| "loss": 0.1238, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "eval_accuracy": 0.9558704453441296, | |
| "eval_loss": 0.1293453723192215, | |
| "eval_runtime": 26.754, | |
| "eval_samples_per_second": 369.291, | |
| "eval_steps_per_second": 11.55, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "step": 6940, | |
| "total_flos": 2.207935349023097e+19, | |
| "train_loss": 0.24048639133100208, | |
| "train_runtime": 4813.8727, | |
| "train_samples_per_second": 184.697, | |
| "train_steps_per_second": 1.442 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 6940, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "total_flos": 2.207935349023097e+19, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |