| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 9.944416371905003, | |
| "eval_steps": 500, | |
| "global_step": 1230, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.0540540540540545e-06, | |
| "loss": 2998113.6, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 8.108108108108109e-06, | |
| "loss": 2797824.6, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.2162162162162164e-05, | |
| "loss": 2452371.6, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.6216216216216218e-05, | |
| "loss": 1871456.8, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 2.027027027027027e-05, | |
| "loss": 1119156.9, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 2.4324324324324327e-05, | |
| "loss": 420413.4, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 2.8378378378378378e-05, | |
| "loss": 36048.9469, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 2.9924559932942165e-05, | |
| "loss": 866.2308, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.9798826487845768e-05, | |
| "loss": 6.5443, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.9673093042749373e-05, | |
| "loss": 0.5418, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.9547359597652975e-05, | |
| "loss": 0.4141, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.942162615255658e-05, | |
| "loss": 0.3744, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.9295892707460186e-05, | |
| "loss": 0.3621, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.917015926236379e-05, | |
| "loss": 0.3423, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 2.9044425817267394e-05, | |
| "loss": 0.3333, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 2.8918692372171e-05, | |
| "loss": 0.3359, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 2.87929589270746e-05, | |
| "loss": 0.3301, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.8667225481978207e-05, | |
| "loss": 0.3238, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.854149203688181e-05, | |
| "loss": 0.3221, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 2.8415758591785418e-05, | |
| "loss": 0.3132, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.829002514668902e-05, | |
| "loss": 0.3141, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.8164291701592623e-05, | |
| "loss": 0.3019, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.8038558256496228e-05, | |
| "loss": 0.324, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.7912824811399834e-05, | |
| "loss": 0.3118, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "eval_f1": 0.3004154682007031, | |
| "eval_loss": 0.2937778830528259, | |
| "eval_runtime": 30.7677, | |
| "eval_samples_per_second": 32.144, | |
| "eval_steps_per_second": 8.06, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 2.778709136630344e-05, | |
| "loss": 0.3159, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.766135792120704e-05, | |
| "loss": 0.2949, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.7535624476110643e-05, | |
| "loss": 0.3043, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.7409891031014252e-05, | |
| "loss": 0.3076, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 2.7284157585917854e-05, | |
| "loss": 0.301, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.715842414082146e-05, | |
| "loss": 0.3006, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.7032690695725062e-05, | |
| "loss": 0.2929, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.6906957250628668e-05, | |
| "loss": 0.3107, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.6781223805532273e-05, | |
| "loss": 0.2988, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 2.6655490360435875e-05, | |
| "loss": 0.3097, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 2.6529756915339484e-05, | |
| "loss": 0.2967, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.6404023470243086e-05, | |
| "loss": 0.2918, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.627829002514669e-05, | |
| "loss": 0.287, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.6152556580050294e-05, | |
| "loss": 0.3104, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.60268231349539e-05, | |
| "loss": 0.2859, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.59010896898575e-05, | |
| "loss": 0.2938, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.5775356244761107e-05, | |
| "loss": 0.2884, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.564962279966471e-05, | |
| "loss": 0.2858, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.5523889354568318e-05, | |
| "loss": 0.2956, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.539815590947192e-05, | |
| "loss": 0.2916, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.5272422464375523e-05, | |
| "loss": 0.2956, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.5146689019279128e-05, | |
| "loss": 0.2905, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.5020955574182734e-05, | |
| "loss": 0.2843, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 2.489522212908634e-05, | |
| "loss": 0.2878, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.476948868398994e-05, | |
| "loss": 0.2732, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_f1": 0.46014293567894443, | |
| "eval_loss": 0.27736958861351013, | |
| "eval_runtime": 34.3798, | |
| "eval_samples_per_second": 28.767, | |
| "eval_steps_per_second": 7.214, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 2.4643755238893543e-05, | |
| "loss": 0.2978, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 2.4518021793797152e-05, | |
| "loss": 0.287, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 2.4392288348700754e-05, | |
| "loss": 0.2841, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 2.426655490360436e-05, | |
| "loss": 0.2755, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 2.4140821458507962e-05, | |
| "loss": 0.2786, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 2.4015088013411568e-05, | |
| "loss": 0.2837, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 2.3889354568315173e-05, | |
| "loss": 0.2691, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.3763621123218775e-05, | |
| "loss": 0.2785, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.363788767812238e-05, | |
| "loss": 0.2863, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.3512154233025986e-05, | |
| "loss": 0.277, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 2.338642078792959e-05, | |
| "loss": 0.2755, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 2.3260687342833194e-05, | |
| "loss": 0.2726, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 2.3134953897736796e-05, | |
| "loss": 0.2732, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 2.3009220452640405e-05, | |
| "loss": 0.2772, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 2.2883487007544007e-05, | |
| "loss": 0.2853, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 2.275775356244761e-05, | |
| "loss": 0.2803, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.263202011735122e-05, | |
| "loss": 0.2777, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 2.250628667225482e-05, | |
| "loss": 0.2728, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 2.2380553227158426e-05, | |
| "loss": 0.2793, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 2.2254819782062028e-05, | |
| "loss": 0.2574, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 2.2129086336965634e-05, | |
| "loss": 0.2722, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 2.200335289186924e-05, | |
| "loss": 0.2847, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.187761944677284e-05, | |
| "loss": 0.2808, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 2.1751886001676447e-05, | |
| "loss": 0.2837, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.1626152556580052e-05, | |
| "loss": 0.2705, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_f1": 0.4702956617850236, | |
| "eval_loss": 0.2660216987133026, | |
| "eval_runtime": 30.702, | |
| "eval_samples_per_second": 32.213, | |
| "eval_steps_per_second": 8.078, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 2.1500419111483655e-05, | |
| "loss": 0.2717, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 2.137468566638726e-05, | |
| "loss": 0.2599, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 2.1248952221290862e-05, | |
| "loss": 0.2668, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 2.112321877619447e-05, | |
| "loss": 0.2689, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 2.0997485331098073e-05, | |
| "loss": 0.2767, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 2.0871751886001675e-05, | |
| "loss": 0.2755, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 2.074601844090528e-05, | |
| "loss": 0.2645, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 2.0620284995808886e-05, | |
| "loss": 0.2656, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 2.0494551550712492e-05, | |
| "loss": 0.2691, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 2.0368818105616094e-05, | |
| "loss": 0.2641, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 2.0243084660519696e-05, | |
| "loss": 0.2576, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 2.0117351215423305e-05, | |
| "loss": 0.2577, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 1.9991617770326907e-05, | |
| "loss": 0.2609, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 1.986588432523051e-05, | |
| "loss": 0.2558, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 1.9740150880134115e-05, | |
| "loss": 0.2645, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 1.961441743503772e-05, | |
| "loss": 0.2698, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 1.9488683989941326e-05, | |
| "loss": 0.26, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 1.9362950544844928e-05, | |
| "loss": 0.2641, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 1.923721709974853e-05, | |
| "loss": 0.2672, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 1.911148365465214e-05, | |
| "loss": 0.2592, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 1.898575020955574e-05, | |
| "loss": 0.2662, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 1.8860016764459347e-05, | |
| "loss": 0.26, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 1.8734283319362952e-05, | |
| "loss": 0.264, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 1.8608549874266555e-05, | |
| "loss": 0.2671, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "eval_f1": 0.4754279403644394, | |
| "eval_loss": 0.2679407000541687, | |
| "eval_runtime": 34.3196, | |
| "eval_samples_per_second": 28.817, | |
| "eval_steps_per_second": 7.226, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 1.848281642917016e-05, | |
| "loss": 0.2603, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 1.8357082984073762e-05, | |
| "loss": 0.2476, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 1.823134953897737e-05, | |
| "loss": 0.2623, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 1.8105616093880973e-05, | |
| "loss": 0.2614, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 1.7979882648784575e-05, | |
| "loss": 0.2405, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 1.785414920368818e-05, | |
| "loss": 0.2519, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 1.7728415758591787e-05, | |
| "loss": 0.2499, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 1.7602682313495392e-05, | |
| "loss": 0.2678, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 1.7476948868398994e-05, | |
| "loss": 0.2639, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 1.7351215423302596e-05, | |
| "loss": 0.2554, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 1.7225481978206205e-05, | |
| "loss": 0.2615, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 1.7099748533109807e-05, | |
| "loss": 0.2518, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 1.6974015088013413e-05, | |
| "loss": 0.2612, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 1.6848281642917015e-05, | |
| "loss": 0.2608, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 1.672254819782062e-05, | |
| "loss": 0.2446, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 1.6596814752724226e-05, | |
| "loss": 0.2556, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 1.6471081307627828e-05, | |
| "loss": 0.2477, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 1.6345347862531434e-05, | |
| "loss": 0.2649, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 1.621961441743504e-05, | |
| "loss": 0.2611, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 1.609388097233864e-05, | |
| "loss": 0.259, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 1.5968147527242247e-05, | |
| "loss": 0.2618, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.584241408214585e-05, | |
| "loss": 0.2478, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 1.5716680637049458e-05, | |
| "loss": 0.2542, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 1.559094719195306e-05, | |
| "loss": 0.2488, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.5465213746856662e-05, | |
| "loss": 0.2529, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_f1": 0.5162986330178759, | |
| "eval_loss": 0.2654779851436615, | |
| "eval_runtime": 32.5281, | |
| "eval_samples_per_second": 30.405, | |
| "eval_steps_per_second": 7.624, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 1.533948030176027e-05, | |
| "loss": 0.2474, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 1.5213746856663873e-05, | |
| "loss": 0.2442, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 1.5088013411567477e-05, | |
| "loss": 0.2494, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 1.4962279966471083e-05, | |
| "loss": 0.2505, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 1.4836546521374687e-05, | |
| "loss": 0.2345, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 1.471081307627829e-05, | |
| "loss": 0.2491, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 1.4585079631181894e-05, | |
| "loss": 0.2455, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 1.44593461860855e-05, | |
| "loss": 0.2483, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 1.4333612740989104e-05, | |
| "loss": 0.2387, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 1.4207879295892709e-05, | |
| "loss": 0.2568, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 1.4082145850796311e-05, | |
| "loss": 0.2539, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 1.3956412405699917e-05, | |
| "loss": 0.2306, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 1.383067896060352e-05, | |
| "loss": 0.2453, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 1.3704945515507126e-05, | |
| "loss": 0.2411, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 1.357921207041073e-05, | |
| "loss": 0.2526, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 1.3453478625314334e-05, | |
| "loss": 0.2384, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 1.3327745180217938e-05, | |
| "loss": 0.2486, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 1.3202011735121543e-05, | |
| "loss": 0.2357, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 1.3076278290025147e-05, | |
| "loss": 0.2484, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 1.295054484492875e-05, | |
| "loss": 0.2405, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 1.2824811399832355e-05, | |
| "loss": 0.2521, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 1.269907795473596e-05, | |
| "loss": 0.2575, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 1.2573344509639564e-05, | |
| "loss": 0.2516, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 1.244761106454317e-05, | |
| "loss": 0.2353, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 1.2321877619446772e-05, | |
| "loss": 0.2395, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_f1": 0.5213319458896981, | |
| "eval_loss": 0.26373252272605896, | |
| "eval_runtime": 29.187, | |
| "eval_samples_per_second": 33.885, | |
| "eval_steps_per_second": 8.497, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.2196144174350377e-05, | |
| "loss": 0.256, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.2070410729253981e-05, | |
| "loss": 0.2359, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 1.1944677284157587e-05, | |
| "loss": 0.239, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 1.181894383906119e-05, | |
| "loss": 0.2485, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 1.1693210393964794e-05, | |
| "loss": 0.254, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.1567476948868398e-05, | |
| "loss": 0.2459, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "learning_rate": 1.1441743503772004e-05, | |
| "loss": 0.2328, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.131601005867561e-05, | |
| "loss": 0.2422, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 1.1190276613579213e-05, | |
| "loss": 0.2296, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 1.1064543168482817e-05, | |
| "loss": 0.2375, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.093880972338642e-05, | |
| "loss": 0.2341, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 1.0813076278290026e-05, | |
| "loss": 0.2424, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.068734283319363e-05, | |
| "loss": 0.2253, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.0561609388097236e-05, | |
| "loss": 0.2386, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 1.0435875943000838e-05, | |
| "loss": 0.2546, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 1.0310142497904443e-05, | |
| "loss": 0.224, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.0184409052808047e-05, | |
| "loss": 0.2337, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 1.0058675607711653e-05, | |
| "loss": 0.2415, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 9.932942162615255e-06, | |
| "loss": 0.2309, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 9.80720871751886e-06, | |
| "loss": 0.2257, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 9.681475272422464e-06, | |
| "loss": 0.2373, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 9.55574182732607e-06, | |
| "loss": 0.2483, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 9.430008382229673e-06, | |
| "loss": 0.2437, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 9.304274937133277e-06, | |
| "loss": 0.2344, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 9.178541492036881e-06, | |
| "loss": 0.2469, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "eval_f1": 0.5234760051880674, | |
| "eval_loss": 0.26504993438720703, | |
| "eval_runtime": 33.4377, | |
| "eval_samples_per_second": 29.577, | |
| "eval_steps_per_second": 7.417, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 9.052808046940487e-06, | |
| "loss": 0.2417, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 8.92707460184409e-06, | |
| "loss": 0.2271, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 8.801341156747696e-06, | |
| "loss": 0.2289, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 8.675607711651298e-06, | |
| "loss": 0.2285, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 8.549874266554904e-06, | |
| "loss": 0.2377, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 8.424140821458508e-06, | |
| "loss": 0.2371, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 8.298407376362113e-06, | |
| "loss": 0.2407, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 8.172673931265717e-06, | |
| "loss": 0.2325, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 8.04694048616932e-06, | |
| "loss": 0.2289, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 7.921207041072925e-06, | |
| "loss": 0.2383, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 7.79547359597653e-06, | |
| "loss": 0.2312, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 7.669740150880136e-06, | |
| "loss": 0.2255, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 7.544006705783739e-06, | |
| "loss": 0.2387, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 7.418273260687343e-06, | |
| "loss": 0.2328, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 7.292539815590947e-06, | |
| "loss": 0.2359, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 7.166806370494552e-06, | |
| "loss": 0.2322, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 7.041072925398156e-06, | |
| "loss": 0.2372, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 6.91533948030176e-06, | |
| "loss": 0.2297, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 6.789606035205365e-06, | |
| "loss": 0.2345, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 6.663872590108969e-06, | |
| "loss": 0.2229, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 6.5381391450125735e-06, | |
| "loss": 0.2395, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 6.412405699916177e-06, | |
| "loss": 0.2395, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 6.286672254819782e-06, | |
| "loss": 0.2226, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "learning_rate": 6.160938809723386e-06, | |
| "loss": 0.2301, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_f1": 0.5250844814140889, | |
| "eval_loss": 0.2636159062385559, | |
| "eval_runtime": 39.4887, | |
| "eval_samples_per_second": 25.045, | |
| "eval_steps_per_second": 6.28, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 6.0352053646269905e-06, | |
| "loss": 0.2295, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 5.909471919530595e-06, | |
| "loss": 0.2384, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 5.783738474434199e-06, | |
| "loss": 0.2277, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 5.658005029337805e-06, | |
| "loss": 0.2295, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 8.17, | |
| "learning_rate": 5.532271584241408e-06, | |
| "loss": 0.2323, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 5.406538139145013e-06, | |
| "loss": 0.2292, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 5.280804694048618e-06, | |
| "loss": 0.2265, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "learning_rate": 5.155071248952222e-06, | |
| "loss": 0.2213, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 5.029337803855826e-06, | |
| "loss": 0.219, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 4.90360435875943e-06, | |
| "loss": 0.2201, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 4.777870913663035e-06, | |
| "loss": 0.2318, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 4.652137468566639e-06, | |
| "loss": 0.2316, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 4.526404023470243e-06, | |
| "loss": 0.2279, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 4.400670578373848e-06, | |
| "loss": 0.2237, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 4.274937133277452e-06, | |
| "loss": 0.2469, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "learning_rate": 4.1492036881810565e-06, | |
| "loss": 0.224, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 4.02347024308466e-06, | |
| "loss": 0.2277, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 3.897736797988265e-06, | |
| "loss": 0.2334, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "learning_rate": 3.7720033528918693e-06, | |
| "loss": 0.2291, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 3.6462699077954736e-06, | |
| "loss": 0.2333, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 3.520536462699078e-06, | |
| "loss": 0.2358, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 3.3948030176026825e-06, | |
| "loss": 0.2314, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 3.2690695725062868e-06, | |
| "loss": 0.2263, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 3.143336127409891e-06, | |
| "loss": 0.2274, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 3.0176026823134953e-06, | |
| "loss": 0.2273, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_f1": 0.5394001016776816, | |
| "eval_loss": 0.26480013132095337, | |
| "eval_runtime": 32.8402, | |
| "eval_samples_per_second": 30.116, | |
| "eval_steps_per_second": 7.552, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 2.8918692372170995e-06, | |
| "loss": 0.2256, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 2.766135792120704e-06, | |
| "loss": 0.2162, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "learning_rate": 2.640402347024309e-06, | |
| "loss": 0.231, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "learning_rate": 2.514668901927913e-06, | |
| "loss": 0.2298, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "learning_rate": 2.3889354568315174e-06, | |
| "loss": 0.2291, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 2.2632020117351217e-06, | |
| "loss": 0.2263, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 2.137468566638726e-06, | |
| "loss": 0.2211, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 2.01173512154233e-06, | |
| "loss": 0.2302, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 1.8860016764459346e-06, | |
| "loss": 0.2218, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 1.760268231349539e-06, | |
| "loss": 0.2366, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 1.6345347862531434e-06, | |
| "loss": 0.2307, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 9.46, | |
| "learning_rate": 1.5088013411567476e-06, | |
| "loss": 0.234, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 1.383067896060352e-06, | |
| "loss": 0.2296, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 9.54, | |
| "learning_rate": 1.2573344509639566e-06, | |
| "loss": 0.2307, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "learning_rate": 1.1316010058675608e-06, | |
| "loss": 0.2234, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 1.005867560771165e-06, | |
| "loss": 0.2299, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 8.801341156747695e-07, | |
| "loss": 0.2325, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 7.544006705783738e-07, | |
| "loss": 0.2238, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 6.286672254819783e-07, | |
| "loss": 0.2293, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 9.78, | |
| "learning_rate": 5.029337803855825e-07, | |
| "loss": 0.2265, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 3.772003352891869e-07, | |
| "loss": 0.2245, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 9.86, | |
| "learning_rate": 2.5146689019279127e-07, | |
| "loss": 0.2133, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "learning_rate": 1.2573344509639564e-07, | |
| "loss": 0.2202, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 0.0, | |
| "loss": 0.2169, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "eval_f1": 0.5395372489193999, | |
| "eval_loss": 0.2657696008682251, | |
| "eval_runtime": 31.9099, | |
| "eval_samples_per_second": 30.993, | |
| "eval_steps_per_second": 7.772, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "step": 1230, | |
| "total_flos": 462869108028288.0, | |
| "train_loss": 47546.01565268776, | |
| "train_runtime": 9264.1071, | |
| "train_samples_per_second": 8.543, | |
| "train_steps_per_second": 0.133 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1230, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "total_flos": 462869108028288.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |