| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 1350, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.022222222222222223, | |
| "grad_norm": 6.603886604309082, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 1.7454, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.044444444444444446, | |
| "grad_norm": 4.177036285400391, | |
| "learning_rate": 1.4814814814814815e-05, | |
| "loss": 1.5071, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06666666666666667, | |
| "grad_norm": 6.5375285148620605, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 1.6012, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.08888888888888889, | |
| "grad_norm": 2.4201414585113525, | |
| "learning_rate": 2.8888888888888888e-05, | |
| "loss": 1.543, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 8.420122146606445, | |
| "learning_rate": 3.62962962962963e-05, | |
| "loss": 1.6806, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13333333333333333, | |
| "grad_norm": 2.9238393306732178, | |
| "learning_rate": 4.3703703703703705e-05, | |
| "loss": 1.4376, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.15555555555555556, | |
| "grad_norm": 4.334264278411865, | |
| "learning_rate": 5.111111111111111e-05, | |
| "loss": 1.5629, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.17777777777777778, | |
| "grad_norm": 4.047328472137451, | |
| "learning_rate": 5.851851851851852e-05, | |
| "loss": 1.5502, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 6.695739269256592, | |
| "learning_rate": 6.592592592592593e-05, | |
| "loss": 1.5744, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 2.579887628555298, | |
| "learning_rate": 7.333333333333333e-05, | |
| "loss": 1.4887, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.24444444444444444, | |
| "grad_norm": 8.655933380126953, | |
| "learning_rate": 8.074074074074075e-05, | |
| "loss": 1.4432, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.26666666666666666, | |
| "grad_norm": 2.3287596702575684, | |
| "learning_rate": 8.814814814814815e-05, | |
| "loss": 1.7349, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.28888888888888886, | |
| "grad_norm": 3.626464366912842, | |
| "learning_rate": 9.555555555555557e-05, | |
| "loss": 1.8419, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.3111111111111111, | |
| "grad_norm": 13.17669677734375, | |
| "learning_rate": 9.999732574196451e-05, | |
| "loss": 1.7434, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 2.938781261444092, | |
| "learning_rate": 9.996724362426075e-05, | |
| "loss": 1.7345, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.35555555555555557, | |
| "grad_norm": 3.3643951416015625, | |
| "learning_rate": 9.990375674425109e-05, | |
| "loss": 1.6652, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.37777777777777777, | |
| "grad_norm": 4.0528178215026855, | |
| "learning_rate": 9.980690754502393e-05, | |
| "loss": 1.6247, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 3.2510921955108643, | |
| "learning_rate": 9.96767607734863e-05, | |
| "loss": 1.9226, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.4222222222222222, | |
| "grad_norm": 3.1616647243499756, | |
| "learning_rate": 9.951340343707852e-05, | |
| "loss": 1.9507, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 2.816512107849121, | |
| "learning_rate": 9.931694474560686e-05, | |
| "loss": 1.5901, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.4666666666666667, | |
| "grad_norm": 3.0883939266204834, | |
| "learning_rate": 9.908751603823301e-05, | |
| "loss": 1.7762, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.4888888888888889, | |
| "grad_norm": 3.496539354324341, | |
| "learning_rate": 9.882527069566965e-05, | |
| "loss": 1.7833, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5111111111111111, | |
| "grad_norm": 3.6071531772613525, | |
| "learning_rate": 9.853038403764021e-05, | |
| "loss": 1.5891, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5333333333333333, | |
| "grad_norm": 3.1297965049743652, | |
| "learning_rate": 9.820305320567192e-05, | |
| "loss": 1.7261, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 2.707146167755127, | |
| "learning_rate": 9.784349703130007e-05, | |
| "loss": 1.6448, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5777777777777777, | |
| "grad_norm": 3.1915082931518555, | |
| "learning_rate": 9.745195588977192e-05, | |
| "loss": 1.9097, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.056610584259033, | |
| "learning_rate": 9.702869153934782e-05, | |
| "loss": 1.6173, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6222222222222222, | |
| "grad_norm": 3.044048309326172, | |
| "learning_rate": 9.657398694630712e-05, | |
| "loss": 1.8324, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6444444444444445, | |
| "grad_norm": 3.3219234943389893, | |
| "learning_rate": 9.608814609577585e-05, | |
| "loss": 1.818, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 2.5632572174072266, | |
| "learning_rate": 9.557149378850254e-05, | |
| "loss": 1.618, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.6888888888888889, | |
| "grad_norm": 2.3387904167175293, | |
| "learning_rate": 9.502437542371812e-05, | |
| "loss": 1.6228, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.7111111111111111, | |
| "grad_norm": 2.6883046627044678, | |
| "learning_rate": 9.444715676822501e-05, | |
| "loss": 1.7307, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7333333333333333, | |
| "grad_norm": 2.9449591636657715, | |
| "learning_rate": 9.384022371187003e-05, | |
| "loss": 1.7338, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7555555555555555, | |
| "grad_norm": 2.4884233474731445, | |
| "learning_rate": 9.320398200956403e-05, | |
| "loss": 1.4941, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7777777777777778, | |
| "grad_norm": 5.446136951446533, | |
| "learning_rate": 9.253885701002134e-05, | |
| "loss": 1.9377, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 3.122303009033203, | |
| "learning_rate": 9.184529337140002e-05, | |
| "loss": 1.5295, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8222222222222222, | |
| "grad_norm": 3.4902145862579346, | |
| "learning_rate": 9.112375476403312e-05, | |
| "loss": 1.688, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.8444444444444444, | |
| "grad_norm": 3.041170597076416, | |
| "learning_rate": 9.037472356044962e-05, | |
| "loss": 1.7526, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8666666666666667, | |
| "grad_norm": 2.029374837875366, | |
| "learning_rate": 8.959870051289241e-05, | |
| "loss": 1.5125, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 4.354530334472656, | |
| "learning_rate": 8.879620441854872e-05, | |
| "loss": 1.6507, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.9111111111111111, | |
| "grad_norm": 3.2502028942108154, | |
| "learning_rate": 8.796777177271708e-05, | |
| "loss": 1.6118, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9333333333333333, | |
| "grad_norm": 2.198453903198242, | |
| "learning_rate": 8.711395641014228e-05, | |
| "loss": 1.6589, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9555555555555556, | |
| "grad_norm": 3.6842291355133057, | |
| "learning_rate": 8.623532913475847e-05, | |
| "loss": 1.6891, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.9777777777777777, | |
| "grad_norm": 2.3169732093811035, | |
| "learning_rate": 8.533247733808776e-05, | |
| "loss": 1.5275, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 5.430471897125244, | |
| "learning_rate": 8.440600460654958e-05, | |
| "loss": 1.7984, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.0222222222222221, | |
| "grad_norm": 4.782519817352295, | |
| "learning_rate": 8.345653031794292e-05, | |
| "loss": 0.9174, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0444444444444445, | |
| "grad_norm": 3.547177314758301, | |
| "learning_rate": 8.248468922737188e-05, | |
| "loss": 1.3878, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.0666666666666667, | |
| "grad_norm": 2.446197748184204, | |
| "learning_rate": 8.149113104289063e-05, | |
| "loss": 1.1277, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.0888888888888888, | |
| "grad_norm": 3.3164918422698975, | |
| "learning_rate": 8.047651999115217e-05, | |
| "loss": 1.0124, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "grad_norm": 7.3411359786987305, | |
| "learning_rate": 7.944153437335057e-05, | |
| "loss": 1.0102, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "eval_loss": 1.7722898721694946, | |
| "eval_runtime": 4.268, | |
| "eval_samples_per_second": 23.43, | |
| "eval_steps_per_second": 23.43, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1333333333333333, | |
| "grad_norm": 3.1477370262145996, | |
| "learning_rate": 7.838686611175421e-05, | |
| "loss": 1.1386, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.1555555555555554, | |
| "grad_norm": 3.3324782848358154, | |
| "learning_rate": 7.73132202871327e-05, | |
| "loss": 1.0376, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.1777777777777778, | |
| "grad_norm": 4.7969136238098145, | |
| "learning_rate": 7.6221314667387e-05, | |
| "loss": 1.2585, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 3.488189697265625, | |
| "learning_rate": 7.511187922769768e-05, | |
| "loss": 1.2203, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.2222222222222223, | |
| "grad_norm": 3.5396199226379395, | |
| "learning_rate": 7.398565566251232e-05, | |
| "loss": 1.1651, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.2444444444444445, | |
| "grad_norm": 4.660005569458008, | |
| "learning_rate": 7.284339688969809e-05, | |
| "loss": 1.1229, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.2666666666666666, | |
| "grad_norm": 6.269535541534424, | |
| "learning_rate": 7.168586654719117e-05, | |
| "loss": 1.1348, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.2888888888888888, | |
| "grad_norm": 5.864573955535889, | |
| "learning_rate": 7.051383848247942e-05, | |
| "loss": 1.1637, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.3111111111111111, | |
| "grad_norm": 3.3166041374206543, | |
| "learning_rate": 6.944726507547169e-05, | |
| "loss": 1.0358, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 4.742485523223877, | |
| "learning_rate": 6.824985757903016e-05, | |
| "loss": 1.0499, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.3555555555555556, | |
| "grad_norm": 2.5902740955352783, | |
| "learning_rate": 6.704024944652537e-05, | |
| "loss": 0.9614, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.3777777777777778, | |
| "grad_norm": 4.31684684753418, | |
| "learning_rate": 6.581924934117782e-05, | |
| "loss": 1.2387, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 6.753187656402588, | |
| "learning_rate": 6.458767354212036e-05, | |
| "loss": 0.9807, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.4222222222222223, | |
| "grad_norm": 4.625970363616943, | |
| "learning_rate": 6.334634539868836e-05, | |
| "loss": 1.047, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.4444444444444444, | |
| "grad_norm": 4.617323875427246, | |
| "learning_rate": 6.22214975761865e-05, | |
| "loss": 1.1423, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.4666666666666668, | |
| "grad_norm": 5.007408142089844, | |
| "learning_rate": 6.096393120939516e-05, | |
| "loss": 1.1181, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.488888888888889, | |
| "grad_norm": 6.361126899719238, | |
| "learning_rate": 5.969903509036172e-05, | |
| "loss": 0.8642, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.511111111111111, | |
| "grad_norm": 3.4635279178619385, | |
| "learning_rate": 5.842765484416237e-05, | |
| "loss": 0.9609, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.5333333333333332, | |
| "grad_norm": 2.583265781402588, | |
| "learning_rate": 5.715064043072771e-05, | |
| "loss": 0.9543, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.5555555555555556, | |
| "grad_norm": 3.2295000553131104, | |
| "learning_rate": 5.586884557661638e-05, | |
| "loss": 1.3545, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.5777777777777777, | |
| "grad_norm": 3.589311361312866, | |
| "learning_rate": 5.458312720427037e-05, | |
| "loss": 1.1898, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 3.749166965484619, | |
| "learning_rate": 5.329434485913393e-05, | |
| "loss": 0.9212, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.6222222222222222, | |
| "grad_norm": 5.566150665283203, | |
| "learning_rate": 5.200336013501898e-05, | |
| "loss": 1.0051, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.6444444444444444, | |
| "grad_norm": 4.666942596435547, | |
| "learning_rate": 5.07110360981009e-05, | |
| "loss": 0.9947, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 4.7450737953186035, | |
| "learning_rate": 4.941823670993016e-05, | |
| "loss": 1.2966, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.6888888888888889, | |
| "grad_norm": 3.922316551208496, | |
| "learning_rate": 4.8125826249845375e-05, | |
| "loss": 0.9482, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.7111111111111112, | |
| "grad_norm": 2.466024398803711, | |
| "learning_rate": 4.683466873717379e-05, | |
| "loss": 1.1203, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.7333333333333334, | |
| "grad_norm": 3.1226003170013428, | |
| "learning_rate": 4.55456273536057e-05, | |
| "loss": 0.9876, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.7555555555555555, | |
| "grad_norm": 3.574613571166992, | |
| "learning_rate": 4.4259563866128754e-05, | |
| "loss": 0.9708, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.7777777777777777, | |
| "grad_norm": 4.307621002197266, | |
| "learning_rate": 4.297733805090819e-05, | |
| "loss": 1.1659, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 3.263989210128784, | |
| "learning_rate": 4.169980711849781e-05, | |
| "loss": 1.0767, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.8222222222222222, | |
| "grad_norm": 4.091586112976074, | |
| "learning_rate": 4.0427825140766306e-05, | |
| "loss": 0.9197, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.8444444444444446, | |
| "grad_norm": 6.540091037750244, | |
| "learning_rate": 3.916224247992184e-05, | |
| "loss": 1.0032, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.8666666666666667, | |
| "grad_norm": 3.0093724727630615, | |
| "learning_rate": 3.790390522001662e-05, | |
| "loss": 0.9162, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.8888888888888888, | |
| "grad_norm": 3.8956823348999023, | |
| "learning_rate": 3.665365460131165e-05, | |
| "loss": 0.9894, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.911111111111111, | |
| "grad_norm": 6.698014736175537, | |
| "learning_rate": 3.541232645787964e-05, | |
| "loss": 1.0847, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.9333333333333333, | |
| "grad_norm": 3.408623218536377, | |
| "learning_rate": 3.418075065882217e-05, | |
| "loss": 1.0293, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.9555555555555557, | |
| "grad_norm": 5.4307780265808105, | |
| "learning_rate": 3.295975055347464e-05, | |
| "loss": 1.1167, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.9777777777777779, | |
| "grad_norm": 3.301551103591919, | |
| "learning_rate": 3.175014242096985e-05, | |
| "loss": 0.9419, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 3.311927080154419, | |
| "learning_rate": 3.0552734924528306e-05, | |
| "loss": 1.0782, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.022222222222222, | |
| "grad_norm": 3.594782590866089, | |
| "learning_rate": 2.936832857084003e-05, | |
| "loss": 0.7008, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.0444444444444443, | |
| "grad_norm": 2.127872943878174, | |
| "learning_rate": 2.8197715174899185e-05, | |
| "loss": 0.5128, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.066666666666667, | |
| "grad_norm": 1.7860101461410522, | |
| "learning_rate": 2.7041677330649407e-05, | |
| "loss": 0.4185, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.088888888888889, | |
| "grad_norm": 5.212789058685303, | |
| "learning_rate": 2.590098788779396e-05, | |
| "loss": 0.52, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.111111111111111, | |
| "grad_norm": 2.7228968143463135, | |
| "learning_rate": 2.4776409435119775e-05, | |
| "loss": 0.7065, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.1333333333333333, | |
| "grad_norm": 2.3301429748535156, | |
| "learning_rate": 2.3668693790681634e-05, | |
| "loss": 0.3988, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.1555555555555554, | |
| "grad_norm": 3.8069827556610107, | |
| "learning_rate": 2.257858149918688e-05, | |
| "loss": 0.6353, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.1777777777777776, | |
| "grad_norm": 3.177034854888916, | |
| "learning_rate": 2.15068013369166e-05, | |
| "loss": 0.4815, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 2.7649686336517334, | |
| "learning_rate": 2.0454069824514444e-05, | |
| "loss": 0.5903, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "grad_norm": 3.8075079917907715, | |
| "learning_rate": 1.942109074796888e-05, | |
| "loss": 0.5375, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "eval_loss": 1.9768437147140503, | |
| "eval_runtime": 4.0891, | |
| "eval_samples_per_second": 24.455, | |
| "eval_steps_per_second": 24.455, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.2444444444444445, | |
| "grad_norm": 4.101439476013184, | |
| "learning_rate": 1.8408554688108786e-05, | |
| "loss": 0.4914, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.2666666666666666, | |
| "grad_norm": 4.174548149108887, | |
| "learning_rate": 1.7417138558927244e-05, | |
| "loss": 0.6643, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.2888888888888888, | |
| "grad_norm": 2.903629779815674, | |
| "learning_rate": 1.6447505155042088e-05, | |
| "loss": 0.4727, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.311111111111111, | |
| "grad_norm": 5.127641201019287, | |
| "learning_rate": 1.5500302708595648e-05, | |
| "loss": 0.5654, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 5.86013126373291, | |
| "learning_rate": 1.4576164455890013e-05, | |
| "loss": 0.4691, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.3555555555555556, | |
| "grad_norm": 5.229647159576416, | |
| "learning_rate": 1.3675708214047578e-05, | |
| "loss": 0.5061, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.3777777777777778, | |
| "grad_norm": 5.425754070281982, | |
| "learning_rate": 1.2799535967979747e-05, | |
| "loss": 0.4352, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 5.084753513336182, | |
| "learning_rate": 1.194823346793998e-05, | |
| "loss": 0.5483, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.422222222222222, | |
| "grad_norm": 4.754981517791748, | |
| "learning_rate": 1.1122369837930363e-05, | |
| "loss": 0.4424, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.4444444444444446, | |
| "grad_norm": 3.03784441947937, | |
| "learning_rate": 1.0322497195223285e-05, | |
| "loss": 0.4717, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.466666666666667, | |
| "grad_norm": 9.531596183776855, | |
| "learning_rate": 9.549150281252633e-06, | |
| "loss": 0.4239, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.488888888888889, | |
| "grad_norm": 8.69981575012207, | |
| "learning_rate": 8.802846104121477e-06, | |
| "loss": 0.6657, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.511111111111111, | |
| "grad_norm": 3.9578542709350586, | |
| "learning_rate": 8.084083592964942e-06, | |
| "loss": 0.4603, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.533333333333333, | |
| "grad_norm": 2.537245273590088, | |
| "learning_rate": 7.393343264399438e-06, | |
| "loss": 0.4783, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.5555555555555554, | |
| "grad_norm": 3.6952104568481445, | |
| "learning_rate": 6.7310869012814585e-06, | |
| "loss": 0.5769, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.5777777777777775, | |
| "grad_norm": 4.507468223571777, | |
| "learning_rate": 6.097757243990321e-06, | |
| "loss": 0.6047, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 4.901561260223389, | |
| "learning_rate": 5.493777694441521e-06, | |
| "loss": 0.3204, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.6222222222222222, | |
| "grad_norm": 5.785924434661865, | |
| "learning_rate": 4.919552033028513e-06, | |
| "loss": 0.4655, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.6444444444444444, | |
| "grad_norm": 7.03330659866333, | |
| "learning_rate": 4.375464148682096e-06, | |
| "loss": 0.5527, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 3.337351083755493, | |
| "learning_rate": 3.861877782227885e-06, | |
| "loss": 0.5065, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.688888888888889, | |
| "grad_norm": 3.3279948234558105, | |
| "learning_rate": 3.3791362832135133e-06, | |
| "loss": 0.4094, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.7111111111111112, | |
| "grad_norm": 3.116320848464966, | |
| "learning_rate": 2.9275623803680596e-06, | |
| "loss": 0.6052, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.7333333333333334, | |
| "grad_norm": 2.5365102291107178, | |
| "learning_rate": 2.5074579658471266e-06, | |
| "loss": 0.491, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.7555555555555555, | |
| "grad_norm": 4.173516750335693, | |
| "learning_rate": 2.1191038934079643e-06, | |
| "loss": 0.5293, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.7777777777777777, | |
| "grad_norm": 4.494065284729004, | |
| "learning_rate": 1.7627597906493654e-06, | |
| "loss": 0.5024, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 2.712306499481201, | |
| "learning_rate": 1.438663885441982e-06, | |
| "loss": 0.4411, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.822222222222222, | |
| "grad_norm": 1.7787264585494995, | |
| "learning_rate": 1.1470328466651304e-06, | |
| "loss": 0.616, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.8444444444444446, | |
| "grad_norm": 2.3278346061706543, | |
| "learning_rate": 8.880616393563967e-07, | |
| "loss": 0.5948, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.8666666666666667, | |
| "grad_norm": 3.4088244438171387, | |
| "learning_rate": 6.61923394371039e-07, | |
| "loss": 0.4361, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.888888888888889, | |
| "grad_norm": 3.020115613937378, | |
| "learning_rate": 4.6876929263829915e-07, | |
| "loss": 0.4553, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.911111111111111, | |
| "grad_norm": 2.950218915939331, | |
| "learning_rate": 3.087284640918786e-07, | |
| "loss": 0.407, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.9333333333333336, | |
| "grad_norm": 4.008513450622559, | |
| "learning_rate": 1.819079013423153e-07, | |
| "loss": 0.4567, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.9555555555555557, | |
| "grad_norm": 2.7357242107391357, | |
| "learning_rate": 8.839238814886685e-08, | |
| "loss": 0.5011, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.977777777777778, | |
| "grad_norm": 5.6999006271362305, | |
| "learning_rate": 2.824444273875071e-08, | |
| "loss": 0.5416, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 3.6127123832702637, | |
| "learning_rate": 1.5042760116212861e-09, | |
| "loss": 0.4591, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1350, | |
| "total_flos": 3120728157462528.0, | |
| "train_loss": 1.0846350595686172, | |
| "train_runtime": 255.809, | |
| "train_samples_per_second": 10.555, | |
| "train_steps_per_second": 5.277 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 1350, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3120728157462528.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |