| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 5115, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005865102639296188, | |
| "grad_norm": 31.542375564575195, | |
| "learning_rate": 9e-07, | |
| "loss": 0.5324, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011730205278592375, | |
| "grad_norm": 27.081035614013672, | |
| "learning_rate": 1.9e-06, | |
| "loss": 0.4886, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.017595307917888565, | |
| "grad_norm": 9.631855010986328, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.2291, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02346041055718475, | |
| "grad_norm": 7.441354751586914, | |
| "learning_rate": 3.9e-06, | |
| "loss": 0.1422, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02932551319648094, | |
| "grad_norm": 5.272273063659668, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.1189, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03519061583577713, | |
| "grad_norm": 4.908827781677246, | |
| "learning_rate": 5.9e-06, | |
| "loss": 0.0836, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.04105571847507331, | |
| "grad_norm": 6.410752296447754, | |
| "learning_rate": 6.900000000000001e-06, | |
| "loss": 0.0788, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0469208211143695, | |
| "grad_norm": 4.663637638092041, | |
| "learning_rate": 7.9e-06, | |
| "loss": 0.0797, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05278592375366569, | |
| "grad_norm": 4.668508052825928, | |
| "learning_rate": 8.9e-06, | |
| "loss": 0.0432, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05865102639296188, | |
| "grad_norm": 4.451730251312256, | |
| "learning_rate": 9.900000000000002e-06, | |
| "loss": 0.0447, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06451612903225806, | |
| "grad_norm": 2.972118854522705, | |
| "learning_rate": 1.09e-05, | |
| "loss": 0.0364, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.07038123167155426, | |
| "grad_norm": 4.36667013168335, | |
| "learning_rate": 1.19e-05, | |
| "loss": 0.0427, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07624633431085044, | |
| "grad_norm": 2.4861721992492676, | |
| "learning_rate": 1.29e-05, | |
| "loss": 0.0434, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.08211143695014662, | |
| "grad_norm": 1.6505670547485352, | |
| "learning_rate": 1.3900000000000002e-05, | |
| "loss": 0.0497, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08797653958944282, | |
| "grad_norm": 4.313763618469238, | |
| "learning_rate": 1.49e-05, | |
| "loss": 0.0346, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.093841642228739, | |
| "grad_norm": 2.6852822303771973, | |
| "learning_rate": 1.59e-05, | |
| "loss": 0.0425, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.09970674486803519, | |
| "grad_norm": 4.099714756011963, | |
| "learning_rate": 1.69e-05, | |
| "loss": 0.0356, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10557184750733138, | |
| "grad_norm": 5.82313346862793, | |
| "learning_rate": 1.79e-05, | |
| "loss": 0.0406, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.11143695014662756, | |
| "grad_norm": 4.862349510192871, | |
| "learning_rate": 1.8900000000000002e-05, | |
| "loss": 0.0442, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11730205278592376, | |
| "grad_norm": 4.571470737457275, | |
| "learning_rate": 1.9900000000000003e-05, | |
| "loss": 0.0505, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12316715542521994, | |
| "grad_norm": 3.140578031539917, | |
| "learning_rate": 2.09e-05, | |
| "loss": 0.0352, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.12903225806451613, | |
| "grad_norm": 1.6524794101715088, | |
| "learning_rate": 2.19e-05, | |
| "loss": 0.028, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.1348973607038123, | |
| "grad_norm": 8.246803283691406, | |
| "learning_rate": 2.29e-05, | |
| "loss": 0.0497, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.14076246334310852, | |
| "grad_norm": 3.31701922416687, | |
| "learning_rate": 2.39e-05, | |
| "loss": 0.0415, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1466275659824047, | |
| "grad_norm": 6.1794939041137695, | |
| "learning_rate": 2.4900000000000002e-05, | |
| "loss": 0.0345, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.15249266862170088, | |
| "grad_norm": 4.081472396850586, | |
| "learning_rate": 2.5900000000000003e-05, | |
| "loss": 0.0347, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.15835777126099707, | |
| "grad_norm": 2.7104103565216064, | |
| "learning_rate": 2.6900000000000003e-05, | |
| "loss": 0.0409, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.16422287390029325, | |
| "grad_norm": 6.425929546356201, | |
| "learning_rate": 2.7900000000000004e-05, | |
| "loss": 0.0402, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.17008797653958943, | |
| "grad_norm": 1.5873750448226929, | |
| "learning_rate": 2.8899999999999998e-05, | |
| "loss": 0.0306, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.17595307917888564, | |
| "grad_norm": 4.29845666885376, | |
| "learning_rate": 2.9900000000000002e-05, | |
| "loss": 0.0329, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 1.728096604347229, | |
| "learning_rate": 3.09e-05, | |
| "loss": 0.0363, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.187683284457478, | |
| "grad_norm": 2.5301356315612793, | |
| "learning_rate": 3.19e-05, | |
| "loss": 0.0293, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1935483870967742, | |
| "grad_norm": 5.745728015899658, | |
| "learning_rate": 3.29e-05, | |
| "loss": 0.0352, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.19941348973607037, | |
| "grad_norm": 3.1042871475219727, | |
| "learning_rate": 3.3900000000000004e-05, | |
| "loss": 0.0268, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.20527859237536658, | |
| "grad_norm": 2.0150339603424072, | |
| "learning_rate": 3.49e-05, | |
| "loss": 0.0284, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.21114369501466276, | |
| "grad_norm": 1.6547868251800537, | |
| "learning_rate": 3.59e-05, | |
| "loss": 0.0362, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.21700879765395895, | |
| "grad_norm": 4.4225754737854, | |
| "learning_rate": 3.69e-05, | |
| "loss": 0.0284, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.22287390029325513, | |
| "grad_norm": 4.417116641998291, | |
| "learning_rate": 3.79e-05, | |
| "loss": 0.0317, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.2287390029325513, | |
| "grad_norm": 3.4028005599975586, | |
| "learning_rate": 3.8900000000000004e-05, | |
| "loss": 0.0336, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.23460410557184752, | |
| "grad_norm": 1.276804804801941, | |
| "learning_rate": 3.99e-05, | |
| "loss": 0.0291, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2404692082111437, | |
| "grad_norm": 3.228950023651123, | |
| "learning_rate": 4.09e-05, | |
| "loss": 0.0404, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.24633431085043989, | |
| "grad_norm": 2.382463216781616, | |
| "learning_rate": 4.19e-05, | |
| "loss": 0.0399, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.25219941348973607, | |
| "grad_norm": 3.2565174102783203, | |
| "learning_rate": 4.29e-05, | |
| "loss": 0.0327, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.25806451612903225, | |
| "grad_norm": 2.632328510284424, | |
| "learning_rate": 4.39e-05, | |
| "loss": 0.0301, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.26392961876832843, | |
| "grad_norm": 7.466786861419678, | |
| "learning_rate": 4.49e-05, | |
| "loss": 0.0361, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2697947214076246, | |
| "grad_norm": 1.4194852113723755, | |
| "learning_rate": 4.5900000000000004e-05, | |
| "loss": 0.0332, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.2756598240469208, | |
| "grad_norm": 2.4137372970581055, | |
| "learning_rate": 4.69e-05, | |
| "loss": 0.0396, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.28152492668621704, | |
| "grad_norm": 1.9690659046173096, | |
| "learning_rate": 4.79e-05, | |
| "loss": 0.0254, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2873900293255132, | |
| "grad_norm": 3.2981226444244385, | |
| "learning_rate": 4.89e-05, | |
| "loss": 0.0228, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2932551319648094, | |
| "grad_norm": 5.314478397369385, | |
| "learning_rate": 4.99e-05, | |
| "loss": 0.0291, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2991202346041056, | |
| "grad_norm": 2.0909876823425293, | |
| "learning_rate": 4.9902491874322866e-05, | |
| "loss": 0.0205, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.30498533724340177, | |
| "grad_norm": 5.199825286865234, | |
| "learning_rate": 4.979414951245938e-05, | |
| "loss": 0.049, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.31085043988269795, | |
| "grad_norm": 2.305478096008301, | |
| "learning_rate": 4.968580715059589e-05, | |
| "loss": 0.0274, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.31671554252199413, | |
| "grad_norm": 8.331841468811035, | |
| "learning_rate": 4.95774647887324e-05, | |
| "loss": 0.0356, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.3225806451612903, | |
| "grad_norm": 5.1305437088012695, | |
| "learning_rate": 4.9469122426868905e-05, | |
| "loss": 0.0385, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3284457478005865, | |
| "grad_norm": 2.931698799133301, | |
| "learning_rate": 4.936078006500542e-05, | |
| "loss": 0.0255, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3343108504398827, | |
| "grad_norm": 1.6110111474990845, | |
| "learning_rate": 4.925243770314193e-05, | |
| "loss": 0.0264, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.34017595307917886, | |
| "grad_norm": 2.3846256732940674, | |
| "learning_rate": 4.914409534127844e-05, | |
| "loss": 0.0252, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3460410557184751, | |
| "grad_norm": 2.34324049949646, | |
| "learning_rate": 4.903575297941496e-05, | |
| "loss": 0.0336, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3519061583577713, | |
| "grad_norm": 2.084900140762329, | |
| "learning_rate": 4.892741061755147e-05, | |
| "loss": 0.0475, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.35777126099706746, | |
| "grad_norm": 0.9921281337738037, | |
| "learning_rate": 4.8819068255687975e-05, | |
| "loss": 0.0359, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 2.2851696014404297, | |
| "learning_rate": 4.871072589382449e-05, | |
| "loss": 0.0212, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.36950146627565983, | |
| "grad_norm": 2.9308276176452637, | |
| "learning_rate": 4.8602383531961e-05, | |
| "loss": 0.0328, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.375366568914956, | |
| "grad_norm": 6.85349178314209, | |
| "learning_rate": 4.849404117009751e-05, | |
| "loss": 0.0396, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3812316715542522, | |
| "grad_norm": 2.044726610183716, | |
| "learning_rate": 4.838569880823402e-05, | |
| "loss": 0.0292, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3870967741935484, | |
| "grad_norm": 1.818989634513855, | |
| "learning_rate": 4.827735644637053e-05, | |
| "loss": 0.0251, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.39296187683284456, | |
| "grad_norm": 3.9918439388275146, | |
| "learning_rate": 4.8169014084507045e-05, | |
| "loss": 0.0221, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.39882697947214074, | |
| "grad_norm": 3.0383472442626953, | |
| "learning_rate": 4.8060671722643556e-05, | |
| "loss": 0.0239, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4046920821114369, | |
| "grad_norm": 0.7501923441886902, | |
| "learning_rate": 4.795232936078007e-05, | |
| "loss": 0.0255, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.41055718475073316, | |
| "grad_norm": 0.8744826316833496, | |
| "learning_rate": 4.784398699891658e-05, | |
| "loss": 0.022, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.41642228739002934, | |
| "grad_norm": 4.488051891326904, | |
| "learning_rate": 4.773564463705309e-05, | |
| "loss": 0.0217, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4222873900293255, | |
| "grad_norm": 3.3455092906951904, | |
| "learning_rate": 4.76273022751896e-05, | |
| "loss": 0.019, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.4281524926686217, | |
| "grad_norm": 3.4381587505340576, | |
| "learning_rate": 4.751895991332611e-05, | |
| "loss": 0.0231, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4340175953079179, | |
| "grad_norm": 2.643965482711792, | |
| "learning_rate": 4.741061755146262e-05, | |
| "loss": 0.0249, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.4398826979472141, | |
| "grad_norm": 3.4764389991760254, | |
| "learning_rate": 4.730227518959914e-05, | |
| "loss": 0.0228, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.44574780058651026, | |
| "grad_norm": 3.8638241291046143, | |
| "learning_rate": 4.719393282773565e-05, | |
| "loss": 0.038, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.45161290322580644, | |
| "grad_norm": 1.6857688426971436, | |
| "learning_rate": 4.708559046587216e-05, | |
| "loss": 0.0297, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.4574780058651026, | |
| "grad_norm": 1.9079605340957642, | |
| "learning_rate": 4.697724810400867e-05, | |
| "loss": 0.0346, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4633431085043988, | |
| "grad_norm": 2.4099855422973633, | |
| "learning_rate": 4.686890574214518e-05, | |
| "loss": 0.0192, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.46920821114369504, | |
| "grad_norm": 2.6280362606048584, | |
| "learning_rate": 4.676056338028169e-05, | |
| "loss": 0.0251, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4750733137829912, | |
| "grad_norm": 1.9467144012451172, | |
| "learning_rate": 4.66522210184182e-05, | |
| "loss": 0.027, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4809384164222874, | |
| "grad_norm": 6.638300895690918, | |
| "learning_rate": 4.654387865655471e-05, | |
| "loss": 0.0253, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4868035190615836, | |
| "grad_norm": 2.0866501331329346, | |
| "learning_rate": 4.643553629469123e-05, | |
| "loss": 0.0205, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.49266862170087977, | |
| "grad_norm": 0.8170230388641357, | |
| "learning_rate": 4.632719393282774e-05, | |
| "loss": 0.018, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.49853372434017595, | |
| "grad_norm": 2.902468204498291, | |
| "learning_rate": 4.621885157096425e-05, | |
| "loss": 0.0306, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5043988269794721, | |
| "grad_norm": 1.3711732625961304, | |
| "learning_rate": 4.611050920910076e-05, | |
| "loss": 0.0169, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5102639296187683, | |
| "grad_norm": 2.804041862487793, | |
| "learning_rate": 4.600216684723727e-05, | |
| "loss": 0.0185, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5161290322580645, | |
| "grad_norm": 4.9906792640686035, | |
| "learning_rate": 4.589382448537378e-05, | |
| "loss": 0.023, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5219941348973607, | |
| "grad_norm": 4.329658031463623, | |
| "learning_rate": 4.5785482123510294e-05, | |
| "loss": 0.0271, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5278592375366569, | |
| "grad_norm": 1.0276806354522705, | |
| "learning_rate": 4.567713976164681e-05, | |
| "loss": 0.0174, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.533724340175953, | |
| "grad_norm": 0.8810903429985046, | |
| "learning_rate": 4.556879739978332e-05, | |
| "loss": 0.0235, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5395894428152492, | |
| "grad_norm": 4.086939811706543, | |
| "learning_rate": 4.546045503791983e-05, | |
| "loss": 0.0272, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.9442876577377319, | |
| "learning_rate": 4.535211267605634e-05, | |
| "loss": 0.0231, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5513196480938416, | |
| "grad_norm": 2.153064727783203, | |
| "learning_rate": 4.524377031419285e-05, | |
| "loss": 0.0228, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5571847507331378, | |
| "grad_norm": 1.1959385871887207, | |
| "learning_rate": 4.513542795232936e-05, | |
| "loss": 0.0313, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5630498533724341, | |
| "grad_norm": 2.5583062171936035, | |
| "learning_rate": 4.5027085590465875e-05, | |
| "loss": 0.0303, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5689149560117303, | |
| "grad_norm": 2.264181137084961, | |
| "learning_rate": 4.4918743228602387e-05, | |
| "loss": 0.0338, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5747800586510264, | |
| "grad_norm": 1.718117594718933, | |
| "learning_rate": 4.48104008667389e-05, | |
| "loss": 0.0252, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5806451612903226, | |
| "grad_norm": 0.4819270670413971, | |
| "learning_rate": 4.470205850487541e-05, | |
| "loss": 0.0227, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5865102639296188, | |
| "grad_norm": 5.476009368896484, | |
| "learning_rate": 4.459371614301192e-05, | |
| "loss": 0.0245, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.592375366568915, | |
| "grad_norm": 1.2682011127471924, | |
| "learning_rate": 4.448537378114843e-05, | |
| "loss": 0.0127, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5982404692082112, | |
| "grad_norm": 6.694324493408203, | |
| "learning_rate": 4.4377031419284945e-05, | |
| "loss": 0.0226, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.6041055718475073, | |
| "grad_norm": 1.3645150661468506, | |
| "learning_rate": 4.4268689057421456e-05, | |
| "loss": 0.0144, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.6099706744868035, | |
| "grad_norm": 5.142198085784912, | |
| "learning_rate": 4.416034669555796e-05, | |
| "loss": 0.0176, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.6158357771260997, | |
| "grad_norm": 1.976951241493225, | |
| "learning_rate": 4.405200433369447e-05, | |
| "loss": 0.0266, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6217008797653959, | |
| "grad_norm": 1.9132397174835205, | |
| "learning_rate": 4.394366197183099e-05, | |
| "loss": 0.0328, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6275659824046921, | |
| "grad_norm": 0.9554237127304077, | |
| "learning_rate": 4.38353196099675e-05, | |
| "loss": 0.0311, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6334310850439883, | |
| "grad_norm": 1.418505072593689, | |
| "learning_rate": 4.3726977248104014e-05, | |
| "loss": 0.0205, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6392961876832844, | |
| "grad_norm": 1.686772346496582, | |
| "learning_rate": 4.361863488624052e-05, | |
| "loss": 0.0231, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6451612903225806, | |
| "grad_norm": 0.7826172113418579, | |
| "learning_rate": 4.351029252437703e-05, | |
| "loss": 0.024, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6510263929618768, | |
| "grad_norm": 2.0760345458984375, | |
| "learning_rate": 4.340195016251354e-05, | |
| "loss": 0.0204, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.656891495601173, | |
| "grad_norm": 1.7399033308029175, | |
| "learning_rate": 4.3293607800650054e-05, | |
| "loss": 0.0165, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6627565982404692, | |
| "grad_norm": 1.938363790512085, | |
| "learning_rate": 4.3185265438786566e-05, | |
| "loss": 0.017, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6686217008797654, | |
| "grad_norm": 4.907315254211426, | |
| "learning_rate": 4.3076923076923084e-05, | |
| "loss": 0.0244, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6744868035190615, | |
| "grad_norm": 0.951277494430542, | |
| "learning_rate": 4.296858071505959e-05, | |
| "loss": 0.0177, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6803519061583577, | |
| "grad_norm": 2.8571536540985107, | |
| "learning_rate": 4.28602383531961e-05, | |
| "loss": 0.0107, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6862170087976539, | |
| "grad_norm": 4.217930316925049, | |
| "learning_rate": 4.275189599133261e-05, | |
| "loss": 0.019, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6920821114369502, | |
| "grad_norm": 5.358763694763184, | |
| "learning_rate": 4.2643553629469124e-05, | |
| "loss": 0.0142, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6979472140762464, | |
| "grad_norm": 1.1704353094100952, | |
| "learning_rate": 4.2535211267605635e-05, | |
| "loss": 0.021, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7038123167155426, | |
| "grad_norm": 0.6873300075531006, | |
| "learning_rate": 4.242686890574215e-05, | |
| "loss": 0.0131, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.7096774193548387, | |
| "grad_norm": 0.8728815317153931, | |
| "learning_rate": 4.231852654387866e-05, | |
| "loss": 0.0155, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.7155425219941349, | |
| "grad_norm": 0.5841957926750183, | |
| "learning_rate": 4.221018418201517e-05, | |
| "loss": 0.0138, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.7214076246334311, | |
| "grad_norm": 2.72292423248291, | |
| "learning_rate": 4.210184182015168e-05, | |
| "loss": 0.0154, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 3.6884212493896484, | |
| "learning_rate": 4.1993499458288194e-05, | |
| "loss": 0.0146, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7331378299120235, | |
| "grad_norm": 0.3702751696109772, | |
| "learning_rate": 4.1885157096424705e-05, | |
| "loss": 0.0094, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7390029325513197, | |
| "grad_norm": 2.5344738960266113, | |
| "learning_rate": 4.177681473456122e-05, | |
| "loss": 0.0265, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7448680351906158, | |
| "grad_norm": 0.8168351650238037, | |
| "learning_rate": 4.166847237269773e-05, | |
| "loss": 0.0173, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.750733137829912, | |
| "grad_norm": 3.0414071083068848, | |
| "learning_rate": 4.156013001083423e-05, | |
| "loss": 0.0185, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7565982404692082, | |
| "grad_norm": 3.424670696258545, | |
| "learning_rate": 4.1451787648970745e-05, | |
| "loss": 0.0216, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7624633431085044, | |
| "grad_norm": 0.7243099808692932, | |
| "learning_rate": 4.134344528710726e-05, | |
| "loss": 0.0117, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7683284457478006, | |
| "grad_norm": 2.333580493927002, | |
| "learning_rate": 4.1235102925243775e-05, | |
| "loss": 0.0136, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7741935483870968, | |
| "grad_norm": 1.190661907196045, | |
| "learning_rate": 4.1126760563380286e-05, | |
| "loss": 0.0138, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7800586510263929, | |
| "grad_norm": 2.198655128479004, | |
| "learning_rate": 4.10184182015168e-05, | |
| "loss": 0.0288, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7859237536656891, | |
| "grad_norm": 3.7605550289154053, | |
| "learning_rate": 4.09100758396533e-05, | |
| "loss": 0.0165, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7917888563049853, | |
| "grad_norm": 2.7459802627563477, | |
| "learning_rate": 4.0801733477789815e-05, | |
| "loss": 0.0167, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7976539589442815, | |
| "grad_norm": 1.2568968534469604, | |
| "learning_rate": 4.0693391115926326e-05, | |
| "loss": 0.018, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.8035190615835777, | |
| "grad_norm": 4.848509788513184, | |
| "learning_rate": 4.0585048754062845e-05, | |
| "loss": 0.0199, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.8093841642228738, | |
| "grad_norm": 3.814544439315796, | |
| "learning_rate": 4.0476706392199356e-05, | |
| "loss": 0.0197, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.8152492668621701, | |
| "grad_norm": 3.7389025688171387, | |
| "learning_rate": 4.036836403033587e-05, | |
| "loss": 0.0164, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.8211143695014663, | |
| "grad_norm": 3.4222829341888428, | |
| "learning_rate": 4.026002166847237e-05, | |
| "loss": 0.0211, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8269794721407625, | |
| "grad_norm": 0.8773980140686035, | |
| "learning_rate": 4.0151679306608884e-05, | |
| "loss": 0.0135, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.8328445747800587, | |
| "grad_norm": 0.4585646688938141, | |
| "learning_rate": 4.0043336944745396e-05, | |
| "loss": 0.0128, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8387096774193549, | |
| "grad_norm": 4.22011137008667, | |
| "learning_rate": 3.993499458288191e-05, | |
| "loss": 0.011, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.844574780058651, | |
| "grad_norm": 0.6671493053436279, | |
| "learning_rate": 3.982665222101842e-05, | |
| "loss": 0.0166, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8504398826979472, | |
| "grad_norm": 1.6846585273742676, | |
| "learning_rate": 3.971830985915493e-05, | |
| "loss": 0.0178, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8563049853372434, | |
| "grad_norm": 1.7884533405303955, | |
| "learning_rate": 3.960996749729144e-05, | |
| "loss": 0.0129, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8621700879765396, | |
| "grad_norm": 1.448573350906372, | |
| "learning_rate": 3.9501625135427954e-05, | |
| "loss": 0.016, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8680351906158358, | |
| "grad_norm": 0.4691716730594635, | |
| "learning_rate": 3.9393282773564466e-05, | |
| "loss": 0.0177, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.873900293255132, | |
| "grad_norm": 2.448608875274658, | |
| "learning_rate": 3.928494041170098e-05, | |
| "loss": 0.0134, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8797653958944281, | |
| "grad_norm": 3.9812233448028564, | |
| "learning_rate": 3.917659804983749e-05, | |
| "loss": 0.0209, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8856304985337243, | |
| "grad_norm": 2.5593063831329346, | |
| "learning_rate": 3.9068255687974e-05, | |
| "loss": 0.0241, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.8914956011730205, | |
| "grad_norm": 0.5445829629898071, | |
| "learning_rate": 3.8959913326110505e-05, | |
| "loss": 0.012, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8973607038123167, | |
| "grad_norm": 2.9569761753082275, | |
| "learning_rate": 3.8851570964247024e-05, | |
| "loss": 0.0172, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.9032258064516129, | |
| "grad_norm": 1.6807665824890137, | |
| "learning_rate": 3.8743228602383535e-05, | |
| "loss": 0.0122, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.29172345995903015, | |
| "learning_rate": 3.863488624052005e-05, | |
| "loss": 0.0225, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.9149560117302052, | |
| "grad_norm": 2.6526272296905518, | |
| "learning_rate": 3.852654387865656e-05, | |
| "loss": 0.0148, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.9208211143695014, | |
| "grad_norm": 3.159996271133423, | |
| "learning_rate": 3.841820151679307e-05, | |
| "loss": 0.0134, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9266862170087976, | |
| "grad_norm": 3.0480852127075195, | |
| "learning_rate": 3.8309859154929575e-05, | |
| "loss": 0.0157, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.9325513196480938, | |
| "grad_norm": 0.8675422072410583, | |
| "learning_rate": 3.820151679306609e-05, | |
| "loss": 0.0178, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9384164222873901, | |
| "grad_norm": 2.1896984577178955, | |
| "learning_rate": 3.80931744312026e-05, | |
| "loss": 0.0117, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9442815249266863, | |
| "grad_norm": 1.3476594686508179, | |
| "learning_rate": 3.798483206933912e-05, | |
| "loss": 0.0108, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9501466275659824, | |
| "grad_norm": 0.6463199853897095, | |
| "learning_rate": 3.787648970747563e-05, | |
| "loss": 0.0191, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9560117302052786, | |
| "grad_norm": 1.6571842432022095, | |
| "learning_rate": 3.776814734561214e-05, | |
| "loss": 0.0089, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9618768328445748, | |
| "grad_norm": 3.092883586883545, | |
| "learning_rate": 3.7659804983748645e-05, | |
| "loss": 0.0115, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 0.9108274579048157, | |
| "learning_rate": 3.7551462621885156e-05, | |
| "loss": 0.0094, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9736070381231672, | |
| "grad_norm": 2.865462064743042, | |
| "learning_rate": 3.744312026002167e-05, | |
| "loss": 0.0113, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.9794721407624634, | |
| "grad_norm": 3.5765507221221924, | |
| "learning_rate": 3.733477789815818e-05, | |
| "loss": 0.0146, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.9853372434017595, | |
| "grad_norm": 2.7337465286254883, | |
| "learning_rate": 3.722643553629469e-05, | |
| "loss": 0.0243, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9912023460410557, | |
| "grad_norm": 4.337994575500488, | |
| "learning_rate": 3.711809317443121e-05, | |
| "loss": 0.0197, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.9970674486803519, | |
| "grad_norm": 0.5181761384010315, | |
| "learning_rate": 3.7009750812567715e-05, | |
| "loss": 0.0104, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.002932551319648, | |
| "grad_norm": 1.3983606100082397, | |
| "learning_rate": 3.6901408450704226e-05, | |
| "loss": 0.0127, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.0087976539589443, | |
| "grad_norm": 0.8616273999214172, | |
| "learning_rate": 3.679306608884074e-05, | |
| "loss": 0.0093, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.0146627565982405, | |
| "grad_norm": 4.700988292694092, | |
| "learning_rate": 3.668472372697725e-05, | |
| "loss": 0.0199, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.0205278592375366, | |
| "grad_norm": 1.5445499420166016, | |
| "learning_rate": 3.657638136511376e-05, | |
| "loss": 0.0081, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.0263929618768328, | |
| "grad_norm": 1.7290083169937134, | |
| "learning_rate": 3.646803900325027e-05, | |
| "loss": 0.0187, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.032258064516129, | |
| "grad_norm": 1.9020334482192993, | |
| "learning_rate": 3.6359696641386784e-05, | |
| "loss": 0.0144, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.0381231671554252, | |
| "grad_norm": 0.6471778750419617, | |
| "learning_rate": 3.6251354279523296e-05, | |
| "loss": 0.0115, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.0439882697947214, | |
| "grad_norm": 2.929093599319458, | |
| "learning_rate": 3.614301191765981e-05, | |
| "loss": 0.0167, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.0498533724340176, | |
| "grad_norm": 1.2861549854278564, | |
| "learning_rate": 3.603466955579632e-05, | |
| "loss": 0.0136, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.0557184750733137, | |
| "grad_norm": 2.8851354122161865, | |
| "learning_rate": 3.592632719393283e-05, | |
| "loss": 0.0136, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.06158357771261, | |
| "grad_norm": 1.2886507511138916, | |
| "learning_rate": 3.581798483206934e-05, | |
| "loss": 0.0115, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.067448680351906, | |
| "grad_norm": 1.7136684656143188, | |
| "learning_rate": 3.5709642470205854e-05, | |
| "loss": 0.0109, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.0733137829912023, | |
| "grad_norm": 2.2909739017486572, | |
| "learning_rate": 3.560130010834236e-05, | |
| "loss": 0.0119, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.0791788856304985, | |
| "grad_norm": 2.4519524574279785, | |
| "learning_rate": 3.549295774647888e-05, | |
| "loss": 0.0154, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.0850439882697946, | |
| "grad_norm": 2.0411393642425537, | |
| "learning_rate": 3.538461538461539e-05, | |
| "loss": 0.0167, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.0909090909090908, | |
| "grad_norm": 4.16176700592041, | |
| "learning_rate": 3.52762730227519e-05, | |
| "loss": 0.0126, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.096774193548387, | |
| "grad_norm": 0.6198504567146301, | |
| "learning_rate": 3.516793066088841e-05, | |
| "loss": 0.0133, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.1026392961876832, | |
| "grad_norm": 1.6029118299484253, | |
| "learning_rate": 3.505958829902492e-05, | |
| "loss": 0.0108, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.1085043988269794, | |
| "grad_norm": 2.381258487701416, | |
| "learning_rate": 3.495124593716143e-05, | |
| "loss": 0.0091, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.1143695014662756, | |
| "grad_norm": 2.277298927307129, | |
| "learning_rate": 3.484290357529794e-05, | |
| "loss": 0.0173, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.1202346041055717, | |
| "grad_norm": 0.6369751691818237, | |
| "learning_rate": 3.473456121343445e-05, | |
| "loss": 0.0097, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.126099706744868, | |
| "grad_norm": 2.223329782485962, | |
| "learning_rate": 3.462621885157097e-05, | |
| "loss": 0.0136, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.131964809384164, | |
| "grad_norm": 0.8173966407775879, | |
| "learning_rate": 3.451787648970748e-05, | |
| "loss": 0.0105, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.1378299120234603, | |
| "grad_norm": 1.2677998542785645, | |
| "learning_rate": 3.440953412784399e-05, | |
| "loss": 0.0107, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.1436950146627567, | |
| "grad_norm": 0.6888647079467773, | |
| "learning_rate": 3.43011917659805e-05, | |
| "loss": 0.0122, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.1495601173020529, | |
| "grad_norm": 1.7134896516799927, | |
| "learning_rate": 3.419284940411701e-05, | |
| "loss": 0.0117, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.155425219941349, | |
| "grad_norm": 1.2958227396011353, | |
| "learning_rate": 3.408450704225352e-05, | |
| "loss": 0.0142, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.1612903225806452, | |
| "grad_norm": 1.2160145044326782, | |
| "learning_rate": 3.397616468039003e-05, | |
| "loss": 0.0161, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.1671554252199414, | |
| "grad_norm": 0.735089898109436, | |
| "learning_rate": 3.3867822318526545e-05, | |
| "loss": 0.0125, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.1730205278592376, | |
| "grad_norm": 2.7560253143310547, | |
| "learning_rate": 3.3759479956663056e-05, | |
| "loss": 0.0088, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.1788856304985338, | |
| "grad_norm": 0.9203332662582397, | |
| "learning_rate": 3.365113759479957e-05, | |
| "loss": 0.0123, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.18475073313783, | |
| "grad_norm": 1.0320255756378174, | |
| "learning_rate": 3.354279523293608e-05, | |
| "loss": 0.0125, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.1906158357771262, | |
| "grad_norm": 1.9456615447998047, | |
| "learning_rate": 3.343445287107259e-05, | |
| "loss": 0.016, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.1964809384164223, | |
| "grad_norm": 0.907472550868988, | |
| "learning_rate": 3.33261105092091e-05, | |
| "loss": 0.0106, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.2023460410557185, | |
| "grad_norm": 0.8324399590492249, | |
| "learning_rate": 3.3217768147345614e-05, | |
| "loss": 0.0094, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.2082111436950147, | |
| "grad_norm": 0.7013159394264221, | |
| "learning_rate": 3.3109425785482126e-05, | |
| "loss": 0.0113, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.2140762463343109, | |
| "grad_norm": 0.7463244795799255, | |
| "learning_rate": 3.300108342361863e-05, | |
| "loss": 0.0111, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.219941348973607, | |
| "grad_norm": 0.5735973119735718, | |
| "learning_rate": 3.289274106175515e-05, | |
| "loss": 0.0101, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.2258064516129032, | |
| "grad_norm": 0.7958659529685974, | |
| "learning_rate": 3.278439869989166e-05, | |
| "loss": 0.0111, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.2316715542521994, | |
| "grad_norm": 2.5547595024108887, | |
| "learning_rate": 3.267605633802817e-05, | |
| "loss": 0.0093, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.2375366568914956, | |
| "grad_norm": 1.760979175567627, | |
| "learning_rate": 3.2567713976164684e-05, | |
| "loss": 0.0112, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.2434017595307918, | |
| "grad_norm": 3.6341629028320312, | |
| "learning_rate": 3.2459371614301196e-05, | |
| "loss": 0.0124, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.249266862170088, | |
| "grad_norm": 3.12435245513916, | |
| "learning_rate": 3.23510292524377e-05, | |
| "loss": 0.013, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.2551319648093842, | |
| "grad_norm": 2.499950885772705, | |
| "learning_rate": 3.224268689057421e-05, | |
| "loss": 0.0085, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.2609970674486803, | |
| "grad_norm": 3.02447247505188, | |
| "learning_rate": 3.2134344528710724e-05, | |
| "loss": 0.0103, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.2668621700879765, | |
| "grad_norm": 1.0911037921905518, | |
| "learning_rate": 3.202600216684724e-05, | |
| "loss": 0.0131, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 1.6208757162094116, | |
| "learning_rate": 3.1917659804983754e-05, | |
| "loss": 0.0103, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.278592375366569, | |
| "grad_norm": 1.3921608924865723, | |
| "learning_rate": 3.1809317443120266e-05, | |
| "loss": 0.009, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.284457478005865, | |
| "grad_norm": 3.066075325012207, | |
| "learning_rate": 3.170097508125677e-05, | |
| "loss": 0.0092, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.2903225806451613, | |
| "grad_norm": 0.49130627512931824, | |
| "learning_rate": 3.159263271939328e-05, | |
| "loss": 0.0126, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.2961876832844574, | |
| "grad_norm": 1.3364100456237793, | |
| "learning_rate": 3.1484290357529794e-05, | |
| "loss": 0.0129, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.3020527859237536, | |
| "grad_norm": 2.61354660987854, | |
| "learning_rate": 3.1375947995666305e-05, | |
| "loss": 0.0119, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.3079178885630498, | |
| "grad_norm": 0.6985631585121155, | |
| "learning_rate": 3.1267605633802824e-05, | |
| "loss": 0.0106, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.313782991202346, | |
| "grad_norm": 3.9875142574310303, | |
| "learning_rate": 3.115926327193933e-05, | |
| "loss": 0.011, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.3196480938416422, | |
| "grad_norm": 1.963915467262268, | |
| "learning_rate": 3.105092091007584e-05, | |
| "loss": 0.0108, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.3255131964809383, | |
| "grad_norm": 0.29517853260040283, | |
| "learning_rate": 3.094257854821235e-05, | |
| "loss": 0.0092, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.3313782991202345, | |
| "grad_norm": 2.272747278213501, | |
| "learning_rate": 3.083423618634886e-05, | |
| "loss": 0.0112, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.3372434017595307, | |
| "grad_norm": 2.0269525051116943, | |
| "learning_rate": 3.0725893824485375e-05, | |
| "loss": 0.0107, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.343108504398827, | |
| "grad_norm": 1.2506771087646484, | |
| "learning_rate": 3.0617551462621887e-05, | |
| "loss": 0.0082, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.3489736070381233, | |
| "grad_norm": 1.7308708429336548, | |
| "learning_rate": 3.0509209100758395e-05, | |
| "loss": 0.0102, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.3548387096774195, | |
| "grad_norm": 2.230759859085083, | |
| "learning_rate": 3.0400866738894913e-05, | |
| "loss": 0.0079, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.3607038123167157, | |
| "grad_norm": 1.599900245666504, | |
| "learning_rate": 3.029252437703142e-05, | |
| "loss": 0.0095, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.3665689149560118, | |
| "grad_norm": 1.4094127416610718, | |
| "learning_rate": 3.0184182015167933e-05, | |
| "loss": 0.0061, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.372434017595308, | |
| "grad_norm": 1.2801002264022827, | |
| "learning_rate": 3.0075839653304445e-05, | |
| "loss": 0.0099, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.3782991202346042, | |
| "grad_norm": 2.3603005409240723, | |
| "learning_rate": 2.9967497291440956e-05, | |
| "loss": 0.0094, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.3841642228739004, | |
| "grad_norm": 2.6935067176818848, | |
| "learning_rate": 2.9859154929577465e-05, | |
| "loss": 0.0146, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.3900293255131966, | |
| "grad_norm": 1.099675178527832, | |
| "learning_rate": 2.9750812567713976e-05, | |
| "loss": 0.0073, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.3958944281524928, | |
| "grad_norm": 1.6118828058242798, | |
| "learning_rate": 2.9642470205850488e-05, | |
| "loss": 0.0079, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.401759530791789, | |
| "grad_norm": 0.4347597062587738, | |
| "learning_rate": 2.9534127843987003e-05, | |
| "loss": 0.0083, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.4076246334310851, | |
| "grad_norm": 0.7919577956199646, | |
| "learning_rate": 2.9425785482123514e-05, | |
| "loss": 0.0069, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.4134897360703813, | |
| "grad_norm": 0.8516518473625183, | |
| "learning_rate": 2.9317443120260023e-05, | |
| "loss": 0.0095, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.4193548387096775, | |
| "grad_norm": 2.754093885421753, | |
| "learning_rate": 2.9209100758396534e-05, | |
| "loss": 0.0155, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.4252199413489737, | |
| "grad_norm": 3.3311378955841064, | |
| "learning_rate": 2.9100758396533046e-05, | |
| "loss": 0.0147, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.4310850439882699, | |
| "grad_norm": 1.4020462036132812, | |
| "learning_rate": 2.8992416034669557e-05, | |
| "loss": 0.0083, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.436950146627566, | |
| "grad_norm": 1.8778059482574463, | |
| "learning_rate": 2.8884073672806066e-05, | |
| "loss": 0.0125, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.4428152492668622, | |
| "grad_norm": 0.8313784003257751, | |
| "learning_rate": 2.8775731310942577e-05, | |
| "loss": 0.0089, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.4486803519061584, | |
| "grad_norm": 1.4605448246002197, | |
| "learning_rate": 2.8667388949079092e-05, | |
| "loss": 0.0186, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.4545454545454546, | |
| "grad_norm": 5.201650619506836, | |
| "learning_rate": 2.8559046587215604e-05, | |
| "loss": 0.0149, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.4604105571847508, | |
| "grad_norm": 1.246205449104309, | |
| "learning_rate": 2.8450704225352116e-05, | |
| "loss": 0.013, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.466275659824047, | |
| "grad_norm": 1.1601160764694214, | |
| "learning_rate": 2.8342361863488627e-05, | |
| "loss": 0.0114, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.4721407624633431, | |
| "grad_norm": 3.673280954360962, | |
| "learning_rate": 2.8234019501625135e-05, | |
| "loss": 0.0099, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.4780058651026393, | |
| "grad_norm": 1.7461609840393066, | |
| "learning_rate": 2.8125677139761647e-05, | |
| "loss": 0.0108, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.4838709677419355, | |
| "grad_norm": 2.374141216278076, | |
| "learning_rate": 2.801733477789816e-05, | |
| "loss": 0.0099, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.4897360703812317, | |
| "grad_norm": 0.3864287734031677, | |
| "learning_rate": 2.7908992416034667e-05, | |
| "loss": 0.0081, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.4956011730205279, | |
| "grad_norm": 2.1756975650787354, | |
| "learning_rate": 2.7800650054171185e-05, | |
| "loss": 0.0082, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.501466275659824, | |
| "grad_norm": 1.4991483688354492, | |
| "learning_rate": 2.7692307692307694e-05, | |
| "loss": 0.0086, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.5073313782991202, | |
| "grad_norm": 1.5785051584243774, | |
| "learning_rate": 2.7583965330444205e-05, | |
| "loss": 0.0078, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.5131964809384164, | |
| "grad_norm": 0.8412874341011047, | |
| "learning_rate": 2.7475622968580717e-05, | |
| "loss": 0.0102, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.5190615835777126, | |
| "grad_norm": 1.034793734550476, | |
| "learning_rate": 2.736728060671723e-05, | |
| "loss": 0.0114, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.5249266862170088, | |
| "grad_norm": 0.5589306950569153, | |
| "learning_rate": 2.7258938244853737e-05, | |
| "loss": 0.0108, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.530791788856305, | |
| "grad_norm": 0.5028925538063049, | |
| "learning_rate": 2.7150595882990248e-05, | |
| "loss": 0.0107, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.5366568914956011, | |
| "grad_norm": 0.5622811913490295, | |
| "learning_rate": 2.704225352112676e-05, | |
| "loss": 0.0108, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.5425219941348973, | |
| "grad_norm": 1.4025564193725586, | |
| "learning_rate": 2.6933911159263275e-05, | |
| "loss": 0.0102, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.5483870967741935, | |
| "grad_norm": 2.0565545558929443, | |
| "learning_rate": 2.6825568797399786e-05, | |
| "loss": 0.0068, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.5542521994134897, | |
| "grad_norm": 0.45606544613838196, | |
| "learning_rate": 2.6717226435536298e-05, | |
| "loss": 0.0156, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.5601173020527859, | |
| "grad_norm": 0.5702988505363464, | |
| "learning_rate": 2.6608884073672806e-05, | |
| "loss": 0.0065, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.565982404692082, | |
| "grad_norm": 0.6669889092445374, | |
| "learning_rate": 2.6500541711809318e-05, | |
| "loss": 0.0157, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.5718475073313782, | |
| "grad_norm": 3.8566477298736572, | |
| "learning_rate": 2.639219934994583e-05, | |
| "loss": 0.0119, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.5777126099706744, | |
| "grad_norm": 1.1893422603607178, | |
| "learning_rate": 2.6283856988082338e-05, | |
| "loss": 0.0053, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.5835777126099706, | |
| "grad_norm": 0.36327511072158813, | |
| "learning_rate": 2.6175514626218856e-05, | |
| "loss": 0.005, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.5894428152492668, | |
| "grad_norm": 0.6985073685646057, | |
| "learning_rate": 2.6067172264355368e-05, | |
| "loss": 0.0093, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.595307917888563, | |
| "grad_norm": 1.7380647659301758, | |
| "learning_rate": 2.5958829902491876e-05, | |
| "loss": 0.0062, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.6011730205278591, | |
| "grad_norm": 0.3573593199253082, | |
| "learning_rate": 2.5850487540628388e-05, | |
| "loss": 0.0077, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.6070381231671553, | |
| "grad_norm": 3.5947067737579346, | |
| "learning_rate": 2.57421451787649e-05, | |
| "loss": 0.0103, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.6129032258064515, | |
| "grad_norm": 1.3845021724700928, | |
| "learning_rate": 2.5633802816901408e-05, | |
| "loss": 0.0078, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.6187683284457477, | |
| "grad_norm": 0.6750550270080566, | |
| "learning_rate": 2.552546045503792e-05, | |
| "loss": 0.0069, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.6246334310850439, | |
| "grad_norm": 0.5807213187217712, | |
| "learning_rate": 2.541711809317443e-05, | |
| "loss": 0.0062, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.63049853372434, | |
| "grad_norm": 0.4663524925708771, | |
| "learning_rate": 2.5308775731310946e-05, | |
| "loss": 0.01, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.6363636363636362, | |
| "grad_norm": 0.9029988050460815, | |
| "learning_rate": 2.5200433369447457e-05, | |
| "loss": 0.0066, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.6422287390029324, | |
| "grad_norm": 0.5071028470993042, | |
| "learning_rate": 2.509209100758397e-05, | |
| "loss": 0.0091, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.6480938416422286, | |
| "grad_norm": 1.9077457189559937, | |
| "learning_rate": 2.4983748645720477e-05, | |
| "loss": 0.0121, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.6539589442815248, | |
| "grad_norm": 0.7507709860801697, | |
| "learning_rate": 2.487540628385699e-05, | |
| "loss": 0.0122, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.659824046920821, | |
| "grad_norm": 0.7139337062835693, | |
| "learning_rate": 2.47670639219935e-05, | |
| "loss": 0.0058, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.6656891495601172, | |
| "grad_norm": 2.0261781215667725, | |
| "learning_rate": 2.4658721560130012e-05, | |
| "loss": 0.0078, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.6715542521994133, | |
| "grad_norm": 0.9489262700080872, | |
| "learning_rate": 2.4550379198266524e-05, | |
| "loss": 0.0119, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.6774193548387095, | |
| "grad_norm": 0.3388541340827942, | |
| "learning_rate": 2.4442036836403035e-05, | |
| "loss": 0.0053, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.6832844574780057, | |
| "grad_norm": 0.9326663017272949, | |
| "learning_rate": 2.4333694474539544e-05, | |
| "loss": 0.008, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.689149560117302, | |
| "grad_norm": 1.6251760721206665, | |
| "learning_rate": 2.422535211267606e-05, | |
| "loss": 0.0088, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.6950146627565983, | |
| "grad_norm": 0.45517489314079285, | |
| "learning_rate": 2.411700975081257e-05, | |
| "loss": 0.0114, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.7008797653958945, | |
| "grad_norm": 1.6923969984054565, | |
| "learning_rate": 2.400866738894908e-05, | |
| "loss": 0.0082, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.7067448680351907, | |
| "grad_norm": 1.6740838289260864, | |
| "learning_rate": 2.390032502708559e-05, | |
| "loss": 0.0067, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.7126099706744868, | |
| "grad_norm": 1.280557632446289, | |
| "learning_rate": 2.3791982665222105e-05, | |
| "loss": 0.0072, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.718475073313783, | |
| "grad_norm": 3.185971260070801, | |
| "learning_rate": 2.3683640303358613e-05, | |
| "loss": 0.0102, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.7243401759530792, | |
| "grad_norm": 0.6263673901557922, | |
| "learning_rate": 2.3575297941495125e-05, | |
| "loss": 0.0057, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.7302052785923754, | |
| "grad_norm": 0.5889501571655273, | |
| "learning_rate": 2.3466955579631637e-05, | |
| "loss": 0.0064, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.7360703812316716, | |
| "grad_norm": 1.5832338333129883, | |
| "learning_rate": 2.3358613217768148e-05, | |
| "loss": 0.0124, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.7419354838709677, | |
| "grad_norm": 1.794831395149231, | |
| "learning_rate": 2.325027085590466e-05, | |
| "loss": 0.0085, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.747800586510264, | |
| "grad_norm": 0.6386451125144958, | |
| "learning_rate": 2.314192849404117e-05, | |
| "loss": 0.0069, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.7536656891495601, | |
| "grad_norm": 1.312626600265503, | |
| "learning_rate": 2.3033586132177683e-05, | |
| "loss": 0.0064, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.7595307917888563, | |
| "grad_norm": 1.8232412338256836, | |
| "learning_rate": 2.2925243770314195e-05, | |
| "loss": 0.0066, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.7653958944281525, | |
| "grad_norm": 0.3552861213684082, | |
| "learning_rate": 2.2816901408450706e-05, | |
| "loss": 0.0058, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.7712609970674487, | |
| "grad_norm": 1.736221194267273, | |
| "learning_rate": 2.2708559046587215e-05, | |
| "loss": 0.0077, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.7771260997067448, | |
| "grad_norm": 0.21436937153339386, | |
| "learning_rate": 2.2600216684723726e-05, | |
| "loss": 0.0059, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.782991202346041, | |
| "grad_norm": 0.6992660164833069, | |
| "learning_rate": 2.249187432286024e-05, | |
| "loss": 0.009, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.7888563049853372, | |
| "grad_norm": 0.7840091586112976, | |
| "learning_rate": 2.238353196099675e-05, | |
| "loss": 0.0067, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.7947214076246334, | |
| "grad_norm": 2.9690425395965576, | |
| "learning_rate": 2.227518959913326e-05, | |
| "loss": 0.0072, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.8005865102639296, | |
| "grad_norm": 1.8144841194152832, | |
| "learning_rate": 2.2166847237269773e-05, | |
| "loss": 0.007, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.8064516129032258, | |
| "grad_norm": 0.38126352429389954, | |
| "learning_rate": 2.2058504875406284e-05, | |
| "loss": 0.0072, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.812316715542522, | |
| "grad_norm": 0.4111306369304657, | |
| "learning_rate": 2.1950162513542796e-05, | |
| "loss": 0.008, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.7458495497703552, | |
| "learning_rate": 2.1841820151679307e-05, | |
| "loss": 0.0063, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.8240469208211145, | |
| "grad_norm": 0.7662573456764221, | |
| "learning_rate": 2.173347778981582e-05, | |
| "loss": 0.0063, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.8299120234604107, | |
| "grad_norm": 0.8057358860969543, | |
| "learning_rate": 2.162513542795233e-05, | |
| "loss": 0.0083, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.835777126099707, | |
| "grad_norm": 2.0041463375091553, | |
| "learning_rate": 2.1516793066088842e-05, | |
| "loss": 0.0074, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.841642228739003, | |
| "grad_norm": 0.4772699475288391, | |
| "learning_rate": 2.1408450704225354e-05, | |
| "loss": 0.0102, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.8475073313782993, | |
| "grad_norm": 1.6266963481903076, | |
| "learning_rate": 2.1300108342361866e-05, | |
| "loss": 0.0099, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.8533724340175954, | |
| "grad_norm": 0.8952119946479797, | |
| "learning_rate": 2.1191765980498377e-05, | |
| "loss": 0.0051, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.8592375366568916, | |
| "grad_norm": 2.7058868408203125, | |
| "learning_rate": 2.108342361863489e-05, | |
| "loss": 0.0085, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.8651026392961878, | |
| "grad_norm": 1.5164512395858765, | |
| "learning_rate": 2.0975081256771397e-05, | |
| "loss": 0.0072, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.870967741935484, | |
| "grad_norm": 0.37659189105033875, | |
| "learning_rate": 2.0866738894907912e-05, | |
| "loss": 0.007, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.8768328445747802, | |
| "grad_norm": 0.7300402522087097, | |
| "learning_rate": 2.075839653304442e-05, | |
| "loss": 0.0083, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.8826979472140764, | |
| "grad_norm": 0.4265812933444977, | |
| "learning_rate": 2.0650054171180932e-05, | |
| "loss": 0.0094, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.8885630498533725, | |
| "grad_norm": 0.6551434993743896, | |
| "learning_rate": 2.0541711809317444e-05, | |
| "loss": 0.0043, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.8944281524926687, | |
| "grad_norm": 0.5182037353515625, | |
| "learning_rate": 2.0433369447453955e-05, | |
| "loss": 0.0046, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.900293255131965, | |
| "grad_norm": 0.5181695818901062, | |
| "learning_rate": 2.0325027085590467e-05, | |
| "loss": 0.0054, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.906158357771261, | |
| "grad_norm": 0.30220693349838257, | |
| "learning_rate": 2.021668472372698e-05, | |
| "loss": 0.0031, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.9120234604105573, | |
| "grad_norm": 0.4141380190849304, | |
| "learning_rate": 2.010834236186349e-05, | |
| "loss": 0.0041, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.9178885630498534, | |
| "grad_norm": 1.5024000406265259, | |
| "learning_rate": 2e-05, | |
| "loss": 0.0036, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.9237536656891496, | |
| "grad_norm": 0.37273481488227844, | |
| "learning_rate": 1.9891657638136513e-05, | |
| "loss": 0.0041, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.9296187683284458, | |
| "grad_norm": 0.7267457246780396, | |
| "learning_rate": 1.9783315276273025e-05, | |
| "loss": 0.0086, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.935483870967742, | |
| "grad_norm": 1.0580945014953613, | |
| "learning_rate": 1.9674972914409533e-05, | |
| "loss": 0.0049, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.9413489736070382, | |
| "grad_norm": 0.5923478603363037, | |
| "learning_rate": 1.9566630552546048e-05, | |
| "loss": 0.0074, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.9472140762463344, | |
| "grad_norm": 2.4155588150024414, | |
| "learning_rate": 1.945828819068256e-05, | |
| "loss": 0.0104, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.9530791788856305, | |
| "grad_norm": 2.4885783195495605, | |
| "learning_rate": 1.9349945828819068e-05, | |
| "loss": 0.0059, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.9589442815249267, | |
| "grad_norm": 1.2375839948654175, | |
| "learning_rate": 1.924160346695558e-05, | |
| "loss": 0.0108, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.964809384164223, | |
| "grad_norm": 2.7441036701202393, | |
| "learning_rate": 1.9133261105092095e-05, | |
| "loss": 0.0055, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.970674486803519, | |
| "grad_norm": 1.8821948766708374, | |
| "learning_rate": 1.9024918743228603e-05, | |
| "loss": 0.0043, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.9765395894428153, | |
| "grad_norm": 1.3677663803100586, | |
| "learning_rate": 1.8916576381365114e-05, | |
| "loss": 0.0047, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.9824046920821115, | |
| "grad_norm": 0.37202900648117065, | |
| "learning_rate": 1.8808234019501626e-05, | |
| "loss": 0.0043, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.9882697947214076, | |
| "grad_norm": 1.2205904722213745, | |
| "learning_rate": 1.8699891657638138e-05, | |
| "loss": 0.0044, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.9941348973607038, | |
| "grad_norm": 1.4114491939544678, | |
| "learning_rate": 1.859154929577465e-05, | |
| "loss": 0.0043, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.8728663325309753, | |
| "learning_rate": 1.848320693391116e-05, | |
| "loss": 0.0041, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 2.005865102639296, | |
| "grad_norm": 1.2277451753616333, | |
| "learning_rate": 1.837486457204767e-05, | |
| "loss": 0.0047, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 2.0117302052785924, | |
| "grad_norm": 0.4225451946258545, | |
| "learning_rate": 1.8266522210184184e-05, | |
| "loss": 0.0025, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 2.0175953079178885, | |
| "grad_norm": 0.6460304856300354, | |
| "learning_rate": 1.8158179848320696e-05, | |
| "loss": 0.0035, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 2.0234604105571847, | |
| "grad_norm": 0.5473723411560059, | |
| "learning_rate": 1.8049837486457204e-05, | |
| "loss": 0.0037, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 2.029325513196481, | |
| "grad_norm": 2.4837288856506348, | |
| "learning_rate": 1.7941495124593716e-05, | |
| "loss": 0.0062, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 2.035190615835777, | |
| "grad_norm": 0.5354294180870056, | |
| "learning_rate": 1.783315276273023e-05, | |
| "loss": 0.0051, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 2.0410557184750733, | |
| "grad_norm": 0.6833767294883728, | |
| "learning_rate": 1.772481040086674e-05, | |
| "loss": 0.0026, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 2.0469208211143695, | |
| "grad_norm": 1.8108445405960083, | |
| "learning_rate": 1.761646803900325e-05, | |
| "loss": 0.0046, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 2.0527859237536656, | |
| "grad_norm": 0.46405455470085144, | |
| "learning_rate": 1.7508125677139762e-05, | |
| "loss": 0.0036, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 2.058651026392962, | |
| "grad_norm": 1.3793003559112549, | |
| "learning_rate": 1.7399783315276274e-05, | |
| "loss": 0.0049, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 2.064516129032258, | |
| "grad_norm": 1.083612084388733, | |
| "learning_rate": 1.7291440953412785e-05, | |
| "loss": 0.0035, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 2.070381231671554, | |
| "grad_norm": 1.7469717264175415, | |
| "learning_rate": 1.7183098591549297e-05, | |
| "loss": 0.0035, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 2.0762463343108504, | |
| "grad_norm": 1.8320294618606567, | |
| "learning_rate": 1.707475622968581e-05, | |
| "loss": 0.0046, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 2.0821114369501466, | |
| "grad_norm": 0.7071532011032104, | |
| "learning_rate": 1.696641386782232e-05, | |
| "loss": 0.0041, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 2.0879765395894427, | |
| "grad_norm": 0.39700040221214294, | |
| "learning_rate": 1.6858071505958832e-05, | |
| "loss": 0.0029, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 2.093841642228739, | |
| "grad_norm": 1.1924680471420288, | |
| "learning_rate": 1.674972914409534e-05, | |
| "loss": 0.0071, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 2.099706744868035, | |
| "grad_norm": 1.3607029914855957, | |
| "learning_rate": 1.6641386782231855e-05, | |
| "loss": 0.0126, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 2.1055718475073313, | |
| "grad_norm": 0.5861091017723083, | |
| "learning_rate": 1.6533044420368367e-05, | |
| "loss": 0.0037, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 2.1114369501466275, | |
| "grad_norm": 0.22819814085960388, | |
| "learning_rate": 1.6424702058504875e-05, | |
| "loss": 0.0026, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 2.1173020527859236, | |
| "grad_norm": 0.15041030943393707, | |
| "learning_rate": 1.6316359696641387e-05, | |
| "loss": 0.0034, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 2.12316715542522, | |
| "grad_norm": 0.5198802947998047, | |
| "learning_rate": 1.62080173347779e-05, | |
| "loss": 0.0032, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 2.129032258064516, | |
| "grad_norm": 0.1846199780702591, | |
| "learning_rate": 1.609967497291441e-05, | |
| "loss": 0.0029, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 2.134897360703812, | |
| "grad_norm": 0.6416236758232117, | |
| "learning_rate": 1.599133261105092e-05, | |
| "loss": 0.0033, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 2.1407624633431084, | |
| "grad_norm": 0.46757569909095764, | |
| "learning_rate": 1.5882990249187433e-05, | |
| "loss": 0.0033, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 2.1466275659824046, | |
| "grad_norm": 2.260544538497925, | |
| "learning_rate": 1.5774647887323945e-05, | |
| "loss": 0.004, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 2.1524926686217007, | |
| "grad_norm": 0.4333725869655609, | |
| "learning_rate": 1.5666305525460456e-05, | |
| "loss": 0.0026, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 2.158357771260997, | |
| "grad_norm": 0.7353067398071289, | |
| "learning_rate": 1.5557963163596968e-05, | |
| "loss": 0.003, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.164222873900293, | |
| "grad_norm": 0.9456650614738464, | |
| "learning_rate": 1.5449620801733476e-05, | |
| "loss": 0.0038, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 2.1700879765395893, | |
| "grad_norm": 0.4781268239021301, | |
| "learning_rate": 1.534127843986999e-05, | |
| "loss": 0.0043, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.1759530791788855, | |
| "grad_norm": 0.488057404756546, | |
| "learning_rate": 1.5232936078006501e-05, | |
| "loss": 0.0025, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 2.1818181818181817, | |
| "grad_norm": 0.2408045530319214, | |
| "learning_rate": 1.5124593716143013e-05, | |
| "loss": 0.0022, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 2.187683284457478, | |
| "grad_norm": 0.3382255434989929, | |
| "learning_rate": 1.5016251354279523e-05, | |
| "loss": 0.0024, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 2.193548387096774, | |
| "grad_norm": 0.3861747682094574, | |
| "learning_rate": 1.4907908992416036e-05, | |
| "loss": 0.0028, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 2.19941348973607, | |
| "grad_norm": 0.45839062333106995, | |
| "learning_rate": 1.4799566630552548e-05, | |
| "loss": 0.0033, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 2.2052785923753664, | |
| "grad_norm": 0.7637222409248352, | |
| "learning_rate": 1.4691224268689057e-05, | |
| "loss": 0.0023, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 2.2111436950146626, | |
| "grad_norm": 0.710662841796875, | |
| "learning_rate": 1.4582881906825569e-05, | |
| "loss": 0.0045, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 2.2170087976539588, | |
| "grad_norm": 1.2684130668640137, | |
| "learning_rate": 1.4474539544962082e-05, | |
| "loss": 0.0039, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 2.222873900293255, | |
| "grad_norm": 0.8279300928115845, | |
| "learning_rate": 1.4366197183098592e-05, | |
| "loss": 0.0032, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 2.228739002932551, | |
| "grad_norm": 1.0345526933670044, | |
| "learning_rate": 1.4257854821235104e-05, | |
| "loss": 0.003, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 2.2346041055718473, | |
| "grad_norm": 0.5127742290496826, | |
| "learning_rate": 1.4149512459371614e-05, | |
| "loss": 0.0032, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 2.2404692082111435, | |
| "grad_norm": 0.7230274677276611, | |
| "learning_rate": 1.4041170097508127e-05, | |
| "loss": 0.006, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 2.2463343108504397, | |
| "grad_norm": 0.7685052156448364, | |
| "learning_rate": 1.3932827735644637e-05, | |
| "loss": 0.0027, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 2.252199413489736, | |
| "grad_norm": 1.4992603063583374, | |
| "learning_rate": 1.3824485373781149e-05, | |
| "loss": 0.0041, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 2.258064516129032, | |
| "grad_norm": 0.30322280526161194, | |
| "learning_rate": 1.3716143011917659e-05, | |
| "loss": 0.0024, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 2.263929618768328, | |
| "grad_norm": 0.2453003227710724, | |
| "learning_rate": 1.3607800650054172e-05, | |
| "loss": 0.0022, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 2.2697947214076244, | |
| "grad_norm": 1.1497894525527954, | |
| "learning_rate": 1.3499458288190684e-05, | |
| "loss": 0.0024, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 2.2756598240469206, | |
| "grad_norm": 1.0339840650558472, | |
| "learning_rate": 1.3391115926327194e-05, | |
| "loss": 0.0027, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 2.281524926686217, | |
| "grad_norm": 1.244283676147461, | |
| "learning_rate": 1.3282773564463705e-05, | |
| "loss": 0.0033, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 2.2873900293255134, | |
| "grad_norm": 0.17964577674865723, | |
| "learning_rate": 1.3174431202600218e-05, | |
| "loss": 0.003, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 2.2932551319648096, | |
| "grad_norm": 0.437898188829422, | |
| "learning_rate": 1.3066088840736728e-05, | |
| "loss": 0.0025, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 2.2991202346041058, | |
| "grad_norm": 0.6417920589447021, | |
| "learning_rate": 1.295774647887324e-05, | |
| "loss": 0.0022, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 2.304985337243402, | |
| "grad_norm": 0.9770957827568054, | |
| "learning_rate": 1.284940411700975e-05, | |
| "loss": 0.0042, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 2.310850439882698, | |
| "grad_norm": 0.2995135486125946, | |
| "learning_rate": 1.2741061755146263e-05, | |
| "loss": 0.0031, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 2.3167155425219943, | |
| "grad_norm": 0.23019449412822723, | |
| "learning_rate": 1.2632719393282775e-05, | |
| "loss": 0.0038, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 2.3225806451612905, | |
| "grad_norm": 1.9633790254592896, | |
| "learning_rate": 1.2524377031419285e-05, | |
| "loss": 0.0052, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 2.3284457478005867, | |
| "grad_norm": 0.5719003081321716, | |
| "learning_rate": 1.2416034669555796e-05, | |
| "loss": 0.0033, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 2.334310850439883, | |
| "grad_norm": 0.4849315881729126, | |
| "learning_rate": 1.230769230769231e-05, | |
| "loss": 0.0029, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 2.340175953079179, | |
| "grad_norm": 0.5165572166442871, | |
| "learning_rate": 1.219934994582882e-05, | |
| "loss": 0.0032, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 2.346041055718475, | |
| "grad_norm": 6.257429599761963, | |
| "learning_rate": 1.2091007583965331e-05, | |
| "loss": 0.0051, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.3519061583577714, | |
| "grad_norm": 0.1021239385008812, | |
| "learning_rate": 1.1982665222101843e-05, | |
| "loss": 0.0029, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 2.3577712609970676, | |
| "grad_norm": 0.5569911599159241, | |
| "learning_rate": 1.1874322860238355e-05, | |
| "loss": 0.0028, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 2.3636363636363638, | |
| "grad_norm": 0.21338407695293427, | |
| "learning_rate": 1.1765980498374864e-05, | |
| "loss": 0.0023, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 2.36950146627566, | |
| "grad_norm": 0.9345069527626038, | |
| "learning_rate": 1.1657638136511378e-05, | |
| "loss": 0.0038, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 2.375366568914956, | |
| "grad_norm": 1.0866254568099976, | |
| "learning_rate": 1.1549295774647888e-05, | |
| "loss": 0.0036, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 2.3812316715542523, | |
| "grad_norm": 0.9882946014404297, | |
| "learning_rate": 1.14409534127844e-05, | |
| "loss": 0.0036, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 2.3870967741935485, | |
| "grad_norm": 0.50217604637146, | |
| "learning_rate": 1.1332611050920911e-05, | |
| "loss": 0.0025, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 2.3929618768328447, | |
| "grad_norm": 1.3795546293258667, | |
| "learning_rate": 1.1224268689057423e-05, | |
| "loss": 0.0042, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 2.398826979472141, | |
| "grad_norm": 0.7706465125083923, | |
| "learning_rate": 1.1115926327193932e-05, | |
| "loss": 0.0027, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 2.404692082111437, | |
| "grad_norm": 0.30888229608535767, | |
| "learning_rate": 1.1007583965330446e-05, | |
| "loss": 0.0027, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 2.410557184750733, | |
| "grad_norm": 0.16666728258132935, | |
| "learning_rate": 1.0899241603466956e-05, | |
| "loss": 0.0025, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 2.4164222873900294, | |
| "grad_norm": 0.40352991223335266, | |
| "learning_rate": 1.0790899241603467e-05, | |
| "loss": 0.0023, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 2.4222873900293256, | |
| "grad_norm": 0.7607168555259705, | |
| "learning_rate": 1.0682556879739979e-05, | |
| "loss": 0.0037, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 2.4281524926686218, | |
| "grad_norm": 0.6904723644256592, | |
| "learning_rate": 1.057421451787649e-05, | |
| "loss": 0.0033, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 2.434017595307918, | |
| "grad_norm": 0.4458531141281128, | |
| "learning_rate": 1.0465872156013e-05, | |
| "loss": 0.0027, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 2.439882697947214, | |
| "grad_norm": 0.9180742502212524, | |
| "learning_rate": 1.0357529794149514e-05, | |
| "loss": 0.0023, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 2.4457478005865103, | |
| "grad_norm": 0.2105313539505005, | |
| "learning_rate": 1.0249187432286024e-05, | |
| "loss": 0.0033, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 2.4516129032258065, | |
| "grad_norm": 0.45809683203697205, | |
| "learning_rate": 1.0140845070422535e-05, | |
| "loss": 0.0025, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 2.4574780058651027, | |
| "grad_norm": 0.8669016361236572, | |
| "learning_rate": 1.0032502708559047e-05, | |
| "loss": 0.0043, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 2.463343108504399, | |
| "grad_norm": 0.4931950569152832, | |
| "learning_rate": 9.924160346695559e-06, | |
| "loss": 0.0021, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 2.469208211143695, | |
| "grad_norm": 0.13325130939483643, | |
| "learning_rate": 9.81581798483207e-06, | |
| "loss": 0.0028, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 2.4750733137829912, | |
| "grad_norm": 0.5872472524642944, | |
| "learning_rate": 9.707475622968582e-06, | |
| "loss": 0.0024, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 2.4809384164222874, | |
| "grad_norm": 1.469139814376831, | |
| "learning_rate": 9.599133261105092e-06, | |
| "loss": 0.0046, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 2.4868035190615836, | |
| "grad_norm": 1.5533734560012817, | |
| "learning_rate": 9.490790899241603e-06, | |
| "loss": 0.0051, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 2.4926686217008798, | |
| "grad_norm": 0.7671976089477539, | |
| "learning_rate": 9.382448537378115e-06, | |
| "loss": 0.0027, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 2.498533724340176, | |
| "grad_norm": 0.3293214440345764, | |
| "learning_rate": 9.274106175514627e-06, | |
| "loss": 0.0037, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 2.504398826979472, | |
| "grad_norm": 0.6871675848960876, | |
| "learning_rate": 9.165763813651138e-06, | |
| "loss": 0.002, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 2.5102639296187683, | |
| "grad_norm": 0.7354288101196289, | |
| "learning_rate": 9.05742145178765e-06, | |
| "loss": 0.0028, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 2.5161290322580645, | |
| "grad_norm": 0.7671726942062378, | |
| "learning_rate": 8.94907908992416e-06, | |
| "loss": 0.0022, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 2.5219941348973607, | |
| "grad_norm": 1.1531054973602295, | |
| "learning_rate": 8.840736728060673e-06, | |
| "loss": 0.0032, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 2.527859237536657, | |
| "grad_norm": 0.648115873336792, | |
| "learning_rate": 8.732394366197183e-06, | |
| "loss": 0.0026, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 2.533724340175953, | |
| "grad_norm": 0.579592764377594, | |
| "learning_rate": 8.624052004333695e-06, | |
| "loss": 0.0017, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 2.5395894428152492, | |
| "grad_norm": 1.3247286081314087, | |
| "learning_rate": 8.515709642470206e-06, | |
| "loss": 0.0039, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 2.5454545454545454, | |
| "grad_norm": 0.200743168592453, | |
| "learning_rate": 8.407367280606718e-06, | |
| "loss": 0.0037, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 2.5513196480938416, | |
| "grad_norm": 0.6144959926605225, | |
| "learning_rate": 8.299024918743228e-06, | |
| "loss": 0.0026, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 2.557184750733138, | |
| "grad_norm": 0.6242021918296814, | |
| "learning_rate": 8.190682556879741e-06, | |
| "loss": 0.0028, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 2.563049853372434, | |
| "grad_norm": 0.5484371185302734, | |
| "learning_rate": 8.082340195016251e-06, | |
| "loss": 0.0048, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 2.56891495601173, | |
| "grad_norm": 0.181168332695961, | |
| "learning_rate": 7.973997833152763e-06, | |
| "loss": 0.0026, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 2.5747800586510263, | |
| "grad_norm": 0.9735661745071411, | |
| "learning_rate": 7.865655471289274e-06, | |
| "loss": 0.0035, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 2.5806451612903225, | |
| "grad_norm": 0.4242919683456421, | |
| "learning_rate": 7.757313109425786e-06, | |
| "loss": 0.0025, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 2.5865102639296187, | |
| "grad_norm": 0.313275009393692, | |
| "learning_rate": 7.648970747562296e-06, | |
| "loss": 0.0024, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 2.592375366568915, | |
| "grad_norm": 0.3656627833843231, | |
| "learning_rate": 7.540628385698808e-06, | |
| "loss": 0.0029, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 2.598240469208211, | |
| "grad_norm": 0.2241465449333191, | |
| "learning_rate": 7.432286023835321e-06, | |
| "loss": 0.002, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 2.6041055718475072, | |
| "grad_norm": 0.3788506090641022, | |
| "learning_rate": 7.3239436619718316e-06, | |
| "loss": 0.002, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 2.6099706744868034, | |
| "grad_norm": 0.2104683667421341, | |
| "learning_rate": 7.215601300108343e-06, | |
| "loss": 0.0021, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 2.6158357771260996, | |
| "grad_norm": 0.6747612953186035, | |
| "learning_rate": 7.107258938244854e-06, | |
| "loss": 0.0023, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 2.621700879765396, | |
| "grad_norm": 0.5167836546897888, | |
| "learning_rate": 6.9989165763813664e-06, | |
| "loss": 0.002, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 2.627565982404692, | |
| "grad_norm": 0.4595435857772827, | |
| "learning_rate": 6.890574214517876e-06, | |
| "loss": 0.0018, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 2.633431085043988, | |
| "grad_norm": 0.38868948817253113, | |
| "learning_rate": 6.782231852654389e-06, | |
| "loss": 0.0074, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 2.6392961876832843, | |
| "grad_norm": 1.027677059173584, | |
| "learning_rate": 6.6738894907909e-06, | |
| "loss": 0.0018, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 2.6451612903225805, | |
| "grad_norm": 0.3188434839248657, | |
| "learning_rate": 6.565547128927411e-06, | |
| "loss": 0.0017, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 2.6510263929618767, | |
| "grad_norm": 0.3385601043701172, | |
| "learning_rate": 6.457204767063922e-06, | |
| "loss": 0.0025, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 2.656891495601173, | |
| "grad_norm": 0.8369510173797607, | |
| "learning_rate": 6.3488624052004345e-06, | |
| "loss": 0.0021, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 2.662756598240469, | |
| "grad_norm": 0.32399484515190125, | |
| "learning_rate": 6.240520043336945e-06, | |
| "loss": 0.002, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 2.6686217008797652, | |
| "grad_norm": 0.48848479986190796, | |
| "learning_rate": 6.132177681473456e-06, | |
| "loss": 0.002, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 2.6744868035190614, | |
| "grad_norm": 0.4203287661075592, | |
| "learning_rate": 6.023835319609968e-06, | |
| "loss": 0.0024, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 2.6803519061583576, | |
| "grad_norm": 0.17682945728302002, | |
| "learning_rate": 5.915492957746479e-06, | |
| "loss": 0.0023, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 2.686217008797654, | |
| "grad_norm": 0.27645328640937805, | |
| "learning_rate": 5.80715059588299e-06, | |
| "loss": 0.0016, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 2.6920821114369504, | |
| "grad_norm": 0.341978520154953, | |
| "learning_rate": 5.698808234019502e-06, | |
| "loss": 0.0024, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 2.6979472140762466, | |
| "grad_norm": 0.6040353178977966, | |
| "learning_rate": 5.590465872156013e-06, | |
| "loss": 0.0029, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 2.703812316715543, | |
| "grad_norm": 0.4571681320667267, | |
| "learning_rate": 5.482123510292524e-06, | |
| "loss": 0.0038, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 2.709677419354839, | |
| "grad_norm": 0.6993141174316406, | |
| "learning_rate": 5.373781148429036e-06, | |
| "loss": 0.0029, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 2.715542521994135, | |
| "grad_norm": 0.45035046339035034, | |
| "learning_rate": 5.265438786565547e-06, | |
| "loss": 0.0018, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 2.7214076246334313, | |
| "grad_norm": 0.23612183332443237, | |
| "learning_rate": 5.157096424702059e-06, | |
| "loss": 0.0021, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 2.7272727272727275, | |
| "grad_norm": 0.5207453370094299, | |
| "learning_rate": 5.0487540628385705e-06, | |
| "loss": 0.0024, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 2.7331378299120237, | |
| "grad_norm": 0.7961679100990295, | |
| "learning_rate": 4.940411700975082e-06, | |
| "loss": 0.0024, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 2.73900293255132, | |
| "grad_norm": 0.42311325669288635, | |
| "learning_rate": 4.832069339111593e-06, | |
| "loss": 0.0016, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 2.744868035190616, | |
| "grad_norm": 0.26033857464790344, | |
| "learning_rate": 4.7237269772481045e-06, | |
| "loss": 0.0027, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 2.7507331378299122, | |
| "grad_norm": 0.2687263488769531, | |
| "learning_rate": 4.615384615384616e-06, | |
| "loss": 0.0024, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 2.7565982404692084, | |
| "grad_norm": 0.1565217226743698, | |
| "learning_rate": 4.507042253521127e-06, | |
| "loss": 0.002, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 2.7624633431085046, | |
| "grad_norm": 0.3572985827922821, | |
| "learning_rate": 4.3986998916576385e-06, | |
| "loss": 0.0024, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 2.768328445747801, | |
| "grad_norm": 0.7106755971908569, | |
| "learning_rate": 4.29035752979415e-06, | |
| "loss": 0.0015, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 2.774193548387097, | |
| "grad_norm": 0.38818755745887756, | |
| "learning_rate": 4.182015167930661e-06, | |
| "loss": 0.0017, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 2.780058651026393, | |
| "grad_norm": 0.19889351725578308, | |
| "learning_rate": 4.0736728060671726e-06, | |
| "loss": 0.0031, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 2.7859237536656893, | |
| "grad_norm": 0.7225050330162048, | |
| "learning_rate": 3.965330444203684e-06, | |
| "loss": 0.0021, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 2.7917888563049855, | |
| "grad_norm": 0.5237097144126892, | |
| "learning_rate": 3.856988082340195e-06, | |
| "loss": 0.0016, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 2.7976539589442817, | |
| "grad_norm": 0.3217989504337311, | |
| "learning_rate": 3.7486457204767066e-06, | |
| "loss": 0.0037, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 2.803519061583578, | |
| "grad_norm": 0.3693365454673767, | |
| "learning_rate": 3.640303358613218e-06, | |
| "loss": 0.0019, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 2.809384164222874, | |
| "grad_norm": 0.7291551232337952, | |
| "learning_rate": 3.5319609967497294e-06, | |
| "loss": 0.0037, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 2.8152492668621703, | |
| "grad_norm": 0.5343610644340515, | |
| "learning_rate": 3.4236186348862406e-06, | |
| "loss": 0.0056, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 2.8211143695014664, | |
| "grad_norm": 0.4685341417789459, | |
| "learning_rate": 3.315276273022752e-06, | |
| "loss": 0.0037, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 2.8269794721407626, | |
| "grad_norm": 0.4689994752407074, | |
| "learning_rate": 3.2069339111592634e-06, | |
| "loss": 0.0016, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 2.832844574780059, | |
| "grad_norm": 0.27341169118881226, | |
| "learning_rate": 3.098591549295775e-06, | |
| "loss": 0.0018, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 2.838709677419355, | |
| "grad_norm": 0.4681690037250519, | |
| "learning_rate": 2.9902491874322862e-06, | |
| "loss": 0.0027, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 2.844574780058651, | |
| "grad_norm": 0.15625539422035217, | |
| "learning_rate": 2.8819068255687974e-06, | |
| "loss": 0.0014, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 2.8504398826979473, | |
| "grad_norm": 0.14638853073120117, | |
| "learning_rate": 2.773564463705309e-06, | |
| "loss": 0.0024, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 2.8563049853372435, | |
| "grad_norm": 0.3259941041469574, | |
| "learning_rate": 2.6652221018418202e-06, | |
| "loss": 0.0024, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 2.8621700879765397, | |
| "grad_norm": 0.2885695993900299, | |
| "learning_rate": 2.5568797399783314e-06, | |
| "loss": 0.0018, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 2.868035190615836, | |
| "grad_norm": 0.13875146210193634, | |
| "learning_rate": 2.448537378114843e-06, | |
| "loss": 0.0032, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 2.873900293255132, | |
| "grad_norm": 0.42090490460395813, | |
| "learning_rate": 2.3401950162513543e-06, | |
| "loss": 0.0022, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 2.8797653958944283, | |
| "grad_norm": 0.587674617767334, | |
| "learning_rate": 2.231852654387866e-06, | |
| "loss": 0.0022, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 2.8856304985337244, | |
| "grad_norm": 0.2683647572994232, | |
| "learning_rate": 2.123510292524377e-06, | |
| "loss": 0.0028, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 2.8914956011730206, | |
| "grad_norm": 0.14802850782871246, | |
| "learning_rate": 2.0151679306608883e-06, | |
| "loss": 0.0027, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 2.897360703812317, | |
| "grad_norm": 0.8457247018814087, | |
| "learning_rate": 1.9068255687973997e-06, | |
| "loss": 0.0016, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 2.903225806451613, | |
| "grad_norm": 0.6568015217781067, | |
| "learning_rate": 1.798483206933911e-06, | |
| "loss": 0.0019, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 2.909090909090909, | |
| "grad_norm": 0.8183419108390808, | |
| "learning_rate": 1.6901408450704225e-06, | |
| "loss": 0.0033, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 2.9149560117302054, | |
| "grad_norm": 0.717965841293335, | |
| "learning_rate": 1.5817984832069341e-06, | |
| "loss": 0.0019, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 2.9208211143695015, | |
| "grad_norm": 0.3152722120285034, | |
| "learning_rate": 1.4734561213434453e-06, | |
| "loss": 0.0012, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 2.9266862170087977, | |
| "grad_norm": 0.26775985956192017, | |
| "learning_rate": 1.3651137594799567e-06, | |
| "loss": 0.0021, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 2.932551319648094, | |
| "grad_norm": 0.44031721353530884, | |
| "learning_rate": 1.2567713976164681e-06, | |
| "loss": 0.0019, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.93841642228739, | |
| "grad_norm": 0.4502199590206146, | |
| "learning_rate": 1.1484290357529793e-06, | |
| "loss": 0.0017, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 2.9442815249266863, | |
| "grad_norm": 0.18106575310230255, | |
| "learning_rate": 1.040086673889491e-06, | |
| "loss": 0.0018, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.9501466275659824, | |
| "grad_norm": 1.1088088750839233, | |
| "learning_rate": 9.317443120260022e-07, | |
| "loss": 0.0024, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.9560117302052786, | |
| "grad_norm": 0.2301834672689438, | |
| "learning_rate": 8.234019501625137e-07, | |
| "loss": 0.0016, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.961876832844575, | |
| "grad_norm": 0.23097343742847443, | |
| "learning_rate": 7.15059588299025e-07, | |
| "loss": 0.0016, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.967741935483871, | |
| "grad_norm": 0.47115635871887207, | |
| "learning_rate": 6.067172264355364e-07, | |
| "loss": 0.0018, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.973607038123167, | |
| "grad_norm": 0.7237260341644287, | |
| "learning_rate": 4.983748645720477e-07, | |
| "loss": 0.0031, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.9794721407624634, | |
| "grad_norm": 0.5529869794845581, | |
| "learning_rate": 3.900325027085591e-07, | |
| "loss": 0.0025, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.9853372434017595, | |
| "grad_norm": 0.2470860332250595, | |
| "learning_rate": 2.8169014084507043e-07, | |
| "loss": 0.0023, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.9912023460410557, | |
| "grad_norm": 0.6672024726867676, | |
| "learning_rate": 1.7334777898158182e-07, | |
| "loss": 0.0014, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.997067448680352, | |
| "grad_norm": 0.777362585067749, | |
| "learning_rate": 6.500541711809318e-08, | |
| "loss": 0.0027, | |
| "step": 5110 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5115, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.0761671760758784e+16, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |