| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.3028900761431997, | |
| "eval_steps": 500, | |
| "global_step": 900, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.00033654452904799967, | |
| "grad_norm": 4.331892967224121, | |
| "learning_rate": 0.0, | |
| "loss": 5.5158, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0006730890580959993, | |
| "grad_norm": 4.539519786834717, | |
| "learning_rate": 6.711409395973154e-07, | |
| "loss": 5.5718, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0013461781161919987, | |
| "grad_norm": 4.208465576171875, | |
| "learning_rate": 2.013422818791946e-06, | |
| "loss": 5.4905, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.002019267174287998, | |
| "grad_norm": 3.891338586807251, | |
| "learning_rate": 3.3557046979865773e-06, | |
| "loss": 5.4511, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0026923562323839974, | |
| "grad_norm": 2.979590654373169, | |
| "learning_rate": 4.697986577181209e-06, | |
| "loss": 5.3311, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0033654452904799967, | |
| "grad_norm": 3.1792373657226562, | |
| "learning_rate": 6.04026845637584e-06, | |
| "loss": 5.3782, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.004038534348575996, | |
| "grad_norm": 3.9909653663635254, | |
| "learning_rate": 7.382550335570471e-06, | |
| "loss": 5.2583, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.004711623406671995, | |
| "grad_norm": 3.31044340133667, | |
| "learning_rate": 8.724832214765101e-06, | |
| "loss": 5.3198, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.005384712464767995, | |
| "grad_norm": 3.3201687335968018, | |
| "learning_rate": 1.006711409395973e-05, | |
| "loss": 5.1138, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.006057801522863994, | |
| "grad_norm": 2.8034451007843018, | |
| "learning_rate": 1.1409395973154363e-05, | |
| "loss": 5.0022, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.0067308905809599934, | |
| "grad_norm": 2.044506072998047, | |
| "learning_rate": 1.2751677852348994e-05, | |
| "loss": 4.8879, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.007403979639055993, | |
| "grad_norm": 1.6194826364517212, | |
| "learning_rate": 1.4093959731543624e-05, | |
| "loss": 4.7436, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.008077068697151992, | |
| "grad_norm": 1.4383995532989502, | |
| "learning_rate": 1.5436241610738255e-05, | |
| "loss": 4.5798, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.00875015775524799, | |
| "grad_norm": 1.174633264541626, | |
| "learning_rate": 1.6778523489932888e-05, | |
| "loss": 4.4188, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.00942324681334399, | |
| "grad_norm": 1.1277130842208862, | |
| "learning_rate": 1.8120805369127517e-05, | |
| "loss": 4.4374, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.01009633587143999, | |
| "grad_norm": 1.0426617860794067, | |
| "learning_rate": 1.946308724832215e-05, | |
| "loss": 4.2195, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01076942492953599, | |
| "grad_norm": 0.9180749654769897, | |
| "learning_rate": 2.080536912751678e-05, | |
| "loss": 4.1755, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.011442513987631988, | |
| "grad_norm": 1.0104376077651978, | |
| "learning_rate": 2.2147651006711412e-05, | |
| "loss": 4.0892, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.012115603045727988, | |
| "grad_norm": 0.9427777528762817, | |
| "learning_rate": 2.348993288590604e-05, | |
| "loss": 4.0498, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.012788692103823987, | |
| "grad_norm": 1.0120079517364502, | |
| "learning_rate": 2.4832214765100674e-05, | |
| "loss": 3.9471, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.013461781161919987, | |
| "grad_norm": 1.0842680931091309, | |
| "learning_rate": 2.6174496644295304e-05, | |
| "loss": 3.8882, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.014134870220015985, | |
| "grad_norm": 1.3515102863311768, | |
| "learning_rate": 2.7516778523489933e-05, | |
| "loss": 3.8274, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.014807959278111986, | |
| "grad_norm": 1.1003209352493286, | |
| "learning_rate": 2.885906040268457e-05, | |
| "loss": 3.7735, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.015481048336207984, | |
| "grad_norm": 0.9302487373352051, | |
| "learning_rate": 3.02013422818792e-05, | |
| "loss": 3.731, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.016154137394303984, | |
| "grad_norm": 1.0311543941497803, | |
| "learning_rate": 3.1543624161073825e-05, | |
| "loss": 3.6608, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.016827226452399983, | |
| "grad_norm": 1.0927435159683228, | |
| "learning_rate": 3.288590604026846e-05, | |
| "loss": 3.5962, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.01750031551049598, | |
| "grad_norm": 1.3255321979522705, | |
| "learning_rate": 3.422818791946309e-05, | |
| "loss": 3.5669, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.018173404568591983, | |
| "grad_norm": 0.9304305911064148, | |
| "learning_rate": 3.557046979865772e-05, | |
| "loss": 3.5456, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.01884649362668798, | |
| "grad_norm": 1.159280776977539, | |
| "learning_rate": 3.6912751677852356e-05, | |
| "loss": 3.5189, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.01951958268478398, | |
| "grad_norm": 0.9376134276390076, | |
| "learning_rate": 3.8255033557046985e-05, | |
| "loss": 3.432, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.02019267174287998, | |
| "grad_norm": 1.292802095413208, | |
| "learning_rate": 3.959731543624161e-05, | |
| "loss": 3.4564, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.02086576080097598, | |
| "grad_norm": 1.2383852005004883, | |
| "learning_rate": 4.0939597315436244e-05, | |
| "loss": 3.4194, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.02153884985907198, | |
| "grad_norm": 0.8546445369720459, | |
| "learning_rate": 4.228187919463087e-05, | |
| "loss": 3.3873, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.022211938917167977, | |
| "grad_norm": 1.4104743003845215, | |
| "learning_rate": 4.36241610738255e-05, | |
| "loss": 3.3647, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.022885027975263976, | |
| "grad_norm": 1.3548426628112793, | |
| "learning_rate": 4.496644295302014e-05, | |
| "loss": 3.3213, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.023558117033359978, | |
| "grad_norm": 1.1530455350875854, | |
| "learning_rate": 4.630872483221477e-05, | |
| "loss": 3.2789, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.024231206091455976, | |
| "grad_norm": 1.3092457056045532, | |
| "learning_rate": 4.76510067114094e-05, | |
| "loss": 3.2922, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.024904295149551975, | |
| "grad_norm": 1.577699065208435, | |
| "learning_rate": 4.8993288590604034e-05, | |
| "loss": 3.2308, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.025577384207647973, | |
| "grad_norm": 1.3348486423492432, | |
| "learning_rate": 5.033557046979866e-05, | |
| "loss": 3.2656, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.026250473265743975, | |
| "grad_norm": 1.1966625452041626, | |
| "learning_rate": 5.167785234899329e-05, | |
| "loss": 3.2703, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.026923562323839974, | |
| "grad_norm": 1.3125278949737549, | |
| "learning_rate": 5.302013422818792e-05, | |
| "loss": 3.2034, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.027596651381935972, | |
| "grad_norm": 1.1957862377166748, | |
| "learning_rate": 5.436241610738255e-05, | |
| "loss": 3.2476, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.02826974044003197, | |
| "grad_norm": 1.2177337408065796, | |
| "learning_rate": 5.570469798657718e-05, | |
| "loss": 3.2166, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.028942829498127973, | |
| "grad_norm": 0.8806389570236206, | |
| "learning_rate": 5.704697986577181e-05, | |
| "loss": 3.1722, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.02961591855622397, | |
| "grad_norm": 1.8180561065673828, | |
| "learning_rate": 5.838926174496645e-05, | |
| "loss": 3.1597, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.03028900761431997, | |
| "grad_norm": 1.1676297187805176, | |
| "learning_rate": 5.973154362416108e-05, | |
| "loss": 3.1829, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.030962096672415968, | |
| "grad_norm": 1.0163198709487915, | |
| "learning_rate": 6.107382550335571e-05, | |
| "loss": 3.1643, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.03163518573051197, | |
| "grad_norm": 1.0734015703201294, | |
| "learning_rate": 6.241610738255034e-05, | |
| "loss": 3.1692, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.03230827478860797, | |
| "grad_norm": 1.4144916534423828, | |
| "learning_rate": 6.375838926174497e-05, | |
| "loss": 3.1217, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.03298136384670397, | |
| "grad_norm": 1.5647915601730347, | |
| "learning_rate": 6.51006711409396e-05, | |
| "loss": 3.1324, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.033654452904799965, | |
| "grad_norm": 1.1999105215072632, | |
| "learning_rate": 6.644295302013423e-05, | |
| "loss": 3.1103, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.03432754196289597, | |
| "grad_norm": 1.512838363647461, | |
| "learning_rate": 6.778523489932886e-05, | |
| "loss": 3.1036, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.03500063102099196, | |
| "grad_norm": 1.1092990636825562, | |
| "learning_rate": 6.912751677852349e-05, | |
| "loss": 3.1244, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.035673720079087964, | |
| "grad_norm": 1.2763620615005493, | |
| "learning_rate": 7.046979865771812e-05, | |
| "loss": 3.0989, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.036346809137183966, | |
| "grad_norm": 1.2328648567199707, | |
| "learning_rate": 7.181208053691275e-05, | |
| "loss": 3.0506, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.03701989819527996, | |
| "grad_norm": 1.6039047241210938, | |
| "learning_rate": 7.315436241610739e-05, | |
| "loss": 3.0589, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.03769298725337596, | |
| "grad_norm": 1.7161307334899902, | |
| "learning_rate": 7.449664429530202e-05, | |
| "loss": 3.0296, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.03836607631147196, | |
| "grad_norm": 1.2628991603851318, | |
| "learning_rate": 7.583892617449665e-05, | |
| "loss": 3.063, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.03903916536956796, | |
| "grad_norm": 1.1414180994033813, | |
| "learning_rate": 7.718120805369128e-05, | |
| "loss": 3.0366, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.03971225442766396, | |
| "grad_norm": 1.5152932405471802, | |
| "learning_rate": 7.852348993288591e-05, | |
| "loss": 3.0241, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.04038534348575996, | |
| "grad_norm": 1.6119567155838013, | |
| "learning_rate": 7.986577181208054e-05, | |
| "loss": 3.0139, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.04105843254385596, | |
| "grad_norm": 1.6078091859817505, | |
| "learning_rate": 8.120805369127518e-05, | |
| "loss": 3.0288, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.04173152160195196, | |
| "grad_norm": 1.1092705726623535, | |
| "learning_rate": 8.255033557046981e-05, | |
| "loss": 2.9996, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.042404610660047956, | |
| "grad_norm": 1.2352242469787598, | |
| "learning_rate": 8.389261744966444e-05, | |
| "loss": 2.9872, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.04307769971814396, | |
| "grad_norm": 1.636400580406189, | |
| "learning_rate": 8.523489932885907e-05, | |
| "loss": 2.9814, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.04375078877623995, | |
| "grad_norm": 1.4877128601074219, | |
| "learning_rate": 8.65771812080537e-05, | |
| "loss": 2.9756, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.044423877834335955, | |
| "grad_norm": 1.2983709573745728, | |
| "learning_rate": 8.791946308724833e-05, | |
| "loss": 2.9756, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.04509696689243196, | |
| "grad_norm": 1.7350983619689941, | |
| "learning_rate": 8.926174496644296e-05, | |
| "loss": 2.9579, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.04577005595052795, | |
| "grad_norm": 0.978854775428772, | |
| "learning_rate": 9.060402684563759e-05, | |
| "loss": 2.9269, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.046443145008623954, | |
| "grad_norm": 1.362163782119751, | |
| "learning_rate": 9.194630872483221e-05, | |
| "loss": 2.99, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.047116234066719956, | |
| "grad_norm": 1.328202247619629, | |
| "learning_rate": 9.328859060402684e-05, | |
| "loss": 2.966, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.04778932312481595, | |
| "grad_norm": 1.2497445344924927, | |
| "learning_rate": 9.463087248322147e-05, | |
| "loss": 2.9254, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.04846241218291195, | |
| "grad_norm": 1.071092128753662, | |
| "learning_rate": 9.59731543624161e-05, | |
| "loss": 2.9597, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.04913550124100795, | |
| "grad_norm": 1.451729416847229, | |
| "learning_rate": 9.731543624161075e-05, | |
| "loss": 2.9605, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.04980859029910395, | |
| "grad_norm": 1.328731656074524, | |
| "learning_rate": 9.865771812080538e-05, | |
| "loss": 2.9493, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.05048167935719995, | |
| "grad_norm": 1.4675222635269165, | |
| "learning_rate": 0.0001, | |
| "loss": 2.9298, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05115476841529595, | |
| "grad_norm": 1.208961009979248, | |
| "learning_rate": 9.99998761551904e-05, | |
| "loss": 2.9382, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.05182785747339195, | |
| "grad_norm": 1.0392056703567505, | |
| "learning_rate": 9.999950462137508e-05, | |
| "loss": 2.8829, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.05250094653148795, | |
| "grad_norm": 1.378711462020874, | |
| "learning_rate": 9.999888540039458e-05, | |
| "loss": 2.9041, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.053174035589583946, | |
| "grad_norm": 1.2687252759933472, | |
| "learning_rate": 9.999801849531635e-05, | |
| "loss": 2.9148, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.05384712464767995, | |
| "grad_norm": 1.2382102012634277, | |
| "learning_rate": 9.999690391043487e-05, | |
| "loss": 2.9107, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.05452021370577594, | |
| "grad_norm": 1.0215928554534912, | |
| "learning_rate": 9.999554165127159e-05, | |
| "loss": 2.9187, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.055193302763871945, | |
| "grad_norm": 1.2638540267944336, | |
| "learning_rate": 9.99939317245748e-05, | |
| "loss": 2.9261, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.055866391821967946, | |
| "grad_norm": 1.3826959133148193, | |
| "learning_rate": 9.999207413831982e-05, | |
| "loss": 2.8944, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.05653948088006394, | |
| "grad_norm": 1.0764875411987305, | |
| "learning_rate": 9.998996890170867e-05, | |
| "loss": 2.9031, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.05721256993815994, | |
| "grad_norm": 1.5163936614990234, | |
| "learning_rate": 9.99876160251703e-05, | |
| "loss": 2.8687, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.057885658996255945, | |
| "grad_norm": 1.5821291208267212, | |
| "learning_rate": 9.998501552036037e-05, | |
| "loss": 2.8828, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.05855874805435194, | |
| "grad_norm": 1.4572120904922485, | |
| "learning_rate": 9.998216740016124e-05, | |
| "loss": 2.8898, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.05923183711244794, | |
| "grad_norm": 1.2185218334197998, | |
| "learning_rate": 9.99790716786819e-05, | |
| "loss": 2.8456, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.05990492617054394, | |
| "grad_norm": 1.106446385383606, | |
| "learning_rate": 9.99757283712579e-05, | |
| "loss": 2.8793, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.06057801522863994, | |
| "grad_norm": 1.1448893547058105, | |
| "learning_rate": 9.997213749445129e-05, | |
| "loss": 2.8579, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.06125110428673594, | |
| "grad_norm": 1.1587834358215332, | |
| "learning_rate": 9.996829906605056e-05, | |
| "loss": 2.8839, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.061924193344831936, | |
| "grad_norm": 1.0969592332839966, | |
| "learning_rate": 9.996421310507046e-05, | |
| "loss": 2.8638, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.06259728240292793, | |
| "grad_norm": 0.9740116000175476, | |
| "learning_rate": 9.9959879631752e-05, | |
| "loss": 2.8455, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.06327037146102393, | |
| "grad_norm": 1.2307910919189453, | |
| "learning_rate": 9.995529866756231e-05, | |
| "loss": 2.8534, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.06394346051911994, | |
| "grad_norm": 1.5021939277648926, | |
| "learning_rate": 9.995047023519452e-05, | |
| "loss": 2.8469, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.06461654957721594, | |
| "grad_norm": 1.1044224500656128, | |
| "learning_rate": 9.994539435856771e-05, | |
| "loss": 2.8429, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.06528963863531194, | |
| "grad_norm": 1.4586883783340454, | |
| "learning_rate": 9.99400710628267e-05, | |
| "loss": 2.836, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.06596272769340794, | |
| "grad_norm": 1.2613426446914673, | |
| "learning_rate": 9.993450037434199e-05, | |
| "loss": 2.8243, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.06663581675150393, | |
| "grad_norm": 1.0347422361373901, | |
| "learning_rate": 9.992868232070963e-05, | |
| "loss": 2.7965, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.06730890580959993, | |
| "grad_norm": 2.1357574462890625, | |
| "learning_rate": 9.992261693075103e-05, | |
| "loss": 2.8486, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.06798199486769593, | |
| "grad_norm": 1.0357908010482788, | |
| "learning_rate": 9.991630423451286e-05, | |
| "loss": 2.8386, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.06865508392579193, | |
| "grad_norm": 1.1383159160614014, | |
| "learning_rate": 9.990974426326696e-05, | |
| "loss": 2.7874, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.06932817298388794, | |
| "grad_norm": 0.8452678322792053, | |
| "learning_rate": 9.990293704951001e-05, | |
| "loss": 2.786, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.07000126204198392, | |
| "grad_norm": 0.9482727646827698, | |
| "learning_rate": 9.989588262696357e-05, | |
| "loss": 2.8156, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.07067435110007993, | |
| "grad_norm": 0.8251766562461853, | |
| "learning_rate": 9.988858103057378e-05, | |
| "loss": 2.7588, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.07134744015817593, | |
| "grad_norm": 1.211065649986267, | |
| "learning_rate": 9.988103229651121e-05, | |
| "loss": 2.7623, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.07202052921627193, | |
| "grad_norm": 0.8990377187728882, | |
| "learning_rate": 9.987323646217075e-05, | |
| "loss": 2.8164, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.07269361827436793, | |
| "grad_norm": 0.9878025054931641, | |
| "learning_rate": 9.986519356617132e-05, | |
| "loss": 2.7847, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.07336670733246392, | |
| "grad_norm": 0.7551445364952087, | |
| "learning_rate": 9.985690364835576e-05, | |
| "loss": 2.8111, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.07403979639055992, | |
| "grad_norm": 0.9582260251045227, | |
| "learning_rate": 9.984836674979062e-05, | |
| "loss": 2.793, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.07471288544865592, | |
| "grad_norm": 0.8087739944458008, | |
| "learning_rate": 9.983958291276591e-05, | |
| "loss": 2.7464, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.07538597450675193, | |
| "grad_norm": 1.2373522520065308, | |
| "learning_rate": 9.983055218079493e-05, | |
| "loss": 2.7656, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.07605906356484793, | |
| "grad_norm": 0.9746289849281311, | |
| "learning_rate": 9.982127459861408e-05, | |
| "loss": 2.7765, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.07673215262294392, | |
| "grad_norm": 0.6946307420730591, | |
| "learning_rate": 9.981175021218255e-05, | |
| "loss": 2.7491, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.07740524168103992, | |
| "grad_norm": 0.8959107398986816, | |
| "learning_rate": 9.980197906868215e-05, | |
| "loss": 2.7565, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.07807833073913592, | |
| "grad_norm": 0.9889335036277771, | |
| "learning_rate": 9.979196121651716e-05, | |
| "loss": 2.7974, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.07875141979723192, | |
| "grad_norm": 0.9802746176719666, | |
| "learning_rate": 9.978169670531388e-05, | |
| "loss": 2.7772, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.07942450885532792, | |
| "grad_norm": 0.6934760808944702, | |
| "learning_rate": 9.977118558592059e-05, | |
| "loss": 2.7602, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.08009759791342393, | |
| "grad_norm": 0.8996357917785645, | |
| "learning_rate": 9.97604279104072e-05, | |
| "loss": 2.7669, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.08077068697151991, | |
| "grad_norm": 0.8844061493873596, | |
| "learning_rate": 9.974942373206499e-05, | |
| "loss": 2.7458, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.08144377602961592, | |
| "grad_norm": 1.023626685142517, | |
| "learning_rate": 9.973817310540638e-05, | |
| "loss": 2.7639, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.08211686508771192, | |
| "grad_norm": 0.8241132497787476, | |
| "learning_rate": 9.972667608616466e-05, | |
| "loss": 2.7457, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.08278995414580792, | |
| "grad_norm": 0.7864794135093689, | |
| "learning_rate": 9.971493273129364e-05, | |
| "loss": 2.763, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.08346304320390392, | |
| "grad_norm": 1.071751356124878, | |
| "learning_rate": 9.970294309896747e-05, | |
| "loss": 2.7347, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.08413613226199991, | |
| "grad_norm": 0.8978875279426575, | |
| "learning_rate": 9.969070724858031e-05, | |
| "loss": 2.7807, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.08480922132009591, | |
| "grad_norm": 0.9984204769134521, | |
| "learning_rate": 9.967822524074602e-05, | |
| "loss": 2.7399, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.08548231037819191, | |
| "grad_norm": 0.8611739873886108, | |
| "learning_rate": 9.966549713729787e-05, | |
| "loss": 2.753, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.08615539943628792, | |
| "grad_norm": 0.8647720217704773, | |
| "learning_rate": 9.965252300128826e-05, | |
| "loss": 2.7224, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.08682848849438392, | |
| "grad_norm": 0.8688477873802185, | |
| "learning_rate": 9.963930289698833e-05, | |
| "loss": 2.6879, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.0875015775524799, | |
| "grad_norm": 1.1445469856262207, | |
| "learning_rate": 9.962583688988778e-05, | |
| "loss": 2.739, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.08817466661057591, | |
| "grad_norm": 0.8668599128723145, | |
| "learning_rate": 9.961212504669437e-05, | |
| "loss": 2.6962, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.08884775566867191, | |
| "grad_norm": 0.905125617980957, | |
| "learning_rate": 9.959816743533375e-05, | |
| "loss": 2.7239, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.08952084472676791, | |
| "grad_norm": 0.8252028822898865, | |
| "learning_rate": 9.958396412494901e-05, | |
| "loss": 2.7381, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.09019393378486391, | |
| "grad_norm": 0.7380514740943909, | |
| "learning_rate": 9.956951518590043e-05, | |
| "loss": 2.7135, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.09086702284295992, | |
| "grad_norm": 0.7395239472389221, | |
| "learning_rate": 9.955482068976502e-05, | |
| "loss": 2.6954, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.0915401119010559, | |
| "grad_norm": 0.6564229726791382, | |
| "learning_rate": 9.953988070933628e-05, | |
| "loss": 2.7145, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.0922132009591519, | |
| "grad_norm": 0.7306910157203674, | |
| "learning_rate": 9.952469531862378e-05, | |
| "loss": 2.6951, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.09288629001724791, | |
| "grad_norm": 0.6810031533241272, | |
| "learning_rate": 9.950926459285277e-05, | |
| "loss": 2.7201, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.09355937907534391, | |
| "grad_norm": 0.6724168658256531, | |
| "learning_rate": 9.949358860846388e-05, | |
| "loss": 2.7112, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.09423246813343991, | |
| "grad_norm": 0.7065703272819519, | |
| "learning_rate": 9.947766744311268e-05, | |
| "loss": 2.6884, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.0949055571915359, | |
| "grad_norm": 0.8231908679008484, | |
| "learning_rate": 9.946150117566931e-05, | |
| "loss": 2.7286, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.0955786462496319, | |
| "grad_norm": 0.9570270776748657, | |
| "learning_rate": 9.944508988621812e-05, | |
| "loss": 2.7166, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.0962517353077279, | |
| "grad_norm": 0.9357023239135742, | |
| "learning_rate": 9.94284336560572e-05, | |
| "loss": 2.6768, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.0969248243658239, | |
| "grad_norm": 0.6350796222686768, | |
| "learning_rate": 9.941153256769809e-05, | |
| "loss": 2.6921, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.09759791342391991, | |
| "grad_norm": 0.6700872778892517, | |
| "learning_rate": 9.939438670486525e-05, | |
| "loss": 2.6847, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.0982710024820159, | |
| "grad_norm": 0.6851752400398254, | |
| "learning_rate": 9.937699615249572e-05, | |
| "loss": 2.6586, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.0989440915401119, | |
| "grad_norm": 0.7098946571350098, | |
| "learning_rate": 9.935936099673871e-05, | |
| "loss": 2.6793, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.0996171805982079, | |
| "grad_norm": 0.680543839931488, | |
| "learning_rate": 9.934148132495511e-05, | |
| "loss": 2.6763, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.1002902696563039, | |
| "grad_norm": 0.6832155585289001, | |
| "learning_rate": 9.932335722571709e-05, | |
| "loss": 2.6768, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.1009633587143999, | |
| "grad_norm": 0.8236553072929382, | |
| "learning_rate": 9.930498878880768e-05, | |
| "loss": 2.6738, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.10163644777249589, | |
| "grad_norm": 1.2765145301818848, | |
| "learning_rate": 9.928637610522032e-05, | |
| "loss": 2.6575, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.1023095368305919, | |
| "grad_norm": 0.8765101432800293, | |
| "learning_rate": 9.926751926715836e-05, | |
| "loss": 2.6521, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.1029826258886879, | |
| "grad_norm": 0.7371405959129333, | |
| "learning_rate": 9.924841836803467e-05, | |
| "loss": 2.6916, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.1036557149467839, | |
| "grad_norm": 0.8433207273483276, | |
| "learning_rate": 9.922907350247115e-05, | |
| "loss": 2.6605, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.1043288040048799, | |
| "grad_norm": 0.7216758131980896, | |
| "learning_rate": 9.920948476629821e-05, | |
| "loss": 2.6686, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.1050018930629759, | |
| "grad_norm": 0.7831395864486694, | |
| "learning_rate": 9.918965225655442e-05, | |
| "loss": 2.6783, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.10567498212107189, | |
| "grad_norm": 0.5831722617149353, | |
| "learning_rate": 9.916957607148591e-05, | |
| "loss": 2.6633, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.10634807117916789, | |
| "grad_norm": 0.6143165826797485, | |
| "learning_rate": 9.91492563105459e-05, | |
| "loss": 2.6785, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.1070211602372639, | |
| "grad_norm": 0.8080072999000549, | |
| "learning_rate": 9.912869307439432e-05, | |
| "loss": 2.672, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.1076942492953599, | |
| "grad_norm": 1.0352627038955688, | |
| "learning_rate": 9.910788646489713e-05, | |
| "loss": 2.6474, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1083673383534559, | |
| "grad_norm": 0.6871941685676575, | |
| "learning_rate": 9.908683658512597e-05, | |
| "loss": 2.6387, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.10904042741155189, | |
| "grad_norm": 0.7802032828330994, | |
| "learning_rate": 9.906554353935758e-05, | |
| "loss": 2.6584, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.10971351646964789, | |
| "grad_norm": 0.7139161825180054, | |
| "learning_rate": 9.904400743307326e-05, | |
| "loss": 2.6728, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.11038660552774389, | |
| "grad_norm": 1.0033543109893799, | |
| "learning_rate": 9.902222837295844e-05, | |
| "loss": 2.666, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.11105969458583989, | |
| "grad_norm": 1.0726121664047241, | |
| "learning_rate": 9.900020646690205e-05, | |
| "loss": 2.6566, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.11173278364393589, | |
| "grad_norm": 0.8290823698043823, | |
| "learning_rate": 9.897794182399606e-05, | |
| "loss": 2.6592, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.11240587270203188, | |
| "grad_norm": 0.7432393431663513, | |
| "learning_rate": 9.895543455453486e-05, | |
| "loss": 2.6604, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.11307896176012788, | |
| "grad_norm": 0.6097532510757446, | |
| "learning_rate": 9.893268477001479e-05, | |
| "loss": 2.6613, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.11375205081822388, | |
| "grad_norm": 0.6818569898605347, | |
| "learning_rate": 9.890969258313358e-05, | |
| "loss": 2.6238, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.11442513987631989, | |
| "grad_norm": 0.6779484748840332, | |
| "learning_rate": 9.888645810778974e-05, | |
| "loss": 2.6512, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.11509822893441589, | |
| "grad_norm": 0.697517454624176, | |
| "learning_rate": 9.886298145908203e-05, | |
| "loss": 2.6278, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.11577131799251189, | |
| "grad_norm": 0.700210452079773, | |
| "learning_rate": 9.88392627533089e-05, | |
| "loss": 2.6592, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.11644440705060788, | |
| "grad_norm": 0.6507501602172852, | |
| "learning_rate": 9.88153021079679e-05, | |
| "loss": 2.6682, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.11711749610870388, | |
| "grad_norm": 0.7211961150169373, | |
| "learning_rate": 9.87910996417551e-05, | |
| "loss": 2.6919, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.11779058516679988, | |
| "grad_norm": 0.7282651662826538, | |
| "learning_rate": 9.876665547456446e-05, | |
| "loss": 2.6148, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.11846367422489588, | |
| "grad_norm": 0.6177457571029663, | |
| "learning_rate": 9.874196972748735e-05, | |
| "loss": 2.6072, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.11913676328299189, | |
| "grad_norm": 0.5497561693191528, | |
| "learning_rate": 9.871704252281179e-05, | |
| "loss": 2.6493, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.11980985234108787, | |
| "grad_norm": 0.6414313912391663, | |
| "learning_rate": 9.869187398402202e-05, | |
| "loss": 2.6505, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.12048294139918388, | |
| "grad_norm": 0.6123400330543518, | |
| "learning_rate": 9.866646423579773e-05, | |
| "loss": 2.6168, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.12115603045727988, | |
| "grad_norm": 0.5811213850975037, | |
| "learning_rate": 9.864081340401354e-05, | |
| "loss": 2.6367, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.12182911951537588, | |
| "grad_norm": 0.7445274591445923, | |
| "learning_rate": 9.861492161573837e-05, | |
| "loss": 2.6288, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.12250220857347188, | |
| "grad_norm": 0.9515259861946106, | |
| "learning_rate": 9.858878899923473e-05, | |
| "loss": 2.6458, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.12317529763156787, | |
| "grad_norm": 0.7661240696907043, | |
| "learning_rate": 9.856241568395818e-05, | |
| "loss": 2.6289, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.12384838668966387, | |
| "grad_norm": 0.704278826713562, | |
| "learning_rate": 9.853580180055669e-05, | |
| "loss": 2.6043, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.12452147574775987, | |
| "grad_norm": 0.6069918274879456, | |
| "learning_rate": 9.850894748086986e-05, | |
| "loss": 2.6193, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.12519456480585586, | |
| "grad_norm": 0.8171132802963257, | |
| "learning_rate": 9.848185285792844e-05, | |
| "loss": 2.6482, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.12586765386395188, | |
| "grad_norm": 0.7318093776702881, | |
| "learning_rate": 9.845451806595354e-05, | |
| "loss": 2.6285, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.12654074292204787, | |
| "grad_norm": 0.7122377157211304, | |
| "learning_rate": 9.84269432403561e-05, | |
| "loss": 2.6147, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.12721383198014388, | |
| "grad_norm": 0.6400437355041504, | |
| "learning_rate": 9.839912851773602e-05, | |
| "loss": 2.576, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.12788692103823987, | |
| "grad_norm": 0.6208680868148804, | |
| "learning_rate": 9.837107403588171e-05, | |
| "loss": 2.6056, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.12856001009633586, | |
| "grad_norm": 0.6690713167190552, | |
| "learning_rate": 9.834277993376924e-05, | |
| "loss": 2.5884, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.12923309915443187, | |
| "grad_norm": 0.613250732421875, | |
| "learning_rate": 9.831424635156168e-05, | |
| "loss": 2.5824, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.12990618821252786, | |
| "grad_norm": 0.6921446323394775, | |
| "learning_rate": 9.82854734306085e-05, | |
| "loss": 2.6239, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.13057927727062388, | |
| "grad_norm": 0.6627881526947021, | |
| "learning_rate": 9.825646131344478e-05, | |
| "loss": 2.5859, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.13125236632871987, | |
| "grad_norm": 0.7399138808250427, | |
| "learning_rate": 9.822721014379052e-05, | |
| "loss": 2.6492, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.13192545538681588, | |
| "grad_norm": 0.633929967880249, | |
| "learning_rate": 9.819772006654993e-05, | |
| "loss": 2.5935, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.13259854444491187, | |
| "grad_norm": 0.6434243321418762, | |
| "learning_rate": 9.816799122781075e-05, | |
| "loss": 2.6231, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.13327163350300786, | |
| "grad_norm": 0.6136692762374878, | |
| "learning_rate": 9.813802377484346e-05, | |
| "loss": 2.5803, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.13394472256110387, | |
| "grad_norm": 0.6683850288391113, | |
| "learning_rate": 9.810781785610062e-05, | |
| "loss": 2.5854, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.13461781161919986, | |
| "grad_norm": 0.705646276473999, | |
| "learning_rate": 9.807737362121605e-05, | |
| "loss": 2.5954, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.13529090067729588, | |
| "grad_norm": 0.7973448038101196, | |
| "learning_rate": 9.80466912210042e-05, | |
| "loss": 2.5824, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.13596398973539187, | |
| "grad_norm": 0.7568982839584351, | |
| "learning_rate": 9.801577080745929e-05, | |
| "loss": 2.5875, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.13663707879348785, | |
| "grad_norm": 0.6286936402320862, | |
| "learning_rate": 9.798461253375462e-05, | |
| "loss": 2.6047, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.13731016785158387, | |
| "grad_norm": 0.6657843589782715, | |
| "learning_rate": 9.795321655424184e-05, | |
| "loss": 2.6066, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.13798325690967986, | |
| "grad_norm": 0.6954363584518433, | |
| "learning_rate": 9.792158302445009e-05, | |
| "loss": 2.5885, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.13865634596777587, | |
| "grad_norm": 0.9607271552085876, | |
| "learning_rate": 9.788971210108533e-05, | |
| "loss": 2.6212, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.13932943502587186, | |
| "grad_norm": 0.9636927843093872, | |
| "learning_rate": 9.785760394202948e-05, | |
| "loss": 2.5552, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.14000252408396785, | |
| "grad_norm": 0.6435789465904236, | |
| "learning_rate": 9.782525870633967e-05, | |
| "loss": 2.6199, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.14067561314206387, | |
| "grad_norm": 0.7337214350700378, | |
| "learning_rate": 9.779267655424754e-05, | |
| "loss": 2.5683, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.14134870220015985, | |
| "grad_norm": 0.6656831502914429, | |
| "learning_rate": 9.775985764715827e-05, | |
| "loss": 2.5735, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.14202179125825587, | |
| "grad_norm": 0.6302557587623596, | |
| "learning_rate": 9.772680214764992e-05, | |
| "loss": 2.5752, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.14269488031635186, | |
| "grad_norm": 0.7694079279899597, | |
| "learning_rate": 9.769351021947258e-05, | |
| "loss": 2.5742, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.14336796937444785, | |
| "grad_norm": 0.743331253528595, | |
| "learning_rate": 9.765998202754753e-05, | |
| "loss": 2.5968, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.14404105843254386, | |
| "grad_norm": 0.7328934669494629, | |
| "learning_rate": 9.762621773796647e-05, | |
| "loss": 2.595, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.14471414749063985, | |
| "grad_norm": 0.786054253578186, | |
| "learning_rate": 9.759221751799072e-05, | |
| "loss": 2.6048, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.14538723654873587, | |
| "grad_norm": 0.7093940377235413, | |
| "learning_rate": 9.755798153605027e-05, | |
| "loss": 2.5766, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.14606032560683185, | |
| "grad_norm": 0.6153858304023743, | |
| "learning_rate": 9.752350996174308e-05, | |
| "loss": 2.5598, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.14673341466492784, | |
| "grad_norm": 0.7648425102233887, | |
| "learning_rate": 9.748880296583418e-05, | |
| "loss": 2.5768, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.14740650372302386, | |
| "grad_norm": 0.777052640914917, | |
| "learning_rate": 9.745386072025481e-05, | |
| "loss": 2.5571, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.14807959278111985, | |
| "grad_norm": 0.6382849812507629, | |
| "learning_rate": 9.741868339810161e-05, | |
| "loss": 2.564, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.14875268183921586, | |
| "grad_norm": 0.6427432298660278, | |
| "learning_rate": 9.738327117363571e-05, | |
| "loss": 2.5625, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.14942577089731185, | |
| "grad_norm": 0.6348729133605957, | |
| "learning_rate": 9.734762422228194e-05, | |
| "loss": 2.5622, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.15009885995540784, | |
| "grad_norm": 0.7136998176574707, | |
| "learning_rate": 9.731174272062791e-05, | |
| "loss": 2.5386, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.15077194901350385, | |
| "grad_norm": 0.7903639078140259, | |
| "learning_rate": 9.727562684642308e-05, | |
| "loss": 2.5593, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.15144503807159984, | |
| "grad_norm": 0.6537742018699646, | |
| "learning_rate": 9.723927677857805e-05, | |
| "loss": 2.5373, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.15211812712969586, | |
| "grad_norm": 0.6430116295814514, | |
| "learning_rate": 9.720269269716347e-05, | |
| "loss": 2.583, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.15279121618779185, | |
| "grad_norm": 0.5976120829582214, | |
| "learning_rate": 9.716587478340928e-05, | |
| "loss": 2.5729, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.15346430524588783, | |
| "grad_norm": 0.7713409066200256, | |
| "learning_rate": 9.712882321970383e-05, | |
| "loss": 2.5858, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.15413739430398385, | |
| "grad_norm": 0.6205691695213318, | |
| "learning_rate": 9.70915381895928e-05, | |
| "loss": 2.5248, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.15481048336207984, | |
| "grad_norm": 0.5880588293075562, | |
| "learning_rate": 9.705401987777855e-05, | |
| "loss": 2.5811, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.15548357242017585, | |
| "grad_norm": 0.645301342010498, | |
| "learning_rate": 9.701626847011899e-05, | |
| "loss": 2.5677, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.15615666147827184, | |
| "grad_norm": 0.5872762799263, | |
| "learning_rate": 9.697828415362674e-05, | |
| "loss": 2.5777, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.15682975053636786, | |
| "grad_norm": 0.5601847767829895, | |
| "learning_rate": 9.694006711646823e-05, | |
| "loss": 2.5563, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.15750283959446384, | |
| "grad_norm": 0.6171532869338989, | |
| "learning_rate": 9.690161754796274e-05, | |
| "loss": 2.5559, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.15817592865255983, | |
| "grad_norm": 0.5213554501533508, | |
| "learning_rate": 9.686293563858142e-05, | |
| "loss": 2.5543, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.15884901771065585, | |
| "grad_norm": 0.5173961520195007, | |
| "learning_rate": 9.682402157994643e-05, | |
| "loss": 2.5674, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.15952210676875184, | |
| "grad_norm": 0.5519447326660156, | |
| "learning_rate": 9.678487556482996e-05, | |
| "loss": 2.5385, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.16019519582684785, | |
| "grad_norm": 0.6517807841300964, | |
| "learning_rate": 9.674549778715322e-05, | |
| "loss": 2.5712, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.16086828488494384, | |
| "grad_norm": 0.745688796043396, | |
| "learning_rate": 9.670588844198554e-05, | |
| "loss": 2.5743, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.16154137394303983, | |
| "grad_norm": 0.6915059685707092, | |
| "learning_rate": 9.666604772554342e-05, | |
| "loss": 2.5536, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.16221446300113584, | |
| "grad_norm": 0.6770395636558533, | |
| "learning_rate": 9.662597583518946e-05, | |
| "loss": 2.5501, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.16288755205923183, | |
| "grad_norm": 0.7015348076820374, | |
| "learning_rate": 9.658567296943151e-05, | |
| "loss": 2.5488, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.16356064111732785, | |
| "grad_norm": 0.6871614456176758, | |
| "learning_rate": 9.654513932792157e-05, | |
| "loss": 2.55, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.16423373017542384, | |
| "grad_norm": 0.634372353553772, | |
| "learning_rate": 9.650437511145494e-05, | |
| "loss": 2.5231, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.16490681923351982, | |
| "grad_norm": 0.6015982031822205, | |
| "learning_rate": 9.646338052196904e-05, | |
| "loss": 2.5338, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.16557990829161584, | |
| "grad_norm": 0.5995615124702454, | |
| "learning_rate": 9.642215576254256e-05, | |
| "loss": 2.5449, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.16625299734971183, | |
| "grad_norm": 0.5862424373626709, | |
| "learning_rate": 9.638070103739443e-05, | |
| "loss": 2.5564, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.16692608640780784, | |
| "grad_norm": 0.49670928716659546, | |
| "learning_rate": 9.633901655188271e-05, | |
| "loss": 2.5283, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.16759917546590383, | |
| "grad_norm": 0.5854897499084473, | |
| "learning_rate": 9.629710251250372e-05, | |
| "loss": 2.5142, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.16827226452399982, | |
| "grad_norm": 0.7148288488388062, | |
| "learning_rate": 9.625495912689088e-05, | |
| "loss": 2.5528, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.16894535358209584, | |
| "grad_norm": 0.7020834684371948, | |
| "learning_rate": 9.62125866038138e-05, | |
| "loss": 2.5157, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.16961844264019182, | |
| "grad_norm": 0.5895742774009705, | |
| "learning_rate": 9.616998515317714e-05, | |
| "loss": 2.5492, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.17029153169828784, | |
| "grad_norm": 0.5936703681945801, | |
| "learning_rate": 9.612715498601966e-05, | |
| "loss": 2.5172, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.17096462075638383, | |
| "grad_norm": 0.6276780962944031, | |
| "learning_rate": 9.608409631451311e-05, | |
| "loss": 2.5448, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.17163770981447982, | |
| "grad_norm": 0.6327413320541382, | |
| "learning_rate": 9.60408093519612e-05, | |
| "loss": 2.4831, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.17231079887257583, | |
| "grad_norm": 0.7642117142677307, | |
| "learning_rate": 9.599729431279856e-05, | |
| "loss": 2.5209, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.17298388793067182, | |
| "grad_norm": 0.7974164485931396, | |
| "learning_rate": 9.595355141258966e-05, | |
| "loss": 2.5102, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.17365697698876784, | |
| "grad_norm": 0.7302394509315491, | |
| "learning_rate": 9.590958086802776e-05, | |
| "loss": 2.4705, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.17433006604686382, | |
| "grad_norm": 0.5221154093742371, | |
| "learning_rate": 9.586538289693378e-05, | |
| "loss": 2.5355, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.1750031551049598, | |
| "grad_norm": 0.7747745513916016, | |
| "learning_rate": 9.582095771825533e-05, | |
| "loss": 2.5375, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.17567624416305583, | |
| "grad_norm": 0.6088679432868958, | |
| "learning_rate": 9.577630555206549e-05, | |
| "loss": 2.546, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.17634933322115182, | |
| "grad_norm": 0.7077321410179138, | |
| "learning_rate": 9.573142661956183e-05, | |
| "loss": 2.5199, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.17702242227924783, | |
| "grad_norm": 0.7267037630081177, | |
| "learning_rate": 9.568632114306524e-05, | |
| "loss": 2.5125, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.17769551133734382, | |
| "grad_norm": 0.591948926448822, | |
| "learning_rate": 9.564098934601895e-05, | |
| "loss": 2.5411, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.1783686003954398, | |
| "grad_norm": 0.5513604283332825, | |
| "learning_rate": 9.559543145298721e-05, | |
| "loss": 2.5178, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.17904168945353582, | |
| "grad_norm": 0.6161476373672485, | |
| "learning_rate": 9.55496476896544e-05, | |
| "loss": 2.5123, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.1797147785116318, | |
| "grad_norm": 0.5339455604553223, | |
| "learning_rate": 9.550363828282375e-05, | |
| "loss": 2.532, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.18038786756972783, | |
| "grad_norm": 0.5792993307113647, | |
| "learning_rate": 9.545740346041634e-05, | |
| "loss": 2.5163, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.18106095662782382, | |
| "grad_norm": 0.6918086409568787, | |
| "learning_rate": 9.541094345146986e-05, | |
| "loss": 2.516, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.18173404568591983, | |
| "grad_norm": 0.577778160572052, | |
| "learning_rate": 9.536425848613754e-05, | |
| "loss": 2.4866, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.18240713474401582, | |
| "grad_norm": 0.6180362105369568, | |
| "learning_rate": 9.531734879568703e-05, | |
| "loss": 2.5186, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.1830802238021118, | |
| "grad_norm": 0.6912726163864136, | |
| "learning_rate": 9.527021461249919e-05, | |
| "loss": 2.5302, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.18375331286020782, | |
| "grad_norm": 0.6083509922027588, | |
| "learning_rate": 9.522285617006695e-05, | |
| "loss": 2.5226, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.1844264019183038, | |
| "grad_norm": 0.6165098547935486, | |
| "learning_rate": 9.517527370299424e-05, | |
| "loss": 2.5136, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.18509949097639983, | |
| "grad_norm": 0.6208174228668213, | |
| "learning_rate": 9.512746744699473e-05, | |
| "loss": 2.4967, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.18577258003449582, | |
| "grad_norm": 0.5808335542678833, | |
| "learning_rate": 9.507943763889062e-05, | |
| "loss": 2.5075, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.1864456690925918, | |
| "grad_norm": 0.5772544741630554, | |
| "learning_rate": 9.503118451661168e-05, | |
| "loss": 2.517, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.18711875815068782, | |
| "grad_norm": 0.5901827812194824, | |
| "learning_rate": 9.498270831919383e-05, | |
| "loss": 2.5008, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.1877918472087838, | |
| "grad_norm": 0.6181182265281677, | |
| "learning_rate": 9.493400928677809e-05, | |
| "loss": 2.4841, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.18846493626687982, | |
| "grad_norm": 0.6515311002731323, | |
| "learning_rate": 9.488508766060935e-05, | |
| "loss": 2.5213, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.1891380253249758, | |
| "grad_norm": 0.5359539985656738, | |
| "learning_rate": 9.483594368303521e-05, | |
| "loss": 2.5276, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.1898111143830718, | |
| "grad_norm": 0.7273654937744141, | |
| "learning_rate": 9.47865775975047e-05, | |
| "loss": 2.4901, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.19048420344116782, | |
| "grad_norm": 0.642062246799469, | |
| "learning_rate": 9.473698964856719e-05, | |
| "loss": 2.4732, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.1911572924992638, | |
| "grad_norm": 0.6698111891746521, | |
| "learning_rate": 9.468718008187106e-05, | |
| "loss": 2.5044, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.19183038155735982, | |
| "grad_norm": 0.6473941206932068, | |
| "learning_rate": 9.463714914416255e-05, | |
| "loss": 2.5149, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.1925034706154558, | |
| "grad_norm": 0.5659685730934143, | |
| "learning_rate": 9.458689708328459e-05, | |
| "loss": 2.4881, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.1931765596735518, | |
| "grad_norm": 0.736530601978302, | |
| "learning_rate": 9.453642414817541e-05, | |
| "loss": 2.5245, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.1938496487316478, | |
| "grad_norm": 0.6213739514350891, | |
| "learning_rate": 9.448573058886747e-05, | |
| "loss": 2.5026, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.1945227377897438, | |
| "grad_norm": 0.6373304724693298, | |
| "learning_rate": 9.443481665648615e-05, | |
| "loss": 2.5033, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.19519582684783982, | |
| "grad_norm": 0.7117682695388794, | |
| "learning_rate": 9.438368260324846e-05, | |
| "loss": 2.5133, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.1958689159059358, | |
| "grad_norm": 0.566567063331604, | |
| "learning_rate": 9.433232868246192e-05, | |
| "loss": 2.4923, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.1965420049640318, | |
| "grad_norm": 0.6423395872116089, | |
| "learning_rate": 9.428075514852319e-05, | |
| "loss": 2.4717, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.1972150940221278, | |
| "grad_norm": 0.6600716710090637, | |
| "learning_rate": 9.422896225691683e-05, | |
| "loss": 2.5027, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.1978881830802238, | |
| "grad_norm": 0.5523872375488281, | |
| "learning_rate": 9.417695026421409e-05, | |
| "loss": 2.4834, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.1985612721383198, | |
| "grad_norm": 0.6183229088783264, | |
| "learning_rate": 9.412471942807157e-05, | |
| "loss": 2.5416, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.1992343611964158, | |
| "grad_norm": 0.5662925839424133, | |
| "learning_rate": 9.407227000723e-05, | |
| "loss": 2.4994, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.1999074502545118, | |
| "grad_norm": 0.58561110496521, | |
| "learning_rate": 9.401960226151291e-05, | |
| "loss": 2.4957, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.2005805393126078, | |
| "grad_norm": 0.5408748984336853, | |
| "learning_rate": 9.396671645182539e-05, | |
| "loss": 2.4932, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.2012536283707038, | |
| "grad_norm": 0.5413419008255005, | |
| "learning_rate": 9.391361284015274e-05, | |
| "loss": 2.4623, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.2019267174287998, | |
| "grad_norm": 0.5581361651420593, | |
| "learning_rate": 9.386029168955925e-05, | |
| "loss": 2.4636, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.2025998064868958, | |
| "grad_norm": 0.5392501950263977, | |
| "learning_rate": 9.380675326418683e-05, | |
| "loss": 2.5353, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.20327289554499178, | |
| "grad_norm": 0.614374041557312, | |
| "learning_rate": 9.37529978292537e-05, | |
| "loss": 2.4918, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.2039459846030878, | |
| "grad_norm": 0.5186980962753296, | |
| "learning_rate": 9.369902565105315e-05, | |
| "loss": 2.4829, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.2046190736611838, | |
| "grad_norm": 0.6255108118057251, | |
| "learning_rate": 9.364483699695215e-05, | |
| "loss": 2.5047, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.2052921627192798, | |
| "grad_norm": 0.7298846244812012, | |
| "learning_rate": 9.359043213539e-05, | |
| "loss": 2.5422, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.2059652517773758, | |
| "grad_norm": 0.7413848042488098, | |
| "learning_rate": 9.353581133587712e-05, | |
| "loss": 2.4572, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.2066383408354718, | |
| "grad_norm": 0.7455918788909912, | |
| "learning_rate": 9.348097486899362e-05, | |
| "loss": 2.4967, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.2073114298935678, | |
| "grad_norm": 0.5773429870605469, | |
| "learning_rate": 9.342592300638795e-05, | |
| "loss": 2.4956, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.20798451895166378, | |
| "grad_norm": 0.5804847478866577, | |
| "learning_rate": 9.337065602077562e-05, | |
| "loss": 2.5137, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.2086576080097598, | |
| "grad_norm": 0.5692312121391296, | |
| "learning_rate": 9.331517418593778e-05, | |
| "loss": 2.4982, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2093306970678558, | |
| "grad_norm": 0.5889792442321777, | |
| "learning_rate": 9.325947777671996e-05, | |
| "loss": 2.5212, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.2100037861259518, | |
| "grad_norm": 0.6200716495513916, | |
| "learning_rate": 9.320356706903059e-05, | |
| "loss": 2.4906, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.2106768751840478, | |
| "grad_norm": 0.5470912456512451, | |
| "learning_rate": 9.314744233983969e-05, | |
| "loss": 2.4984, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.21134996424214378, | |
| "grad_norm": 0.5696718692779541, | |
| "learning_rate": 9.309110386717755e-05, | |
| "loss": 2.4749, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.2120230533002398, | |
| "grad_norm": 0.5643149614334106, | |
| "learning_rate": 9.303455193013322e-05, | |
| "loss": 2.465, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.21269614235833578, | |
| "grad_norm": 0.581017017364502, | |
| "learning_rate": 9.29777868088533e-05, | |
| "loss": 2.4705, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.2133692314164318, | |
| "grad_norm": 0.5466561913490295, | |
| "learning_rate": 9.29208087845404e-05, | |
| "loss": 2.4705, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.2140423204745278, | |
| "grad_norm": 0.62199467420578, | |
| "learning_rate": 9.286361813945181e-05, | |
| "loss": 2.468, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.21471540953262377, | |
| "grad_norm": 0.6129001975059509, | |
| "learning_rate": 9.280621515689814e-05, | |
| "loss": 2.4807, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.2153884985907198, | |
| "grad_norm": 0.5707617998123169, | |
| "learning_rate": 9.274860012124182e-05, | |
| "loss": 2.4628, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.21606158764881578, | |
| "grad_norm": 0.5998787879943848, | |
| "learning_rate": 9.269077331789578e-05, | |
| "loss": 2.469, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.2167346767069118, | |
| "grad_norm": 0.4872931241989136, | |
| "learning_rate": 9.263273503332201e-05, | |
| "loss": 2.458, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.21740776576500778, | |
| "grad_norm": 0.48162540793418884, | |
| "learning_rate": 9.257448555503013e-05, | |
| "loss": 2.4706, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.21808085482310377, | |
| "grad_norm": 0.5514042377471924, | |
| "learning_rate": 9.251602517157594e-05, | |
| "loss": 2.4834, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.21875394388119979, | |
| "grad_norm": 0.4928090572357178, | |
| "learning_rate": 9.245735417256004e-05, | |
| "loss": 2.4816, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.21942703293929577, | |
| "grad_norm": 0.5588257908821106, | |
| "learning_rate": 9.239847284862639e-05, | |
| "loss": 2.4726, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.2201001219973918, | |
| "grad_norm": 0.4884628355503082, | |
| "learning_rate": 9.233938149146086e-05, | |
| "loss": 2.4834, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.22077321105548778, | |
| "grad_norm": 0.5175626277923584, | |
| "learning_rate": 9.228008039378972e-05, | |
| "loss": 2.4571, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.22144630011358377, | |
| "grad_norm": 0.48624858260154724, | |
| "learning_rate": 9.222056984937835e-05, | |
| "loss": 2.5005, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.22211938917167978, | |
| "grad_norm": 0.6215223073959351, | |
| "learning_rate": 9.21608501530296e-05, | |
| "loss": 2.4743, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.22279247822977577, | |
| "grad_norm": 0.6394065618515015, | |
| "learning_rate": 9.210092160058245e-05, | |
| "loss": 2.5083, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.22346556728787179, | |
| "grad_norm": 0.6800668835639954, | |
| "learning_rate": 9.20407844889105e-05, | |
| "loss": 2.4848, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.22413865634596777, | |
| "grad_norm": 0.7512221932411194, | |
| "learning_rate": 9.198043911592053e-05, | |
| "loss": 2.4759, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.22481174540406376, | |
| "grad_norm": 0.7012692093849182, | |
| "learning_rate": 9.1919885780551e-05, | |
| "loss": 2.4484, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.22548483446215978, | |
| "grad_norm": 0.7792785167694092, | |
| "learning_rate": 9.185912478277052e-05, | |
| "loss": 2.4628, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.22615792352025577, | |
| "grad_norm": 0.6083918213844299, | |
| "learning_rate": 9.17981564235765e-05, | |
| "loss": 2.4606, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.22683101257835178, | |
| "grad_norm": 0.6361806988716125, | |
| "learning_rate": 9.173698100499351e-05, | |
| "loss": 2.4683, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.22750410163644777, | |
| "grad_norm": 0.572401762008667, | |
| "learning_rate": 9.167559883007188e-05, | |
| "loss": 2.4802, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.22817719069454379, | |
| "grad_norm": 0.6176139712333679, | |
| "learning_rate": 9.161401020288616e-05, | |
| "loss": 2.4904, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.22885027975263977, | |
| "grad_norm": 0.5795890688896179, | |
| "learning_rate": 9.155221542853362e-05, | |
| "loss": 2.489, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.22952336881073576, | |
| "grad_norm": 0.544706404209137, | |
| "learning_rate": 9.149021481313276e-05, | |
| "loss": 2.4342, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.23019645786883178, | |
| "grad_norm": 0.5396670699119568, | |
| "learning_rate": 9.142800866382173e-05, | |
| "loss": 2.4824, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.23086954692692777, | |
| "grad_norm": 0.5719538927078247, | |
| "learning_rate": 9.13655972887569e-05, | |
| "loss": 2.4956, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.23154263598502378, | |
| "grad_norm": 0.5415273308753967, | |
| "learning_rate": 9.130298099711125e-05, | |
| "loss": 2.4569, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.23221572504311977, | |
| "grad_norm": 0.49953708052635193, | |
| "learning_rate": 9.124016009907287e-05, | |
| "loss": 2.4281, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.23288881410121576, | |
| "grad_norm": 0.5383540987968445, | |
| "learning_rate": 9.11771349058435e-05, | |
| "loss": 2.4527, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.23356190315931177, | |
| "grad_norm": 0.5342861413955688, | |
| "learning_rate": 9.111390572963683e-05, | |
| "loss": 2.4786, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.23423499221740776, | |
| "grad_norm": 0.5322859883308411, | |
| "learning_rate": 9.105047288367707e-05, | |
| "loss": 2.4587, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.23490808127550378, | |
| "grad_norm": 0.5226521492004395, | |
| "learning_rate": 9.098683668219737e-05, | |
| "loss": 2.4605, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.23558117033359977, | |
| "grad_norm": 0.5837043523788452, | |
| "learning_rate": 9.092299744043826e-05, | |
| "loss": 2.4591, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.23625425939169575, | |
| "grad_norm": 0.5048717260360718, | |
| "learning_rate": 9.08589554746461e-05, | |
| "loss": 2.4809, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.23692734844979177, | |
| "grad_norm": 0.6950474381446838, | |
| "learning_rate": 9.079471110207149e-05, | |
| "loss": 2.4261, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.23760043750788776, | |
| "grad_norm": 0.6609525680541992, | |
| "learning_rate": 9.07302646409677e-05, | |
| "loss": 2.4631, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.23827352656598377, | |
| "grad_norm": 0.7101286053657532, | |
| "learning_rate": 9.066561641058912e-05, | |
| "loss": 2.4427, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.23894661562407976, | |
| "grad_norm": 0.5411195755004883, | |
| "learning_rate": 9.060076673118967e-05, | |
| "loss": 2.4933, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.23961970468217575, | |
| "grad_norm": 0.5625993609428406, | |
| "learning_rate": 9.05357159240212e-05, | |
| "loss": 2.4357, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.24029279374027177, | |
| "grad_norm": 0.8118378520011902, | |
| "learning_rate": 9.04704643113319e-05, | |
| "loss": 2.4366, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.24096588279836775, | |
| "grad_norm": 0.8360730409622192, | |
| "learning_rate": 9.040501221636472e-05, | |
| "loss": 2.4807, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.24163897185646377, | |
| "grad_norm": 0.6942176818847656, | |
| "learning_rate": 9.033935996335573e-05, | |
| "loss": 2.4435, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.24231206091455976, | |
| "grad_norm": 0.6470984220504761, | |
| "learning_rate": 9.027350787753257e-05, | |
| "loss": 2.4413, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.24298514997265575, | |
| "grad_norm": 0.6180524826049805, | |
| "learning_rate": 9.020745628511281e-05, | |
| "loss": 2.4708, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.24365823903075176, | |
| "grad_norm": 0.5775427222251892, | |
| "learning_rate": 9.014120551330232e-05, | |
| "loss": 2.4242, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.24433132808884775, | |
| "grad_norm": 0.4921644628047943, | |
| "learning_rate": 9.007475589029365e-05, | |
| "loss": 2.4759, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.24500441714694376, | |
| "grad_norm": 0.6223775148391724, | |
| "learning_rate": 9.000810774526448e-05, | |
| "loss": 2.4407, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.24567750620503975, | |
| "grad_norm": 0.5503790974617004, | |
| "learning_rate": 8.994126140837585e-05, | |
| "loss": 2.4631, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.24635059526313574, | |
| "grad_norm": 0.5364406704902649, | |
| "learning_rate": 8.987421721077062e-05, | |
| "loss": 2.4698, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.24702368432123176, | |
| "grad_norm": 0.5078843235969543, | |
| "learning_rate": 8.980697548457186e-05, | |
| "loss": 2.4727, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.24769677337932774, | |
| "grad_norm": 0.49923375248908997, | |
| "learning_rate": 8.973953656288112e-05, | |
| "loss": 2.4305, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.24836986243742376, | |
| "grad_norm": 0.5955345630645752, | |
| "learning_rate": 8.967190077977681e-05, | |
| "loss": 2.4578, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.24904295149551975, | |
| "grad_norm": 0.5475841760635376, | |
| "learning_rate": 8.960406847031253e-05, | |
| "loss": 2.4187, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.24971604055361574, | |
| "grad_norm": 0.5308858156204224, | |
| "learning_rate": 8.953603997051548e-05, | |
| "loss": 2.4664, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.2503891296117117, | |
| "grad_norm": 0.5813267827033997, | |
| "learning_rate": 8.946781561738474e-05, | |
| "loss": 2.4627, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.25106221866980777, | |
| "grad_norm": 0.5371124744415283, | |
| "learning_rate": 8.939939574888957e-05, | |
| "loss": 2.4528, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.25173530772790376, | |
| "grad_norm": 0.6726812720298767, | |
| "learning_rate": 8.933078070396778e-05, | |
| "loss": 2.4286, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.25240839678599974, | |
| "grad_norm": 0.5831666588783264, | |
| "learning_rate": 8.926197082252411e-05, | |
| "loss": 2.4551, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.25308148584409573, | |
| "grad_norm": 0.5222828388214111, | |
| "learning_rate": 8.919296644542837e-05, | |
| "loss": 2.4725, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.2537545749021917, | |
| "grad_norm": 0.5731188058853149, | |
| "learning_rate": 8.912376791451395e-05, | |
| "loss": 2.4416, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.25442766396028776, | |
| "grad_norm": 0.7226285338401794, | |
| "learning_rate": 8.905437557257599e-05, | |
| "loss": 2.4516, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.25510075301838375, | |
| "grad_norm": 0.683601975440979, | |
| "learning_rate": 8.898478976336975e-05, | |
| "loss": 2.4514, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.25577384207647974, | |
| "grad_norm": 0.623328685760498, | |
| "learning_rate": 8.89150108316089e-05, | |
| "loss": 2.4264, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.25644693113457573, | |
| "grad_norm": 0.5894498825073242, | |
| "learning_rate": 8.884503912296373e-05, | |
| "loss": 2.4572, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.2571200201926717, | |
| "grad_norm": 0.5245216488838196, | |
| "learning_rate": 8.877487498405961e-05, | |
| "loss": 2.4725, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.25779310925076776, | |
| "grad_norm": 0.5440263152122498, | |
| "learning_rate": 8.87045187624751e-05, | |
| "loss": 2.4494, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.25846619830886375, | |
| "grad_norm": 0.6193727850914001, | |
| "learning_rate": 8.86339708067403e-05, | |
| "loss": 2.4392, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.25913928736695974, | |
| "grad_norm": 0.5997888445854187, | |
| "learning_rate": 8.856323146633517e-05, | |
| "loss": 2.4388, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.2598123764250557, | |
| "grad_norm": 0.7109591960906982, | |
| "learning_rate": 8.849230109168767e-05, | |
| "loss": 2.409, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.2604854654831517, | |
| "grad_norm": 0.5998343825340271, | |
| "learning_rate": 8.842118003417218e-05, | |
| "loss": 2.4443, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.26115855454124776, | |
| "grad_norm": 0.5481546521186829, | |
| "learning_rate": 8.834986864610764e-05, | |
| "loss": 2.4334, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.26183164359934374, | |
| "grad_norm": 0.5310930013656616, | |
| "learning_rate": 8.827836728075588e-05, | |
| "loss": 2.4424, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.26250473265743973, | |
| "grad_norm": 0.49727270007133484, | |
| "learning_rate": 8.82066762923198e-05, | |
| "loss": 2.4348, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.2631778217155357, | |
| "grad_norm": 0.6515977382659912, | |
| "learning_rate": 8.813479603594168e-05, | |
| "loss": 2.4454, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.26385091077363176, | |
| "grad_norm": 0.7124093174934387, | |
| "learning_rate": 8.806272686770139e-05, | |
| "loss": 2.4265, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.26452399983172775, | |
| "grad_norm": 0.5826483964920044, | |
| "learning_rate": 8.799046914461461e-05, | |
| "loss": 2.4736, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.26519708888982374, | |
| "grad_norm": 0.5905554294586182, | |
| "learning_rate": 8.791802322463114e-05, | |
| "loss": 2.4467, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.26587017794791973, | |
| "grad_norm": 0.5929023027420044, | |
| "learning_rate": 8.784538946663297e-05, | |
| "loss": 2.4288, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.2665432670060157, | |
| "grad_norm": 0.5478795170783997, | |
| "learning_rate": 8.777256823043269e-05, | |
| "loss": 2.4387, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.26721635606411176, | |
| "grad_norm": 0.5086268782615662, | |
| "learning_rate": 8.769955987677159e-05, | |
| "loss": 2.4384, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.26788944512220775, | |
| "grad_norm": 0.6146414875984192, | |
| "learning_rate": 8.762636476731786e-05, | |
| "loss": 2.432, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.26856253418030374, | |
| "grad_norm": 0.5799776315689087, | |
| "learning_rate": 8.755298326466495e-05, | |
| "loss": 2.4305, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.2692356232383997, | |
| "grad_norm": 0.47361868619918823, | |
| "learning_rate": 8.747941573232951e-05, | |
| "loss": 2.4397, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.2699087122964957, | |
| "grad_norm": 0.4643039405345917, | |
| "learning_rate": 8.740566253474985e-05, | |
| "loss": 2.4515, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.27058180135459176, | |
| "grad_norm": 0.4562658369541168, | |
| "learning_rate": 8.733172403728401e-05, | |
| "loss": 2.4386, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.27125489041268774, | |
| "grad_norm": 0.5169376730918884, | |
| "learning_rate": 8.725760060620795e-05, | |
| "loss": 2.4331, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.27192797947078373, | |
| "grad_norm": 0.5017179250717163, | |
| "learning_rate": 8.718329260871374e-05, | |
| "loss": 2.4203, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.2726010685288797, | |
| "grad_norm": 0.5720140337944031, | |
| "learning_rate": 8.71088004129078e-05, | |
| "loss": 2.4548, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.2732741575869757, | |
| "grad_norm": 0.5228462219238281, | |
| "learning_rate": 8.703412438780898e-05, | |
| "loss": 2.3827, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.27394724664507175, | |
| "grad_norm": 0.4759220778942108, | |
| "learning_rate": 8.695926490334682e-05, | |
| "loss": 2.4321, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.27462033570316774, | |
| "grad_norm": 0.532883882522583, | |
| "learning_rate": 8.688422233035967e-05, | |
| "loss": 2.3892, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.2752934247612637, | |
| "grad_norm": 0.480090469121933, | |
| "learning_rate": 8.680899704059283e-05, | |
| "loss": 2.4134, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.2759665138193597, | |
| "grad_norm": 0.5285961627960205, | |
| "learning_rate": 8.673358940669679e-05, | |
| "loss": 2.448, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.2766396028774557, | |
| "grad_norm": 0.49371105432510376, | |
| "learning_rate": 8.665799980222528e-05, | |
| "loss": 2.4465, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.27731269193555175, | |
| "grad_norm": 0.4970341920852661, | |
| "learning_rate": 8.658222860163356e-05, | |
| "loss": 2.4171, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.27798578099364774, | |
| "grad_norm": 0.5542078614234924, | |
| "learning_rate": 8.650627618027638e-05, | |
| "loss": 2.3954, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.2786588700517437, | |
| "grad_norm": 0.506982684135437, | |
| "learning_rate": 8.643014291440629e-05, | |
| "loss": 2.4383, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.2793319591098397, | |
| "grad_norm": 0.5749984979629517, | |
| "learning_rate": 8.635382918117167e-05, | |
| "loss": 2.3878, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.2800050481679357, | |
| "grad_norm": 0.5519755482673645, | |
| "learning_rate": 8.627733535861493e-05, | |
| "loss": 2.4304, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.28067813722603174, | |
| "grad_norm": 0.6592808365821838, | |
| "learning_rate": 8.620066182567057e-05, | |
| "loss": 2.3806, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.28135122628412773, | |
| "grad_norm": 0.7210227847099304, | |
| "learning_rate": 8.612380896216336e-05, | |
| "loss": 2.4454, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.2820243153422237, | |
| "grad_norm": 0.6003880500793457, | |
| "learning_rate": 8.604677714880642e-05, | |
| "loss": 2.4387, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.2826974044003197, | |
| "grad_norm": 0.5979674458503723, | |
| "learning_rate": 8.596956676719936e-05, | |
| "loss": 2.4231, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.2833704934584157, | |
| "grad_norm": 0.6536392569541931, | |
| "learning_rate": 8.58921781998264e-05, | |
| "loss": 2.4038, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.28404358251651174, | |
| "grad_norm": 0.7131980657577515, | |
| "learning_rate": 8.581461183005441e-05, | |
| "loss": 2.3951, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.2847166715746077, | |
| "grad_norm": 0.7326920032501221, | |
| "learning_rate": 8.57368680421311e-05, | |
| "loss": 2.4102, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.2853897606327037, | |
| "grad_norm": 0.6664044260978699, | |
| "learning_rate": 8.565894722118307e-05, | |
| "loss": 2.3962, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.2860628496907997, | |
| "grad_norm": 0.5486345291137695, | |
| "learning_rate": 8.558084975321384e-05, | |
| "loss": 2.4255, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.2867359387488957, | |
| "grad_norm": 0.5411694049835205, | |
| "learning_rate": 8.55025760251021e-05, | |
| "loss": 2.4395, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.28740902780699173, | |
| "grad_norm": 0.490962415933609, | |
| "learning_rate": 8.542412642459963e-05, | |
| "loss": 2.3971, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.2880821168650877, | |
| "grad_norm": 0.5245395302772522, | |
| "learning_rate": 8.534550134032944e-05, | |
| "loss": 2.3977, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.2887552059231837, | |
| "grad_norm": 0.5046750903129578, | |
| "learning_rate": 8.526670116178391e-05, | |
| "loss": 2.3787, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.2894282949812797, | |
| "grad_norm": 0.5423001646995544, | |
| "learning_rate": 8.518772627932276e-05, | |
| "loss": 2.4143, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.2901013840393757, | |
| "grad_norm": 0.5712574124336243, | |
| "learning_rate": 8.510857708417114e-05, | |
| "loss": 2.4738, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.29077447309747173, | |
| "grad_norm": 0.6638187766075134, | |
| "learning_rate": 8.502925396841775e-05, | |
| "loss": 2.3985, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.2914475621555677, | |
| "grad_norm": 0.5826054215431213, | |
| "learning_rate": 8.494975732501282e-05, | |
| "loss": 2.4162, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.2921206512136637, | |
| "grad_norm": 0.5681880712509155, | |
| "learning_rate": 8.487008754776622e-05, | |
| "loss": 2.407, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.2927937402717597, | |
| "grad_norm": 0.5738682746887207, | |
| "learning_rate": 8.47902450313455e-05, | |
| "loss": 2.4274, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.2934668293298557, | |
| "grad_norm": 0.6157481670379639, | |
| "learning_rate": 8.47102301712739e-05, | |
| "loss": 2.4233, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.2941399183879517, | |
| "grad_norm": 0.6142215728759766, | |
| "learning_rate": 8.463004336392842e-05, | |
| "loss": 2.4127, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.2948130074460477, | |
| "grad_norm": 0.5124562978744507, | |
| "learning_rate": 8.454968500653787e-05, | |
| "loss": 2.4232, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.2954860965041437, | |
| "grad_norm": 0.6266094446182251, | |
| "learning_rate": 8.446915549718085e-05, | |
| "loss": 2.4318, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.2961591855622397, | |
| "grad_norm": 0.5722962021827698, | |
| "learning_rate": 8.438845523478385e-05, | |
| "loss": 2.4324, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.2968322746203357, | |
| "grad_norm": 0.5898362994194031, | |
| "learning_rate": 8.430758461911919e-05, | |
| "loss": 2.4183, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.2975053636784317, | |
| "grad_norm": 0.5322403907775879, | |
| "learning_rate": 8.422654405080315e-05, | |
| "loss": 2.4215, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.2981784527365277, | |
| "grad_norm": 0.5670167207717896, | |
| "learning_rate": 8.414533393129383e-05, | |
| "loss": 2.3962, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.2988515417946237, | |
| "grad_norm": 0.5440824031829834, | |
| "learning_rate": 8.406395466288935e-05, | |
| "loss": 2.4166, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.2995246308527197, | |
| "grad_norm": 0.534233570098877, | |
| "learning_rate": 8.398240664872566e-05, | |
| "loss": 2.4417, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.3001977199108157, | |
| "grad_norm": 0.5321791172027588, | |
| "learning_rate": 8.390069029277474e-05, | |
| "loss": 2.3973, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.3008708089689117, | |
| "grad_norm": 0.526823878288269, | |
| "learning_rate": 8.381880599984242e-05, | |
| "loss": 2.4064, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.3015438980270077, | |
| "grad_norm": 0.5071905255317688, | |
| "learning_rate": 8.37367541755665e-05, | |
| "loss": 2.4135, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.3022169870851037, | |
| "grad_norm": 0.5760087966918945, | |
| "learning_rate": 8.365453522641467e-05, | |
| "loss": 2.3872, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.3028900761431997, | |
| "grad_norm": 0.5876712799072266, | |
| "learning_rate": 8.357214955968256e-05, | |
| "loss": 2.4155, | |
| "step": 900 | |
| } | |
| ], | |
| "logging_steps": 2, | |
| "max_steps": 2972, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 300, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.0132651008589824e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |