| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9133519673188562, | |
| "eval_steps": 500, | |
| "global_step": 531, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0017200602021070737, | |
| "grad_norm": 9.863033294677734, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 46.5684, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0034401204042141475, | |
| "grad_norm": 9.501362800598145, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 46.7594, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.005160180606321221, | |
| "grad_norm": 10.306024551391602, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 47.7285, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.006880240808428295, | |
| "grad_norm": 10.35928726196289, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 46.2416, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.008600301010535369, | |
| "grad_norm": 10.284590721130371, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 46.4638, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.010320361212642442, | |
| "grad_norm": 9.508557319641113, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 47.0967, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.012040421414749517, | |
| "grad_norm": 9.53107738494873, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 46.8945, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01376048161685659, | |
| "grad_norm": 8.917387008666992, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 46.0318, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.015480541818963663, | |
| "grad_norm": 9.046576499938965, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 46.7626, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.017200602021070738, | |
| "grad_norm": 8.346121788024902, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 46.4023, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01892066222317781, | |
| "grad_norm": 7.368544578552246, | |
| "learning_rate": 1.1e-06, | |
| "loss": 46.9599, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.020640722425284884, | |
| "grad_norm": 7.151632308959961, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 46.8859, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.022360782627391957, | |
| "grad_norm": 6.858343124389648, | |
| "learning_rate": 1.3e-06, | |
| "loss": 47.0505, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.024080842829499034, | |
| "grad_norm": 6.4683918952941895, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 46.6439, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.025800903031606107, | |
| "grad_norm": 8.405121803283691, | |
| "learning_rate": 1.5e-06, | |
| "loss": 46.8736, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02752096323371318, | |
| "grad_norm": 10.191279411315918, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 46.2945, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.029241023435820253, | |
| "grad_norm": 10.24738597869873, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 46.0618, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.030961083637927326, | |
| "grad_norm": 9.236139297485352, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 47.0773, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.0326811438400344, | |
| "grad_norm": 8.437068939208984, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 46.6797, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.034401204042141476, | |
| "grad_norm": 8.35542106628418, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 46.8348, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03612126424424855, | |
| "grad_norm": 9.210644721984863, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 46.7521, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.03784132444635562, | |
| "grad_norm": 9.216105461120605, | |
| "learning_rate": 2.2e-06, | |
| "loss": 46.4541, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0395613846484627, | |
| "grad_norm": 8.33311939239502, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 46.7192, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.04128144485056977, | |
| "grad_norm": 7.9267449378967285, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 46.7559, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.043001505052676844, | |
| "grad_norm": 7.363073348999023, | |
| "learning_rate": 2.5e-06, | |
| "loss": 45.9578, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.044721565254783914, | |
| "grad_norm": 7.073836326599121, | |
| "learning_rate": 2.6e-06, | |
| "loss": 45.9155, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.04644162545689099, | |
| "grad_norm": 7.0409088134765625, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 46.6551, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.04816168565899807, | |
| "grad_norm": 6.685385227203369, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 46.3889, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.04988174586110514, | |
| "grad_norm": 6.540524959564209, | |
| "learning_rate": 2.9e-06, | |
| "loss": 47.0662, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.05160180606321221, | |
| "grad_norm": 6.627730846405029, | |
| "learning_rate": 3e-06, | |
| "loss": 46.101, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.05332186626531928, | |
| "grad_norm": 7.054595470428467, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 46.7379, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.05504192646742636, | |
| "grad_norm": 6.73618221282959, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 46.3172, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.056761986669533436, | |
| "grad_norm": 5.943539619445801, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 47.0245, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.058482046871640506, | |
| "grad_norm": 6.056912899017334, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 46.0208, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.06020210707374758, | |
| "grad_norm": 5.7798309326171875, | |
| "learning_rate": 3.5e-06, | |
| "loss": 46.0746, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.06192216727585465, | |
| "grad_norm": 5.896692276000977, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 46.4911, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.06364222747796174, | |
| "grad_norm": 5.496098518371582, | |
| "learning_rate": 3.7e-06, | |
| "loss": 45.9957, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.0653622876800688, | |
| "grad_norm": 5.253308296203613, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 45.7145, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.06708234788217587, | |
| "grad_norm": 5.340756416320801, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 46.7068, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.06880240808428295, | |
| "grad_norm": 5.312371730804443, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 46.4172, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.07052246828639003, | |
| "grad_norm": 5.484511375427246, | |
| "learning_rate": 4.1e-06, | |
| "loss": 45.6433, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.0722425284884971, | |
| "grad_norm": 5.260024547576904, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 46.1259, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.07396258869060417, | |
| "grad_norm": 5.4440999031066895, | |
| "learning_rate": 4.3e-06, | |
| "loss": 46.2947, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.07568264889271124, | |
| "grad_norm": 5.153471946716309, | |
| "learning_rate": 4.4e-06, | |
| "loss": 46.2882, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.07740270909481832, | |
| "grad_norm": 5.6796488761901855, | |
| "learning_rate": 4.5e-06, | |
| "loss": 46.3314, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.0791227692969254, | |
| "grad_norm": 5.253461837768555, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 46.4205, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.08084282949903247, | |
| "grad_norm": 5.475714683532715, | |
| "learning_rate": 4.7e-06, | |
| "loss": 45.9359, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.08256288970113954, | |
| "grad_norm": 5.880488872528076, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 46.0665, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.08428294990324661, | |
| "grad_norm": 5.69149923324585, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 45.8811, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.08600301010535369, | |
| "grad_norm": 5.086974143981934, | |
| "learning_rate": 5e-06, | |
| "loss": 46.1759, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08772307030746077, | |
| "grad_norm": 5.902041435241699, | |
| "learning_rate": 5.1e-06, | |
| "loss": 46.4326, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.08944313050956783, | |
| "grad_norm": 5.740163803100586, | |
| "learning_rate": 5.2e-06, | |
| "loss": 45.607, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.0911631907116749, | |
| "grad_norm": 5.547687530517578, | |
| "learning_rate": 5.300000000000001e-06, | |
| "loss": 45.8864, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.09288325091378198, | |
| "grad_norm": 6.20143461227417, | |
| "learning_rate": 5.400000000000001e-06, | |
| "loss": 45.7776, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.09460331111588906, | |
| "grad_norm": 5.544395446777344, | |
| "learning_rate": 5.500000000000001e-06, | |
| "loss": 46.3334, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.09632337131799613, | |
| "grad_norm": 5.583694934844971, | |
| "learning_rate": 5.600000000000001e-06, | |
| "loss": 45.5733, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.0980434315201032, | |
| "grad_norm": 5.751343250274658, | |
| "learning_rate": 5.7e-06, | |
| "loss": 45.2805, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.09976349172221027, | |
| "grad_norm": 6.024663925170898, | |
| "learning_rate": 5.8e-06, | |
| "loss": 45.6569, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.10148355192431735, | |
| "grad_norm": 5.834673881530762, | |
| "learning_rate": 5.9e-06, | |
| "loss": 45.7854, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.10320361212642443, | |
| "grad_norm": 6.796127796173096, | |
| "learning_rate": 6e-06, | |
| "loss": 46.4184, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1049236723285315, | |
| "grad_norm": 6.303890705108643, | |
| "learning_rate": 6.1e-06, | |
| "loss": 46.0877, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.10664373253063857, | |
| "grad_norm": 6.320569038391113, | |
| "learning_rate": 6.200000000000001e-06, | |
| "loss": 45.8802, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.10836379273274564, | |
| "grad_norm": 6.015869617462158, | |
| "learning_rate": 6.300000000000001e-06, | |
| "loss": 45.6733, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.11008385293485272, | |
| "grad_norm": 6.314846515655518, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 45.8531, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.1118039131369598, | |
| "grad_norm": 6.106888294219971, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 45.2966, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.11352397333906687, | |
| "grad_norm": 6.093003273010254, | |
| "learning_rate": 6.600000000000001e-06, | |
| "loss": 46.0552, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.11524403354117393, | |
| "grad_norm": 5.634897708892822, | |
| "learning_rate": 6.700000000000001e-06, | |
| "loss": 45.3367, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.11696409374328101, | |
| "grad_norm": 6.261721134185791, | |
| "learning_rate": 6.800000000000001e-06, | |
| "loss": 45.7655, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.11868415394538809, | |
| "grad_norm": 5.926329135894775, | |
| "learning_rate": 6.9e-06, | |
| "loss": 45.1391, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.12040421414749516, | |
| "grad_norm": 5.887923717498779, | |
| "learning_rate": 7e-06, | |
| "loss": 45.43, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.12212427434960224, | |
| "grad_norm": 5.79194450378418, | |
| "learning_rate": 7.100000000000001e-06, | |
| "loss": 45.763, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.1238443345517093, | |
| "grad_norm": 6.2670722007751465, | |
| "learning_rate": 7.2000000000000005e-06, | |
| "loss": 45.137, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.1255643947538164, | |
| "grad_norm": 5.60247278213501, | |
| "learning_rate": 7.3e-06, | |
| "loss": 45.7123, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.12728445495592347, | |
| "grad_norm": 7.346188545227051, | |
| "learning_rate": 7.4e-06, | |
| "loss": 45.5004, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.12900451515803052, | |
| "grad_norm": 6.078243255615234, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 45.5731, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1307245753601376, | |
| "grad_norm": 7.248181343078613, | |
| "learning_rate": 7.600000000000001e-06, | |
| "loss": 45.7506, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.13244463556224467, | |
| "grad_norm": 5.837612628936768, | |
| "learning_rate": 7.7e-06, | |
| "loss": 45.7486, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.13416469576435175, | |
| "grad_norm": 7.661535739898682, | |
| "learning_rate": 7.800000000000002e-06, | |
| "loss": 45.5743, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.13588475596645883, | |
| "grad_norm": 5.925168991088867, | |
| "learning_rate": 7.9e-06, | |
| "loss": 45.0878, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.1376048161685659, | |
| "grad_norm": 6.47566032409668, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 45.4567, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.13932487637067298, | |
| "grad_norm": 5.9641852378845215, | |
| "learning_rate": 8.1e-06, | |
| "loss": 45.0619, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.14104493657278006, | |
| "grad_norm": 6.4052324295043945, | |
| "learning_rate": 8.2e-06, | |
| "loss": 45.9375, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.14276499677488713, | |
| "grad_norm": 6.149839401245117, | |
| "learning_rate": 8.3e-06, | |
| "loss": 45.5695, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.1444850569769942, | |
| "grad_norm": 6.546520233154297, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 45.8453, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.14620511717910126, | |
| "grad_norm": 6.221002578735352, | |
| "learning_rate": 8.5e-06, | |
| "loss": 44.9717, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.14792517738120833, | |
| "grad_norm": 6.772427558898926, | |
| "learning_rate": 8.6e-06, | |
| "loss": 45.8218, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.1496452375833154, | |
| "grad_norm": 7.193835258483887, | |
| "learning_rate": 8.700000000000001e-06, | |
| "loss": 45.4445, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.1513652977854225, | |
| "grad_norm": 6.1274895668029785, | |
| "learning_rate": 8.8e-06, | |
| "loss": 45.127, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.15308535798752956, | |
| "grad_norm": 7.984500885009766, | |
| "learning_rate": 8.900000000000001e-06, | |
| "loss": 44.9928, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.15480541818963664, | |
| "grad_norm": 6.346322059631348, | |
| "learning_rate": 9e-06, | |
| "loss": 45.4616, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.15652547839174372, | |
| "grad_norm": 7.460362911224365, | |
| "learning_rate": 9.100000000000001e-06, | |
| "loss": 45.2488, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.1582455385938508, | |
| "grad_norm": 7.510838508605957, | |
| "learning_rate": 9.200000000000002e-06, | |
| "loss": 45.0087, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.15996559879595787, | |
| "grad_norm": 6.567174911499023, | |
| "learning_rate": 9.3e-06, | |
| "loss": 44.3216, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.16168565899806495, | |
| "grad_norm": 8.357160568237305, | |
| "learning_rate": 9.4e-06, | |
| "loss": 45.1102, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.163405719200172, | |
| "grad_norm": 6.190718650817871, | |
| "learning_rate": 9.5e-06, | |
| "loss": 45.0187, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.16512577940227907, | |
| "grad_norm": 6.47848653793335, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 45.9968, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.16684583960438615, | |
| "grad_norm": 6.271050453186035, | |
| "learning_rate": 9.7e-06, | |
| "loss": 45.6172, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.16856589980649322, | |
| "grad_norm": 7.013180255889893, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 45.1628, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.1702859600086003, | |
| "grad_norm": 6.151601314544678, | |
| "learning_rate": 9.9e-06, | |
| "loss": 45.3331, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.17200602021070738, | |
| "grad_norm": 6.522064685821533, | |
| "learning_rate": 1e-05, | |
| "loss": 45.01, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.17372608041281445, | |
| "grad_norm": 6.936015605926514, | |
| "learning_rate": 9.999990859614876e-06, | |
| "loss": 44.7838, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.17544614061492153, | |
| "grad_norm": 6.696622371673584, | |
| "learning_rate": 9.99996343849292e-06, | |
| "loss": 44.4096, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.1771662008170286, | |
| "grad_norm": 6.770718574523926, | |
| "learning_rate": 9.999917736734387e-06, | |
| "loss": 44.6306, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.17888626101913566, | |
| "grad_norm": 8.121861457824707, | |
| "learning_rate": 9.999853754506375e-06, | |
| "loss": 45.3421, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.18060632122124273, | |
| "grad_norm": 7.101470947265625, | |
| "learning_rate": 9.999771492042807e-06, | |
| "loss": 45.415, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.1823263814233498, | |
| "grad_norm": 8.22966194152832, | |
| "learning_rate": 9.99967094964445e-06, | |
| "loss": 45.3149, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.18404644162545689, | |
| "grad_norm": 10.975841522216797, | |
| "learning_rate": 9.9995521276789e-06, | |
| "loss": 45.2939, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.18576650182756396, | |
| "grad_norm": 7.771969795227051, | |
| "learning_rate": 9.999415026580592e-06, | |
| "loss": 45.2433, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.18748656202967104, | |
| "grad_norm": 10.387533187866211, | |
| "learning_rate": 9.999259646850787e-06, | |
| "loss": 45.2594, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.18920662223177812, | |
| "grad_norm": 8.546263694763184, | |
| "learning_rate": 9.999085989057578e-06, | |
| "loss": 45.0587, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.1909266824338852, | |
| "grad_norm": 9.245797157287598, | |
| "learning_rate": 9.998894053835883e-06, | |
| "loss": 45.2273, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.19264674263599227, | |
| "grad_norm": 9.295574188232422, | |
| "learning_rate": 9.998683841887449e-06, | |
| "loss": 45.5471, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.19436680283809935, | |
| "grad_norm": 8.650074005126953, | |
| "learning_rate": 9.99845535398084e-06, | |
| "loss": 45.2802, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.1960868630402064, | |
| "grad_norm": 11.102032661437988, | |
| "learning_rate": 9.998208590951449e-06, | |
| "loss": 45.3066, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.19780692324231347, | |
| "grad_norm": 6.938677787780762, | |
| "learning_rate": 9.99794355370147e-06, | |
| "loss": 45.1454, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.19952698344442055, | |
| "grad_norm": 16.639892578125, | |
| "learning_rate": 9.997660243199928e-06, | |
| "loss": 45.6125, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.20124704364652762, | |
| "grad_norm": 11.0328369140625, | |
| "learning_rate": 9.99735866048265e-06, | |
| "loss": 45.4087, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.2029671038486347, | |
| "grad_norm": 13.419515609741211, | |
| "learning_rate": 9.997038806652264e-06, | |
| "loss": 45.8139, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.20468716405074178, | |
| "grad_norm": 13.69460678100586, | |
| "learning_rate": 9.996700682878206e-06, | |
| "loss": 45.304, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.20640722425284885, | |
| "grad_norm": 7.643240928649902, | |
| "learning_rate": 9.996344290396713e-06, | |
| "loss": 45.1699, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.20812728445495593, | |
| "grad_norm": 9.692140579223633, | |
| "learning_rate": 9.995969630510805e-06, | |
| "loss": 44.7309, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.209847344657063, | |
| "grad_norm": 7.550359725952148, | |
| "learning_rate": 9.995576704590299e-06, | |
| "loss": 44.6299, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.21156740485917008, | |
| "grad_norm": 6.425361156463623, | |
| "learning_rate": 9.995165514071793e-06, | |
| "loss": 44.8296, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.21328746506127713, | |
| "grad_norm": 8.089837074279785, | |
| "learning_rate": 9.994736060458665e-06, | |
| "loss": 45.2622, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.2150075252633842, | |
| "grad_norm": 6.491065979003906, | |
| "learning_rate": 9.994288345321059e-06, | |
| "loss": 44.3288, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.21672758546549128, | |
| "grad_norm": 5.423704147338867, | |
| "learning_rate": 9.993822370295892e-06, | |
| "loss": 44.7233, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.21844764566759836, | |
| "grad_norm": 7.901766777038574, | |
| "learning_rate": 9.993338137086841e-06, | |
| "loss": 45.0159, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.22016770586970544, | |
| "grad_norm": 7.50944709777832, | |
| "learning_rate": 9.992835647464339e-06, | |
| "loss": 44.8785, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.22188776607181251, | |
| "grad_norm": 7.400681972503662, | |
| "learning_rate": 9.992314903265561e-06, | |
| "loss": 45.3053, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.2236078262739196, | |
| "grad_norm": 8.214972496032715, | |
| "learning_rate": 9.991775906394434e-06, | |
| "loss": 44.8066, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.22532788647602667, | |
| "grad_norm": 7.00181770324707, | |
| "learning_rate": 9.991218658821609e-06, | |
| "loss": 44.5013, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.22704794667813374, | |
| "grad_norm": 6.811863422393799, | |
| "learning_rate": 9.990643162584467e-06, | |
| "loss": 44.7327, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.22876800688024082, | |
| "grad_norm": 8.760518074035645, | |
| "learning_rate": 9.99004941978712e-06, | |
| "loss": 44.6706, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.23048806708234787, | |
| "grad_norm": 7.204503536224365, | |
| "learning_rate": 9.989437432600373e-06, | |
| "loss": 44.8113, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.23220812728445495, | |
| "grad_norm": 8.635207176208496, | |
| "learning_rate": 9.988807203261752e-06, | |
| "loss": 45.3002, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.23392818748656202, | |
| "grad_norm": 7.5076823234558105, | |
| "learning_rate": 9.988158734075468e-06, | |
| "loss": 44.919, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.2356482476886691, | |
| "grad_norm": 8.329002380371094, | |
| "learning_rate": 9.98749202741243e-06, | |
| "loss": 44.9676, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.23736830789077618, | |
| "grad_norm": 7.5155158042907715, | |
| "learning_rate": 9.986807085710213e-06, | |
| "loss": 45.167, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.23908836809288325, | |
| "grad_norm": 8.341093063354492, | |
| "learning_rate": 9.986103911473075e-06, | |
| "loss": 45.1884, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.24080842829499033, | |
| "grad_norm": 7.4146342277526855, | |
| "learning_rate": 9.985382507271928e-06, | |
| "loss": 44.8973, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.2425284884970974, | |
| "grad_norm": 8.28603458404541, | |
| "learning_rate": 9.984642875744338e-06, | |
| "loss": 44.6793, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.24424854869920448, | |
| "grad_norm": 7.7817583084106445, | |
| "learning_rate": 9.983885019594506e-06, | |
| "loss": 44.7361, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.24596860890131156, | |
| "grad_norm": 8.71711254119873, | |
| "learning_rate": 9.983108941593277e-06, | |
| "loss": 44.514, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.2476886691034186, | |
| "grad_norm": 8.746935844421387, | |
| "learning_rate": 9.982314644578111e-06, | |
| "loss": 45.4915, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.24940872930552568, | |
| "grad_norm": 8.616266250610352, | |
| "learning_rate": 9.981502131453077e-06, | |
| "loss": 44.4955, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.2511287895076328, | |
| "grad_norm": 8.73255443572998, | |
| "learning_rate": 9.980671405188852e-06, | |
| "loss": 44.7087, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.25284884970973986, | |
| "grad_norm": 7.063310623168945, | |
| "learning_rate": 9.979822468822696e-06, | |
| "loss": 44.7221, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.25456890991184694, | |
| "grad_norm": 8.635971069335938, | |
| "learning_rate": 9.978955325458453e-06, | |
| "loss": 44.5993, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.25628897011395396, | |
| "grad_norm": 7.771418571472168, | |
| "learning_rate": 9.978069978266534e-06, | |
| "loss": 45.1399, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.25800903031606104, | |
| "grad_norm": 9.317761421203613, | |
| "learning_rate": 9.9771664304839e-06, | |
| "loss": 44.8672, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2597290905181681, | |
| "grad_norm": 8.102662086486816, | |
| "learning_rate": 9.976244685414065e-06, | |
| "loss": 45.4121, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.2614491507202752, | |
| "grad_norm": 12.453920364379883, | |
| "learning_rate": 9.97530474642707e-06, | |
| "loss": 44.6091, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.26316921092238227, | |
| "grad_norm": 9.156878471374512, | |
| "learning_rate": 9.974346616959476e-06, | |
| "loss": 45.1213, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.26488927112448934, | |
| "grad_norm": 11.89529037475586, | |
| "learning_rate": 9.973370300514353e-06, | |
| "loss": 44.5162, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.2666093313265964, | |
| "grad_norm": 9.946409225463867, | |
| "learning_rate": 9.972375800661264e-06, | |
| "loss": 44.106, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.2683293915287035, | |
| "grad_norm": 10.854037284851074, | |
| "learning_rate": 9.971363121036252e-06, | |
| "loss": 44.9188, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.2700494517308106, | |
| "grad_norm": 9.196579933166504, | |
| "learning_rate": 9.970332265341833e-06, | |
| "loss": 44.6235, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.27176951193291765, | |
| "grad_norm": 11.79194164276123, | |
| "learning_rate": 9.969283237346973e-06, | |
| "loss": 45.7572, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.2734895721350247, | |
| "grad_norm": 7.789967060089111, | |
| "learning_rate": 9.968216040887078e-06, | |
| "loss": 44.8069, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.2752096323371318, | |
| "grad_norm": 10.540818214416504, | |
| "learning_rate": 9.967130679863984e-06, | |
| "loss": 44.7341, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2769296925392389, | |
| "grad_norm": 7.4835405349731445, | |
| "learning_rate": 9.966027158245939e-06, | |
| "loss": 45.4445, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.27864975274134596, | |
| "grad_norm": 9.472413063049316, | |
| "learning_rate": 9.964905480067585e-06, | |
| "loss": 44.5498, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.28036981294345303, | |
| "grad_norm": 9.155404090881348, | |
| "learning_rate": 9.963765649429954e-06, | |
| "loss": 44.3932, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.2820898731455601, | |
| "grad_norm": 8.14942455291748, | |
| "learning_rate": 9.962607670500442e-06, | |
| "loss": 44.5368, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.2838099333476672, | |
| "grad_norm": 10.103939056396484, | |
| "learning_rate": 9.961431547512794e-06, | |
| "loss": 44.1877, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.28552999354977426, | |
| "grad_norm": 7.972369194030762, | |
| "learning_rate": 9.960237284767103e-06, | |
| "loss": 44.775, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.28725005375188134, | |
| "grad_norm": 13.627598762512207, | |
| "learning_rate": 9.959024886629772e-06, | |
| "loss": 44.3529, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.2889701139539884, | |
| "grad_norm": 9.467161178588867, | |
| "learning_rate": 9.957794357533518e-06, | |
| "loss": 44.7574, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.29069017415609544, | |
| "grad_norm": 12.242958068847656, | |
| "learning_rate": 9.956545701977347e-06, | |
| "loss": 44.5439, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.2924102343582025, | |
| "grad_norm": 10.142478942871094, | |
| "learning_rate": 9.955278924526532e-06, | |
| "loss": 44.4951, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2941302945603096, | |
| "grad_norm": 12.610379219055176, | |
| "learning_rate": 9.95399402981261e-06, | |
| "loss": 44.7777, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.29585035476241667, | |
| "grad_norm": 9.542120933532715, | |
| "learning_rate": 9.952691022533352e-06, | |
| "loss": 44.5877, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.29757041496452374, | |
| "grad_norm": 13.538004875183105, | |
| "learning_rate": 9.951369907452752e-06, | |
| "loss": 44.4161, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.2992904751666308, | |
| "grad_norm": 11.80547046661377, | |
| "learning_rate": 9.950030689401014e-06, | |
| "loss": 44.8214, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.3010105353687379, | |
| "grad_norm": 11.983033180236816, | |
| "learning_rate": 9.948673373274523e-06, | |
| "loss": 44.7073, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.302730595570845, | |
| "grad_norm": 10.844096183776855, | |
| "learning_rate": 9.947297964035837e-06, | |
| "loss": 44.4097, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.30445065577295205, | |
| "grad_norm": 11.820161819458008, | |
| "learning_rate": 9.94590446671366e-06, | |
| "loss": 44.7555, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.3061707159750591, | |
| "grad_norm": 11.587363243103027, | |
| "learning_rate": 9.94449288640284e-06, | |
| "loss": 44.2532, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.3078907761771662, | |
| "grad_norm": 10.775352478027344, | |
| "learning_rate": 9.943063228264327e-06, | |
| "loss": 44.8222, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.3096108363792733, | |
| "grad_norm": 12.077004432678223, | |
| "learning_rate": 9.941615497525172e-06, | |
| "loss": 45.0694, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.31133089658138036, | |
| "grad_norm": 9.174921035766602, | |
| "learning_rate": 9.940149699478502e-06, | |
| "loss": 44.4424, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.31305095678348743, | |
| "grad_norm": 9.949912071228027, | |
| "learning_rate": 9.938665839483503e-06, | |
| "loss": 44.82, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.3147710169855945, | |
| "grad_norm": 10.341442108154297, | |
| "learning_rate": 9.937163922965394e-06, | |
| "loss": 43.993, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.3164910771877016, | |
| "grad_norm": 7.373370170593262, | |
| "learning_rate": 9.93564395541541e-06, | |
| "loss": 44.6727, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.31821113738980866, | |
| "grad_norm": 11.212616920471191, | |
| "learning_rate": 9.93410594239079e-06, | |
| "loss": 44.3283, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.31993119759191574, | |
| "grad_norm": 8.05331802368164, | |
| "learning_rate": 9.932549889514747e-06, | |
| "loss": 44.6646, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.3216512577940228, | |
| "grad_norm": 11.395249366760254, | |
| "learning_rate": 9.930975802476448e-06, | |
| "loss": 43.9711, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.3233713179961299, | |
| "grad_norm": 7.730330944061279, | |
| "learning_rate": 9.929383687030999e-06, | |
| "loss": 45.2283, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.3250913781982369, | |
| "grad_norm": 11.927477836608887, | |
| "learning_rate": 9.927773548999419e-06, | |
| "loss": 44.723, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.326811438400344, | |
| "grad_norm": 8.933055877685547, | |
| "learning_rate": 9.92614539426862e-06, | |
| "loss": 44.5058, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.32853149860245107, | |
| "grad_norm": 13.985485076904297, | |
| "learning_rate": 9.924499228791387e-06, | |
| "loss": 44.8917, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.33025155880455814, | |
| "grad_norm": 13.561887741088867, | |
| "learning_rate": 9.922835058586353e-06, | |
| "loss": 44.6659, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.3319716190066652, | |
| "grad_norm": 8.802722930908203, | |
| "learning_rate": 9.921152889737985e-06, | |
| "loss": 43.9996, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.3336916792087723, | |
| "grad_norm": 10.387024879455566, | |
| "learning_rate": 9.919452728396548e-06, | |
| "loss": 44.7691, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.3354117394108794, | |
| "grad_norm": 8.743358612060547, | |
| "learning_rate": 9.917734580778094e-06, | |
| "loss": 45.2034, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.33713179961298645, | |
| "grad_norm": 7.972978591918945, | |
| "learning_rate": 9.915998453164435e-06, | |
| "loss": 45.5303, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.3388518598150935, | |
| "grad_norm": 8.630827903747559, | |
| "learning_rate": 9.914244351903122e-06, | |
| "loss": 44.7464, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.3405719200172006, | |
| "grad_norm": 6.4518609046936035, | |
| "learning_rate": 9.912472283407421e-06, | |
| "loss": 44.2976, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.3422919802193077, | |
| "grad_norm": 8.886955261230469, | |
| "learning_rate": 9.910682254156284e-06, | |
| "loss": 44.8556, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.34401204042141476, | |
| "grad_norm": 7.261415004730225, | |
| "learning_rate": 9.908874270694337e-06, | |
| "loss": 44.466, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.34573210062352183, | |
| "grad_norm": 8.031500816345215, | |
| "learning_rate": 9.907048339631843e-06, | |
| "loss": 44.4753, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.3474521608256289, | |
| "grad_norm": 7.2853569984436035, | |
| "learning_rate": 9.905204467644688e-06, | |
| "loss": 45.0669, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.349172221027736, | |
| "grad_norm": 8.290436744689941, | |
| "learning_rate": 9.903342661474355e-06, | |
| "loss": 44.7901, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.35089228122984306, | |
| "grad_norm": 9.512991905212402, | |
| "learning_rate": 9.901462927927891e-06, | |
| "loss": 44.7863, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.35261234143195014, | |
| "grad_norm": 8.04033374786377, | |
| "learning_rate": 9.899565273877892e-06, | |
| "loss": 44.9776, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3543324016340572, | |
| "grad_norm": 11.140141487121582, | |
| "learning_rate": 9.897649706262474e-06, | |
| "loss": 45.1048, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.3560524618361643, | |
| "grad_norm": 8.036794662475586, | |
| "learning_rate": 9.895716232085247e-06, | |
| "loss": 44.763, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.3577725220382713, | |
| "grad_norm": 11.508170127868652, | |
| "learning_rate": 9.89376485841529e-06, | |
| "loss": 44.7094, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.3594925822403784, | |
| "grad_norm": 8.577386856079102, | |
| "learning_rate": 9.891795592387127e-06, | |
| "loss": 44.5892, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.36121264244248547, | |
| "grad_norm": 11.701868057250977, | |
| "learning_rate": 9.889808441200697e-06, | |
| "loss": 44.4664, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.36293270264459254, | |
| "grad_norm": 7.955048561096191, | |
| "learning_rate": 9.887803412121331e-06, | |
| "loss": 44.5424, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.3646527628466996, | |
| "grad_norm": 11.340240478515625, | |
| "learning_rate": 9.885780512479725e-06, | |
| "loss": 44.3322, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.3663728230488067, | |
| "grad_norm": 8.020219802856445, | |
| "learning_rate": 9.88373974967191e-06, | |
| "loss": 44.2046, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.36809288325091377, | |
| "grad_norm": 10.230839729309082, | |
| "learning_rate": 9.881681131159232e-06, | |
| "loss": 44.2038, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.36981294345302085, | |
| "grad_norm": 8.711820602416992, | |
| "learning_rate": 9.879604664468315e-06, | |
| "loss": 44.0065, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.3715330036551279, | |
| "grad_norm": 10.792113304138184, | |
| "learning_rate": 9.877510357191042e-06, | |
| "loss": 44.3272, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.373253063857235, | |
| "grad_norm": 8.520330429077148, | |
| "learning_rate": 9.875398216984521e-06, | |
| "loss": 44.403, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.3749731240593421, | |
| "grad_norm": 12.095443725585938, | |
| "learning_rate": 9.873268251571065e-06, | |
| "loss": 44.8801, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.37669318426144915, | |
| "grad_norm": 10.553284645080566, | |
| "learning_rate": 9.871120468738156e-06, | |
| "loss": 44.1855, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.37841324446355623, | |
| "grad_norm": 10.677011489868164, | |
| "learning_rate": 9.868954876338414e-06, | |
| "loss": 44.9765, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3801333046656633, | |
| "grad_norm": 10.681044578552246, | |
| "learning_rate": 9.866771482289585e-06, | |
| "loss": 44.3767, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.3818533648677704, | |
| "grad_norm": 8.905034065246582, | |
| "learning_rate": 9.86457029457449e-06, | |
| "loss": 44.5735, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.38357342506987746, | |
| "grad_norm": 11.448368072509766, | |
| "learning_rate": 9.86235132124101e-06, | |
| "loss": 44.3242, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.38529348527198454, | |
| "grad_norm": 8.515959739685059, | |
| "learning_rate": 9.860114570402055e-06, | |
| "loss": 45.0004, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.3870135454740916, | |
| "grad_norm": 11.419853210449219, | |
| "learning_rate": 9.85786005023553e-06, | |
| "loss": 44.7329, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.3887336056761987, | |
| "grad_norm": 10.274301528930664, | |
| "learning_rate": 9.855587768984308e-06, | |
| "loss": 44.5693, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.39045366587830577, | |
| "grad_norm": 9.408884048461914, | |
| "learning_rate": 9.8532977349562e-06, | |
| "loss": 44.2397, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3921737260804128, | |
| "grad_norm": 8.234622955322266, | |
| "learning_rate": 9.850989956523922e-06, | |
| "loss": 44.3023, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.39389378628251986, | |
| "grad_norm": 10.358318328857422, | |
| "learning_rate": 9.848664442125068e-06, | |
| "loss": 44.6614, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.39561384648462694, | |
| "grad_norm": 9.553954124450684, | |
| "learning_rate": 9.846321200262079e-06, | |
| "loss": 44.8496, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.397333906686734, | |
| "grad_norm": 9.689641952514648, | |
| "learning_rate": 9.843960239502205e-06, | |
| "loss": 44.1763, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.3990539668888411, | |
| "grad_norm": 8.881220817565918, | |
| "learning_rate": 9.841581568477483e-06, | |
| "loss": 44.5679, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.40077402709094817, | |
| "grad_norm": 8.874262809753418, | |
| "learning_rate": 9.839185195884702e-06, | |
| "loss": 45.0497, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.40249408729305525, | |
| "grad_norm": 8.762188911437988, | |
| "learning_rate": 9.836771130485367e-06, | |
| "loss": 44.415, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.4042141474951623, | |
| "grad_norm": 7.233129501342773, | |
| "learning_rate": 9.834339381105676e-06, | |
| "loss": 44.4637, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.4059342076972694, | |
| "grad_norm": 10.540146827697754, | |
| "learning_rate": 9.831889956636478e-06, | |
| "loss": 44.525, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.4076542678993765, | |
| "grad_norm": 5.345295429229736, | |
| "learning_rate": 9.829422866033246e-06, | |
| "loss": 43.6553, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.40937432810148355, | |
| "grad_norm": 9.131731033325195, | |
| "learning_rate": 9.826938118316044e-06, | |
| "loss": 44.2395, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.41109438830359063, | |
| "grad_norm": 6.4219560623168945, | |
| "learning_rate": 9.82443572256949e-06, | |
| "loss": 44.541, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.4128144485056977, | |
| "grad_norm": 7.843993663787842, | |
| "learning_rate": 9.821915687942729e-06, | |
| "loss": 44.6975, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.4145345087078048, | |
| "grad_norm": 7.926616668701172, | |
| "learning_rate": 9.8193780236494e-06, | |
| "loss": 43.983, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.41625456890991186, | |
| "grad_norm": 7.5367045402526855, | |
| "learning_rate": 9.81682273896759e-06, | |
| "loss": 43.9646, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.41797462911201894, | |
| "grad_norm": 10.298775672912598, | |
| "learning_rate": 9.814249843239816e-06, | |
| "loss": 44.0679, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.419694689314126, | |
| "grad_norm": 8.142918586730957, | |
| "learning_rate": 9.811659345872979e-06, | |
| "loss": 44.9597, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.4214147495162331, | |
| "grad_norm": 11.297587394714355, | |
| "learning_rate": 9.809051256338338e-06, | |
| "loss": 44.3569, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.42313480971834017, | |
| "grad_norm": 8.743409156799316, | |
| "learning_rate": 9.806425584171468e-06, | |
| "loss": 43.6217, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.42485486992044724, | |
| "grad_norm": 9.554738998413086, | |
| "learning_rate": 9.803782338972235e-06, | |
| "loss": 44.3762, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.42657493012255426, | |
| "grad_norm": 8.766114234924316, | |
| "learning_rate": 9.801121530404746e-06, | |
| "loss": 44.1824, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.42829499032466134, | |
| "grad_norm": 8.465466499328613, | |
| "learning_rate": 9.798443168197332e-06, | |
| "loss": 44.0283, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.4300150505267684, | |
| "grad_norm": 8.999267578125, | |
| "learning_rate": 9.795747262142494e-06, | |
| "loss": 44.1171, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.4317351107288755, | |
| "grad_norm": 6.678277492523193, | |
| "learning_rate": 9.79303382209688e-06, | |
| "loss": 44.7172, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.43345517093098257, | |
| "grad_norm": 9.430837631225586, | |
| "learning_rate": 9.790302857981247e-06, | |
| "loss": 44.3632, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.43517523113308965, | |
| "grad_norm": 6.532567501068115, | |
| "learning_rate": 9.787554379780417e-06, | |
| "loss": 44.2348, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.4368952913351967, | |
| "grad_norm": 9.008966445922852, | |
| "learning_rate": 9.784788397543254e-06, | |
| "loss": 43.9189, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.4386153515373038, | |
| "grad_norm": 7.171030521392822, | |
| "learning_rate": 9.782004921382612e-06, | |
| "loss": 44.719, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.4403354117394109, | |
| "grad_norm": 8.457947731018066, | |
| "learning_rate": 9.77920396147531e-06, | |
| "loss": 44.3203, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.44205547194151795, | |
| "grad_norm": 8.303704261779785, | |
| "learning_rate": 9.77638552806209e-06, | |
| "loss": 44.6251, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.44377553214362503, | |
| "grad_norm": 7.793144702911377, | |
| "learning_rate": 9.773549631447576e-06, | |
| "loss": 44.4527, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.4454955923457321, | |
| "grad_norm": 9.074666976928711, | |
| "learning_rate": 9.770696282000245e-06, | |
| "loss": 44.4602, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.4472156525478392, | |
| "grad_norm": 7.790366172790527, | |
| "learning_rate": 9.767825490152381e-06, | |
| "loss": 44.0525, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.44893571274994626, | |
| "grad_norm": 6.790820598602295, | |
| "learning_rate": 9.764937266400042e-06, | |
| "loss": 44.3677, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.45065577295205334, | |
| "grad_norm": 7.48856782913208, | |
| "learning_rate": 9.76203162130302e-06, | |
| "loss": 44.281, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.4523758331541604, | |
| "grad_norm": 6.4276814460754395, | |
| "learning_rate": 9.759108565484796e-06, | |
| "loss": 44.5151, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.4540958933562675, | |
| "grad_norm": 7.4707350730896, | |
| "learning_rate": 9.756168109632519e-06, | |
| "loss": 44.4653, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.45581595355837456, | |
| "grad_norm": 6.78653621673584, | |
| "learning_rate": 9.753210264496943e-06, | |
| "loss": 45.1088, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.45753601376048164, | |
| "grad_norm": 6.732356548309326, | |
| "learning_rate": 9.75023504089241e-06, | |
| "loss": 44.3737, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.4592560739625887, | |
| "grad_norm": 6.069101810455322, | |
| "learning_rate": 9.747242449696794e-06, | |
| "loss": 44.5619, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.46097613416469574, | |
| "grad_norm": 6.123370170593262, | |
| "learning_rate": 9.74423250185147e-06, | |
| "loss": 44.3021, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.4626961943668028, | |
| "grad_norm": 6.104144096374512, | |
| "learning_rate": 9.74120520836127e-06, | |
| "loss": 44.2148, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.4644162545689099, | |
| "grad_norm": 8.752744674682617, | |
| "learning_rate": 9.738160580294444e-06, | |
| "loss": 44.6205, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.46613631477101697, | |
| "grad_norm": 7.354405879974365, | |
| "learning_rate": 9.735098628782624e-06, | |
| "loss": 44.459, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.46785637497312405, | |
| "grad_norm": 8.801732063293457, | |
| "learning_rate": 9.732019365020778e-06, | |
| "loss": 44.635, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.4695764351752311, | |
| "grad_norm": 7.110331058502197, | |
| "learning_rate": 9.728922800267162e-06, | |
| "loss": 44.3402, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.4712964953773382, | |
| "grad_norm": 8.147953033447266, | |
| "learning_rate": 9.7258089458433e-06, | |
| "loss": 44.6291, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.4730165555794453, | |
| "grad_norm": 7.08845853805542, | |
| "learning_rate": 9.722677813133921e-06, | |
| "loss": 45.0577, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.47473661578155235, | |
| "grad_norm": 6.4095025062561035, | |
| "learning_rate": 9.719529413586928e-06, | |
| "loss": 43.9258, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.47645667598365943, | |
| "grad_norm": 7.270499229431152, | |
| "learning_rate": 9.716363758713357e-06, | |
| "loss": 44.8198, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.4781767361857665, | |
| "grad_norm": 7.040411949157715, | |
| "learning_rate": 9.713180860087328e-06, | |
| "loss": 44.1966, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.4798967963878736, | |
| "grad_norm": 6.831189155578613, | |
| "learning_rate": 9.709980729346009e-06, | |
| "loss": 44.88, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.48161685658998066, | |
| "grad_norm": 7.017982482910156, | |
| "learning_rate": 9.706763378189571e-06, | |
| "loss": 44.3914, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.48333691679208773, | |
| "grad_norm": 7.631150245666504, | |
| "learning_rate": 9.703528818381144e-06, | |
| "loss": 44.3798, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.4850569769941948, | |
| "grad_norm": 6.717972278594971, | |
| "learning_rate": 9.70027706174678e-06, | |
| "loss": 43.8038, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.4867770371963019, | |
| "grad_norm": 8.252500534057617, | |
| "learning_rate": 9.697008120175402e-06, | |
| "loss": 44.4889, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.48849709739840896, | |
| "grad_norm": 7.6612420082092285, | |
| "learning_rate": 9.693722005618763e-06, | |
| "loss": 44.2001, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.49021715760051604, | |
| "grad_norm": 6.846263408660889, | |
| "learning_rate": 9.690418730091403e-06, | |
| "loss": 44.6985, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.4919372178026231, | |
| "grad_norm": 6.461937427520752, | |
| "learning_rate": 9.687098305670606e-06, | |
| "loss": 44.2506, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.49365727800473014, | |
| "grad_norm": 6.650141716003418, | |
| "learning_rate": 9.683760744496356e-06, | |
| "loss": 44.3858, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.4953773382068372, | |
| "grad_norm": 6.5828986167907715, | |
| "learning_rate": 9.68040605877129e-06, | |
| "loss": 43.6322, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.4970973984089443, | |
| "grad_norm": 6.385183334350586, | |
| "learning_rate": 9.677034260760658e-06, | |
| "loss": 44.7745, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.49881745861105137, | |
| "grad_norm": 7.130415916442871, | |
| "learning_rate": 9.673645362792273e-06, | |
| "loss": 44.1543, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5005375188131584, | |
| "grad_norm": 6.580416202545166, | |
| "learning_rate": 9.670239377256467e-06, | |
| "loss": 43.8422, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.5022575790152656, | |
| "grad_norm": 7.959731101989746, | |
| "learning_rate": 9.666816316606044e-06, | |
| "loss": 44.4367, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.5039776392173726, | |
| "grad_norm": 6.089702606201172, | |
| "learning_rate": 9.663376193356249e-06, | |
| "loss": 43.9484, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.5056976994194797, | |
| "grad_norm": 8.458806037902832, | |
| "learning_rate": 9.659919020084695e-06, | |
| "loss": 44.1408, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.5074177596215868, | |
| "grad_norm": 6.733780860900879, | |
| "learning_rate": 9.656444809431344e-06, | |
| "loss": 43.9267, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.5091378198236939, | |
| "grad_norm": 8.281241416931152, | |
| "learning_rate": 9.652953574098444e-06, | |
| "loss": 44.7447, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.5108578800258009, | |
| "grad_norm": 7.2893195152282715, | |
| "learning_rate": 9.649445326850491e-06, | |
| "loss": 44.1749, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.5125779402279079, | |
| "grad_norm": 8.188138961791992, | |
| "learning_rate": 9.645920080514176e-06, | |
| "loss": 44.5725, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.5142980004300151, | |
| "grad_norm": 8.285508155822754, | |
| "learning_rate": 9.642377847978343e-06, | |
| "loss": 44.4519, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.5160180606321221, | |
| "grad_norm": 12.107803344726562, | |
| "learning_rate": 9.638818642193939e-06, | |
| "loss": 43.6642, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5177381208342292, | |
| "grad_norm": 10.988150596618652, | |
| "learning_rate": 9.63524247617397e-06, | |
| "loss": 43.9385, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.5194581810363362, | |
| "grad_norm": 12.65985107421875, | |
| "learning_rate": 9.631649362993447e-06, | |
| "loss": 44.304, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.5211782412384434, | |
| "grad_norm": 12.63979721069336, | |
| "learning_rate": 9.62803931578935e-06, | |
| "loss": 44.2028, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.5228983014405504, | |
| "grad_norm": 7.90657377243042, | |
| "learning_rate": 9.624412347760564e-06, | |
| "loss": 44.1649, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.5246183616426575, | |
| "grad_norm": 9.31624698638916, | |
| "learning_rate": 9.620768472167844e-06, | |
| "loss": 43.996, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.5263384218447645, | |
| "grad_norm": 8.557055473327637, | |
| "learning_rate": 9.61710770233376e-06, | |
| "loss": 44.3358, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.5280584820468717, | |
| "grad_norm": 7.057743549346924, | |
| "learning_rate": 9.613430051642652e-06, | |
| "loss": 44.583, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.5297785422489787, | |
| "grad_norm": 7.244456768035889, | |
| "learning_rate": 9.609735533540576e-06, | |
| "loss": 43.7423, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.5314986024510858, | |
| "grad_norm": 6.6239333152771, | |
| "learning_rate": 9.606024161535261e-06, | |
| "loss": 43.719, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.5332186626531928, | |
| "grad_norm": 7.109512805938721, | |
| "learning_rate": 9.602295949196052e-06, | |
| "loss": 43.8263, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.5349387228553, | |
| "grad_norm": 7.938423156738281, | |
| "learning_rate": 9.59855091015387e-06, | |
| "loss": 43.513, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.536658783057407, | |
| "grad_norm": 7.159519195556641, | |
| "learning_rate": 9.594789058101154e-06, | |
| "loss": 43.7476, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.5383788432595141, | |
| "grad_norm": 9.051861763000488, | |
| "learning_rate": 9.591010406791814e-06, | |
| "loss": 44.6808, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.5400989034616211, | |
| "grad_norm": 7.437441825866699, | |
| "learning_rate": 9.587214970041181e-06, | |
| "loss": 44.2018, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.5418189636637283, | |
| "grad_norm": 8.533609390258789, | |
| "learning_rate": 9.58340276172596e-06, | |
| "loss": 44.35, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.5435390238658353, | |
| "grad_norm": 7.41975736618042, | |
| "learning_rate": 9.579573795784167e-06, | |
| "loss": 44.1627, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.5452590840679423, | |
| "grad_norm": 6.5013580322265625, | |
| "learning_rate": 9.575728086215093e-06, | |
| "loss": 44.0411, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.5469791442700495, | |
| "grad_norm": 8.311059951782227, | |
| "learning_rate": 9.571865647079246e-06, | |
| "loss": 44.6953, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.5486992044721565, | |
| "grad_norm": 5.960739612579346, | |
| "learning_rate": 9.567986492498299e-06, | |
| "loss": 44.1261, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.5504192646742636, | |
| "grad_norm": 7.7150959968566895, | |
| "learning_rate": 9.564090636655033e-06, | |
| "loss": 44.0052, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.5521393248763706, | |
| "grad_norm": 7.0516815185546875, | |
| "learning_rate": 9.560178093793304e-06, | |
| "loss": 44.4024, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.5538593850784778, | |
| "grad_norm": 6.510403633117676, | |
| "learning_rate": 9.55624887821797e-06, | |
| "loss": 44.3171, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.5555794452805848, | |
| "grad_norm": 6.586174488067627, | |
| "learning_rate": 9.552303004294845e-06, | |
| "loss": 44.1694, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.5572995054826919, | |
| "grad_norm": 7.093349456787109, | |
| "learning_rate": 9.548340486450656e-06, | |
| "loss": 43.9714, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.5590195656847989, | |
| "grad_norm": 5.638337135314941, | |
| "learning_rate": 9.544361339172976e-06, | |
| "loss": 43.9597, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5607396258869061, | |
| "grad_norm": 6.339056491851807, | |
| "learning_rate": 9.54036557701018e-06, | |
| "loss": 43.9718, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.5624596860890131, | |
| "grad_norm": 6.259551525115967, | |
| "learning_rate": 9.536353214571393e-06, | |
| "loss": 44.1765, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.5641797462911202, | |
| "grad_norm": 6.071033477783203, | |
| "learning_rate": 9.53232426652643e-06, | |
| "loss": 44.1298, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.5658998064932272, | |
| "grad_norm": 5.966522216796875, | |
| "learning_rate": 9.528278747605741e-06, | |
| "loss": 43.5899, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.5676198666953344, | |
| "grad_norm": 6.373861312866211, | |
| "learning_rate": 9.52421667260037e-06, | |
| "loss": 43.9747, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.5693399268974414, | |
| "grad_norm": 7.79583215713501, | |
| "learning_rate": 9.52013805636189e-06, | |
| "loss": 44.1957, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.5710599870995485, | |
| "grad_norm": 6.730131149291992, | |
| "learning_rate": 9.516042913802349e-06, | |
| "loss": 44.5105, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.5727800473016555, | |
| "grad_norm": 6.649820804595947, | |
| "learning_rate": 9.511931259894219e-06, | |
| "loss": 43.4763, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.5745001075037627, | |
| "grad_norm": 8.12640380859375, | |
| "learning_rate": 9.507803109670337e-06, | |
| "loss": 43.5421, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.5762201677058697, | |
| "grad_norm": 7.1897783279418945, | |
| "learning_rate": 9.503658478223862e-06, | |
| "loss": 43.661, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.5779402279079768, | |
| "grad_norm": 6.837246417999268, | |
| "learning_rate": 9.499497380708202e-06, | |
| "loss": 43.7997, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.5796602881100839, | |
| "grad_norm": 8.631741523742676, | |
| "learning_rate": 9.495319832336969e-06, | |
| "loss": 43.8287, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.5813803483121909, | |
| "grad_norm": 8.00960636138916, | |
| "learning_rate": 9.491125848383926e-06, | |
| "loss": 43.6861, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.583100408514298, | |
| "grad_norm": 6.487185001373291, | |
| "learning_rate": 9.486915444182926e-06, | |
| "loss": 43.8275, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.584820468716405, | |
| "grad_norm": 7.411306381225586, | |
| "learning_rate": 9.482688635127849e-06, | |
| "loss": 43.4639, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5865405289185122, | |
| "grad_norm": 6.86546516418457, | |
| "learning_rate": 9.478445436672566e-06, | |
| "loss": 44.094, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.5882605891206192, | |
| "grad_norm": 8.094916343688965, | |
| "learning_rate": 9.474185864330861e-06, | |
| "loss": 43.8569, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.5899806493227263, | |
| "grad_norm": 7.7384138107299805, | |
| "learning_rate": 9.469909933676388e-06, | |
| "loss": 43.8937, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.5917007095248333, | |
| "grad_norm": 8.301685333251953, | |
| "learning_rate": 9.46561766034261e-06, | |
| "loss": 44.288, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.5934207697269405, | |
| "grad_norm": 10.100611686706543, | |
| "learning_rate": 9.461309060022737e-06, | |
| "loss": 43.9889, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5951408299290475, | |
| "grad_norm": 6.778660774230957, | |
| "learning_rate": 9.456984148469674e-06, | |
| "loss": 43.697, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.5968608901311546, | |
| "grad_norm": 11.071110725402832, | |
| "learning_rate": 9.452642941495967e-06, | |
| "loss": 44.3392, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.5985809503332616, | |
| "grad_norm": 7.321798324584961, | |
| "learning_rate": 9.448285454973739e-06, | |
| "loss": 43.5908, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.6003010105353688, | |
| "grad_norm": 10.584439277648926, | |
| "learning_rate": 9.443911704834624e-06, | |
| "loss": 44.094, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.6020210707374758, | |
| "grad_norm": 7.291213035583496, | |
| "learning_rate": 9.439521707069737e-06, | |
| "loss": 43.7771, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6037411309395829, | |
| "grad_norm": 10.645991325378418, | |
| "learning_rate": 9.435115477729577e-06, | |
| "loss": 44.2799, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.60546119114169, | |
| "grad_norm": 8.463363647460938, | |
| "learning_rate": 9.430693032924003e-06, | |
| "loss": 43.9206, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.6071812513437971, | |
| "grad_norm": 11.510597229003906, | |
| "learning_rate": 9.426254388822152e-06, | |
| "loss": 43.8839, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.6089013115459041, | |
| "grad_norm": 9.736458778381348, | |
| "learning_rate": 9.421799561652391e-06, | |
| "loss": 44.6627, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.6106213717480112, | |
| "grad_norm": 11.647321701049805, | |
| "learning_rate": 9.417328567702256e-06, | |
| "loss": 43.6068, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.6123414319501183, | |
| "grad_norm": 12.150289535522461, | |
| "learning_rate": 9.412841423318386e-06, | |
| "loss": 43.6395, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.6140614921522253, | |
| "grad_norm": 9.2246675491333, | |
| "learning_rate": 9.408338144906475e-06, | |
| "loss": 44.1518, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.6157815523543324, | |
| "grad_norm": 10.310601234436035, | |
| "learning_rate": 9.403818748931201e-06, | |
| "loss": 43.9735, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.6175016125564394, | |
| "grad_norm": 8.350564002990723, | |
| "learning_rate": 9.399283251916174e-06, | |
| "loss": 43.7255, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.6192216727585466, | |
| "grad_norm": 9.116833686828613, | |
| "learning_rate": 9.394731670443869e-06, | |
| "loss": 43.6732, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.6209417329606536, | |
| "grad_norm": 8.664816856384277, | |
| "learning_rate": 9.390164021155568e-06, | |
| "loss": 43.6788, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.6226617931627607, | |
| "grad_norm": 8.421276092529297, | |
| "learning_rate": 9.385580320751301e-06, | |
| "loss": 43.7357, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.6243818533648677, | |
| "grad_norm": 7.4578680992126465, | |
| "learning_rate": 9.380980585989782e-06, | |
| "loss": 43.7452, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.6261019135669749, | |
| "grad_norm": 8.058576583862305, | |
| "learning_rate": 9.376364833688352e-06, | |
| "loss": 43.839, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.6278219737690819, | |
| "grad_norm": 7.826845169067383, | |
| "learning_rate": 9.371733080722911e-06, | |
| "loss": 43.8903, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.629542033971189, | |
| "grad_norm": 7.76792573928833, | |
| "learning_rate": 9.367085344027862e-06, | |
| "loss": 44.3217, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.631262094173296, | |
| "grad_norm": 8.252405166625977, | |
| "learning_rate": 9.362421640596044e-06, | |
| "loss": 44.0905, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.6329821543754032, | |
| "grad_norm": 7.431006908416748, | |
| "learning_rate": 9.35774198747868e-06, | |
| "loss": 43.864, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.6347022145775102, | |
| "grad_norm": 9.33600902557373, | |
| "learning_rate": 9.353046401785297e-06, | |
| "loss": 43.0342, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.6364222747796173, | |
| "grad_norm": 7.454495906829834, | |
| "learning_rate": 9.348334900683685e-06, | |
| "loss": 43.7442, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.6381423349817243, | |
| "grad_norm": 8.429414749145508, | |
| "learning_rate": 9.343607501399812e-06, | |
| "loss": 43.7836, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.6398623951838315, | |
| "grad_norm": 7.481090545654297, | |
| "learning_rate": 9.338864221217783e-06, | |
| "loss": 43.9994, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.6415824553859385, | |
| "grad_norm": 7.106781482696533, | |
| "learning_rate": 9.33410507747976e-06, | |
| "loss": 43.6922, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.6433025155880456, | |
| "grad_norm": 7.0425615310668945, | |
| "learning_rate": 9.329330087585905e-06, | |
| "loss": 44.5278, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.6450225757901527, | |
| "grad_norm": 7.197376728057861, | |
| "learning_rate": 9.324539268994317e-06, | |
| "loss": 43.8955, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.6467426359922598, | |
| "grad_norm": 6.9038286209106445, | |
| "learning_rate": 9.319732639220965e-06, | |
| "loss": 43.9268, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.6484626961943668, | |
| "grad_norm": 7.032724380493164, | |
| "learning_rate": 9.31491021583963e-06, | |
| "loss": 43.5221, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.6501827563964738, | |
| "grad_norm": 7.234856605529785, | |
| "learning_rate": 9.310072016481832e-06, | |
| "loss": 43.4951, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.651902816598581, | |
| "grad_norm": 6.546868801116943, | |
| "learning_rate": 9.305218058836778e-06, | |
| "loss": 44.0876, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.653622876800688, | |
| "grad_norm": 6.904932498931885, | |
| "learning_rate": 9.300348360651282e-06, | |
| "loss": 44.5423, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.6553429370027951, | |
| "grad_norm": 7.505612850189209, | |
| "learning_rate": 9.295462939729711e-06, | |
| "loss": 43.9986, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.6570629972049021, | |
| "grad_norm": 5.859259605407715, | |
| "learning_rate": 9.290561813933916e-06, | |
| "loss": 43.8683, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.6587830574070093, | |
| "grad_norm": 8.047765731811523, | |
| "learning_rate": 9.285645001183167e-06, | |
| "loss": 44.4658, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.6605031176091163, | |
| "grad_norm": 6.570570945739746, | |
| "learning_rate": 9.280712519454092e-06, | |
| "loss": 43.6115, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.6622231778112234, | |
| "grad_norm": 6.266587734222412, | |
| "learning_rate": 9.2757643867806e-06, | |
| "loss": 43.6822, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.6639432380133304, | |
| "grad_norm": 7.281513214111328, | |
| "learning_rate": 9.270800621253833e-06, | |
| "loss": 43.8285, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.6656632982154376, | |
| "grad_norm": 6.563234806060791, | |
| "learning_rate": 9.265821241022074e-06, | |
| "loss": 43.6976, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.6673833584175446, | |
| "grad_norm": 6.870432376861572, | |
| "learning_rate": 9.26082626429071e-06, | |
| "loss": 43.9566, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.6691034186196517, | |
| "grad_norm": 8.11976432800293, | |
| "learning_rate": 9.255815709322142e-06, | |
| "loss": 43.8613, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.6708234788217587, | |
| "grad_norm": 7.040714740753174, | |
| "learning_rate": 9.250789594435735e-06, | |
| "loss": 43.3387, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6725435390238659, | |
| "grad_norm": 6.891185283660889, | |
| "learning_rate": 9.245747938007734e-06, | |
| "loss": 43.596, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.6742635992259729, | |
| "grad_norm": 7.045391082763672, | |
| "learning_rate": 9.240690758471216e-06, | |
| "loss": 43.1001, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.67598365942808, | |
| "grad_norm": 6.838486194610596, | |
| "learning_rate": 9.235618074316005e-06, | |
| "loss": 44.0918, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.677703719630187, | |
| "grad_norm": 8.006799697875977, | |
| "learning_rate": 9.230529904088621e-06, | |
| "loss": 43.4563, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.6794237798322942, | |
| "grad_norm": 7.786087989807129, | |
| "learning_rate": 9.225426266392191e-06, | |
| "loss": 44.1002, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.6811438400344012, | |
| "grad_norm": 7.782168388366699, | |
| "learning_rate": 9.220307179886408e-06, | |
| "loss": 44.15, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.6828639002365082, | |
| "grad_norm": 7.179986953735352, | |
| "learning_rate": 9.215172663287435e-06, | |
| "loss": 43.8326, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.6845839604386154, | |
| "grad_norm": 8.045145988464355, | |
| "learning_rate": 9.210022735367857e-06, | |
| "loss": 43.3196, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.6863040206407224, | |
| "grad_norm": 7.906603813171387, | |
| "learning_rate": 9.204857414956606e-06, | |
| "loss": 44.152, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.6880240808428295, | |
| "grad_norm": 8.266923904418945, | |
| "learning_rate": 9.199676720938886e-06, | |
| "loss": 44.158, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6897441410449365, | |
| "grad_norm": 7.465760231018066, | |
| "learning_rate": 9.194480672256117e-06, | |
| "loss": 43.9078, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.6914642012470437, | |
| "grad_norm": 7.522243499755859, | |
| "learning_rate": 9.189269287905849e-06, | |
| "loss": 43.3097, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.6931842614491507, | |
| "grad_norm": 6.484007358551025, | |
| "learning_rate": 9.184042586941708e-06, | |
| "loss": 43.9014, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.6949043216512578, | |
| "grad_norm": 6.548778533935547, | |
| "learning_rate": 9.178800588473317e-06, | |
| "loss": 43.9104, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.6966243818533648, | |
| "grad_norm": 6.578863620758057, | |
| "learning_rate": 9.17354331166623e-06, | |
| "loss": 44.0022, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.698344442055472, | |
| "grad_norm": 7.083658695220947, | |
| "learning_rate": 9.168270775741863e-06, | |
| "loss": 43.7902, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.700064502257579, | |
| "grad_norm": 7.241711139678955, | |
| "learning_rate": 9.162982999977417e-06, | |
| "loss": 43.9464, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.7017845624596861, | |
| "grad_norm": 6.329436779022217, | |
| "learning_rate": 9.157680003705816e-06, | |
| "loss": 44.1072, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.7035046226617931, | |
| "grad_norm": 6.435650825500488, | |
| "learning_rate": 9.15236180631563e-06, | |
| "loss": 43.392, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.7052246828639003, | |
| "grad_norm": 5.681223392486572, | |
| "learning_rate": 9.14702842725101e-06, | |
| "loss": 44.3148, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.7069447430660073, | |
| "grad_norm": 6.71289587020874, | |
| "learning_rate": 9.14167988601161e-06, | |
| "loss": 43.8893, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.7086648032681144, | |
| "grad_norm": 6.497440814971924, | |
| "learning_rate": 9.13631620215252e-06, | |
| "loss": 44.1776, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.7103848634702215, | |
| "grad_norm": 7.291422367095947, | |
| "learning_rate": 9.130937395284199e-06, | |
| "loss": 43.8195, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.7121049236723286, | |
| "grad_norm": 6.935153961181641, | |
| "learning_rate": 9.125543485072386e-06, | |
| "loss": 43.9977, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.7138249838744356, | |
| "grad_norm": 6.302245140075684, | |
| "learning_rate": 9.120134491238054e-06, | |
| "loss": 43.677, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.7155450440765426, | |
| "grad_norm": 6.205868244171143, | |
| "learning_rate": 9.114710433557314e-06, | |
| "loss": 43.8423, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.7172651042786498, | |
| "grad_norm": 5.34831428527832, | |
| "learning_rate": 9.109271331861361e-06, | |
| "loss": 43.6707, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.7189851644807568, | |
| "grad_norm": 7.174152374267578, | |
| "learning_rate": 9.103817206036383e-06, | |
| "loss": 43.3579, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.7207052246828639, | |
| "grad_norm": 6.666977882385254, | |
| "learning_rate": 9.098348076023506e-06, | |
| "loss": 43.8424, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.7224252848849709, | |
| "grad_norm": 7.491025924682617, | |
| "learning_rate": 9.092863961818715e-06, | |
| "loss": 44.4333, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.7241453450870781, | |
| "grad_norm": 6.508261680603027, | |
| "learning_rate": 9.087364883472774e-06, | |
| "loss": 43.7001, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.7258654052891851, | |
| "grad_norm": 7.418080806732178, | |
| "learning_rate": 9.08185086109116e-06, | |
| "loss": 44.0001, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.7275854654912922, | |
| "grad_norm": 6.983603000640869, | |
| "learning_rate": 9.076321914833988e-06, | |
| "loss": 44.6241, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.7293055256933992, | |
| "grad_norm": 8.667305946350098, | |
| "learning_rate": 9.070778064915937e-06, | |
| "loss": 44.1089, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.7310255858955064, | |
| "grad_norm": 7.419984340667725, | |
| "learning_rate": 9.065219331606182e-06, | |
| "loss": 43.9046, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.7327456460976134, | |
| "grad_norm": 7.34318733215332, | |
| "learning_rate": 9.0596457352283e-06, | |
| "loss": 43.794, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.7344657062997205, | |
| "grad_norm": 7.931493759155273, | |
| "learning_rate": 9.054057296160221e-06, | |
| "loss": 44.6317, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.7361857665018275, | |
| "grad_norm": 6.583981037139893, | |
| "learning_rate": 9.048454034834143e-06, | |
| "loss": 43.5199, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.7379058267039347, | |
| "grad_norm": 8.499653816223145, | |
| "learning_rate": 9.042835971736446e-06, | |
| "loss": 43.8616, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.7396258869060417, | |
| "grad_norm": 6.757936000823975, | |
| "learning_rate": 9.037203127407642e-06, | |
| "loss": 44.0385, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.7413459471081488, | |
| "grad_norm": 8.30978012084961, | |
| "learning_rate": 9.031555522442268e-06, | |
| "loss": 43.4628, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.7430660073102558, | |
| "grad_norm": 7.357321262359619, | |
| "learning_rate": 9.025893177488848e-06, | |
| "loss": 43.6677, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.744786067512363, | |
| "grad_norm": 8.4613676071167, | |
| "learning_rate": 9.02021611324978e-06, | |
| "loss": 43.2542, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.74650612771447, | |
| "grad_norm": 8.782477378845215, | |
| "learning_rate": 9.014524350481287e-06, | |
| "loss": 44.0515, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.7482261879165771, | |
| "grad_norm": 6.701351165771484, | |
| "learning_rate": 9.008817909993332e-06, | |
| "loss": 44.0643, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.7499462481186842, | |
| "grad_norm": 7.1624884605407715, | |
| "learning_rate": 9.00309681264954e-06, | |
| "loss": 44.0639, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.7516663083207912, | |
| "grad_norm": 6.229190349578857, | |
| "learning_rate": 8.997361079367124e-06, | |
| "loss": 43.9093, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.7533863685228983, | |
| "grad_norm": 7.180543422698975, | |
| "learning_rate": 8.991610731116808e-06, | |
| "loss": 44.2652, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.7551064287250053, | |
| "grad_norm": 6.3346781730651855, | |
| "learning_rate": 8.985845788922753e-06, | |
| "loss": 43.2561, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.7568264889271125, | |
| "grad_norm": 7.270414352416992, | |
| "learning_rate": 8.980066273862473e-06, | |
| "loss": 43.9074, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.7585465491292195, | |
| "grad_norm": 6.431163311004639, | |
| "learning_rate": 8.974272207066767e-06, | |
| "loss": 43.9343, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.7602666093313266, | |
| "grad_norm": 6.415679931640625, | |
| "learning_rate": 8.968463609719636e-06, | |
| "loss": 44.2067, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.7619866695334336, | |
| "grad_norm": 6.083033561706543, | |
| "learning_rate": 8.962640503058206e-06, | |
| "loss": 43.7967, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.7637067297355408, | |
| "grad_norm": 6.942599773406982, | |
| "learning_rate": 8.956802908372652e-06, | |
| "loss": 43.4928, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.7654267899376478, | |
| "grad_norm": 6.515557765960693, | |
| "learning_rate": 8.95095084700612e-06, | |
| "loss": 43.6577, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.7671468501397549, | |
| "grad_norm": 7.167238235473633, | |
| "learning_rate": 8.945084340354646e-06, | |
| "loss": 43.4542, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.7688669103418619, | |
| "grad_norm": 7.30296516418457, | |
| "learning_rate": 8.939203409867084e-06, | |
| "loss": 43.123, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.7705869705439691, | |
| "grad_norm": 7.387278079986572, | |
| "learning_rate": 8.933308077045022e-06, | |
| "loss": 43.6603, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.7723070307460761, | |
| "grad_norm": 7.026780128479004, | |
| "learning_rate": 8.927398363442705e-06, | |
| "loss": 44.0497, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.7740270909481832, | |
| "grad_norm": 7.03558874130249, | |
| "learning_rate": 8.921474290666955e-06, | |
| "loss": 43.1461, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.7757471511502902, | |
| "grad_norm": 8.48353099822998, | |
| "learning_rate": 8.915535880377096e-06, | |
| "loss": 43.6771, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.7774672113523974, | |
| "grad_norm": 7.250082015991211, | |
| "learning_rate": 8.909583154284868e-06, | |
| "loss": 43.9369, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.7791872715545044, | |
| "grad_norm": 8.958197593688965, | |
| "learning_rate": 8.90361613415436e-06, | |
| "loss": 43.7911, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.7809073317566115, | |
| "grad_norm": 8.168319702148438, | |
| "learning_rate": 8.897634841801911e-06, | |
| "loss": 43.3905, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.7826273919587186, | |
| "grad_norm": 8.520408630371094, | |
| "learning_rate": 8.891639299096051e-06, | |
| "loss": 43.3708, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.7843474521608256, | |
| "grad_norm": 8.194758415222168, | |
| "learning_rate": 8.885629527957407e-06, | |
| "loss": 43.3692, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.7860675123629327, | |
| "grad_norm": 7.554206848144531, | |
| "learning_rate": 8.879605550358627e-06, | |
| "loss": 43.5693, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.7877875725650397, | |
| "grad_norm": 7.146202087402344, | |
| "learning_rate": 8.873567388324302e-06, | |
| "loss": 43.8261, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.7895076327671469, | |
| "grad_norm": 6.808493137359619, | |
| "learning_rate": 8.867515063930881e-06, | |
| "loss": 43.3648, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.7912276929692539, | |
| "grad_norm": 7.165658950805664, | |
| "learning_rate": 8.861448599306597e-06, | |
| "loss": 43.0367, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.792947753171361, | |
| "grad_norm": 6.527984142303467, | |
| "learning_rate": 8.855368016631377e-06, | |
| "loss": 43.4491, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.794667813373468, | |
| "grad_norm": 6.912752628326416, | |
| "learning_rate": 8.849273338136772e-06, | |
| "loss": 43.6405, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.7963878735755752, | |
| "grad_norm": 6.334918975830078, | |
| "learning_rate": 8.84316458610586e-06, | |
| "loss": 44.4637, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.7981079337776822, | |
| "grad_norm": 7.22133207321167, | |
| "learning_rate": 8.837041782873182e-06, | |
| "loss": 43.2829, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.7998279939797893, | |
| "grad_norm": 6.233572006225586, | |
| "learning_rate": 8.83090495082465e-06, | |
| "loss": 43.3993, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.8015480541818963, | |
| "grad_norm": 6.681156635284424, | |
| "learning_rate": 8.824754112397467e-06, | |
| "loss": 43.2356, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.8032681143840035, | |
| "grad_norm": 7.240959167480469, | |
| "learning_rate": 8.818589290080043e-06, | |
| "loss": 42.8966, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.8049881745861105, | |
| "grad_norm": 6.253081798553467, | |
| "learning_rate": 8.812410506411925e-06, | |
| "loss": 43.9822, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.8067082347882176, | |
| "grad_norm": 7.541505813598633, | |
| "learning_rate": 8.806217783983693e-06, | |
| "loss": 43.9604, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.8084282949903246, | |
| "grad_norm": 7.4928483963012695, | |
| "learning_rate": 8.800011145436893e-06, | |
| "loss": 43.8446, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.8101483551924318, | |
| "grad_norm": 6.140499591827393, | |
| "learning_rate": 8.793790613463956e-06, | |
| "loss": 43.6913, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.8118684153945388, | |
| "grad_norm": 7.944373607635498, | |
| "learning_rate": 8.787556210808101e-06, | |
| "loss": 43.4474, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.8135884755966459, | |
| "grad_norm": 6.9422101974487305, | |
| "learning_rate": 8.781307960263267e-06, | |
| "loss": 43.293, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.815308535798753, | |
| "grad_norm": 6.664095878601074, | |
| "learning_rate": 8.77504588467402e-06, | |
| "loss": 43.8227, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.81702859600086, | |
| "grad_norm": 7.298461437225342, | |
| "learning_rate": 8.768770006935475e-06, | |
| "loss": 43.7175, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.8187486562029671, | |
| "grad_norm": 6.43251895904541, | |
| "learning_rate": 8.762480349993204e-06, | |
| "loss": 43.143, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.8204687164050741, | |
| "grad_norm": 6.303859233856201, | |
| "learning_rate": 8.756176936843161e-06, | |
| "loss": 43.7655, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.8221887766071813, | |
| "grad_norm": 6.824503421783447, | |
| "learning_rate": 8.749859790531601e-06, | |
| "loss": 43.5909, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.8239088368092883, | |
| "grad_norm": 6.232965469360352, | |
| "learning_rate": 8.743528934154982e-06, | |
| "loss": 43.6798, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.8256288970113954, | |
| "grad_norm": 6.288873672485352, | |
| "learning_rate": 8.737184390859887e-06, | |
| "loss": 43.4713, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.8273489572135024, | |
| "grad_norm": 6.1072306632995605, | |
| "learning_rate": 8.730826183842947e-06, | |
| "loss": 43.4521, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.8290690174156096, | |
| "grad_norm": 7.3213701248168945, | |
| "learning_rate": 8.724454336350742e-06, | |
| "loss": 43.9662, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.8307890776177166, | |
| "grad_norm": 6.282354354858398, | |
| "learning_rate": 8.718068871679735e-06, | |
| "loss": 44.1781, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.8325091378198237, | |
| "grad_norm": 7.692941188812256, | |
| "learning_rate": 8.711669813176165e-06, | |
| "loss": 43.5585, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.8342291980219307, | |
| "grad_norm": 6.070176124572754, | |
| "learning_rate": 8.705257184235973e-06, | |
| "loss": 43.843, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.8359492582240379, | |
| "grad_norm": 7.584023952484131, | |
| "learning_rate": 8.698831008304723e-06, | |
| "loss": 43.5888, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.8376693184261449, | |
| "grad_norm": 8.037973403930664, | |
| "learning_rate": 8.6923913088775e-06, | |
| "loss": 43.4765, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.839389378628252, | |
| "grad_norm": 6.745630741119385, | |
| "learning_rate": 8.685938109498839e-06, | |
| "loss": 44.0438, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.841109438830359, | |
| "grad_norm": 6.660660743713379, | |
| "learning_rate": 8.679471433762633e-06, | |
| "loss": 43.5884, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.8428294990324662, | |
| "grad_norm": 7.205166339874268, | |
| "learning_rate": 8.672991305312042e-06, | |
| "loss": 43.5902, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.8445495592345732, | |
| "grad_norm": 6.969662666320801, | |
| "learning_rate": 8.666497747839413e-06, | |
| "loss": 43.339, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.8462696194366803, | |
| "grad_norm": 6.3066229820251465, | |
| "learning_rate": 8.659990785086195e-06, | |
| "loss": 43.8102, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.8479896796387874, | |
| "grad_norm": 7.804117202758789, | |
| "learning_rate": 8.653470440842847e-06, | |
| "loss": 43.7162, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.8497097398408945, | |
| "grad_norm": 6.339798450469971, | |
| "learning_rate": 8.646936738948747e-06, | |
| "loss": 43.3229, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.8514298000430015, | |
| "grad_norm": 8.31767749786377, | |
| "learning_rate": 8.64038970329212e-06, | |
| "loss": 43.8772, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.8531498602451085, | |
| "grad_norm": 7.365615367889404, | |
| "learning_rate": 8.633829357809937e-06, | |
| "loss": 43.2881, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.8548699204472157, | |
| "grad_norm": 8.474952697753906, | |
| "learning_rate": 8.627255726487831e-06, | |
| "loss": 43.7939, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.8565899806493227, | |
| "grad_norm": 6.960323333740234, | |
| "learning_rate": 8.620668833360009e-06, | |
| "loss": 43.499, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.8583100408514298, | |
| "grad_norm": 7.6085734367370605, | |
| "learning_rate": 8.614068702509169e-06, | |
| "loss": 43.2025, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.8600301010535368, | |
| "grad_norm": 5.805634498596191, | |
| "learning_rate": 8.607455358066404e-06, | |
| "loss": 44.0489, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.861750161255644, | |
| "grad_norm": 7.452625274658203, | |
| "learning_rate": 8.600828824211122e-06, | |
| "loss": 43.147, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.863470221457751, | |
| "grad_norm": 6.4528584480285645, | |
| "learning_rate": 8.594189125170952e-06, | |
| "loss": 43.5228, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.8651902816598581, | |
| "grad_norm": 7.375026702880859, | |
| "learning_rate": 8.587536285221656e-06, | |
| "loss": 44.272, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.8669103418619651, | |
| "grad_norm": 6.547872543334961, | |
| "learning_rate": 8.580870328687041e-06, | |
| "loss": 43.4759, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.8686304020640723, | |
| "grad_norm": 7.088862419128418, | |
| "learning_rate": 8.574191279938872e-06, | |
| "loss": 43.5528, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.8703504622661793, | |
| "grad_norm": 6.6691083908081055, | |
| "learning_rate": 8.567499163396777e-06, | |
| "loss": 43.4488, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.8720705224682864, | |
| "grad_norm": 7.416652202606201, | |
| "learning_rate": 8.560794003528171e-06, | |
| "loss": 43.5291, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.8737905826703934, | |
| "grad_norm": 5.969050884246826, | |
| "learning_rate": 8.554075824848146e-06, | |
| "loss": 43.5905, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.8755106428725006, | |
| "grad_norm": 7.501400470733643, | |
| "learning_rate": 8.5473446519194e-06, | |
| "loss": 44.0266, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.8772307030746076, | |
| "grad_norm": 6.1612548828125, | |
| "learning_rate": 8.540600509352139e-06, | |
| "loss": 44.2164, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.8789507632767147, | |
| "grad_norm": 7.144975662231445, | |
| "learning_rate": 8.533843421803985e-06, | |
| "loss": 43.1628, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.8806708234788218, | |
| "grad_norm": 6.905309200286865, | |
| "learning_rate": 8.527073413979894e-06, | |
| "loss": 43.8717, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.8823908836809289, | |
| "grad_norm": 7.095192909240723, | |
| "learning_rate": 8.520290510632055e-06, | |
| "loss": 43.612, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.8841109438830359, | |
| "grad_norm": 6.804945468902588, | |
| "learning_rate": 8.51349473655981e-06, | |
| "loss": 43.4807, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.8858310040851429, | |
| "grad_norm": 8.927321434020996, | |
| "learning_rate": 8.506686116609553e-06, | |
| "loss": 43.5119, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.8875510642872501, | |
| "grad_norm": 6.946136474609375, | |
| "learning_rate": 8.499864675674648e-06, | |
| "loss": 43.1227, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.8892711244893571, | |
| "grad_norm": 11.003009796142578, | |
| "learning_rate": 8.493030438695336e-06, | |
| "loss": 43.2844, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.8909911846914642, | |
| "grad_norm": 9.151321411132812, | |
| "learning_rate": 8.486183430658639e-06, | |
| "loss": 43.9351, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.8927112448935712, | |
| "grad_norm": 10.419197082519531, | |
| "learning_rate": 8.479323676598271e-06, | |
| "loss": 43.4703, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.8944313050956784, | |
| "grad_norm": 9.973617553710938, | |
| "learning_rate": 8.472451201594556e-06, | |
| "loss": 43.2196, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8961513652977854, | |
| "grad_norm": 7.706507682800293, | |
| "learning_rate": 8.465566030774314e-06, | |
| "loss": 43.3206, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.8978714254998925, | |
| "grad_norm": 7.455542087554932, | |
| "learning_rate": 8.458668189310793e-06, | |
| "loss": 43.2529, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.8995914857019995, | |
| "grad_norm": 7.595444202423096, | |
| "learning_rate": 8.451757702423566e-06, | |
| "loss": 43.5217, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.9013115459041067, | |
| "grad_norm": 7.0413899421691895, | |
| "learning_rate": 8.444834595378434e-06, | |
| "loss": 43.6686, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.9030316061062137, | |
| "grad_norm": 7.505328178405762, | |
| "learning_rate": 8.437898893487345e-06, | |
| "loss": 43.508, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.9047516663083208, | |
| "grad_norm": 6.636236667633057, | |
| "learning_rate": 8.430950622108292e-06, | |
| "loss": 43.6455, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.9064717265104278, | |
| "grad_norm": 8.014334678649902, | |
| "learning_rate": 8.42398980664523e-06, | |
| "loss": 43.9419, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.908191786712535, | |
| "grad_norm": 6.861055374145508, | |
| "learning_rate": 8.417016472547968e-06, | |
| "loss": 44.0091, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.909911846914642, | |
| "grad_norm": 7.528046607971191, | |
| "learning_rate": 8.41003064531209e-06, | |
| "loss": 43.9412, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.9116319071167491, | |
| "grad_norm": 6.380741596221924, | |
| "learning_rate": 8.403032350478857e-06, | |
| "loss": 43.1688, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.9133519673188562, | |
| "grad_norm": 8.248329162597656, | |
| "learning_rate": 8.396021613635116e-06, | |
| "loss": 43.241, | |
| "step": 531 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1743, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 59, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.448505275235225e+19, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |