| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.3735699276208265, | |
| "eval_steps": 500, | |
| "global_step": 400, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0009339248190520663, | |
| "grad_norm": 6.638877692627699, | |
| "learning_rate": 9.345794392523364e-07, | |
| "loss": 9.2917, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.009339248190520663, | |
| "grad_norm": 1.1560921335705272, | |
| "learning_rate": 9.345794392523365e-06, | |
| "loss": 9.0876, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.018678496381041326, | |
| "grad_norm": 0.8415132296956432, | |
| "learning_rate": 1.869158878504673e-05, | |
| "loss": 8.2164, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02801774457156199, | |
| "grad_norm": 0.45381630992958155, | |
| "learning_rate": 2.8037383177570094e-05, | |
| "loss": 7.5184, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03735699276208265, | |
| "grad_norm": 0.8400636107958425, | |
| "learning_rate": 3.738317757009346e-05, | |
| "loss": 6.6507, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.046696240952603316, | |
| "grad_norm": 0.557696240829066, | |
| "learning_rate": 4.672897196261683e-05, | |
| "loss": 5.8909, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05603548914312398, | |
| "grad_norm": 0.3971996057467842, | |
| "learning_rate": 5.607476635514019e-05, | |
| "loss": 5.4127, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06537473733364464, | |
| "grad_norm": 0.2932710540265688, | |
| "learning_rate": 6.542056074766355e-05, | |
| "loss": 5.0106, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0747139855241653, | |
| "grad_norm": 0.3682690443551033, | |
| "learning_rate": 7.476635514018692e-05, | |
| "loss": 4.6042, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08405323371468597, | |
| "grad_norm": 0.3132971920011515, | |
| "learning_rate": 8.411214953271028e-05, | |
| "loss": 4.2031, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09339248190520663, | |
| "grad_norm": 0.6731868159213446, | |
| "learning_rate": 9.345794392523365e-05, | |
| "loss": 3.9423, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1027317300957273, | |
| "grad_norm": 0.27848867836763197, | |
| "learning_rate": 0.000102803738317757, | |
| "loss": 3.7157, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.11207097828624796, | |
| "grad_norm": 0.24642109032991807, | |
| "learning_rate": 0.00011214953271028037, | |
| "loss": 3.4516, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.12141022647676862, | |
| "grad_norm": 0.25717384664029797, | |
| "learning_rate": 0.00012149532710280373, | |
| "loss": 3.2167, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.13074947466728928, | |
| "grad_norm": 0.20912922668565637, | |
| "learning_rate": 0.0001308411214953271, | |
| "loss": 3.0237, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.14008872285780993, | |
| "grad_norm": 0.15805888388706113, | |
| "learning_rate": 0.00014018691588785047, | |
| "loss": 2.8529, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1494279710483306, | |
| "grad_norm": 0.23370349497479534, | |
| "learning_rate": 0.00014953271028037384, | |
| "loss": 2.7078, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15876721923885126, | |
| "grad_norm": 0.1802138633012483, | |
| "learning_rate": 0.0001588785046728972, | |
| "loss": 2.6115, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16810646742937194, | |
| "grad_norm": 0.13354347610039718, | |
| "learning_rate": 0.00016822429906542056, | |
| "loss": 2.5309, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.17744571561989259, | |
| "grad_norm": 0.09414865188086892, | |
| "learning_rate": 0.00017757009345794393, | |
| "loss": 2.4452, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18678496381041326, | |
| "grad_norm": 0.08333601554768896, | |
| "learning_rate": 0.0001869158878504673, | |
| "loss": 2.3832, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1961242120009339, | |
| "grad_norm": 0.15926414699806835, | |
| "learning_rate": 0.00019626168224299065, | |
| "loss": 2.3492, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2054634601914546, | |
| "grad_norm": 0.09492820761057012, | |
| "learning_rate": 0.0001999989254250208, | |
| "loss": 2.323, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.21480270838197524, | |
| "grad_norm": 0.0801349259356147, | |
| "learning_rate": 0.00019999235866155886, | |
| "loss": 2.2731, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.22414195657249592, | |
| "grad_norm": 0.12210960524693895, | |
| "learning_rate": 0.00019997982251228469, | |
| "loss": 2.2433, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.23348120476301656, | |
| "grad_norm": 3.14289498732125, | |
| "learning_rate": 0.00019996131772558666, | |
| "loss": 3.2769, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.24282045295353724, | |
| "grad_norm": 1.632940983166179, | |
| "learning_rate": 0.00019993684540617132, | |
| "loss": 4.9343, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2521597011440579, | |
| "grad_norm": 3.4831252230225416, | |
| "learning_rate": 0.00019990640701499736, | |
| "loss": 4.2768, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.26149894933457857, | |
| "grad_norm": 1.6069045920523788, | |
| "learning_rate": 0.00019987000436918874, | |
| "loss": 5.9581, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.27083819752509924, | |
| "grad_norm": 0.2220907936615993, | |
| "learning_rate": 0.00019982763964192585, | |
| "loss": 3.8228, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.28017744571561987, | |
| "grad_norm": 0.24737284913291765, | |
| "learning_rate": 0.00019977931536231596, | |
| "loss": 3.1413, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.28951669390614054, | |
| "grad_norm": 4.010404518241152, | |
| "learning_rate": 0.00019972503441524224, | |
| "loss": 2.8432, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2988559420966612, | |
| "grad_norm": 0.1515583580811596, | |
| "learning_rate": 0.00019966480004119142, | |
| "loss": 2.7859, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3081951902871819, | |
| "grad_norm": 0.11259395750650594, | |
| "learning_rate": 0.00019959861583606045, | |
| "loss": 2.5821, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3175344384777025, | |
| "grad_norm": 0.22514797814956813, | |
| "learning_rate": 0.00019952648575094183, | |
| "loss": 2.4517, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3268736866682232, | |
| "grad_norm": 0.08040136172033542, | |
| "learning_rate": 0.00019944841409188767, | |
| "loss": 2.3794, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3362129348587439, | |
| "grad_norm": 0.054758073593565354, | |
| "learning_rate": 0.00019936440551965263, | |
| "loss": 2.3232, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.34555218304926455, | |
| "grad_norm": 0.06742998909645591, | |
| "learning_rate": 0.00019927446504941577, | |
| "loss": 2.2776, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.35489143123978517, | |
| "grad_norm": 0.048780907584876736, | |
| "learning_rate": 0.00019917859805048096, | |
| "loss": 2.2376, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.36423067943030585, | |
| "grad_norm": 0.0475325963052214, | |
| "learning_rate": 0.00019907681024595663, | |
| "loss": 2.2191, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3735699276208265, | |
| "grad_norm": 0.054089563211590065, | |
| "learning_rate": 0.00019896910771241387, | |
| "loss": 2.1961, | |
| "step": 400 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 4280, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 4, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.2388055677114778e+19, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |