| { | |
| "best_metric": 0.7773334980010986, | |
| "best_model_checkpoint": "../../models/lora/codellama-7b-multitask_cot_reflection/checkpoint-200", | |
| "epoch": 25.0, | |
| "global_step": 400, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 2.9999999999999997e-05, | |
| "loss": 2.4326, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 5.9999999999999995e-05, | |
| "loss": 2.3071, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 8.699999999999999e-05, | |
| "loss": 2.0883, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.000117, | |
| "loss": 1.8582, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 0.00014399999999999998, | |
| "loss": 1.4434, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 0.00017399999999999997, | |
| "loss": 1.1487, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 0.000204, | |
| "loss": 1.042, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 0.000234, | |
| "loss": 1.0089, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 0.00026399999999999997, | |
| "loss": 0.9878, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 0.000294, | |
| "loss": 0.9677, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 0.0002936842105263158, | |
| "loss": 0.9404, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 0.0002857894736842105, | |
| "loss": 0.9229, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 0.00027789473684210523, | |
| "loss": 0.9233, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 0.00027, | |
| "loss": 0.8922, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 0.0002621052631578947, | |
| "loss": 0.8933, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 0.00025421052631578945, | |
| "loss": 0.8536, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 0.00024631578947368417, | |
| "loss": 0.8694, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 11.25, | |
| "learning_rate": 0.00023842105263157895, | |
| "loss": 0.8344, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 11.88, | |
| "learning_rate": 0.00023052631578947364, | |
| "loss": 0.8342, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 0.0002226315789473684, | |
| "loss": 0.8227, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "eval_loss": 0.7773334980010986, | |
| "eval_runtime": 0.9943, | |
| "eval_samples_per_second": 21.12, | |
| "eval_steps_per_second": 3.017, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 13.12, | |
| "learning_rate": 0.00021473684210526314, | |
| "loss": 0.8147, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 13.75, | |
| "learning_rate": 0.0002068421052631579, | |
| "loss": 0.7956, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 14.38, | |
| "learning_rate": 0.0001989473684210526, | |
| "loss": 0.8037, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 0.00019105263157894736, | |
| "loss": 0.7974, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "learning_rate": 0.00018315789473684208, | |
| "loss": 0.8115, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 16.25, | |
| "learning_rate": 0.00017526315789473683, | |
| "loss": 0.7757, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 16.88, | |
| "learning_rate": 0.00016736842105263155, | |
| "loss": 0.7783, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "learning_rate": 0.0001594736842105263, | |
| "loss": 0.755, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 18.12, | |
| "learning_rate": 0.00015157894736842105, | |
| "loss": 0.7654, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 18.75, | |
| "learning_rate": 0.00014368421052631577, | |
| "loss": 0.7366, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 19.38, | |
| "learning_rate": 0.00013578947368421052, | |
| "loss": 0.7439, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 0.00012789473684210524, | |
| "loss": 0.7442, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 20.62, | |
| "learning_rate": 0.00011999999999999999, | |
| "loss": 0.75, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 21.25, | |
| "learning_rate": 0.00011210526315789472, | |
| "loss": 0.7311, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 21.88, | |
| "learning_rate": 0.00010421052631578947, | |
| "loss": 0.7194, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 22.5, | |
| "learning_rate": 9.63157894736842e-05, | |
| "loss": 0.712, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 23.12, | |
| "learning_rate": 8.842105263157893e-05, | |
| "loss": 0.7149, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 23.75, | |
| "learning_rate": 8.052631578947368e-05, | |
| "loss": 0.6846, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 24.38, | |
| "learning_rate": 7.263157894736842e-05, | |
| "loss": 0.7005, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 6.473684210526315e-05, | |
| "loss": 0.696, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "eval_loss": 0.8131171464920044, | |
| "eval_runtime": 0.9869, | |
| "eval_samples_per_second": 21.28, | |
| "eval_steps_per_second": 3.04, | |
| "step": 400 | |
| } | |
| ], | |
| "max_steps": 480, | |
| "num_train_epochs": 30, | |
| "total_flos": 5.3309496187079885e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |