| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.003200580371907439, | |
| "eval_steps": 500, | |
| "global_step": 150, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2045.390625, | |
| "epoch": 2.1337202479382926e-05, | |
| "grad_norm": 0.2854375348252045, | |
| "kl": 0.0009365081787109375, | |
| "learning_rate": 6.666666666666667e-08, | |
| "loss": 0.0031, | |
| "num_tokens": 145561.0, | |
| "reward": 2.3322094678878784, | |
| "reward_std": 1.2520284354686737, | |
| "rewards/accuracy_reward": 0.03125, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.3790844678878784, | |
| "step": 1 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 4.267440495876585e-05, | |
| "grad_norm": 0.3004221342795977, | |
| "kl": 0.001178741455078125, | |
| "learning_rate": 1.3333333333333334e-07, | |
| "loss": 0.0, | |
| "num_tokens": 286841.0, | |
| "reward": 1.582009196281433, | |
| "reward_std": 1.0301556289196014, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 0.6445092260837555, | |
| "step": 2 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 6.401160743814878e-05, | |
| "grad_norm": 0.2553423242073664, | |
| "kl": 0.0007991790771484375, | |
| "learning_rate": 2e-07, | |
| "loss": 0.0, | |
| "num_tokens": 429305.0, | |
| "reward": 1.5751782655715942, | |
| "reward_std": 1.1425774097442627, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 0.7158032357692719, | |
| "step": 3 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 8.53488099175317e-05, | |
| "grad_norm": 0.2370439897132965, | |
| "kl": 0.0007152557373046875, | |
| "learning_rate": 2.6666666666666667e-07, | |
| "loss": 0.0, | |
| "num_tokens": 568761.0, | |
| "reward": 1.4800153374671936, | |
| "reward_std": 1.032576322555542, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 0.6518903374671936, | |
| "step": 4 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.00010668601239691465, | |
| "grad_norm": 0.27501681251951193, | |
| "kl": 0.000881195068359375, | |
| "learning_rate": 3.333333333333333e-07, | |
| "loss": 0.0, | |
| "num_tokens": 715321.0, | |
| "reward": 1.7932595014572144, | |
| "reward_std": 0.9091455042362213, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 0.8713843822479248, | |
| "step": 5 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.00012802321487629756, | |
| "grad_norm": 0.22277528302667082, | |
| "kl": 0.00064849853515625, | |
| "learning_rate": 4e-07, | |
| "loss": 0.0, | |
| "num_tokens": 854969.0, | |
| "reward": 2.333535134792328, | |
| "reward_std": 1.344240427017212, | |
| "rewards/accuracy_reward": 0.140625, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.1460351347923279, | |
| "step": 6 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2047.59375, | |
| "epoch": 0.0001493604173556805, | |
| "grad_norm": 0.23557466579809663, | |
| "kl": 0.000698089599609375, | |
| "learning_rate": 4.6666666666666666e-07, | |
| "loss": 0.0003, | |
| "num_tokens": 996767.0, | |
| "reward": 1.6814353466033936, | |
| "reward_std": 1.1845325827598572, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 0.7751853466033936, | |
| "step": 7 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0001706976198350634, | |
| "grad_norm": 0.2607816885759869, | |
| "kl": 0.0008945465087890625, | |
| "learning_rate": 5.333333333333333e-07, | |
| "loss": 0.0, | |
| "num_tokens": 1142847.0, | |
| "reward": 1.6424962878227234, | |
| "reward_std": 0.5981582403182983, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.96875, | |
| "rewards/tag_count_reward": 0.673746258020401, | |
| "step": 8 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1973.234375, | |
| "epoch": 0.00019203482231444635, | |
| "grad_norm": 0.261127030099532, | |
| "kl": 0.0007343292236328125, | |
| "learning_rate": 6e-07, | |
| "loss": 0.0346, | |
| "num_tokens": 1279118.0, | |
| "reward": 3.187682867050171, | |
| "reward_std": 1.5947946906089783, | |
| "rewards/accuracy_reward": 0.328125, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 1.5939329266548157, | |
| "step": 9 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1903.375, | |
| "epoch": 0.0002133720247938293, | |
| "grad_norm": 0.34736032542583767, | |
| "kl": 0.0011196136474609375, | |
| "learning_rate": 6.666666666666666e-07, | |
| "loss": 0.0592, | |
| "num_tokens": 1412134.0, | |
| "reward": 2.229554295539856, | |
| "reward_std": 1.2556451261043549, | |
| "rewards/accuracy_reward": 0.046875, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.2139292061328888, | |
| "step": 10 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0002347092272732122, | |
| "grad_norm": 0.26441212029443795, | |
| "kl": 0.00083160400390625, | |
| "learning_rate": 7.333333333333332e-07, | |
| "loss": 0.0, | |
| "num_tokens": 1554054.0, | |
| "reward": 1.7672640085220337, | |
| "reward_std": 0.8887456059455872, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 0.8141390085220337, | |
| "step": 11 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2035.671875, | |
| "epoch": 0.0002560464297525951, | |
| "grad_norm": 0.24428351731034045, | |
| "kl": 0.000823974609375, | |
| "learning_rate": 8e-07, | |
| "loss": 0.0109, | |
| "num_tokens": 1696817.0, | |
| "reward": 1.9538565278053284, | |
| "reward_std": 1.122345894575119, | |
| "rewards/accuracy_reward": 0.0625, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 0.9226065576076508, | |
| "step": 12 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.00027738363223197806, | |
| "grad_norm": 0.271528625280216, | |
| "kl": 0.0008182525634765625, | |
| "learning_rate": 8.666666666666667e-07, | |
| "loss": 0.0, | |
| "num_tokens": 1839505.0, | |
| "reward": 1.8388200998306274, | |
| "reward_std": 1.1881698369979858, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.84375, | |
| "rewards/tag_count_reward": 0.9950700998306274, | |
| "step": 13 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2045.5, | |
| "epoch": 0.000298720834711361, | |
| "grad_norm": 0.2414762480820206, | |
| "kl": 0.0007495880126953125, | |
| "learning_rate": 9.333333333333333e-07, | |
| "loss": 0.0027, | |
| "num_tokens": 1981969.0, | |
| "reward": 1.6887366771697998, | |
| "reward_std": 1.2837353944778442, | |
| "rewards/accuracy_reward": 0.03125, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 0.7981116473674774, | |
| "step": 14 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.00032005803719074394, | |
| "grad_norm": 0.2381201921894107, | |
| "kl": 0.0006237030029296875, | |
| "learning_rate": 1e-06, | |
| "loss": 0.0, | |
| "num_tokens": 2129073.0, | |
| "reward": 1.8175826668739319, | |
| "reward_std": 0.7961941659450531, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 0.8644576668739319, | |
| "step": 15 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0003413952396701268, | |
| "grad_norm": 0.2802755837902431, | |
| "kl": 0.000896453857421875, | |
| "learning_rate": 9.998781585307575e-07, | |
| "loss": 0.0, | |
| "num_tokens": 2270929.0, | |
| "reward": 1.9846835136413574, | |
| "reward_std": 1.0485508143901825, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 1.0315584540367126, | |
| "step": 16 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2047.515625, | |
| "epoch": 0.00036273244214950976, | |
| "grad_norm": 0.2675584363328969, | |
| "kl": 0.0008106231689453125, | |
| "learning_rate": 9.99512700102336e-07, | |
| "loss": 0.0004, | |
| "num_tokens": 2411954.0, | |
| "reward": 1.7201014757156372, | |
| "reward_std": 1.0423041880130768, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 0.8138515949249268, | |
| "step": 17 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0003840696446288927, | |
| "grad_norm": 0.23989033868526222, | |
| "kl": 0.0007305145263671875, | |
| "learning_rate": 9.989038226169207e-07, | |
| "loss": 0.0, | |
| "num_tokens": 2551506.0, | |
| "reward": 1.9061667919158936, | |
| "reward_std": 0.6040446311235428, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 0.9530417025089264, | |
| "step": 18 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.00040540684710827564, | |
| "grad_norm": 0.23074770207202952, | |
| "kl": 0.00074005126953125, | |
| "learning_rate": 9.98051855792412e-07, | |
| "loss": 0.0, | |
| "num_tokens": 2693490.0, | |
| "reward": 1.8408669233322144, | |
| "reward_std": 1.06916081905365, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 0.9658669233322144, | |
| "step": 19 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1473.109375, | |
| "epoch": 0.0004267440495876586, | |
| "grad_norm": 0.32686272681018075, | |
| "kl": 0.00066375732421875, | |
| "learning_rate": 9.969572609838744e-07, | |
| "loss": 0.0628, | |
| "num_tokens": 2796473.0, | |
| "reward": 3.2726305723190308, | |
| "reward_std": 1.5523000955581665, | |
| "rewards/accuracy_reward": 0.078125, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 2.2882553935050964, | |
| "step": 20 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2011.109375, | |
| "epoch": 0.00044808125206704147, | |
| "grad_norm": 0.2522727057188777, | |
| "kl": 0.0007610321044921875, | |
| "learning_rate": 9.956206309337066e-07, | |
| "loss": 0.0123, | |
| "num_tokens": 2936704.0, | |
| "reward": 1.9428061246871948, | |
| "reward_std": 0.7873689234256744, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.0365561842918396, | |
| "step": 21 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0004694184545464244, | |
| "grad_norm": 0.2458128336289327, | |
| "kl": 0.0007953643798828125, | |
| "learning_rate": 9.940426894506606e-07, | |
| "loss": 0.0, | |
| "num_tokens": 3078752.0, | |
| "reward": 1.8225675821304321, | |
| "reward_std": 1.2480815649032593, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 0.9475676417350769, | |
| "step": 22 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2043.765625, | |
| "epoch": 0.0004907556570258073, | |
| "grad_norm": 0.28831558421434744, | |
| "kl": 0.0009784698486328125, | |
| "learning_rate": 9.922242910178859e-07, | |
| "loss": 0.0046, | |
| "num_tokens": 3225713.0, | |
| "reward": 1.782945454120636, | |
| "reward_std": 1.0670717060565948, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 0.8454455137252808, | |
| "step": 23 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1679.90625, | |
| "epoch": 0.0005120928595051902, | |
| "grad_norm": 0.24506028826358214, | |
| "kl": 0.000652313232421875, | |
| "learning_rate": 9.901664203302124e-07, | |
| "loss": 0.0259, | |
| "num_tokens": 3343307.0, | |
| "reward": 2.1847400069236755, | |
| "reward_std": 1.6377228498458862, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 1.2316150665283203, | |
| "step": 24 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2043.40625, | |
| "epoch": 0.0005334300619845732, | |
| "grad_norm": 0.22920633843668548, | |
| "kl": 0.0006666183471679688, | |
| "learning_rate": 9.878701917609207e-07, | |
| "loss": 0.001, | |
| "num_tokens": 3483557.0, | |
| "reward": 1.804680585861206, | |
| "reward_std": 0.9784244000911713, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 0.929680585861206, | |
| "step": 25 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0005547672644639561, | |
| "grad_norm": 0.2301457492486728, | |
| "kl": 0.000682830810546875, | |
| "learning_rate": 9.853368487582886e-07, | |
| "loss": 0.0, | |
| "num_tokens": 3626597.0, | |
| "reward": 1.6927853226661682, | |
| "reward_std": 1.414592206478119, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 0.8334102928638458, | |
| "step": 26 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2013.421875, | |
| "epoch": 0.0005761044669433391, | |
| "grad_norm": 0.2675723979028092, | |
| "kl": 0.00096893310546875, | |
| "learning_rate": 9.825677631722435e-07, | |
| "loss": 0.0272, | |
| "num_tokens": 3765120.0, | |
| "reward": 2.001575767993927, | |
| "reward_std": 0.8465057462453842, | |
| "rewards/accuracy_reward": 0.0625, | |
| "rewards/format_reward": 0.96875, | |
| "rewards/tag_count_reward": 0.907825767993927, | |
| "step": 27 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.000597441669422722, | |
| "grad_norm": 0.2381343290893271, | |
| "kl": 0.0006809234619140625, | |
| "learning_rate": 9.795644345114794e-07, | |
| "loss": 0.0, | |
| "num_tokens": 3906464.0, | |
| "reward": 1.6514010429382324, | |
| "reward_std": 0.9460130631923676, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 0.7295260727405548, | |
| "step": 28 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0006187788719021049, | |
| "grad_norm": 0.23461358537978447, | |
| "kl": 0.0006694793701171875, | |
| "learning_rate": 9.76328489131448e-07, | |
| "loss": 0.0, | |
| "num_tokens": 4048576.0, | |
| "reward": 2.099605143070221, | |
| "reward_std": 1.140558898448944, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.1777302026748657, | |
| "step": 29 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0006401160743814879, | |
| "grad_norm": 0.26327552884400207, | |
| "kl": 0.000946044921875, | |
| "learning_rate": 9.728616793536587e-07, | |
| "loss": 0.0, | |
| "num_tokens": 4192832.0, | |
| "reward": 2.05770343542099, | |
| "reward_std": 1.1037575900554657, | |
| "rewards/accuracy_reward": 0.046875, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 1.0108284056186676, | |
| "step": 30 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0006614532768608708, | |
| "grad_norm": 0.2597451641361432, | |
| "kl": 0.0008487701416015625, | |
| "learning_rate": 9.69165882516764e-07, | |
| "loss": 0.0, | |
| "num_tokens": 4338560.0, | |
| "reward": 1.938770055770874, | |
| "reward_std": 1.192082703113556, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 1.1106451153755188, | |
| "step": 31 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2041.921875, | |
| "epoch": 0.0006827904793402536, | |
| "grad_norm": 0.2930055914002611, | |
| "kl": 0.0009365081787109375, | |
| "learning_rate": 9.65243099959949e-07, | |
| "loss": 0.0012, | |
| "num_tokens": 4480379.0, | |
| "reward": 1.856385350227356, | |
| "reward_std": 1.006319135427475, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 0.9345102906227112, | |
| "step": 32 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0007041276818196366, | |
| "grad_norm": 0.22103909931032698, | |
| "kl": 0.000598907470703125, | |
| "learning_rate": 9.610954559391704e-07, | |
| "loss": 0.0, | |
| "num_tokens": 4622939.0, | |
| "reward": 2.5040335655212402, | |
| "reward_std": 1.1569816768169403, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.5665335655212402, | |
| "step": 33 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2046.359375, | |
| "epoch": 0.0007254648842990195, | |
| "grad_norm": 0.22286065468459607, | |
| "kl": 0.000667572021484375, | |
| "learning_rate": 9.567251964768342e-07, | |
| "loss": 0.0013, | |
| "num_tokens": 4765778.0, | |
| "reward": 2.4069931507110596, | |
| "reward_std": 1.472485899925232, | |
| "rewards/accuracy_reward": 0.140625, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.2663681507110596, | |
| "step": 34 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0007468020867784025, | |
| "grad_norm": 0.2479007589198706, | |
| "kl": 0.000667572021484375, | |
| "learning_rate": 9.521346881455354e-07, | |
| "loss": 0.0, | |
| "num_tokens": 4910994.0, | |
| "reward": 2.049605906009674, | |
| "reward_std": 1.2689136564731598, | |
| "rewards/accuracy_reward": 0.046875, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.0808558762073517, | |
| "step": 35 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1945.53125, | |
| "epoch": 0.0007681392892577854, | |
| "grad_norm": 0.2917659018128537, | |
| "kl": 0.00090789794921875, | |
| "learning_rate": 9.473264167865171e-07, | |
| "loss": 0.0092, | |
| "num_tokens": 5046900.0, | |
| "reward": 2.4214553833007812, | |
| "reward_std": 1.148881196975708, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.5620803236961365, | |
| "step": 36 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0007894764917371683, | |
| "grad_norm": 0.282236040891347, | |
| "kl": 0.00102996826171875, | |
| "learning_rate": 9.42302986163543e-07, | |
| "loss": 0.0, | |
| "num_tokens": 5187092.0, | |
| "reward": 1.7346134781837463, | |
| "reward_std": 0.9199301600456238, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 0.7971135079860687, | |
| "step": 37 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2042.890625, | |
| "epoch": 0.0008108136942165513, | |
| "grad_norm": 0.2761074131953832, | |
| "kl": 0.0008392333984375, | |
| "learning_rate": 9.370671165529144e-07, | |
| "loss": 0.0028, | |
| "num_tokens": 5330029.0, | |
| "reward": 2.530204176902771, | |
| "reward_std": 1.2140900790691376, | |
| "rewards/accuracy_reward": 0.21875, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.170829176902771, | |
| "step": 38 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1746.09375, | |
| "epoch": 0.0008321508966959342, | |
| "grad_norm": 0.24419783770020848, | |
| "kl": 0.000598907470703125, | |
| "learning_rate": 9.316216432703916e-07, | |
| "loss": 0.0685, | |
| "num_tokens": 5454003.0, | |
| "reward": 2.502521574497223, | |
| "reward_std": 1.2263858914375305, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.596271574497223, | |
| "step": 39 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0008534880991753172, | |
| "grad_norm": 0.22662575766567117, | |
| "kl": 0.0005588531494140625, | |
| "learning_rate": 9.259695151358214e-07, | |
| "loss": 0.0, | |
| "num_tokens": 5594931.0, | |
| "reward": 2.3665153980255127, | |
| "reward_std": 1.1563751697540283, | |
| "rewards/accuracy_reward": 0.09375, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 1.2415153980255127, | |
| "step": 40 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0008748253016547, | |
| "grad_norm": 0.2635470392525618, | |
| "kl": 0.000873565673828125, | |
| "learning_rate": 9.20113792876298e-07, | |
| "loss": 0.0, | |
| "num_tokens": 5737107.0, | |
| "reward": 2.02409964799881, | |
| "reward_std": 1.1649736166000366, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.1334747076034546, | |
| "step": 41 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2043.5, | |
| "epoch": 0.0008961625041340829, | |
| "grad_norm": 0.2563169843221627, | |
| "kl": 0.000782012939453125, | |
| "learning_rate": 9.140576474687263e-07, | |
| "loss": 0.003, | |
| "num_tokens": 5886835.0, | |
| "reward": 2.190575957298279, | |
| "reward_std": 1.5674269199371338, | |
| "rewards/accuracy_reward": 0.109375, | |
| "rewards/format_reward": 0.84375, | |
| "rewards/tag_count_reward": 1.128075897693634, | |
| "step": 42 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0009174997066134659, | |
| "grad_norm": 0.28163754663914925, | |
| "kl": 0.00092315673828125, | |
| "learning_rate": 9.078043584226815e-07, | |
| "loss": 0.0, | |
| "num_tokens": 6037235.0, | |
| "reward": 1.8984251022338867, | |
| "reward_std": 1.0966300964355469, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.0078001618385315, | |
| "step": 43 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2019.5625, | |
| "epoch": 0.0009388369090928488, | |
| "grad_norm": 0.22931076804296466, | |
| "kl": 0.000720977783203125, | |
| "learning_rate": 9.013573120044966e-07, | |
| "loss": 0.0095, | |
| "num_tokens": 6178775.0, | |
| "reward": 1.8543309569358826, | |
| "reward_std": 1.0193661749362946, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 0.9637059569358826, | |
| "step": 44 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0009601741115722318, | |
| "grad_norm": 0.19292180517274882, | |
| "kl": 0.0005321502685546875, | |
| "learning_rate": 8.9471999940354e-07, | |
| "loss": 0.0, | |
| "num_tokens": 6319095.0, | |
| "reward": 2.5643407106399536, | |
| "reward_std": 1.7284818887710571, | |
| "rewards/accuracy_reward": 0.15625, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.3455908298492432, | |
| "step": 45 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1988.765625, | |
| "epoch": 0.0009815113140516146, | |
| "grad_norm": 0.2448535186498399, | |
| "kl": 0.00070953369140625, | |
| "learning_rate": 8.878960148416747e-07, | |
| "loss": 0.026, | |
| "num_tokens": 6456104.0, | |
| "reward": 3.0129899978637695, | |
| "reward_std": 1.7822765707969666, | |
| "rewards/accuracy_reward": 0.296875, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.544240117073059, | |
| "step": 46 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2042.171875, | |
| "epoch": 0.0010028485165309976, | |
| "grad_norm": 0.27737187408641456, | |
| "kl": 0.0009708404541015625, | |
| "learning_rate": 8.808890536269229e-07, | |
| "loss": 0.0049, | |
| "num_tokens": 6597555.0, | |
| "reward": 2.284244418144226, | |
| "reward_std": 1.4300822615623474, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.377994418144226, | |
| "step": 47 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1989.5625, | |
| "epoch": 0.0010241857190103805, | |
| "grad_norm": 0.27668306340787147, | |
| "kl": 0.0009365081787109375, | |
| "learning_rate": 8.737029101523929e-07, | |
| "loss": 0.0137, | |
| "num_tokens": 6736695.0, | |
| "reward": 2.212657928466797, | |
| "reward_std": 0.7791785001754761, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 1.2595329284667969, | |
| "step": 48 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0010455229214897635, | |
| "grad_norm": 0.21783066479020852, | |
| "kl": 0.0006561279296875, | |
| "learning_rate": 8.663414758415478e-07, | |
| "loss": 0.0, | |
| "num_tokens": 6877367.0, | |
| "reward": 2.2547308206558228, | |
| "reward_std": 1.1983038187026978, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.3172308206558228, | |
| "step": 49 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2009.765625, | |
| "epoch": 0.0010668601239691464, | |
| "grad_norm": 0.24235044467850728, | |
| "kl": 0.0008182525634765625, | |
| "learning_rate": 8.588087370409302e-07, | |
| "loss": 0.0159, | |
| "num_tokens": 7016648.0, | |
| "reward": 2.772611379623413, | |
| "reward_std": 1.6880531311035156, | |
| "rewards/accuracy_reward": 0.25, | |
| "rewards/format_reward": 0.796875, | |
| "rewards/tag_count_reward": 1.4757365584373474, | |
| "step": 50 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2042.25, | |
| "epoch": 0.0010881973264485294, | |
| "grad_norm": 0.2433309236639273, | |
| "kl": 0.0008258819580078125, | |
| "learning_rate": 8.511087728614862e-07, | |
| "loss": 0.0023, | |
| "num_tokens": 7164056.0, | |
| "reward": 1.8680261969566345, | |
| "reward_std": 0.8950491547584534, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 0.8992761969566345, | |
| "step": 51 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0011095345289279122, | |
| "grad_norm": 0.2156516248748721, | |
| "kl": 0.0006465911865234375, | |
| "learning_rate": 8.432457529696548e-07, | |
| "loss": 0.0, | |
| "num_tokens": 7309528.0, | |
| "reward": 2.376724123954773, | |
| "reward_std": 1.2739354968070984, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.501724123954773, | |
| "step": 52 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2030.515625, | |
| "epoch": 0.0011308717314072953, | |
| "grad_norm": 0.2725527123608276, | |
| "kl": 0.0008831024169921875, | |
| "learning_rate": 8.352239353294194e-07, | |
| "loss": 0.0045, | |
| "num_tokens": 7448569.0, | |
| "reward": 2.542914628982544, | |
| "reward_std": 1.0636687576770782, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.96875, | |
| "rewards/tag_count_reward": 1.574164628982544, | |
| "step": 53 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0011522089338866781, | |
| "grad_norm": 0.21340474346529933, | |
| "kl": 0.0006866455078125, | |
| "learning_rate": 8.270476638965461e-07, | |
| "loss": 0.0, | |
| "num_tokens": 7591545.0, | |
| "reward": 2.076167106628418, | |
| "reward_std": 0.833263486623764, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.154291957616806, | |
| "step": 54 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.001173546136366061, | |
| "grad_norm": 0.25482078966184224, | |
| "kl": 0.000797271728515625, | |
| "learning_rate": 8.187213662662538e-07, | |
| "loss": 0.0, | |
| "num_tokens": 7736825.0, | |
| "reward": 1.9633585214614868, | |
| "reward_std": 0.7034492790699005, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 1.0, | |
| "rewards/tag_count_reward": 0.9633584916591644, | |
| "step": 55 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.001194883338845444, | |
| "grad_norm": 0.26106446652031223, | |
| "kl": 0.001003265380859375, | |
| "learning_rate": 8.102495512755938e-07, | |
| "loss": 0.0, | |
| "num_tokens": 7885721.0, | |
| "reward": 1.6216127276420593, | |
| "reward_std": 0.7480948567390442, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.96875, | |
| "rewards/tag_count_reward": 0.6528627574443817, | |
| "step": 56 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0012162205413248268, | |
| "grad_norm": 0.2856678726358658, | |
| "kl": 0.0009746551513671875, | |
| "learning_rate": 8.01636806561836e-07, | |
| "loss": 0.0, | |
| "num_tokens": 8026585.0, | |
| "reward": 1.956697940826416, | |
| "reward_std": 0.7752513885498047, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 1.0035729110240936, | |
| "step": 57 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1940.1875, | |
| "epoch": 0.0012375577438042099, | |
| "grad_norm": 0.27128271278603266, | |
| "kl": 0.0008296966552734375, | |
| "learning_rate": 7.928877960781808e-07, | |
| "loss": -0.0024, | |
| "num_tokens": 8159653.0, | |
| "reward": 2.1624690294265747, | |
| "reward_std": 0.7985830008983612, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.240594059228897, | |
| "step": 58 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0012588949462835927, | |
| "grad_norm": 0.2393949731273476, | |
| "kl": 0.000789642333984375, | |
| "learning_rate": 7.840072575681468e-07, | |
| "loss": 0.0, | |
| "num_tokens": 8303717.0, | |
| "reward": 1.9459198713302612, | |
| "reward_std": 1.2206479012966156, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 1.1177948713302612, | |
| "step": 59 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2037.5625, | |
| "epoch": 0.0012802321487629758, | |
| "grad_norm": 0.3025950046835889, | |
| "kl": 0.00112152099609375, | |
| "learning_rate": 7.75e-07, | |
| "loss": 0.0067, | |
| "num_tokens": 8445033.0, | |
| "reward": 2.0251868963241577, | |
| "reward_std": 0.7835379242897034, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.96875, | |
| "rewards/tag_count_reward": 1.05643692612648, | |
| "step": 60 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2046.140625, | |
| "epoch": 0.0013015693512423586, | |
| "grad_norm": 0.24381165261859616, | |
| "kl": 0.00078582763671875, | |
| "learning_rate": 7.658709009626109e-07, | |
| "loss": 0.001, | |
| "num_tokens": 8583666.0, | |
| "reward": 2.543883442878723, | |
| "reward_std": 1.186528593301773, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.84375, | |
| "rewards/tag_count_reward": 1.7001334428787231, | |
| "step": 61 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0013229065537217416, | |
| "grad_norm": 0.3053664407634924, | |
| "kl": 0.001110076904296875, | |
| "learning_rate": 7.566249040241553e-07, | |
| "loss": 0.0, | |
| "num_tokens": 8732274.0, | |
| "reward": 1.7126996517181396, | |
| "reward_std": 1.000561237335205, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 0.8376996517181396, | |
| "step": 62 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2022.828125, | |
| "epoch": 0.0013442437562011245, | |
| "grad_norm": 0.20644590625475823, | |
| "kl": 0.000682830810546875, | |
| "learning_rate": 7.472670160550848e-07, | |
| "loss": 0.0023, | |
| "num_tokens": 8871143.0, | |
| "reward": 2.6747394800186157, | |
| "reward_std": 1.2998624444007874, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.8153644800186157, | |
| "step": 63 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2020.296875, | |
| "epoch": 0.0013655809586805073, | |
| "grad_norm": 0.21757211966396547, | |
| "kl": 0.0006732940673828125, | |
| "learning_rate": 7.37802304516818e-07, | |
| "loss": 0.0069, | |
| "num_tokens": 9011386.0, | |
| "reward": 3.340072751045227, | |
| "reward_std": 1.4956574440002441, | |
| "rewards/accuracy_reward": 0.359375, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.730697751045227, | |
| "step": 64 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1919.8125, | |
| "epoch": 0.0013869181611598903, | |
| "grad_norm": 0.2254260907566737, | |
| "kl": 0.0006008148193359375, | |
| "learning_rate": 7.282358947176205e-07, | |
| "loss": 0.0391, | |
| "num_tokens": 9142830.0, | |
| "reward": 2.9505216479301453, | |
| "reward_std": 1.60056534409523, | |
| "rewards/accuracy_reward": 0.328125, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.4192715287208557, | |
| "step": 65 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2045.765625, | |
| "epoch": 0.0014082553636392732, | |
| "grad_norm": 0.2769049884459238, | |
| "kl": 0.001010894775390625, | |
| "learning_rate": 7.185729670371604e-07, | |
| "loss": 0.0018, | |
| "num_tokens": 9286335.0, | |
| "reward": 1.9083788394927979, | |
| "reward_std": 1.1193182468414307, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.0177539587020874, | |
| "step": 66 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2008.359375, | |
| "epoch": 0.0014295925661186562, | |
| "grad_norm": 0.2622067085298228, | |
| "kl": 0.00093841552734375, | |
| "learning_rate": 7.08818754121241e-07, | |
| "loss": 0.0105, | |
| "num_tokens": 9428662.0, | |
| "reward": 2.7499518394470215, | |
| "reward_std": 1.4287935495376587, | |
| "rewards/accuracy_reward": 0.265625, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.328076958656311, | |
| "step": 67 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.001450929768598039, | |
| "grad_norm": 0.24102425862447946, | |
| "kl": 0.000919342041015625, | |
| "learning_rate": 6.989785380482312e-07, | |
| "loss": 0.0, | |
| "num_tokens": 9576566.0, | |
| "reward": 2.014813780784607, | |
| "reward_std": 1.0817488133907318, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.092938870191574, | |
| "step": 68 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1144.953125, | |
| "epoch": 0.001472266971077422, | |
| "grad_norm": 0.33393515505001675, | |
| "kl": 0.000881195068359375, | |
| "learning_rate": 6.890576474687263e-07, | |
| "loss": -0.0248, | |
| "num_tokens": 9658739.0, | |
| "reward": 3.2838672399520874, | |
| "reward_std": 1.3060714602470398, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 2.3619922399520874, | |
| "step": 69 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.001493604173556805, | |
| "grad_norm": 0.24655547854354828, | |
| "kl": 0.0009098052978515625, | |
| "learning_rate": 6.790614547199906e-07, | |
| "loss": 0.0, | |
| "num_tokens": 9799827.0, | |
| "reward": 1.7142577767372131, | |
| "reward_std": 0.8359348177909851, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 0.7611327767372131, | |
| "step": 70 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1688.703125, | |
| "epoch": 0.001514941376036188, | |
| "grad_norm": 0.2903000474811399, | |
| "kl": 0.0008106231689453125, | |
| "learning_rate": 6.68995372916741e-07, | |
| "loss": 0.0328, | |
| "num_tokens": 9919808.0, | |
| "reward": 3.7349190711975098, | |
| "reward_std": 1.3884176015853882, | |
| "rewards/accuracy_reward": 0.625, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.5630440711975098, | |
| "step": 71 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2030.609375, | |
| "epoch": 0.0015362785785155708, | |
| "grad_norm": 0.23520625536771628, | |
| "kl": 0.0008068084716796875, | |
| "learning_rate": 6.588648530198504e-07, | |
| "loss": 0.0136, | |
| "num_tokens": 10061927.0, | |
| "reward": 2.674770474433899, | |
| "reward_std": 1.6050742864608765, | |
| "rewards/accuracy_reward": 0.15625, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.471645474433899, | |
| "step": 72 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0015576157809949536, | |
| "grad_norm": 0.23187823906584043, | |
| "kl": 0.0007991790771484375, | |
| "learning_rate": 6.486753808845564e-07, | |
| "loss": 0.0, | |
| "num_tokens": 10204167.0, | |
| "reward": 1.8549121618270874, | |
| "reward_std": 0.9832762628793716, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 0.9330372214317322, | |
| "step": 73 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0015789529834743367, | |
| "grad_norm": 0.2852769039521243, | |
| "kl": 0.00112152099609375, | |
| "learning_rate": 6.384324742897735e-07, | |
| "loss": 0.0, | |
| "num_tokens": 10346439.0, | |
| "reward": 1.840112566947937, | |
| "reward_std": 1.2321603894233704, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 0.980737566947937, | |
| "step": 74 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2044.03125, | |
| "epoch": 0.0016002901859537195, | |
| "grad_norm": 0.272112451374701, | |
| "kl": 0.000988006591796875, | |
| "learning_rate": 6.281416799501187e-07, | |
| "loss": 0.003, | |
| "num_tokens": 10494057.0, | |
| "reward": 1.7877224683761597, | |
| "reward_std": 0.6015221998095512, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 0.8345976173877716, | |
| "step": 75 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1908.125, | |
| "epoch": 0.0016216273884331026, | |
| "grad_norm": 0.2832589342425617, | |
| "kl": 0.000934600830078125, | |
| "learning_rate": 6.178085705122674e-07, | |
| "loss": 0.023, | |
| "num_tokens": 10630161.0, | |
| "reward": 2.9047216176986694, | |
| "reward_std": 1.1229093968868256, | |
| "rewards/accuracy_reward": 0.359375, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 1.2484715580940247, | |
| "step": 76 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1991.390625, | |
| "epoch": 0.0016429645909124854, | |
| "grad_norm": 0.2512677143163325, | |
| "kl": 0.0008907318115234375, | |
| "learning_rate": 6.074387415372676e-07, | |
| "loss": 0.011, | |
| "num_tokens": 10766346.0, | |
| "reward": 2.495735764503479, | |
| "reward_std": 0.9869028925895691, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.605110764503479, | |
| "step": 77 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2033.0625, | |
| "epoch": 0.0016643017933918685, | |
| "grad_norm": 0.2528088276687433, | |
| "kl": 0.000827789306640625, | |
| "learning_rate": 5.97037808470444e-07, | |
| "loss": -0.0006, | |
| "num_tokens": 10910606.0, | |
| "reward": 2.8697324991226196, | |
| "reward_std": 1.4734113216400146, | |
| "rewards/accuracy_reward": 0.296875, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.3853575587272644, | |
| "step": 78 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1977.484375, | |
| "epoch": 0.0016856389958712513, | |
| "grad_norm": 0.2831360789817148, | |
| "kl": 0.0010509490966796875, | |
| "learning_rate": 5.866114036005362e-07, | |
| "loss": 0.0366, | |
| "num_tokens": 11049613.0, | |
| "reward": 2.818997859954834, | |
| "reward_std": 1.7136698961257935, | |
| "rewards/accuracy_reward": NaN, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.4283727407455444, | |
| "step": 79 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0017069761983506343, | |
| "grad_norm": 0.24095973105078278, | |
| "kl": 0.0008087158203125, | |
| "learning_rate": 5.761651730097142e-07, | |
| "loss": 0.0, | |
| "num_tokens": 11192205.0, | |
| "reward": 2.0921364426612854, | |
| "reward_std": 1.2704755067825317, | |
| "rewards/accuracy_reward": 0.03125, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.1233863234519958, | |
| "step": 80 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1887.40625, | |
| "epoch": 0.0017283134008300172, | |
| "grad_norm": 0.2617304413835446, | |
| "kl": 0.000926971435546875, | |
| "learning_rate": 5.657047735161255e-07, | |
| "loss": 0.0364, | |
| "num_tokens": 11324775.0, | |
| "reward": 3.415910482406616, | |
| "reward_std": 1.824449062347412, | |
| "rewards/accuracy_reward": 0.515625, | |
| "rewards/format_reward": 0.796875, | |
| "rewards/tag_count_reward": 1.587785243988037, | |
| "step": 81 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2047.28125, | |
| "epoch": 0.0017496506033094, | |
| "grad_norm": 0.2582561415280589, | |
| "kl": 0.0010089874267578125, | |
| "learning_rate": 5.552358696106288e-07, | |
| "loss": 0.0004, | |
| "num_tokens": 11465753.0, | |
| "reward": 2.362888216972351, | |
| "reward_std": 1.2181004285812378, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.4722631573677063, | |
| "step": 82 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.001770987805788783, | |
| "grad_norm": 0.2523781224931707, | |
| "kl": 0.00079345703125, | |
| "learning_rate": 5.447641303893714e-07, | |
| "loss": 0.0, | |
| "num_tokens": 11605273.0, | |
| "reward": 1.7928955554962158, | |
| "reward_std": 1.291743278503418, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 0.964770495891571, | |
| "step": 83 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1845.25, | |
| "epoch": 0.0017923250082681659, | |
| "grad_norm": 0.29023191077913163, | |
| "kl": 0.00101470947265625, | |
| "learning_rate": 5.342952264838747e-07, | |
| "loss": 0.0271, | |
| "num_tokens": 11739369.0, | |
| "reward": 3.3112670183181763, | |
| "reward_std": 0.975414514541626, | |
| "rewards/accuracy_reward": 0.484375, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.4206420183181763, | |
| "step": 84 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1960.515625, | |
| "epoch": 0.001813662210747549, | |
| "grad_norm": 0.25663771030937327, | |
| "kl": 0.0009517669677734375, | |
| "learning_rate": 5.238348269902859e-07, | |
| "loss": 0.0024, | |
| "num_tokens": 11879114.0, | |
| "reward": 2.707318902015686, | |
| "reward_std": 1.2365654706954956, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.801068902015686, | |
| "step": 85 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2012.546875, | |
| "epoch": 0.0018349994132269318, | |
| "grad_norm": 0.25998444768041296, | |
| "kl": 0.0008792877197265625, | |
| "learning_rate": 5.133885963994639e-07, | |
| "loss": 0.012, | |
| "num_tokens": 12032493.0, | |
| "reward": 2.910939037799835, | |
| "reward_std": 1.5657430291175842, | |
| "rewards/accuracy_reward": 0.265625, | |
| "rewards/format_reward": 0.796875, | |
| "rewards/tag_count_reward": 1.5828140377998352, | |
| "step": 86 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0018563366157063148, | |
| "grad_norm": 0.25043258022994846, | |
| "kl": 0.00096893310546875, | |
| "learning_rate": 5.02962191529556e-07, | |
| "loss": 0.0, | |
| "num_tokens": 12174669.0, | |
| "reward": 2.3195923566818237, | |
| "reward_std": 1.093966394662857, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.4289673566818237, | |
| "step": 87 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0018776738181856976, | |
| "grad_norm": 0.29914837108236175, | |
| "kl": 0.001117706298828125, | |
| "learning_rate": 4.925612584627324e-07, | |
| "loss": 0.0, | |
| "num_tokens": 12314477.0, | |
| "reward": 1.970560908317566, | |
| "reward_std": 0.8368427753448486, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 1.017435908317566, | |
| "step": 88 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1909.484375, | |
| "epoch": 0.0018990110206650807, | |
| "grad_norm": 0.2518336087550161, | |
| "kl": 0.000774383544921875, | |
| "learning_rate": 4.821914294877326e-07, | |
| "loss": 0.0168, | |
| "num_tokens": 12445324.0, | |
| "reward": 2.5033475160598755, | |
| "reward_std": 1.1783407926559448, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.6127226948738098, | |
| "step": 89 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2045.375, | |
| "epoch": 0.0019203482231444635, | |
| "grad_norm": 0.26310664120871885, | |
| "kl": 0.00103759765625, | |
| "learning_rate": 4.7185832004988133e-07, | |
| "loss": 0.0011, | |
| "num_tokens": 12593092.0, | |
| "reward": 2.543837308883667, | |
| "reward_std": 1.3276816010475159, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.6688373684883118, | |
| "step": 90 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2047.609375, | |
| "epoch": 0.0019416854256238463, | |
| "grad_norm": 0.28938532633558406, | |
| "kl": 0.00127410888671875, | |
| "learning_rate": 4.6156752571022637e-07, | |
| "loss": 0.0003, | |
| "num_tokens": 12739083.0, | |
| "reward": 2.1860002279281616, | |
| "reward_std": 1.2386729717254639, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 1.357875257730484, | |
| "step": 91 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1900.71875, | |
| "epoch": 0.001963022628103229, | |
| "grad_norm": 0.2653562007841822, | |
| "kl": 0.0010280609130859375, | |
| "learning_rate": 4.513246191154434e-07, | |
| "loss": 0.0037, | |
| "num_tokens": 12870745.0, | |
| "reward": 2.5691369771957397, | |
| "reward_std": 0.8630341291427612, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 1.6160120368003845, | |
| "step": 92 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0019843598305826124, | |
| "grad_norm": 0.24320922328123415, | |
| "kl": 0.00089263916015625, | |
| "learning_rate": 4.4113514698014953e-07, | |
| "loss": 0.0, | |
| "num_tokens": 13013625.0, | |
| "reward": 1.8812434077262878, | |
| "reward_std": 1.2174347043037415, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 0.9906184077262878, | |
| "step": 93 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0020056970330619953, | |
| "grad_norm": 0.24390353582994392, | |
| "kl": 0.0007953643798828125, | |
| "learning_rate": 4.3100462708325914e-07, | |
| "loss": 0.0, | |
| "num_tokens": 13159353.0, | |
| "reward": 1.918450117111206, | |
| "reward_std": 0.8721325099468231, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 0.996575117111206, | |
| "step": 94 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002027034235541378, | |
| "grad_norm": 0.2690851353987946, | |
| "kl": 0.0008544921875, | |
| "learning_rate": 4.209385452800095e-07, | |
| "loss": 0.0, | |
| "num_tokens": 13303929.0, | |
| "reward": 1.7299691438674927, | |
| "reward_std": 0.7123595774173737, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 0.7768440842628479, | |
| "step": 95 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002048371438020761, | |
| "grad_norm": 0.2710088598548366, | |
| "kl": 0.0010471343994140625, | |
| "learning_rate": 4.1094235253127374e-07, | |
| "loss": 0.0, | |
| "num_tokens": 13445017.0, | |
| "reward": 1.74798583984375, | |
| "reward_std": 1.3267025351524353, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 0.8886107504367828, | |
| "step": 96 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002069708640500144, | |
| "grad_norm": 0.23651577681323582, | |
| "kl": 0.000823974609375, | |
| "learning_rate": 4.0102146195176887e-07, | |
| "loss": 0.0, | |
| "num_tokens": 13586681.0, | |
| "reward": 2.2576102018356323, | |
| "reward_std": 1.0508009791374207, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.3826101422309875, | |
| "step": 97 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2037.75, | |
| "epoch": 0.002091045842979527, | |
| "grad_norm": 0.2463373721645078, | |
| "kl": 0.0009613037109375, | |
| "learning_rate": 3.911812458787591e-07, | |
| "loss": 0.0071, | |
| "num_tokens": 13726697.0, | |
| "reward": 2.5095486640930176, | |
| "reward_std": 1.7340097427368164, | |
| "rewards/accuracy_reward": 0.109375, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.4314236640930176, | |
| "step": 98 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.00211238304545891, | |
| "grad_norm": 0.2422484191159108, | |
| "kl": 0.000823974609375, | |
| "learning_rate": 3.8142703296283953e-07, | |
| "loss": 0.0, | |
| "num_tokens": 13868329.0, | |
| "reward": 1.9351916313171387, | |
| "reward_std": 1.0000105500221252, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.0289416313171387, | |
| "step": 99 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0021337202479382927, | |
| "grad_norm": 0.2664142353736605, | |
| "kl": 0.00110626220703125, | |
| "learning_rate": 3.7176410528237945e-07, | |
| "loss": 0.0, | |
| "num_tokens": 14010185.0, | |
| "reward": 2.0706847310066223, | |
| "reward_std": 1.3341472148895264, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.2113096117973328, | |
| "step": 100 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1949.21875, | |
| "epoch": 0.0021550574504176755, | |
| "grad_norm": 0.23131529768976428, | |
| "kl": 0.0008525848388671875, | |
| "learning_rate": 3.62197695483182e-07, | |
| "loss": 0.043, | |
| "num_tokens": 14145431.0, | |
| "reward": 2.7469276189804077, | |
| "reward_std": 1.427620768547058, | |
| "rewards/accuracy_reward": 0.234375, | |
| "rewards/format_reward": 0.75, | |
| "rewards/tag_count_reward": 1.528177559375763, | |
| "step": 101 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002176394652897059, | |
| "grad_norm": 0.2507023081592315, | |
| "kl": 0.00091552734375, | |
| "learning_rate": 3.5273298394491515e-07, | |
| "loss": 0.0, | |
| "num_tokens": 14289623.0, | |
| "reward": 1.7653347253799438, | |
| "reward_std": 1.1874454021453857, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 0.8903347849845886, | |
| "step": 102 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0021977318553764416, | |
| "grad_norm": 0.2745529021458805, | |
| "kl": 0.001163482666015625, | |
| "learning_rate": 3.433750959758446e-07, | |
| "loss": 0.0, | |
| "num_tokens": 14430295.0, | |
| "reward": 2.8309139013290405, | |
| "reward_std": 1.2025935649871826, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.9715389013290405, | |
| "step": 103 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0022190690578558245, | |
| "grad_norm": 0.28737578385004514, | |
| "kl": 0.001148223876953125, | |
| "learning_rate": 3.3412909903738936e-07, | |
| "loss": 0.0, | |
| "num_tokens": 14571895.0, | |
| "reward": 2.100715756416321, | |
| "reward_std": 1.0319698452949524, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 1.1475908160209656, | |
| "step": 104 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2022.671875, | |
| "epoch": 0.0022404062603352073, | |
| "grad_norm": 0.24092084749705797, | |
| "kl": 0.00098419189453125, | |
| "learning_rate": 3.250000000000001e-07, | |
| "loss": 0.0112, | |
| "num_tokens": 14710146.0, | |
| "reward": 2.2321301698684692, | |
| "reward_std": 1.0327418744564056, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 1.2946301698684692, | |
| "step": 105 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1879.609375, | |
| "epoch": 0.0022617434628145906, | |
| "grad_norm": 0.2548862609498936, | |
| "kl": 0.0009136199951171875, | |
| "learning_rate": 3.159927424318531e-07, | |
| "loss": 0.0319, | |
| "num_tokens": 14846281.0, | |
| "reward": 2.330931842327118, | |
| "reward_std": 1.3079878091812134, | |
| "rewards/accuracy_reward": NaN, | |
| "rewards/format_reward": 0.78125, | |
| "rewards/tag_count_reward": 1.5496819615364075, | |
| "step": 106 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0022830806652939734, | |
| "grad_norm": 0.27731203904003077, | |
| "kl": 0.0010967254638671875, | |
| "learning_rate": 3.0711220392181934e-07, | |
| "loss": 0.0, | |
| "num_tokens": 14990441.0, | |
| "reward": 1.7980546951293945, | |
| "reward_std": 0.8120542466640472, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 0.8605546951293945, | |
| "step": 107 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2022.640625, | |
| "epoch": 0.0023044178677733562, | |
| "grad_norm": 0.2522375210435353, | |
| "kl": 0.00102996826171875, | |
| "learning_rate": 2.9836319343816397e-07, | |
| "loss": 0.0187, | |
| "num_tokens": 15129458.0, | |
| "reward": 2.667497754096985, | |
| "reward_std": 1.6230178475379944, | |
| "rewards/accuracy_reward": 0.109375, | |
| "rewards/format_reward": 0.78125, | |
| "rewards/tag_count_reward": 1.6674975752830505, | |
| "step": 108 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2005.15625, | |
| "epoch": 0.002325755070252739, | |
| "grad_norm": 0.21530909512700133, | |
| "kl": 0.000850677490234375, | |
| "learning_rate": 2.897504487244061e-07, | |
| "loss": -0.0048, | |
| "num_tokens": 15265788.0, | |
| "reward": 2.6458462476730347, | |
| "reward_std": 1.383511245250702, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.77084618806839, | |
| "step": 109 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002347092272732122, | |
| "grad_norm": 0.24861834808297276, | |
| "kl": 0.0009212493896484375, | |
| "learning_rate": 2.812786337337463e-07, | |
| "loss": 0.0, | |
| "num_tokens": 15406684.0, | |
| "reward": 2.0519683957099915, | |
| "reward_std": 1.0633149147033691, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.1769684553146362, | |
| "step": 110 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002368429475211505, | |
| "grad_norm": 0.30503198554033595, | |
| "kl": 0.00147247314453125, | |
| "learning_rate": 2.729523361034538e-07, | |
| "loss": 0.0001, | |
| "num_tokens": 15550652.0, | |
| "reward": 2.0827815532684326, | |
| "reward_std": 1.0192652642726898, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.2077816128730774, | |
| "step": 111 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1871.765625, | |
| "epoch": 0.002389766677690888, | |
| "grad_norm": 0.2842623238586417, | |
| "kl": 0.0010280609130859375, | |
| "learning_rate": 2.6477606467058035e-07, | |
| "loss": 0.106, | |
| "num_tokens": 15683661.0, | |
| "reward": 1.914925754070282, | |
| "reward_std": 0.9150934815406799, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 0.9774257838726044, | |
| "step": 112 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002411103880170271, | |
| "grad_norm": 0.2285684445959697, | |
| "kl": 0.00087738037109375, | |
| "learning_rate": 2.567542470303452e-07, | |
| "loss": 0.0, | |
| "num_tokens": 15823917.0, | |
| "reward": 2.3608699440956116, | |
| "reward_std": 1.4412734508514404, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.454619973897934, | |
| "step": 113 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1838.015625, | |
| "epoch": 0.0024324410826496536, | |
| "grad_norm": 0.24180280253503963, | |
| "kl": 0.0008525848388671875, | |
| "learning_rate": 2.488912271385139e-07, | |
| "loss": -0.0088, | |
| "num_tokens": 15952078.0, | |
| "reward": 2.989068865776062, | |
| "reward_std": 1.378861427307129, | |
| "rewards/accuracy_reward": 0.125, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.879693865776062, | |
| "step": 114 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2037.203125, | |
| "epoch": 0.002453778285129037, | |
| "grad_norm": 0.2612756806308449, | |
| "kl": 0.00115203857421875, | |
| "learning_rate": 2.411912629590699e-07, | |
| "loss": 0.0038, | |
| "num_tokens": 16091291.0, | |
| "reward": 2.6733558177948, | |
| "reward_std": 1.371264487504959, | |
| "rewards/accuracy_reward": 0.234375, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.3452309966087341, | |
| "step": 115 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1934.734375, | |
| "epoch": 0.0024751154876084197, | |
| "grad_norm": 0.2904898095363512, | |
| "kl": 0.001300811767578125, | |
| "learning_rate": 2.336585241584522e-07, | |
| "loss": 0.0434, | |
| "num_tokens": 16231690.0, | |
| "reward": 3.2452234625816345, | |
| "reward_std": 1.7195310592651367, | |
| "rewards/accuracy_reward": 0.234375, | |
| "rewards/format_reward": 0.84375, | |
| "rewards/tag_count_reward": 1.9327235221862793, | |
| "step": 116 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2027.6875, | |
| "epoch": 0.0024964526900878026, | |
| "grad_norm": 0.2857729276958772, | |
| "kl": 0.001163482666015625, | |
| "learning_rate": 2.2629708984760706e-07, | |
| "loss": 0.0004, | |
| "num_tokens": 16376406.0, | |
| "reward": 2.67911034822464, | |
| "reward_std": 1.0040427446365356, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.77286034822464, | |
| "step": 117 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0025177898925671854, | |
| "grad_norm": 0.24787304560201415, | |
| "kl": 0.00091552734375, | |
| "learning_rate": 2.1911094637307714e-07, | |
| "loss": 0.0, | |
| "num_tokens": 16517462.0, | |
| "reward": 2.315044343471527, | |
| "reward_std": 1.209524244070053, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 1.377544343471527, | |
| "step": 118 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0025391270950465682, | |
| "grad_norm": 0.2606421089552964, | |
| "kl": 0.0010623931884765625, | |
| "learning_rate": 2.1210398515832536e-07, | |
| "loss": 0.0, | |
| "num_tokens": 16662262.0, | |
| "reward": 1.879551887512207, | |
| "reward_std": 1.109174221754074, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 0.9889269471168518, | |
| "step": 119 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2035.15625, | |
| "epoch": 0.0025604642975259515, | |
| "grad_norm": 0.26008391541111336, | |
| "kl": 0.0010547637939453125, | |
| "learning_rate": 2.0528000059645995e-07, | |
| "loss": 0.0081, | |
| "num_tokens": 16804032.0, | |
| "reward": 2.415029287338257, | |
| "reward_std": 1.1450726389884949, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.5400293469429016, | |
| "step": 120 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0025818015000053343, | |
| "grad_norm": 0.2675057780920799, | |
| "kl": 0.0011196136474609375, | |
| "learning_rate": 1.986426879955034e-07, | |
| "loss": 0.0, | |
| "num_tokens": 16947232.0, | |
| "reward": 2.0260613560676575, | |
| "reward_std": 1.0874513685703278, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.1354363560676575, | |
| "step": 121 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002603138702484717, | |
| "grad_norm": 0.3037082390905714, | |
| "kl": 0.0014190673828125, | |
| "learning_rate": 1.9219564157731844e-07, | |
| "loss": 0.0001, | |
| "num_tokens": 17087328.0, | |
| "reward": 2.1842929124832153, | |
| "reward_std": 1.4917433261871338, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.78125, | |
| "rewards/tag_count_reward": 1.4030429124832153, | |
| "step": 122 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0026244759049641, | |
| "grad_norm": 0.2489684161448546, | |
| "kl": 0.0011692047119140625, | |
| "learning_rate": 1.8594235253127372e-07, | |
| "loss": 0.0, | |
| "num_tokens": 17234688.0, | |
| "reward": 2.1522029638290405, | |
| "reward_std": 1.3411579728126526, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.796875, | |
| "rewards/tag_count_reward": 1.3553279042243958, | |
| "step": 123 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0026458131074434833, | |
| "grad_norm": 0.26782284739987977, | |
| "kl": 0.0010528564453125, | |
| "learning_rate": 1.7988620712370195e-07, | |
| "loss": 0.0, | |
| "num_tokens": 17382240.0, | |
| "reward": 2.098485767841339, | |
| "reward_std": 0.8514607399702072, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 1.1609857678413391, | |
| "step": 124 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1975.828125, | |
| "epoch": 0.002667150309922866, | |
| "grad_norm": 0.2219669853339219, | |
| "kl": 0.0010166168212890625, | |
| "learning_rate": 1.7403048486417868e-07, | |
| "loss": 0.02, | |
| "num_tokens": 17517269.0, | |
| "reward": 3.266877293586731, | |
| "reward_std": 1.6198533773422241, | |
| "rewards/accuracy_reward": 0.34375, | |
| "rewards/format_reward": 0.75, | |
| "rewards/tag_count_reward": 1.8293771743774414, | |
| "step": 125 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002688487512402249, | |
| "grad_norm": 0.2702594030390932, | |
| "kl": 0.001033782958984375, | |
| "learning_rate": 1.6837835672960831e-07, | |
| "loss": 0.0, | |
| "num_tokens": 17656277.0, | |
| "reward": 2.1437134742736816, | |
| "reward_std": 1.2825772166252136, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.8125, | |
| "rewards/tag_count_reward": 1.3312135338783264, | |
| "step": 126 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1957.78125, | |
| "epoch": 0.0027098247148816318, | |
| "grad_norm": 0.2608403813261371, | |
| "kl": 0.0010585784912109375, | |
| "learning_rate": 1.6293288344708566e-07, | |
| "loss": 0.0298, | |
| "num_tokens": 17793191.0, | |
| "reward": 3.3102771043777466, | |
| "reward_std": 1.5307283401489258, | |
| "rewards/accuracy_reward": 0.296875, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.810277283191681, | |
| "step": 127 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0027311619173610146, | |
| "grad_norm": 0.24075707253745235, | |
| "kl": 0.0010528564453125, | |
| "learning_rate": 1.5769701383645698e-07, | |
| "loss": 0.0, | |
| "num_tokens": 17932071.0, | |
| "reward": 2.7975616455078125, | |
| "reward_std": 1.3928316235542297, | |
| "rewards/accuracy_reward": 0.03125, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.813186526298523, | |
| "step": 128 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002752499119840398, | |
| "grad_norm": 0.25056499906388097, | |
| "kl": 0.001018524169921875, | |
| "learning_rate": 1.5267358321348285e-07, | |
| "loss": 0.0, | |
| "num_tokens": 18075975.0, | |
| "reward": 1.7318490147590637, | |
| "reward_std": 1.142630249261856, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 0.8568490147590637, | |
| "step": 129 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2032.828125, | |
| "epoch": 0.0027738363223197807, | |
| "grad_norm": 0.2477651512416495, | |
| "kl": 0.0010166168212890625, | |
| "learning_rate": 1.4786531185446452e-07, | |
| "loss": 0.0021, | |
| "num_tokens": 18216540.0, | |
| "reward": 2.5726191997528076, | |
| "reward_std": 1.1982768774032593, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.84375, | |
| "rewards/tag_count_reward": 1.7288691997528076, | |
| "step": 130 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0027951735247991635, | |
| "grad_norm": 0.2625928973873186, | |
| "kl": 0.00113677978515625, | |
| "learning_rate": 1.432748035231658e-07, | |
| "loss": 0.0, | |
| "num_tokens": 18358684.0, | |
| "reward": 2.0005903840065002, | |
| "reward_std": 1.2123122811317444, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.765625, | |
| "rewards/tag_count_reward": 1.2349653840065002, | |
| "step": 131 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0028165107272785463, | |
| "grad_norm": 0.3067420061538383, | |
| "kl": 0.001132965087890625, | |
| "learning_rate": 1.3890454406082956e-07, | |
| "loss": 0.0, | |
| "num_tokens": 18499964.0, | |
| "reward": 1.8372145891189575, | |
| "reward_std": 0.8202246427536011, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.953125, | |
| "rewards/tag_count_reward": 0.8840895891189575, | |
| "step": 132 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0028378479297579296, | |
| "grad_norm": 0.24973842820998515, | |
| "kl": 0.001068115234375, | |
| "learning_rate": 1.3475690004005097e-07, | |
| "loss": 0.0, | |
| "num_tokens": 18641692.0, | |
| "reward": 2.486280083656311, | |
| "reward_std": 1.218274474143982, | |
| "rewards/accuracy_reward": 0.046875, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.5175301134586334, | |
| "step": 133 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1920.3125, | |
| "epoch": 0.0028591851322373124, | |
| "grad_norm": 0.2624705975403559, | |
| "kl": 0.0010528564453125, | |
| "learning_rate": 1.308341174832359e-07, | |
| "loss": -0.0046, | |
| "num_tokens": 18779248.0, | |
| "reward": 2.2273890376091003, | |
| "reward_std": 1.3820355534553528, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 1.3992640972137451, | |
| "step": 134 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1740.5625, | |
| "epoch": 0.0028805223347166953, | |
| "grad_norm": 0.2666988977590925, | |
| "kl": 0.001003265380859375, | |
| "learning_rate": 1.2713832064634125e-07, | |
| "loss": -0.0169, | |
| "num_tokens": 18900148.0, | |
| "reward": 2.8538860082626343, | |
| "reward_std": 1.2738634943962097, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.9476360082626343, | |
| "step": 135 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2046.09375, | |
| "epoch": 0.002901859537196078, | |
| "grad_norm": 0.23990986289607155, | |
| "kl": 0.0009765625, | |
| "learning_rate": 1.2367151086855187e-07, | |
| "loss": 0.0007, | |
| "num_tokens": 19040858.0, | |
| "reward": 3.28359591960907, | |
| "reward_std": 2.004893660545349, | |
| "rewards/accuracy_reward": 0.265625, | |
| "rewards/format_reward": 0.875, | |
| "rewards/tag_count_reward": 1.8773459196090698, | |
| "step": 136 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002923196739675461, | |
| "grad_norm": 0.2647155262558471, | |
| "kl": 0.00112152099609375, | |
| "learning_rate": 1.2043556548852063e-07, | |
| "loss": 0.0, | |
| "num_tokens": 19182522.0, | |
| "reward": 2.4655028581619263, | |
| "reward_std": 1.1369588375091553, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.90625, | |
| "rewards/tag_count_reward": 1.5592527985572815, | |
| "step": 137 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.002944533942154844, | |
| "grad_norm": 0.24982213800072361, | |
| "kl": 0.00095367431640625, | |
| "learning_rate": 1.1743223682775649e-07, | |
| "loss": 0.0, | |
| "num_tokens": 19324026.0, | |
| "reward": 2.1995232105255127, | |
| "reward_std": 1.259451150894165, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.3401482105255127, | |
| "step": 138 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1807.625, | |
| "epoch": 0.002965871144634227, | |
| "grad_norm": 0.3059644858635862, | |
| "kl": 0.001178741455078125, | |
| "learning_rate": 1.1466315124171128e-07, | |
| "loss": 0.0359, | |
| "num_tokens": 19451778.0, | |
| "reward": 3.3477195501327515, | |
| "reward_std": 1.6818965673446655, | |
| "rewards/accuracy_reward": 0.359375, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 1.691469669342041, | |
| "step": 139 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.00298720834711361, | |
| "grad_norm": 0.2699481507121129, | |
| "kl": 0.001255035400390625, | |
| "learning_rate": 1.1212980823907929e-07, | |
| "loss": 0.0001, | |
| "num_tokens": 19595394.0, | |
| "reward": 2.130341053009033, | |
| "reward_std": 1.0669668912887573, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.921875, | |
| "rewards/tag_count_reward": 1.2084660530090332, | |
| "step": 140 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2021.171875, | |
| "epoch": 0.0030085455495929927, | |
| "grad_norm": 0.2856619819013544, | |
| "kl": 0.00133514404296875, | |
| "learning_rate": 1.0983357966978745e-07, | |
| "loss": 0.001, | |
| "num_tokens": 19739533.0, | |
| "reward": 2.441653609275818, | |
| "reward_std": 1.2313106060028076, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.551028549671173, | |
| "step": 141 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1618.296875, | |
| "epoch": 0.003029882752072376, | |
| "grad_norm": 0.2931928737727467, | |
| "kl": 0.001163482666015625, | |
| "learning_rate": 1.0777570898211405e-07, | |
| "loss": -0.0036, | |
| "num_tokens": 19853280.0, | |
| "reward": 2.6928197145462036, | |
| "reward_std": 1.2333460450172424, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.8021947741508484, | |
| "step": 142 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.003051219954551759, | |
| "grad_norm": 0.2674044406375496, | |
| "kl": 0.001102447509765625, | |
| "learning_rate": 1.0595731054933934e-07, | |
| "loss": 0.0, | |
| "num_tokens": 19992992.0, | |
| "reward": 2.2873085737228394, | |
| "reward_std": 1.0768440067768097, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 1.3185588121414185, | |
| "step": 143 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1699.796875, | |
| "epoch": 0.0030725571570311416, | |
| "grad_norm": 0.27582111849830443, | |
| "kl": 0.00107574462890625, | |
| "learning_rate": 1.0437936906629334e-07, | |
| "loss": 0.0026, | |
| "num_tokens": 20112723.0, | |
| "reward": 2.509324848651886, | |
| "reward_std": 0.9660422205924988, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 1.649949848651886, | |
| "step": 144 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2044.75, | |
| "epoch": 0.0030938943595105245, | |
| "grad_norm": 0.2318145530270087, | |
| "kl": 0.0009479522705078125, | |
| "learning_rate": 1.0304273901612565e-07, | |
| "loss": 0.0011, | |
| "num_tokens": 20254051.0, | |
| "reward": 2.513481378555298, | |
| "reward_std": 1.2940130233764648, | |
| "rewards/accuracy_reward": 0.03125, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.5603563785552979, | |
| "step": 145 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1784.703125, | |
| "epoch": 0.0031152315619899073, | |
| "grad_norm": 0.23592103326348526, | |
| "kl": 0.001026153564453125, | |
| "learning_rate": 1.0194814420758804e-07, | |
| "loss": -0.027, | |
| "num_tokens": 20378064.0, | |
| "reward": 3.284206509590149, | |
| "reward_std": 1.071185827255249, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 2.346706509590149, | |
| "step": 146 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0031365687644692906, | |
| "grad_norm": 0.2481161393733156, | |
| "kl": 0.00101470947265625, | |
| "learning_rate": 1.0109617738307911e-07, | |
| "loss": 0.0, | |
| "num_tokens": 20520752.0, | |
| "reward": 2.223458707332611, | |
| "reward_std": 1.2180058360099792, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.890625, | |
| "rewards/tag_count_reward": 1.3015836477279663, | |
| "step": 147 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.0031579059669486734, | |
| "grad_norm": 0.22727140458758907, | |
| "kl": 0.000820159912109375, | |
| "learning_rate": 1.0048729989766394e-07, | |
| "loss": 0.0, | |
| "num_tokens": 20661680.0, | |
| "reward": 1.856073021888733, | |
| "reward_std": 1.3392052054405212, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.828125, | |
| "rewards/tag_count_reward": 1.0279478430747986, | |
| "step": 148 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 1788.28125, | |
| "epoch": 0.0031792431694280562, | |
| "grad_norm": 0.26612520189625616, | |
| "kl": 0.001068115234375, | |
| "learning_rate": 1.0012184146924223e-07, | |
| "loss": 0.0012, | |
| "num_tokens": 20784290.0, | |
| "reward": 3.120908260345459, | |
| "reward_std": 1.2517384886741638, | |
| "rewards/accuracy_reward": 0.0, | |
| "rewards/format_reward": 0.859375, | |
| "rewards/tag_count_reward": 2.26153302192688, | |
| "step": 149 | |
| }, | |
| { | |
| "clip_ratio": 0.0, | |
| "completion_length": 2048.0, | |
| "epoch": 0.003200580371907439, | |
| "grad_norm": 0.2964631635956297, | |
| "kl": 0.0013275146484375, | |
| "learning_rate": 1e-07, | |
| "loss": 0.0001, | |
| "num_tokens": 20929506.0, | |
| "reward": 2.3926680088043213, | |
| "reward_std": 1.114904761314392, | |
| "rewards/accuracy_reward": 0.015625, | |
| "rewards/format_reward": 0.9375, | |
| "rewards/tag_count_reward": 1.423918068408966, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.003200580371907439, | |
| "step": 150, | |
| "total_flos": 0.0, | |
| "train_loss": 0.006486209444701672, | |
| "train_runtime": 8644.5926, | |
| "train_samples_per_second": 1.111, | |
| "train_steps_per_second": 0.017 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 150, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 10, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |