Training in progress, step 1500, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 3443585096
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:71868c70f9ff065ed3d46aa5c75810e7cd3261588f6b4897569bc4ada4740a06
|
| 3 |
size 3443585096
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
-
"epoch": 2.
|
| 6 |
"eval_steps": 500,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -9983,6 +9983,531 @@
|
|
| 9983 |
"learning_rate": 0.0002,
|
| 9984 |
"loss": 1.4128,
|
| 9985 |
"step": 1425
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9986 |
}
|
| 9987 |
],
|
| 9988 |
"logging_steps": 1,
|
|
@@ -9997,12 +10522,12 @@
|
|
| 9997 |
"should_evaluate": false,
|
| 9998 |
"should_log": false,
|
| 9999 |
"should_save": true,
|
| 10000 |
-
"should_training_stop":
|
| 10001 |
},
|
| 10002 |
"attributes": {}
|
| 10003 |
}
|
| 10004 |
},
|
| 10005 |
-
"total_flos": 9.
|
| 10006 |
"train_batch_size": 8,
|
| 10007 |
"trial_name": null,
|
| 10008 |
"trial_params": null
|
|
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 2.419693301049233,
|
| 6 |
"eval_steps": 500,
|
| 7 |
+
"global_step": 1500,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 9983 |
"learning_rate": 0.0002,
|
| 9984 |
"loss": 1.4128,
|
| 9985 |
"step": 1425
|
| 9986 |
+
},
|
| 9987 |
+
{
|
| 9988 |
+
"epoch": 2.3002421307506054,
|
| 9989 |
+
"grad_norm": 0.5141810178756714,
|
| 9990 |
+
"learning_rate": 0.0002,
|
| 9991 |
+
"loss": 1.3929,
|
| 9992 |
+
"step": 1426
|
| 9993 |
+
},
|
| 9994 |
+
{
|
| 9995 |
+
"epoch": 2.301856335754641,
|
| 9996 |
+
"grad_norm": 0.4419632852077484,
|
| 9997 |
+
"learning_rate": 0.0002,
|
| 9998 |
+
"loss": 1.3551,
|
| 9999 |
+
"step": 1427
|
| 10000 |
+
},
|
| 10001 |
+
{
|
| 10002 |
+
"epoch": 2.3034705407586764,
|
| 10003 |
+
"grad_norm": 0.4233209490776062,
|
| 10004 |
+
"learning_rate": 0.0002,
|
| 10005 |
+
"loss": 1.2845,
|
| 10006 |
+
"step": 1428
|
| 10007 |
+
},
|
| 10008 |
+
{
|
| 10009 |
+
"epoch": 2.305084745762712,
|
| 10010 |
+
"grad_norm": 0.41972845792770386,
|
| 10011 |
+
"learning_rate": 0.0002,
|
| 10012 |
+
"loss": 1.3905,
|
| 10013 |
+
"step": 1429
|
| 10014 |
+
},
|
| 10015 |
+
{
|
| 10016 |
+
"epoch": 2.3066989507667475,
|
| 10017 |
+
"grad_norm": 0.40918785333633423,
|
| 10018 |
+
"learning_rate": 0.0002,
|
| 10019 |
+
"loss": 1.4356,
|
| 10020 |
+
"step": 1430
|
| 10021 |
+
},
|
| 10022 |
+
{
|
| 10023 |
+
"epoch": 2.308313155770783,
|
| 10024 |
+
"grad_norm": 0.4390774071216583,
|
| 10025 |
+
"learning_rate": 0.0002,
|
| 10026 |
+
"loss": 1.2642,
|
| 10027 |
+
"step": 1431
|
| 10028 |
+
},
|
| 10029 |
+
{
|
| 10030 |
+
"epoch": 2.3099273607748185,
|
| 10031 |
+
"grad_norm": 0.42124879360198975,
|
| 10032 |
+
"learning_rate": 0.0002,
|
| 10033 |
+
"loss": 1.3974,
|
| 10034 |
+
"step": 1432
|
| 10035 |
+
},
|
| 10036 |
+
{
|
| 10037 |
+
"epoch": 2.311541565778854,
|
| 10038 |
+
"grad_norm": 0.418536514043808,
|
| 10039 |
+
"learning_rate": 0.0002,
|
| 10040 |
+
"loss": 1.3645,
|
| 10041 |
+
"step": 1433
|
| 10042 |
+
},
|
| 10043 |
+
{
|
| 10044 |
+
"epoch": 2.3131557707828896,
|
| 10045 |
+
"grad_norm": 9.660236358642578,
|
| 10046 |
+
"learning_rate": 0.0002,
|
| 10047 |
+
"loss": 1.3999,
|
| 10048 |
+
"step": 1434
|
| 10049 |
+
},
|
| 10050 |
+
{
|
| 10051 |
+
"epoch": 2.314769975786925,
|
| 10052 |
+
"grad_norm": 0.45394179224967957,
|
| 10053 |
+
"learning_rate": 0.0002,
|
| 10054 |
+
"loss": 1.4067,
|
| 10055 |
+
"step": 1435
|
| 10056 |
+
},
|
| 10057 |
+
{
|
| 10058 |
+
"epoch": 2.3163841807909606,
|
| 10059 |
+
"grad_norm": 0.47176432609558105,
|
| 10060 |
+
"learning_rate": 0.0002,
|
| 10061 |
+
"loss": 1.3179,
|
| 10062 |
+
"step": 1436
|
| 10063 |
+
},
|
| 10064 |
+
{
|
| 10065 |
+
"epoch": 2.317998385794996,
|
| 10066 |
+
"grad_norm": 0.4437849819660187,
|
| 10067 |
+
"learning_rate": 0.0002,
|
| 10068 |
+
"loss": 1.2497,
|
| 10069 |
+
"step": 1437
|
| 10070 |
+
},
|
| 10071 |
+
{
|
| 10072 |
+
"epoch": 2.3196125907990313,
|
| 10073 |
+
"grad_norm": 0.43986475467681885,
|
| 10074 |
+
"learning_rate": 0.0002,
|
| 10075 |
+
"loss": 1.4395,
|
| 10076 |
+
"step": 1438
|
| 10077 |
+
},
|
| 10078 |
+
{
|
| 10079 |
+
"epoch": 2.3212267958030672,
|
| 10080 |
+
"grad_norm": 0.6889885067939758,
|
| 10081 |
+
"learning_rate": 0.0002,
|
| 10082 |
+
"loss": 1.359,
|
| 10083 |
+
"step": 1439
|
| 10084 |
+
},
|
| 10085 |
+
{
|
| 10086 |
+
"epoch": 2.3228410008071023,
|
| 10087 |
+
"grad_norm": 0.42875397205352783,
|
| 10088 |
+
"learning_rate": 0.0002,
|
| 10089 |
+
"loss": 1.3374,
|
| 10090 |
+
"step": 1440
|
| 10091 |
+
},
|
| 10092 |
+
{
|
| 10093 |
+
"epoch": 2.324455205811138,
|
| 10094 |
+
"grad_norm": 0.48654845356941223,
|
| 10095 |
+
"learning_rate": 0.0002,
|
| 10096 |
+
"loss": 1.3067,
|
| 10097 |
+
"step": 1441
|
| 10098 |
+
},
|
| 10099 |
+
{
|
| 10100 |
+
"epoch": 2.3260694108151734,
|
| 10101 |
+
"grad_norm": 0.44666141271591187,
|
| 10102 |
+
"learning_rate": 0.0002,
|
| 10103 |
+
"loss": 1.2824,
|
| 10104 |
+
"step": 1442
|
| 10105 |
+
},
|
| 10106 |
+
{
|
| 10107 |
+
"epoch": 2.327683615819209,
|
| 10108 |
+
"grad_norm": 0.4244503676891327,
|
| 10109 |
+
"learning_rate": 0.0002,
|
| 10110 |
+
"loss": 1.365,
|
| 10111 |
+
"step": 1443
|
| 10112 |
+
},
|
| 10113 |
+
{
|
| 10114 |
+
"epoch": 2.3292978208232444,
|
| 10115 |
+
"grad_norm": 0.41780194640159607,
|
| 10116 |
+
"learning_rate": 0.0002,
|
| 10117 |
+
"loss": 1.3565,
|
| 10118 |
+
"step": 1444
|
| 10119 |
+
},
|
| 10120 |
+
{
|
| 10121 |
+
"epoch": 2.33091202582728,
|
| 10122 |
+
"grad_norm": 0.4327908754348755,
|
| 10123 |
+
"learning_rate": 0.0002,
|
| 10124 |
+
"loss": 1.2449,
|
| 10125 |
+
"step": 1445
|
| 10126 |
+
},
|
| 10127 |
+
{
|
| 10128 |
+
"epoch": 2.3325262308313155,
|
| 10129 |
+
"grad_norm": 0.427132785320282,
|
| 10130 |
+
"learning_rate": 0.0002,
|
| 10131 |
+
"loss": 1.3742,
|
| 10132 |
+
"step": 1446
|
| 10133 |
+
},
|
| 10134 |
+
{
|
| 10135 |
+
"epoch": 2.334140435835351,
|
| 10136 |
+
"grad_norm": 0.9838108420372009,
|
| 10137 |
+
"learning_rate": 0.0002,
|
| 10138 |
+
"loss": 1.3473,
|
| 10139 |
+
"step": 1447
|
| 10140 |
+
},
|
| 10141 |
+
{
|
| 10142 |
+
"epoch": 2.3357546408393866,
|
| 10143 |
+
"grad_norm": 0.4349774718284607,
|
| 10144 |
+
"learning_rate": 0.0002,
|
| 10145 |
+
"loss": 1.3301,
|
| 10146 |
+
"step": 1448
|
| 10147 |
+
},
|
| 10148 |
+
{
|
| 10149 |
+
"epoch": 2.337368845843422,
|
| 10150 |
+
"grad_norm": 0.4829374849796295,
|
| 10151 |
+
"learning_rate": 0.0002,
|
| 10152 |
+
"loss": 1.293,
|
| 10153 |
+
"step": 1449
|
| 10154 |
+
},
|
| 10155 |
+
{
|
| 10156 |
+
"epoch": 2.3389830508474576,
|
| 10157 |
+
"grad_norm": 0.4401623606681824,
|
| 10158 |
+
"learning_rate": 0.0002,
|
| 10159 |
+
"loss": 1.2349,
|
| 10160 |
+
"step": 1450
|
| 10161 |
+
},
|
| 10162 |
+
{
|
| 10163 |
+
"epoch": 2.340597255851493,
|
| 10164 |
+
"grad_norm": 0.5126479864120483,
|
| 10165 |
+
"learning_rate": 0.0002,
|
| 10166 |
+
"loss": 1.2948,
|
| 10167 |
+
"step": 1451
|
| 10168 |
+
},
|
| 10169 |
+
{
|
| 10170 |
+
"epoch": 2.3422114608555287,
|
| 10171 |
+
"grad_norm": 0.42908668518066406,
|
| 10172 |
+
"learning_rate": 0.0002,
|
| 10173 |
+
"loss": 1.2782,
|
| 10174 |
+
"step": 1452
|
| 10175 |
+
},
|
| 10176 |
+
{
|
| 10177 |
+
"epoch": 2.343825665859564,
|
| 10178 |
+
"grad_norm": 0.4352446496486664,
|
| 10179 |
+
"learning_rate": 0.0002,
|
| 10180 |
+
"loss": 1.1775,
|
| 10181 |
+
"step": 1453
|
| 10182 |
+
},
|
| 10183 |
+
{
|
| 10184 |
+
"epoch": 2.3454398708635997,
|
| 10185 |
+
"grad_norm": 0.47312191128730774,
|
| 10186 |
+
"learning_rate": 0.0002,
|
| 10187 |
+
"loss": 1.3177,
|
| 10188 |
+
"step": 1454
|
| 10189 |
+
},
|
| 10190 |
+
{
|
| 10191 |
+
"epoch": 2.3470540758676353,
|
| 10192 |
+
"grad_norm": 0.42954206466674805,
|
| 10193 |
+
"learning_rate": 0.0002,
|
| 10194 |
+
"loss": 1.2397,
|
| 10195 |
+
"step": 1455
|
| 10196 |
+
},
|
| 10197 |
+
{
|
| 10198 |
+
"epoch": 2.348668280871671,
|
| 10199 |
+
"grad_norm": 0.46389418840408325,
|
| 10200 |
+
"learning_rate": 0.0002,
|
| 10201 |
+
"loss": 1.3343,
|
| 10202 |
+
"step": 1456
|
| 10203 |
+
},
|
| 10204 |
+
{
|
| 10205 |
+
"epoch": 2.3502824858757063,
|
| 10206 |
+
"grad_norm": 0.43719297647476196,
|
| 10207 |
+
"learning_rate": 0.0002,
|
| 10208 |
+
"loss": 1.2942,
|
| 10209 |
+
"step": 1457
|
| 10210 |
+
},
|
| 10211 |
+
{
|
| 10212 |
+
"epoch": 2.351896690879742,
|
| 10213 |
+
"grad_norm": 0.5461978912353516,
|
| 10214 |
+
"learning_rate": 0.0002,
|
| 10215 |
+
"loss": 1.3442,
|
| 10216 |
+
"step": 1458
|
| 10217 |
+
},
|
| 10218 |
+
{
|
| 10219 |
+
"epoch": 2.3535108958837774,
|
| 10220 |
+
"grad_norm": 0.43839031457901,
|
| 10221 |
+
"learning_rate": 0.0002,
|
| 10222 |
+
"loss": 1.258,
|
| 10223 |
+
"step": 1459
|
| 10224 |
+
},
|
| 10225 |
+
{
|
| 10226 |
+
"epoch": 2.355125100887813,
|
| 10227 |
+
"grad_norm": 0.4903876483440399,
|
| 10228 |
+
"learning_rate": 0.0002,
|
| 10229 |
+
"loss": 1.465,
|
| 10230 |
+
"step": 1460
|
| 10231 |
+
},
|
| 10232 |
+
{
|
| 10233 |
+
"epoch": 2.3567393058918484,
|
| 10234 |
+
"grad_norm": 0.42305469512939453,
|
| 10235 |
+
"learning_rate": 0.0002,
|
| 10236 |
+
"loss": 1.3441,
|
| 10237 |
+
"step": 1461
|
| 10238 |
+
},
|
| 10239 |
+
{
|
| 10240 |
+
"epoch": 2.358353510895884,
|
| 10241 |
+
"grad_norm": 0.4420433044433594,
|
| 10242 |
+
"learning_rate": 0.0002,
|
| 10243 |
+
"loss": 1.3811,
|
| 10244 |
+
"step": 1462
|
| 10245 |
+
},
|
| 10246 |
+
{
|
| 10247 |
+
"epoch": 2.359967715899919,
|
| 10248 |
+
"grad_norm": 0.46115559339523315,
|
| 10249 |
+
"learning_rate": 0.0002,
|
| 10250 |
+
"loss": 1.2991,
|
| 10251 |
+
"step": 1463
|
| 10252 |
+
},
|
| 10253 |
+
{
|
| 10254 |
+
"epoch": 2.361581920903955,
|
| 10255 |
+
"grad_norm": 0.4190042018890381,
|
| 10256 |
+
"learning_rate": 0.0002,
|
| 10257 |
+
"loss": 1.3862,
|
| 10258 |
+
"step": 1464
|
| 10259 |
+
},
|
| 10260 |
+
{
|
| 10261 |
+
"epoch": 2.36319612590799,
|
| 10262 |
+
"grad_norm": 0.41592875123023987,
|
| 10263 |
+
"learning_rate": 0.0002,
|
| 10264 |
+
"loss": 1.2438,
|
| 10265 |
+
"step": 1465
|
| 10266 |
+
},
|
| 10267 |
+
{
|
| 10268 |
+
"epoch": 2.3648103309120256,
|
| 10269 |
+
"grad_norm": 0.4431193768978119,
|
| 10270 |
+
"learning_rate": 0.0002,
|
| 10271 |
+
"loss": 1.3611,
|
| 10272 |
+
"step": 1466
|
| 10273 |
+
},
|
| 10274 |
+
{
|
| 10275 |
+
"epoch": 2.366424535916061,
|
| 10276 |
+
"grad_norm": 0.4248901307582855,
|
| 10277 |
+
"learning_rate": 0.0002,
|
| 10278 |
+
"loss": 1.1827,
|
| 10279 |
+
"step": 1467
|
| 10280 |
+
},
|
| 10281 |
+
{
|
| 10282 |
+
"epoch": 2.3680387409200967,
|
| 10283 |
+
"grad_norm": 0.49995511770248413,
|
| 10284 |
+
"learning_rate": 0.0002,
|
| 10285 |
+
"loss": 1.3848,
|
| 10286 |
+
"step": 1468
|
| 10287 |
+
},
|
| 10288 |
+
{
|
| 10289 |
+
"epoch": 2.3696529459241322,
|
| 10290 |
+
"grad_norm": 0.4702857732772827,
|
| 10291 |
+
"learning_rate": 0.0002,
|
| 10292 |
+
"loss": 1.3926,
|
| 10293 |
+
"step": 1469
|
| 10294 |
+
},
|
| 10295 |
+
{
|
| 10296 |
+
"epoch": 2.3712671509281678,
|
| 10297 |
+
"grad_norm": 0.5258844494819641,
|
| 10298 |
+
"learning_rate": 0.0002,
|
| 10299 |
+
"loss": 1.3391,
|
| 10300 |
+
"step": 1470
|
| 10301 |
+
},
|
| 10302 |
+
{
|
| 10303 |
+
"epoch": 2.3728813559322033,
|
| 10304 |
+
"grad_norm": 0.5130214095115662,
|
| 10305 |
+
"learning_rate": 0.0002,
|
| 10306 |
+
"loss": 1.4088,
|
| 10307 |
+
"step": 1471
|
| 10308 |
+
},
|
| 10309 |
+
{
|
| 10310 |
+
"epoch": 2.374495560936239,
|
| 10311 |
+
"grad_norm": 0.7444900274276733,
|
| 10312 |
+
"learning_rate": 0.0002,
|
| 10313 |
+
"loss": 1.2021,
|
| 10314 |
+
"step": 1472
|
| 10315 |
+
},
|
| 10316 |
+
{
|
| 10317 |
+
"epoch": 2.3761097659402743,
|
| 10318 |
+
"grad_norm": 0.48592880368232727,
|
| 10319 |
+
"learning_rate": 0.0002,
|
| 10320 |
+
"loss": 1.4223,
|
| 10321 |
+
"step": 1473
|
| 10322 |
+
},
|
| 10323 |
+
{
|
| 10324 |
+
"epoch": 2.37772397094431,
|
| 10325 |
+
"grad_norm": 0.6075024008750916,
|
| 10326 |
+
"learning_rate": 0.0002,
|
| 10327 |
+
"loss": 1.2963,
|
| 10328 |
+
"step": 1474
|
| 10329 |
+
},
|
| 10330 |
+
{
|
| 10331 |
+
"epoch": 2.3793381759483454,
|
| 10332 |
+
"grad_norm": 0.434675931930542,
|
| 10333 |
+
"learning_rate": 0.0002,
|
| 10334 |
+
"loss": 1.3272,
|
| 10335 |
+
"step": 1475
|
| 10336 |
+
},
|
| 10337 |
+
{
|
| 10338 |
+
"epoch": 2.380952380952381,
|
| 10339 |
+
"grad_norm": 0.4828976094722748,
|
| 10340 |
+
"learning_rate": 0.0002,
|
| 10341 |
+
"loss": 1.2819,
|
| 10342 |
+
"step": 1476
|
| 10343 |
+
},
|
| 10344 |
+
{
|
| 10345 |
+
"epoch": 2.3825665859564165,
|
| 10346 |
+
"grad_norm": 0.513092041015625,
|
| 10347 |
+
"learning_rate": 0.0002,
|
| 10348 |
+
"loss": 1.3674,
|
| 10349 |
+
"step": 1477
|
| 10350 |
+
},
|
| 10351 |
+
{
|
| 10352 |
+
"epoch": 2.384180790960452,
|
| 10353 |
+
"grad_norm": 0.42832380533218384,
|
| 10354 |
+
"learning_rate": 0.0002,
|
| 10355 |
+
"loss": 1.2564,
|
| 10356 |
+
"step": 1478
|
| 10357 |
+
},
|
| 10358 |
+
{
|
| 10359 |
+
"epoch": 2.3857949959644875,
|
| 10360 |
+
"grad_norm": 0.4438645541667938,
|
| 10361 |
+
"learning_rate": 0.0002,
|
| 10362 |
+
"loss": 1.3712,
|
| 10363 |
+
"step": 1479
|
| 10364 |
+
},
|
| 10365 |
+
{
|
| 10366 |
+
"epoch": 2.387409200968523,
|
| 10367 |
+
"grad_norm": 0.42463281750679016,
|
| 10368 |
+
"learning_rate": 0.0002,
|
| 10369 |
+
"loss": 1.2953,
|
| 10370 |
+
"step": 1480
|
| 10371 |
+
},
|
| 10372 |
+
{
|
| 10373 |
+
"epoch": 2.3890234059725586,
|
| 10374 |
+
"grad_norm": 0.42697665095329285,
|
| 10375 |
+
"learning_rate": 0.0002,
|
| 10376 |
+
"loss": 1.313,
|
| 10377 |
+
"step": 1481
|
| 10378 |
+
},
|
| 10379 |
+
{
|
| 10380 |
+
"epoch": 2.390637610976594,
|
| 10381 |
+
"grad_norm": 0.43315592408180237,
|
| 10382 |
+
"learning_rate": 0.0002,
|
| 10383 |
+
"loss": 1.322,
|
| 10384 |
+
"step": 1482
|
| 10385 |
+
},
|
| 10386 |
+
{
|
| 10387 |
+
"epoch": 2.3922518159806296,
|
| 10388 |
+
"grad_norm": 0.4209153354167938,
|
| 10389 |
+
"learning_rate": 0.0002,
|
| 10390 |
+
"loss": 1.3051,
|
| 10391 |
+
"step": 1483
|
| 10392 |
+
},
|
| 10393 |
+
{
|
| 10394 |
+
"epoch": 2.393866020984665,
|
| 10395 |
+
"grad_norm": 0.43778765201568604,
|
| 10396 |
+
"learning_rate": 0.0002,
|
| 10397 |
+
"loss": 1.4258,
|
| 10398 |
+
"step": 1484
|
| 10399 |
+
},
|
| 10400 |
+
{
|
| 10401 |
+
"epoch": 2.3954802259887007,
|
| 10402 |
+
"grad_norm": 0.41469642519950867,
|
| 10403 |
+
"learning_rate": 0.0002,
|
| 10404 |
+
"loss": 1.398,
|
| 10405 |
+
"step": 1485
|
| 10406 |
+
},
|
| 10407 |
+
{
|
| 10408 |
+
"epoch": 2.3970944309927362,
|
| 10409 |
+
"grad_norm": 0.41460326313972473,
|
| 10410 |
+
"learning_rate": 0.0002,
|
| 10411 |
+
"loss": 1.2997,
|
| 10412 |
+
"step": 1486
|
| 10413 |
+
},
|
| 10414 |
+
{
|
| 10415 |
+
"epoch": 2.3987086359967718,
|
| 10416 |
+
"grad_norm": 0.43409091234207153,
|
| 10417 |
+
"learning_rate": 0.0002,
|
| 10418 |
+
"loss": 1.3761,
|
| 10419 |
+
"step": 1487
|
| 10420 |
+
},
|
| 10421 |
+
{
|
| 10422 |
+
"epoch": 2.4003228410008073,
|
| 10423 |
+
"grad_norm": 0.43002137541770935,
|
| 10424 |
+
"learning_rate": 0.0002,
|
| 10425 |
+
"loss": 1.3879,
|
| 10426 |
+
"step": 1488
|
| 10427 |
+
},
|
| 10428 |
+
{
|
| 10429 |
+
"epoch": 2.401937046004843,
|
| 10430 |
+
"grad_norm": 0.4376080632209778,
|
| 10431 |
+
"learning_rate": 0.0002,
|
| 10432 |
+
"loss": 1.3356,
|
| 10433 |
+
"step": 1489
|
| 10434 |
+
},
|
| 10435 |
+
{
|
| 10436 |
+
"epoch": 2.403551251008878,
|
| 10437 |
+
"grad_norm": 0.4308399260044098,
|
| 10438 |
+
"learning_rate": 0.0002,
|
| 10439 |
+
"loss": 1.3483,
|
| 10440 |
+
"step": 1490
|
| 10441 |
+
},
|
| 10442 |
+
{
|
| 10443 |
+
"epoch": 2.405165456012914,
|
| 10444 |
+
"grad_norm": 0.4664413034915924,
|
| 10445 |
+
"learning_rate": 0.0002,
|
| 10446 |
+
"loss": 1.356,
|
| 10447 |
+
"step": 1491
|
| 10448 |
+
},
|
| 10449 |
+
{
|
| 10450 |
+
"epoch": 2.406779661016949,
|
| 10451 |
+
"grad_norm": 0.5452325940132141,
|
| 10452 |
+
"learning_rate": 0.0002,
|
| 10453 |
+
"loss": 1.5682,
|
| 10454 |
+
"step": 1492
|
| 10455 |
+
},
|
| 10456 |
+
{
|
| 10457 |
+
"epoch": 2.4083938660209845,
|
| 10458 |
+
"grad_norm": 0.4430229365825653,
|
| 10459 |
+
"learning_rate": 0.0002,
|
| 10460 |
+
"loss": 1.3031,
|
| 10461 |
+
"step": 1493
|
| 10462 |
+
},
|
| 10463 |
+
{
|
| 10464 |
+
"epoch": 2.41000807102502,
|
| 10465 |
+
"grad_norm": 0.429807186126709,
|
| 10466 |
+
"learning_rate": 0.0002,
|
| 10467 |
+
"loss": 1.339,
|
| 10468 |
+
"step": 1494
|
| 10469 |
+
},
|
| 10470 |
+
{
|
| 10471 |
+
"epoch": 2.4116222760290555,
|
| 10472 |
+
"grad_norm": 0.42216193675994873,
|
| 10473 |
+
"learning_rate": 0.0002,
|
| 10474 |
+
"loss": 1.3242,
|
| 10475 |
+
"step": 1495
|
| 10476 |
+
},
|
| 10477 |
+
{
|
| 10478 |
+
"epoch": 2.413236481033091,
|
| 10479 |
+
"grad_norm": 0.4356923997402191,
|
| 10480 |
+
"learning_rate": 0.0002,
|
| 10481 |
+
"loss": 1.3524,
|
| 10482 |
+
"step": 1496
|
| 10483 |
+
},
|
| 10484 |
+
{
|
| 10485 |
+
"epoch": 2.4148506860371266,
|
| 10486 |
+
"grad_norm": 0.43242383003234863,
|
| 10487 |
+
"learning_rate": 0.0002,
|
| 10488 |
+
"loss": 1.4138,
|
| 10489 |
+
"step": 1497
|
| 10490 |
+
},
|
| 10491 |
+
{
|
| 10492 |
+
"epoch": 2.416464891041162,
|
| 10493 |
+
"grad_norm": 0.4492044448852539,
|
| 10494 |
+
"learning_rate": 0.0002,
|
| 10495 |
+
"loss": 1.3394,
|
| 10496 |
+
"step": 1498
|
| 10497 |
+
},
|
| 10498 |
+
{
|
| 10499 |
+
"epoch": 2.4180790960451977,
|
| 10500 |
+
"grad_norm": 0.40164169669151306,
|
| 10501 |
+
"learning_rate": 0.0002,
|
| 10502 |
+
"loss": 1.2786,
|
| 10503 |
+
"step": 1499
|
| 10504 |
+
},
|
| 10505 |
+
{
|
| 10506 |
+
"epoch": 2.419693301049233,
|
| 10507 |
+
"grad_norm": 0.4147217869758606,
|
| 10508 |
+
"learning_rate": 0.0002,
|
| 10509 |
+
"loss": 1.2922,
|
| 10510 |
+
"step": 1500
|
| 10511 |
}
|
| 10512 |
],
|
| 10513 |
"logging_steps": 1,
|
|
|
|
| 10522 |
"should_evaluate": false,
|
| 10523 |
"should_log": false,
|
| 10524 |
"should_save": true,
|
| 10525 |
+
"should_training_stop": true
|
| 10526 |
},
|
| 10527 |
"attributes": {}
|
| 10528 |
}
|
| 10529 |
},
|
| 10530 |
+
"total_flos": 9.86212666947561e+18,
|
| 10531 |
"train_batch_size": 8,
|
| 10532 |
"trial_name": null,
|
| 10533 |
"trial_params": null
|