Training in progress, step 350, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 138995824
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:90548e553606ee93637df3e130ac4564ca4705ed924489b17f8f7f603ac347d0
|
| 3 |
size 138995824
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 71078228
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d094018d0d7215b1bf72e226739e9c8e5d192e7ac2f271db0330cc5691cd1d9d
|
| 3 |
size 71078228
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:807df050ec2598cccef4c1f07da0dfd76dd851deaa7ba232ce956056ad7df4b2
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f9096f15f02bac6b0fc27aa7aa4986f85d87d53fca310a75657e0015357af5c5
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
-
"best_metric": 1.
|
| 3 |
-
"best_model_checkpoint": "miner_id_24/checkpoint-
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 50,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -2163,6 +2163,364 @@
|
|
| 2163 |
"eval_samples_per_second": 54.212,
|
| 2164 |
"eval_steps_per_second": 13.564,
|
| 2165 |
"step": 300
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2166 |
}
|
| 2167 |
],
|
| 2168 |
"logging_steps": 1,
|
|
@@ -2191,7 +2549,7 @@
|
|
| 2191 |
"attributes": {}
|
| 2192 |
}
|
| 2193 |
},
|
| 2194 |
-
"total_flos": 2.
|
| 2195 |
"train_batch_size": 8,
|
| 2196 |
"trial_name": null,
|
| 2197 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
+
"best_metric": 1.392707109451294,
|
| 3 |
+
"best_model_checkpoint": "miner_id_24/checkpoint-350",
|
| 4 |
+
"epoch": 0.2367264119039567,
|
| 5 |
"eval_steps": 50,
|
| 6 |
+
"global_step": 350,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 2163 |
"eval_samples_per_second": 54.212,
|
| 2164 |
"eval_steps_per_second": 13.564,
|
| 2165 |
"step": 300
|
| 2166 |
+
},
|
| 2167 |
+
{
|
| 2168 |
+
"epoch": 0.20358471423740276,
|
| 2169 |
+
"grad_norm": 0.06888294219970703,
|
| 2170 |
+
"learning_rate": 1.5074431737553157e-05,
|
| 2171 |
+
"loss": 1.2703,
|
| 2172 |
+
"step": 301
|
| 2173 |
+
},
|
| 2174 |
+
{
|
| 2175 |
+
"epoch": 0.20426107541427121,
|
| 2176 |
+
"grad_norm": 0.07071013748645782,
|
| 2177 |
+
"learning_rate": 1.4787347420660541e-05,
|
| 2178 |
+
"loss": 1.2927,
|
| 2179 |
+
"step": 302
|
| 2180 |
+
},
|
| 2181 |
+
{
|
| 2182 |
+
"epoch": 0.20493743659113967,
|
| 2183 |
+
"grad_norm": 0.07139529287815094,
|
| 2184 |
+
"learning_rate": 1.4502548002332088e-05,
|
| 2185 |
+
"loss": 1.3657,
|
| 2186 |
+
"step": 303
|
| 2187 |
+
},
|
| 2188 |
+
{
|
| 2189 |
+
"epoch": 0.20561379776800812,
|
| 2190 |
+
"grad_norm": 0.06573140621185303,
|
| 2191 |
+
"learning_rate": 1.422005196279395e-05,
|
| 2192 |
+
"loss": 1.2871,
|
| 2193 |
+
"step": 304
|
| 2194 |
+
},
|
| 2195 |
+
{
|
| 2196 |
+
"epoch": 0.20629015894487657,
|
| 2197 |
+
"grad_norm": 0.06961559504270554,
|
| 2198 |
+
"learning_rate": 1.3939877632809278e-05,
|
| 2199 |
+
"loss": 1.3394,
|
| 2200 |
+
"step": 305
|
| 2201 |
+
},
|
| 2202 |
+
{
|
| 2203 |
+
"epoch": 0.20696652012174502,
|
| 2204 |
+
"grad_norm": 0.07154294103384018,
|
| 2205 |
+
"learning_rate": 1.3662043192488849e-05,
|
| 2206 |
+
"loss": 1.4211,
|
| 2207 |
+
"step": 306
|
| 2208 |
+
},
|
| 2209 |
+
{
|
| 2210 |
+
"epoch": 0.20764288129861347,
|
| 2211 |
+
"grad_norm": 0.0693695992231369,
|
| 2212 |
+
"learning_rate": 1.338656667011134e-05,
|
| 2213 |
+
"loss": 1.3076,
|
| 2214 |
+
"step": 307
|
| 2215 |
+
},
|
| 2216 |
+
{
|
| 2217 |
+
"epoch": 0.20831924247548192,
|
| 2218 |
+
"grad_norm": 0.0711139366030693,
|
| 2219 |
+
"learning_rate": 1.3113465940953495e-05,
|
| 2220 |
+
"loss": 1.3016,
|
| 2221 |
+
"step": 308
|
| 2222 |
+
},
|
| 2223 |
+
{
|
| 2224 |
+
"epoch": 0.20899560365235034,
|
| 2225 |
+
"grad_norm": 0.06786596775054932,
|
| 2226 |
+
"learning_rate": 1.2842758726130283e-05,
|
| 2227 |
+
"loss": 1.2759,
|
| 2228 |
+
"step": 309
|
| 2229 |
+
},
|
| 2230 |
+
{
|
| 2231 |
+
"epoch": 0.2096719648292188,
|
| 2232 |
+
"grad_norm": 0.07203105837106705,
|
| 2233 |
+
"learning_rate": 1.257446259144494e-05,
|
| 2234 |
+
"loss": 1.2681,
|
| 2235 |
+
"step": 310
|
| 2236 |
+
},
|
| 2237 |
+
{
|
| 2238 |
+
"epoch": 0.21034832600608724,
|
| 2239 |
+
"grad_norm": 0.06898628920316696,
|
| 2240 |
+
"learning_rate": 1.2308594946249163e-05,
|
| 2241 |
+
"loss": 1.3097,
|
| 2242 |
+
"step": 311
|
| 2243 |
+
},
|
| 2244 |
+
{
|
| 2245 |
+
"epoch": 0.2110246871829557,
|
| 2246 |
+
"grad_norm": 0.07093969732522964,
|
| 2247 |
+
"learning_rate": 1.204517304231343e-05,
|
| 2248 |
+
"loss": 1.3423,
|
| 2249 |
+
"step": 312
|
| 2250 |
+
},
|
| 2251 |
+
{
|
| 2252 |
+
"epoch": 0.21170104835982415,
|
| 2253 |
+
"grad_norm": 0.07039978355169296,
|
| 2254 |
+
"learning_rate": 1.178421397270758e-05,
|
| 2255 |
+
"loss": 1.3544,
|
| 2256 |
+
"step": 313
|
| 2257 |
+
},
|
| 2258 |
+
{
|
| 2259 |
+
"epoch": 0.2123774095366926,
|
| 2260 |
+
"grad_norm": 0.07245506346225739,
|
| 2261 |
+
"learning_rate": 1.1525734670691701e-05,
|
| 2262 |
+
"loss": 1.3775,
|
| 2263 |
+
"step": 314
|
| 2264 |
+
},
|
| 2265 |
+
{
|
| 2266 |
+
"epoch": 0.21305377071356105,
|
| 2267 |
+
"grad_norm": 0.0731048583984375,
|
| 2268 |
+
"learning_rate": 1.1269751908617277e-05,
|
| 2269 |
+
"loss": 1.4099,
|
| 2270 |
+
"step": 315
|
| 2271 |
+
},
|
| 2272 |
+
{
|
| 2273 |
+
"epoch": 0.2137301318904295,
|
| 2274 |
+
"grad_norm": 0.07116963714361191,
|
| 2275 |
+
"learning_rate": 1.1016282296838887e-05,
|
| 2276 |
+
"loss": 1.3899,
|
| 2277 |
+
"step": 316
|
| 2278 |
+
},
|
| 2279 |
+
{
|
| 2280 |
+
"epoch": 0.21440649306729795,
|
| 2281 |
+
"grad_norm": 0.07567148655653,
|
| 2282 |
+
"learning_rate": 1.0765342282636416e-05,
|
| 2283 |
+
"loss": 1.5033,
|
| 2284 |
+
"step": 317
|
| 2285 |
+
},
|
| 2286 |
+
{
|
| 2287 |
+
"epoch": 0.21508285424416637,
|
| 2288 |
+
"grad_norm": 0.07363573461771011,
|
| 2289 |
+
"learning_rate": 1.0516948149147754e-05,
|
| 2290 |
+
"loss": 1.442,
|
| 2291 |
+
"step": 318
|
| 2292 |
+
},
|
| 2293 |
+
{
|
| 2294 |
+
"epoch": 0.21575921542103482,
|
| 2295 |
+
"grad_norm": 0.07205386459827423,
|
| 2296 |
+
"learning_rate": 1.0271116014312293e-05,
|
| 2297 |
+
"loss": 1.3578,
|
| 2298 |
+
"step": 319
|
| 2299 |
+
},
|
| 2300 |
+
{
|
| 2301 |
+
"epoch": 0.21643557659790327,
|
| 2302 |
+
"grad_norm": 0.07321757078170776,
|
| 2303 |
+
"learning_rate": 1.0027861829824952e-05,
|
| 2304 |
+
"loss": 1.4057,
|
| 2305 |
+
"step": 320
|
| 2306 |
+
},
|
| 2307 |
+
{
|
| 2308 |
+
"epoch": 0.21711193777477172,
|
| 2309 |
+
"grad_norm": 0.07573118805885315,
|
| 2310 |
+
"learning_rate": 9.787201380101157e-06,
|
| 2311 |
+
"loss": 1.3645,
|
| 2312 |
+
"step": 321
|
| 2313 |
+
},
|
| 2314 |
+
{
|
| 2315 |
+
"epoch": 0.21778829895164017,
|
| 2316 |
+
"grad_norm": 0.07296445220708847,
|
| 2317 |
+
"learning_rate": 9.549150281252633e-06,
|
| 2318 |
+
"loss": 1.4686,
|
| 2319 |
+
"step": 322
|
| 2320 |
+
},
|
| 2321 |
+
{
|
| 2322 |
+
"epoch": 0.21846466012850863,
|
| 2323 |
+
"grad_norm": 0.0765371024608612,
|
| 2324 |
+
"learning_rate": 9.313723980074018e-06,
|
| 2325 |
+
"loss": 1.509,
|
| 2326 |
+
"step": 323
|
| 2327 |
+
},
|
| 2328 |
+
{
|
| 2329 |
+
"epoch": 0.21914102130537708,
|
| 2330 |
+
"grad_norm": 0.0745658352971077,
|
| 2331 |
+
"learning_rate": 9.080937753040646e-06,
|
| 2332 |
+
"loss": 1.4014,
|
| 2333 |
+
"step": 324
|
| 2334 |
+
},
|
| 2335 |
+
{
|
| 2336 |
+
"epoch": 0.21981738248224553,
|
| 2337 |
+
"grad_norm": 0.0729154720902443,
|
| 2338 |
+
"learning_rate": 8.850806705317183e-06,
|
| 2339 |
+
"loss": 1.3839,
|
| 2340 |
+
"step": 325
|
| 2341 |
+
},
|
| 2342 |
+
{
|
| 2343 |
+
"epoch": 0.22049374365911398,
|
| 2344 |
+
"grad_norm": 0.07217688858509064,
|
| 2345 |
+
"learning_rate": 8.623345769777514e-06,
|
| 2346 |
+
"loss": 1.4157,
|
| 2347 |
+
"step": 326
|
| 2348 |
+
},
|
| 2349 |
+
{
|
| 2350 |
+
"epoch": 0.22117010483598243,
|
| 2351 |
+
"grad_norm": 0.07794667035341263,
|
| 2352 |
+
"learning_rate": 8.398569706035792e-06,
|
| 2353 |
+
"loss": 1.4788,
|
| 2354 |
+
"step": 327
|
| 2355 |
+
},
|
| 2356 |
+
{
|
| 2357 |
+
"epoch": 0.22184646601285085,
|
| 2358 |
+
"grad_norm": 0.07797662168741226,
|
| 2359 |
+
"learning_rate": 8.176493099488663e-06,
|
| 2360 |
+
"loss": 1.3679,
|
| 2361 |
+
"step": 328
|
| 2362 |
+
},
|
| 2363 |
+
{
|
| 2364 |
+
"epoch": 0.2225228271897193,
|
| 2365 |
+
"grad_norm": 0.0776330828666687,
|
| 2366 |
+
"learning_rate": 7.957130360368898e-06,
|
| 2367 |
+
"loss": 1.4592,
|
| 2368 |
+
"step": 329
|
| 2369 |
+
},
|
| 2370 |
+
{
|
| 2371 |
+
"epoch": 0.22319918836658775,
|
| 2372 |
+
"grad_norm": 0.07840722054243088,
|
| 2373 |
+
"learning_rate": 7.740495722810271e-06,
|
| 2374 |
+
"loss": 1.4205,
|
| 2375 |
+
"step": 330
|
| 2376 |
+
},
|
| 2377 |
+
{
|
| 2378 |
+
"epoch": 0.2238755495434562,
|
| 2379 |
+
"grad_norm": 0.07938168942928314,
|
| 2380 |
+
"learning_rate": 7.526603243923957e-06,
|
| 2381 |
+
"loss": 1.4727,
|
| 2382 |
+
"step": 331
|
| 2383 |
+
},
|
| 2384 |
+
{
|
| 2385 |
+
"epoch": 0.22455191072032465,
|
| 2386 |
+
"grad_norm": 0.07692472636699677,
|
| 2387 |
+
"learning_rate": 7.315466802886401e-06,
|
| 2388 |
+
"loss": 1.3764,
|
| 2389 |
+
"step": 332
|
| 2390 |
+
},
|
| 2391 |
+
{
|
| 2392 |
+
"epoch": 0.2252282718971931,
|
| 2393 |
+
"grad_norm": 0.07381051033735275,
|
| 2394 |
+
"learning_rate": 7.107100100038671e-06,
|
| 2395 |
+
"loss": 1.3103,
|
| 2396 |
+
"step": 333
|
| 2397 |
+
},
|
| 2398 |
+
{
|
| 2399 |
+
"epoch": 0.22590463307406156,
|
| 2400 |
+
"grad_norm": 0.08517421782016754,
|
| 2401 |
+
"learning_rate": 6.901516655997536e-06,
|
| 2402 |
+
"loss": 1.4538,
|
| 2403 |
+
"step": 334
|
| 2404 |
+
},
|
| 2405 |
+
{
|
| 2406 |
+
"epoch": 0.22658099425093,
|
| 2407 |
+
"grad_norm": 0.07926001399755478,
|
| 2408 |
+
"learning_rate": 6.698729810778065e-06,
|
| 2409 |
+
"loss": 1.4098,
|
| 2410 |
+
"step": 335
|
| 2411 |
+
},
|
| 2412 |
+
{
|
| 2413 |
+
"epoch": 0.22725735542779846,
|
| 2414 |
+
"grad_norm": 0.08531263470649719,
|
| 2415 |
+
"learning_rate": 6.498752722928042e-06,
|
| 2416 |
+
"loss": 1.42,
|
| 2417 |
+
"step": 336
|
| 2418 |
+
},
|
| 2419 |
+
{
|
| 2420 |
+
"epoch": 0.22793371660466688,
|
| 2421 |
+
"grad_norm": 0.08142856508493423,
|
| 2422 |
+
"learning_rate": 6.301598368674105e-06,
|
| 2423 |
+
"loss": 1.4362,
|
| 2424 |
+
"step": 337
|
| 2425 |
+
},
|
| 2426 |
+
{
|
| 2427 |
+
"epoch": 0.22861007778153533,
|
| 2428 |
+
"grad_norm": 0.08574479818344116,
|
| 2429 |
+
"learning_rate": 6.107279541079769e-06,
|
| 2430 |
+
"loss": 1.4158,
|
| 2431 |
+
"step": 338
|
| 2432 |
+
},
|
| 2433 |
+
{
|
| 2434 |
+
"epoch": 0.22928643895840378,
|
| 2435 |
+
"grad_norm": 0.08034282922744751,
|
| 2436 |
+
"learning_rate": 5.915808849215304e-06,
|
| 2437 |
+
"loss": 1.3813,
|
| 2438 |
+
"step": 339
|
| 2439 |
+
},
|
| 2440 |
+
{
|
| 2441 |
+
"epoch": 0.22996280013527223,
|
| 2442 |
+
"grad_norm": 0.08398088812828064,
|
| 2443 |
+
"learning_rate": 5.727198717339511e-06,
|
| 2444 |
+
"loss": 1.3915,
|
| 2445 |
+
"step": 340
|
| 2446 |
+
},
|
| 2447 |
+
{
|
| 2448 |
+
"epoch": 0.23063916131214068,
|
| 2449 |
+
"grad_norm": 0.0855628028512001,
|
| 2450 |
+
"learning_rate": 5.54146138409355e-06,
|
| 2451 |
+
"loss": 1.4603,
|
| 2452 |
+
"step": 341
|
| 2453 |
+
},
|
| 2454 |
+
{
|
| 2455 |
+
"epoch": 0.23131552248900913,
|
| 2456 |
+
"grad_norm": 0.09168089181184769,
|
| 2457 |
+
"learning_rate": 5.358608901706802e-06,
|
| 2458 |
+
"loss": 1.4113,
|
| 2459 |
+
"step": 342
|
| 2460 |
+
},
|
| 2461 |
+
{
|
| 2462 |
+
"epoch": 0.23199188366587759,
|
| 2463 |
+
"grad_norm": 0.09065324068069458,
|
| 2464 |
+
"learning_rate": 5.178653135214812e-06,
|
| 2465 |
+
"loss": 1.3356,
|
| 2466 |
+
"step": 343
|
| 2467 |
+
},
|
| 2468 |
+
{
|
| 2469 |
+
"epoch": 0.23266824484274604,
|
| 2470 |
+
"grad_norm": 0.09464474767446518,
|
| 2471 |
+
"learning_rate": 5.001605761689398e-06,
|
| 2472 |
+
"loss": 1.4075,
|
| 2473 |
+
"step": 344
|
| 2474 |
+
},
|
| 2475 |
+
{
|
| 2476 |
+
"epoch": 0.2333446060196145,
|
| 2477 |
+
"grad_norm": 0.09780465066432953,
|
| 2478 |
+
"learning_rate": 4.827478269480895e-06,
|
| 2479 |
+
"loss": 1.4186,
|
| 2480 |
+
"step": 345
|
| 2481 |
+
},
|
| 2482 |
+
{
|
| 2483 |
+
"epoch": 0.2340209671964829,
|
| 2484 |
+
"grad_norm": 0.09540226310491562,
|
| 2485 |
+
"learning_rate": 4.65628195747273e-06,
|
| 2486 |
+
"loss": 1.4061,
|
| 2487 |
+
"step": 346
|
| 2488 |
+
},
|
| 2489 |
+
{
|
| 2490 |
+
"epoch": 0.23469732837335136,
|
| 2491 |
+
"grad_norm": 0.10365621000528336,
|
| 2492 |
+
"learning_rate": 4.488027934348271e-06,
|
| 2493 |
+
"loss": 1.3495,
|
| 2494 |
+
"step": 347
|
| 2495 |
+
},
|
| 2496 |
+
{
|
| 2497 |
+
"epoch": 0.2353736895502198,
|
| 2498 |
+
"grad_norm": 0.1070394292473793,
|
| 2499 |
+
"learning_rate": 4.322727117869951e-06,
|
| 2500 |
+
"loss": 1.4006,
|
| 2501 |
+
"step": 348
|
| 2502 |
+
},
|
| 2503 |
+
{
|
| 2504 |
+
"epoch": 0.23605005072708826,
|
| 2505 |
+
"grad_norm": 0.12799158692359924,
|
| 2506 |
+
"learning_rate": 4.16039023417088e-06,
|
| 2507 |
+
"loss": 1.322,
|
| 2508 |
+
"step": 349
|
| 2509 |
+
},
|
| 2510 |
+
{
|
| 2511 |
+
"epoch": 0.2367264119039567,
|
| 2512 |
+
"grad_norm": 0.16463126242160797,
|
| 2513 |
+
"learning_rate": 4.001027817058789e-06,
|
| 2514 |
+
"loss": 1.1977,
|
| 2515 |
+
"step": 350
|
| 2516 |
+
},
|
| 2517 |
+
{
|
| 2518 |
+
"epoch": 0.2367264119039567,
|
| 2519 |
+
"eval_loss": 1.392707109451294,
|
| 2520 |
+
"eval_runtime": 45.9044,
|
| 2521 |
+
"eval_samples_per_second": 54.243,
|
| 2522 |
+
"eval_steps_per_second": 13.572,
|
| 2523 |
+
"step": 350
|
| 2524 |
}
|
| 2525 |
],
|
| 2526 |
"logging_steps": 1,
|
|
|
|
| 2549 |
"attributes": {}
|
| 2550 |
}
|
| 2551 |
},
|
| 2552 |
+
"total_flos": 2.872908867698688e+16,
|
| 2553 |
"train_batch_size": 8,
|
| 2554 |
"trial_name": null,
|
| 2555 |
"trial_params": null
|