Training in progress, step 350, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 191968
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:49e300df4dc067ec51336ad5347f570a4571cca24b7e139f8609330de84be8d6
|
| 3 |
size 191968
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 253144
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8273207357219ef1d0d87b1ea98b7b9a60824b7e458cce54da2f2e0398328870
|
| 3 |
size 253144
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5f4bce664627ad47b11416e493e46e02899c659dc2a669218324c2708e483373
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f9096f15f02bac6b0fc27aa7aa4986f85d87d53fca310a75657e0015357af5c5
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
-
"best_metric": 10.
|
| 3 |
-
"best_model_checkpoint": "miner_id_24/checkpoint-
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 50,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -2163,6 +2163,364 @@
|
|
| 2163 |
"eval_samples_per_second": 282.6,
|
| 2164 |
"eval_steps_per_second": 70.669,
|
| 2165 |
"step": 300
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2166 |
}
|
| 2167 |
],
|
| 2168 |
"logging_steps": 1,
|
|
@@ -2191,7 +2549,7 @@
|
|
| 2191 |
"attributes": {}
|
| 2192 |
}
|
| 2193 |
},
|
| 2194 |
-
"total_flos":
|
| 2195 |
"train_batch_size": 8,
|
| 2196 |
"trial_name": null,
|
| 2197 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
+
"best_metric": 10.325580596923828,
|
| 3 |
+
"best_model_checkpoint": "miner_id_24/checkpoint-350",
|
| 4 |
+
"epoch": 0.0513818035013029,
|
| 5 |
"eval_steps": 50,
|
| 6 |
+
"global_step": 350,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 2163 |
"eval_samples_per_second": 282.6,
|
| 2164 |
"eval_steps_per_second": 70.669,
|
| 2165 |
"step": 300
|
| 2166 |
+
},
|
| 2167 |
+
{
|
| 2168 |
+
"epoch": 0.04418835101112049,
|
| 2169 |
+
"grad_norm": 0.15617330372333527,
|
| 2170 |
+
"learning_rate": 1.5074431737553157e-05,
|
| 2171 |
+
"loss": 10.3324,
|
| 2172 |
+
"step": 301
|
| 2173 |
+
},
|
| 2174 |
+
{
|
| 2175 |
+
"epoch": 0.04433515616398136,
|
| 2176 |
+
"grad_norm": 0.1019449383020401,
|
| 2177 |
+
"learning_rate": 1.4787347420660541e-05,
|
| 2178 |
+
"loss": 10.3353,
|
| 2179 |
+
"step": 302
|
| 2180 |
+
},
|
| 2181 |
+
{
|
| 2182 |
+
"epoch": 0.04448196131684222,
|
| 2183 |
+
"grad_norm": 0.07090485841035843,
|
| 2184 |
+
"learning_rate": 1.4502548002332088e-05,
|
| 2185 |
+
"loss": 10.3358,
|
| 2186 |
+
"step": 303
|
| 2187 |
+
},
|
| 2188 |
+
{
|
| 2189 |
+
"epoch": 0.04462876646970309,
|
| 2190 |
+
"grad_norm": 0.08353332430124283,
|
| 2191 |
+
"learning_rate": 1.422005196279395e-05,
|
| 2192 |
+
"loss": 10.3286,
|
| 2193 |
+
"step": 304
|
| 2194 |
+
},
|
| 2195 |
+
{
|
| 2196 |
+
"epoch": 0.04477557162256395,
|
| 2197 |
+
"grad_norm": 0.06909141689538956,
|
| 2198 |
+
"learning_rate": 1.3939877632809278e-05,
|
| 2199 |
+
"loss": 10.3307,
|
| 2200 |
+
"step": 305
|
| 2201 |
+
},
|
| 2202 |
+
{
|
| 2203 |
+
"epoch": 0.044922376775424815,
|
| 2204 |
+
"grad_norm": 0.07847987115383148,
|
| 2205 |
+
"learning_rate": 1.3662043192488849e-05,
|
| 2206 |
+
"loss": 10.3325,
|
| 2207 |
+
"step": 306
|
| 2208 |
+
},
|
| 2209 |
+
{
|
| 2210 |
+
"epoch": 0.045069181928285684,
|
| 2211 |
+
"grad_norm": 0.08882429450750351,
|
| 2212 |
+
"learning_rate": 1.338656667011134e-05,
|
| 2213 |
+
"loss": 10.3254,
|
| 2214 |
+
"step": 307
|
| 2215 |
+
},
|
| 2216 |
+
{
|
| 2217 |
+
"epoch": 0.04521598708114655,
|
| 2218 |
+
"grad_norm": 0.094759002327919,
|
| 2219 |
+
"learning_rate": 1.3113465940953495e-05,
|
| 2220 |
+
"loss": 10.3323,
|
| 2221 |
+
"step": 308
|
| 2222 |
+
},
|
| 2223 |
+
{
|
| 2224 |
+
"epoch": 0.04536279223400742,
|
| 2225 |
+
"grad_norm": 0.0710742175579071,
|
| 2226 |
+
"learning_rate": 1.2842758726130283e-05,
|
| 2227 |
+
"loss": 10.328,
|
| 2228 |
+
"step": 309
|
| 2229 |
+
},
|
| 2230 |
+
{
|
| 2231 |
+
"epoch": 0.04550959738686828,
|
| 2232 |
+
"grad_norm": 0.07474274188280106,
|
| 2233 |
+
"learning_rate": 1.257446259144494e-05,
|
| 2234 |
+
"loss": 10.3261,
|
| 2235 |
+
"step": 310
|
| 2236 |
+
},
|
| 2237 |
+
{
|
| 2238 |
+
"epoch": 0.04565640253972914,
|
| 2239 |
+
"grad_norm": 0.092684805393219,
|
| 2240 |
+
"learning_rate": 1.2308594946249163e-05,
|
| 2241 |
+
"loss": 10.3258,
|
| 2242 |
+
"step": 311
|
| 2243 |
+
},
|
| 2244 |
+
{
|
| 2245 |
+
"epoch": 0.04580320769259001,
|
| 2246 |
+
"grad_norm": 0.07652658969163895,
|
| 2247 |
+
"learning_rate": 1.204517304231343e-05,
|
| 2248 |
+
"loss": 10.3218,
|
| 2249 |
+
"step": 312
|
| 2250 |
+
},
|
| 2251 |
+
{
|
| 2252 |
+
"epoch": 0.045950012845450874,
|
| 2253 |
+
"grad_norm": 0.0737638920545578,
|
| 2254 |
+
"learning_rate": 1.178421397270758e-05,
|
| 2255 |
+
"loss": 10.334,
|
| 2256 |
+
"step": 313
|
| 2257 |
+
},
|
| 2258 |
+
{
|
| 2259 |
+
"epoch": 0.046096817998311744,
|
| 2260 |
+
"grad_norm": 0.05947414040565491,
|
| 2261 |
+
"learning_rate": 1.1525734670691701e-05,
|
| 2262 |
+
"loss": 10.3312,
|
| 2263 |
+
"step": 314
|
| 2264 |
+
},
|
| 2265 |
+
{
|
| 2266 |
+
"epoch": 0.04624362315117261,
|
| 2267 |
+
"grad_norm": 0.08125612139701843,
|
| 2268 |
+
"learning_rate": 1.1269751908617277e-05,
|
| 2269 |
+
"loss": 10.3262,
|
| 2270 |
+
"step": 315
|
| 2271 |
+
},
|
| 2272 |
+
{
|
| 2273 |
+
"epoch": 0.04639042830403347,
|
| 2274 |
+
"grad_norm": 0.0674387738108635,
|
| 2275 |
+
"learning_rate": 1.1016282296838887e-05,
|
| 2276 |
+
"loss": 10.3267,
|
| 2277 |
+
"step": 316
|
| 2278 |
+
},
|
| 2279 |
+
{
|
| 2280 |
+
"epoch": 0.04653723345689434,
|
| 2281 |
+
"grad_norm": 0.06924287229776382,
|
| 2282 |
+
"learning_rate": 1.0765342282636416e-05,
|
| 2283 |
+
"loss": 10.3254,
|
| 2284 |
+
"step": 317
|
| 2285 |
+
},
|
| 2286 |
+
{
|
| 2287 |
+
"epoch": 0.0466840386097552,
|
| 2288 |
+
"grad_norm": 0.06078397482633591,
|
| 2289 |
+
"learning_rate": 1.0516948149147754e-05,
|
| 2290 |
+
"loss": 10.3342,
|
| 2291 |
+
"step": 318
|
| 2292 |
+
},
|
| 2293 |
+
{
|
| 2294 |
+
"epoch": 0.046830843762616065,
|
| 2295 |
+
"grad_norm": 0.06581950187683105,
|
| 2296 |
+
"learning_rate": 1.0271116014312293e-05,
|
| 2297 |
+
"loss": 10.3266,
|
| 2298 |
+
"step": 319
|
| 2299 |
+
},
|
| 2300 |
+
{
|
| 2301 |
+
"epoch": 0.046977648915476934,
|
| 2302 |
+
"grad_norm": 0.07352259755134583,
|
| 2303 |
+
"learning_rate": 1.0027861829824952e-05,
|
| 2304 |
+
"loss": 10.3265,
|
| 2305 |
+
"step": 320
|
| 2306 |
+
},
|
| 2307 |
+
{
|
| 2308 |
+
"epoch": 0.0471244540683378,
|
| 2309 |
+
"grad_norm": 0.06884633004665375,
|
| 2310 |
+
"learning_rate": 9.787201380101157e-06,
|
| 2311 |
+
"loss": 10.3221,
|
| 2312 |
+
"step": 321
|
| 2313 |
+
},
|
| 2314 |
+
{
|
| 2315 |
+
"epoch": 0.047271259221198667,
|
| 2316 |
+
"grad_norm": 0.07347755134105682,
|
| 2317 |
+
"learning_rate": 9.549150281252633e-06,
|
| 2318 |
+
"loss": 10.3293,
|
| 2319 |
+
"step": 322
|
| 2320 |
+
},
|
| 2321 |
+
{
|
| 2322 |
+
"epoch": 0.04741806437405953,
|
| 2323 |
+
"grad_norm": 0.07484126836061478,
|
| 2324 |
+
"learning_rate": 9.313723980074018e-06,
|
| 2325 |
+
"loss": 10.3207,
|
| 2326 |
+
"step": 323
|
| 2327 |
+
},
|
| 2328 |
+
{
|
| 2329 |
+
"epoch": 0.04756486952692039,
|
| 2330 |
+
"grad_norm": 0.08332342654466629,
|
| 2331 |
+
"learning_rate": 9.080937753040646e-06,
|
| 2332 |
+
"loss": 10.324,
|
| 2333 |
+
"step": 324
|
| 2334 |
+
},
|
| 2335 |
+
{
|
| 2336 |
+
"epoch": 0.04771167467978126,
|
| 2337 |
+
"grad_norm": 0.07034022361040115,
|
| 2338 |
+
"learning_rate": 8.850806705317183e-06,
|
| 2339 |
+
"loss": 10.3264,
|
| 2340 |
+
"step": 325
|
| 2341 |
+
},
|
| 2342 |
+
{
|
| 2343 |
+
"epoch": 0.047858479832642124,
|
| 2344 |
+
"grad_norm": 0.07176532596349716,
|
| 2345 |
+
"learning_rate": 8.623345769777514e-06,
|
| 2346 |
+
"loss": 10.325,
|
| 2347 |
+
"step": 326
|
| 2348 |
+
},
|
| 2349 |
+
{
|
| 2350 |
+
"epoch": 0.048005284985502994,
|
| 2351 |
+
"grad_norm": 0.1011543869972229,
|
| 2352 |
+
"learning_rate": 8.398569706035792e-06,
|
| 2353 |
+
"loss": 10.308,
|
| 2354 |
+
"step": 327
|
| 2355 |
+
},
|
| 2356 |
+
{
|
| 2357 |
+
"epoch": 0.04815209013836386,
|
| 2358 |
+
"grad_norm": 0.07753263413906097,
|
| 2359 |
+
"learning_rate": 8.176493099488663e-06,
|
| 2360 |
+
"loss": 10.3269,
|
| 2361 |
+
"step": 328
|
| 2362 |
+
},
|
| 2363 |
+
{
|
| 2364 |
+
"epoch": 0.04829889529122472,
|
| 2365 |
+
"grad_norm": 0.06734514981508255,
|
| 2366 |
+
"learning_rate": 7.957130360368898e-06,
|
| 2367 |
+
"loss": 10.3221,
|
| 2368 |
+
"step": 329
|
| 2369 |
+
},
|
| 2370 |
+
{
|
| 2371 |
+
"epoch": 0.04844570044408559,
|
| 2372 |
+
"grad_norm": 0.08725865185260773,
|
| 2373 |
+
"learning_rate": 7.740495722810271e-06,
|
| 2374 |
+
"loss": 10.3183,
|
| 2375 |
+
"step": 330
|
| 2376 |
+
},
|
| 2377 |
+
{
|
| 2378 |
+
"epoch": 0.04859250559694645,
|
| 2379 |
+
"grad_norm": 0.07323329895734787,
|
| 2380 |
+
"learning_rate": 7.526603243923957e-06,
|
| 2381 |
+
"loss": 10.3186,
|
| 2382 |
+
"step": 331
|
| 2383 |
+
},
|
| 2384 |
+
{
|
| 2385 |
+
"epoch": 0.04873931074980732,
|
| 2386 |
+
"grad_norm": 0.0848127231001854,
|
| 2387 |
+
"learning_rate": 7.315466802886401e-06,
|
| 2388 |
+
"loss": 10.3222,
|
| 2389 |
+
"step": 332
|
| 2390 |
+
},
|
| 2391 |
+
{
|
| 2392 |
+
"epoch": 0.048886115902668184,
|
| 2393 |
+
"grad_norm": 0.06692821532487869,
|
| 2394 |
+
"learning_rate": 7.107100100038671e-06,
|
| 2395 |
+
"loss": 10.3143,
|
| 2396 |
+
"step": 333
|
| 2397 |
+
},
|
| 2398 |
+
{
|
| 2399 |
+
"epoch": 0.04903292105552905,
|
| 2400 |
+
"grad_norm": 0.08171255886554718,
|
| 2401 |
+
"learning_rate": 6.901516655997536e-06,
|
| 2402 |
+
"loss": 10.3238,
|
| 2403 |
+
"step": 334
|
| 2404 |
+
},
|
| 2405 |
+
{
|
| 2406 |
+
"epoch": 0.049179726208389916,
|
| 2407 |
+
"grad_norm": 0.08207869529724121,
|
| 2408 |
+
"learning_rate": 6.698729810778065e-06,
|
| 2409 |
+
"loss": 10.3232,
|
| 2410 |
+
"step": 335
|
| 2411 |
+
},
|
| 2412 |
+
{
|
| 2413 |
+
"epoch": 0.04932653136125078,
|
| 2414 |
+
"grad_norm": 0.09229540079832077,
|
| 2415 |
+
"learning_rate": 6.498752722928042e-06,
|
| 2416 |
+
"loss": 10.3215,
|
| 2417 |
+
"step": 336
|
| 2418 |
+
},
|
| 2419 |
+
{
|
| 2420 |
+
"epoch": 0.04947333651411165,
|
| 2421 |
+
"grad_norm": 0.0909721627831459,
|
| 2422 |
+
"learning_rate": 6.301598368674105e-06,
|
| 2423 |
+
"loss": 10.3242,
|
| 2424 |
+
"step": 337
|
| 2425 |
+
},
|
| 2426 |
+
{
|
| 2427 |
+
"epoch": 0.04962014166697251,
|
| 2428 |
+
"grad_norm": 0.07625994086265564,
|
| 2429 |
+
"learning_rate": 6.107279541079769e-06,
|
| 2430 |
+
"loss": 10.3268,
|
| 2431 |
+
"step": 338
|
| 2432 |
+
},
|
| 2433 |
+
{
|
| 2434 |
+
"epoch": 0.049766946819833374,
|
| 2435 |
+
"grad_norm": 0.08614172041416168,
|
| 2436 |
+
"learning_rate": 5.915808849215304e-06,
|
| 2437 |
+
"loss": 10.3201,
|
| 2438 |
+
"step": 339
|
| 2439 |
+
},
|
| 2440 |
+
{
|
| 2441 |
+
"epoch": 0.049913751972694244,
|
| 2442 |
+
"grad_norm": 0.08884906768798828,
|
| 2443 |
+
"learning_rate": 5.727198717339511e-06,
|
| 2444 |
+
"loss": 10.3232,
|
| 2445 |
+
"step": 340
|
| 2446 |
+
},
|
| 2447 |
+
{
|
| 2448 |
+
"epoch": 0.05006055712555511,
|
| 2449 |
+
"grad_norm": 0.09030349552631378,
|
| 2450 |
+
"learning_rate": 5.54146138409355e-06,
|
| 2451 |
+
"loss": 10.3216,
|
| 2452 |
+
"step": 341
|
| 2453 |
+
},
|
| 2454 |
+
{
|
| 2455 |
+
"epoch": 0.05020736227841597,
|
| 2456 |
+
"grad_norm": 0.11042314767837524,
|
| 2457 |
+
"learning_rate": 5.358608901706802e-06,
|
| 2458 |
+
"loss": 10.3223,
|
| 2459 |
+
"step": 342
|
| 2460 |
+
},
|
| 2461 |
+
{
|
| 2462 |
+
"epoch": 0.05035416743127684,
|
| 2463 |
+
"grad_norm": 0.0956040620803833,
|
| 2464 |
+
"learning_rate": 5.178653135214812e-06,
|
| 2465 |
+
"loss": 10.3204,
|
| 2466 |
+
"step": 343
|
| 2467 |
+
},
|
| 2468 |
+
{
|
| 2469 |
+
"epoch": 0.0505009725841377,
|
| 2470 |
+
"grad_norm": 0.10560595989227295,
|
| 2471 |
+
"learning_rate": 5.001605761689398e-06,
|
| 2472 |
+
"loss": 10.333,
|
| 2473 |
+
"step": 344
|
| 2474 |
+
},
|
| 2475 |
+
{
|
| 2476 |
+
"epoch": 0.05064777773699857,
|
| 2477 |
+
"grad_norm": 0.10146863758563995,
|
| 2478 |
+
"learning_rate": 4.827478269480895e-06,
|
| 2479 |
+
"loss": 10.3263,
|
| 2480 |
+
"step": 345
|
| 2481 |
+
},
|
| 2482 |
+
{
|
| 2483 |
+
"epoch": 0.050794582889859434,
|
| 2484 |
+
"grad_norm": 0.10761269927024841,
|
| 2485 |
+
"learning_rate": 4.65628195747273e-06,
|
| 2486 |
+
"loss": 10.3272,
|
| 2487 |
+
"step": 346
|
| 2488 |
+
},
|
| 2489 |
+
{
|
| 2490 |
+
"epoch": 0.0509413880427203,
|
| 2491 |
+
"grad_norm": 0.11111512780189514,
|
| 2492 |
+
"learning_rate": 4.488027934348271e-06,
|
| 2493 |
+
"loss": 10.3298,
|
| 2494 |
+
"step": 347
|
| 2495 |
+
},
|
| 2496 |
+
{
|
| 2497 |
+
"epoch": 0.051088193195581166,
|
| 2498 |
+
"grad_norm": 0.16782677173614502,
|
| 2499 |
+
"learning_rate": 4.322727117869951e-06,
|
| 2500 |
+
"loss": 10.3297,
|
| 2501 |
+
"step": 348
|
| 2502 |
+
},
|
| 2503 |
+
{
|
| 2504 |
+
"epoch": 0.05123499834844203,
|
| 2505 |
+
"grad_norm": 0.13419117033481598,
|
| 2506 |
+
"learning_rate": 4.16039023417088e-06,
|
| 2507 |
+
"loss": 10.3217,
|
| 2508 |
+
"step": 349
|
| 2509 |
+
},
|
| 2510 |
+
{
|
| 2511 |
+
"epoch": 0.0513818035013029,
|
| 2512 |
+
"grad_norm": 0.30833181738853455,
|
| 2513 |
+
"learning_rate": 4.001027817058789e-06,
|
| 2514 |
+
"loss": 10.3408,
|
| 2515 |
+
"step": 350
|
| 2516 |
+
},
|
| 2517 |
+
{
|
| 2518 |
+
"epoch": 0.0513818035013029,
|
| 2519 |
+
"eval_loss": 10.325580596923828,
|
| 2520 |
+
"eval_runtime": 40.8534,
|
| 2521 |
+
"eval_samples_per_second": 280.834,
|
| 2522 |
+
"eval_steps_per_second": 70.227,
|
| 2523 |
+
"step": 350
|
| 2524 |
}
|
| 2525 |
],
|
| 2526 |
"logging_steps": 1,
|
|
|
|
| 2549 |
"attributes": {}
|
| 2550 |
}
|
| 2551 |
},
|
| 2552 |
+
"total_flos": 39237944868864.0,
|
| 2553 |
"train_batch_size": 8,
|
| 2554 |
"trial_name": null,
|
| 2555 |
"trial_params": null
|