Upload checkpoint 800
Browse files- README.md +3 -3
- adapter_model.safetensors +1 -1
- optimizer.pt +1 -1
- scheduler.pt +1 -1
- trainer_state.json +703 -3
README.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
| 2 |
base_model: Qwen/Qwen2.5-3B-Instruct
|
| 3 |
library_name: peft
|
| 4 |
---
|
| 5 |
-
# Gradience T1 3B (Step
|
| 6 |
|
| 7 |
> [!NOTE]
|
| 8 |
> Training in progress...
|
|
@@ -38,10 +38,10 @@ library_name: peft
|
|
| 38 |
</head>
|
| 39 |
<body>
|
| 40 |
<div style="width: 100%; background-color: #e0e0e0; border-radius: 25px; overflow: hidden; margin: 20px 0;">
|
| 41 |
-
<div style="height: 30px; width:
|
| 42 |
<!-- 3.75% -->
|
| 43 |
</div>
|
| 44 |
</div>
|
| 45 |
-
<p style="font-family: Arial, sans-serif; font-size: 16px;">Progress:
|
| 46 |
</body>
|
| 47 |
</html>
|
|
|
|
| 2 |
base_model: Qwen/Qwen2.5-3B-Instruct
|
| 3 |
library_name: peft
|
| 4 |
---
|
| 5 |
+
# Gradience T1 3B (Step 800 Checkpoint)
|
| 6 |
|
| 7 |
> [!NOTE]
|
| 8 |
> Training in progress...
|
|
|
|
| 38 |
</head>
|
| 39 |
<body>
|
| 40 |
<div style="width: 100%; background-color: #e0e0e0; border-radius: 25px; overflow: hidden; margin: 20px 0;">
|
| 41 |
+
<div style="height: 30px; width: 8.13%; background-color: #76c7c0; text-align: center; line-height: 30px; color: white; border-radius: 25px 0 0 25px;">
|
| 42 |
<!-- 3.75% -->
|
| 43 |
</div>
|
| 44 |
</div>
|
| 45 |
+
<p style="font-family: Arial, sans-serif; font-size: 16px;">Progress: 800 out of 9838 steps</p>
|
| 46 |
</body>
|
| 47 |
</html>
|
adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 119801528
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7d662121c4c4e38fa7417857b15faabf4697bf6f421766c1af2289391ccb3d30
|
| 3 |
size 119801528
|
optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 61392692
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6c230d8b5c1e095fc4008b79adc74d44f024c9250c191a0c398ce799413ca397
|
| 3 |
size 61392692
|
scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4afe690e5607953bb2e0fc05a12430468a9b25343713c9f58a2a219031dce337
|
| 3 |
size 1064
|
trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
-
"epoch": 0.
|
| 6 |
"eval_steps": 500,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -4908,6 +4908,706 @@
|
|
| 4908 |
"learning_rate": 0.00018588426726329706,
|
| 4909 |
"loss": 1.26,
|
| 4910 |
"step": 700
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4911 |
}
|
| 4912 |
],
|
| 4913 |
"logging_steps": 1,
|
|
@@ -4927,7 +5627,7 @@
|
|
| 4927 |
"attributes": {}
|
| 4928 |
}
|
| 4929 |
},
|
| 4930 |
-
"total_flos":
|
| 4931 |
"train_batch_size": 8,
|
| 4932 |
"trial_name": null,
|
| 4933 |
"trial_params": null
|
|
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 0.16263468184590363,
|
| 6 |
"eval_steps": 500,
|
| 7 |
+
"global_step": 800,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 4908 |
"learning_rate": 0.00018588426726329706,
|
| 4909 |
"loss": 1.26,
|
| 4910 |
"step": 700
|
| 4911 |
+
},
|
| 4912 |
+
{
|
| 4913 |
+
"epoch": 0.14250863996747307,
|
| 4914 |
+
"grad_norm": 0.12191738188266754,
|
| 4915 |
+
"learning_rate": 0.00018586392759076581,
|
| 4916 |
+
"loss": 0.9333,
|
| 4917 |
+
"step": 701
|
| 4918 |
+
},
|
| 4919 |
+
{
|
| 4920 |
+
"epoch": 0.14271193331978044,
|
| 4921 |
+
"grad_norm": 0.13285742700099945,
|
| 4922 |
+
"learning_rate": 0.00018584358791823454,
|
| 4923 |
+
"loss": 1.2199,
|
| 4924 |
+
"step": 702
|
| 4925 |
+
},
|
| 4926 |
+
{
|
| 4927 |
+
"epoch": 0.14291522667208784,
|
| 4928 |
+
"grad_norm": 0.11525557935237885,
|
| 4929 |
+
"learning_rate": 0.00018582324824570326,
|
| 4930 |
+
"loss": 1.1212,
|
| 4931 |
+
"step": 703
|
| 4932 |
+
},
|
| 4933 |
+
{
|
| 4934 |
+
"epoch": 0.1431185200243952,
|
| 4935 |
+
"grad_norm": 0.12379605323076248,
|
| 4936 |
+
"learning_rate": 0.000185802908573172,
|
| 4937 |
+
"loss": 0.9767,
|
| 4938 |
+
"step": 704
|
| 4939 |
+
},
|
| 4940 |
+
{
|
| 4941 |
+
"epoch": 0.14332181337670258,
|
| 4942 |
+
"grad_norm": 0.13637319207191467,
|
| 4943 |
+
"learning_rate": 0.0001857825689006407,
|
| 4944 |
+
"loss": 1.1399,
|
| 4945 |
+
"step": 705
|
| 4946 |
+
},
|
| 4947 |
+
{
|
| 4948 |
+
"epoch": 0.14352510672900995,
|
| 4949 |
+
"grad_norm": 0.12638236582279205,
|
| 4950 |
+
"learning_rate": 0.00018576222922810944,
|
| 4951 |
+
"loss": 1.2348,
|
| 4952 |
+
"step": 706
|
| 4953 |
+
},
|
| 4954 |
+
{
|
| 4955 |
+
"epoch": 0.14372840008131735,
|
| 4956 |
+
"grad_norm": 0.11840532720088959,
|
| 4957 |
+
"learning_rate": 0.00018574188955557816,
|
| 4958 |
+
"loss": 1.0475,
|
| 4959 |
+
"step": 707
|
| 4960 |
+
},
|
| 4961 |
+
{
|
| 4962 |
+
"epoch": 0.14393169343362472,
|
| 4963 |
+
"grad_norm": 0.11270745098590851,
|
| 4964 |
+
"learning_rate": 0.00018572154988304689,
|
| 4965 |
+
"loss": 1.0503,
|
| 4966 |
+
"step": 708
|
| 4967 |
+
},
|
| 4968 |
+
{
|
| 4969 |
+
"epoch": 0.1441349867859321,
|
| 4970 |
+
"grad_norm": 0.12445101141929626,
|
| 4971 |
+
"learning_rate": 0.00018570121021051564,
|
| 4972 |
+
"loss": 1.0658,
|
| 4973 |
+
"step": 709
|
| 4974 |
+
},
|
| 4975 |
+
{
|
| 4976 |
+
"epoch": 0.1443382801382395,
|
| 4977 |
+
"grad_norm": 0.11352977156639099,
|
| 4978 |
+
"learning_rate": 0.00018568087053798436,
|
| 4979 |
+
"loss": 0.9473,
|
| 4980 |
+
"step": 710
|
| 4981 |
+
},
|
| 4982 |
+
{
|
| 4983 |
+
"epoch": 0.14454157349054686,
|
| 4984 |
+
"grad_norm": 0.11230108141899109,
|
| 4985 |
+
"learning_rate": 0.00018566053086545309,
|
| 4986 |
+
"loss": 1.0519,
|
| 4987 |
+
"step": 711
|
| 4988 |
+
},
|
| 4989 |
+
{
|
| 4990 |
+
"epoch": 0.14474486684285423,
|
| 4991 |
+
"grad_norm": 0.14274398982524872,
|
| 4992 |
+
"learning_rate": 0.0001856401911929218,
|
| 4993 |
+
"loss": 1.1135,
|
| 4994 |
+
"step": 712
|
| 4995 |
+
},
|
| 4996 |
+
{
|
| 4997 |
+
"epoch": 0.14494816019516163,
|
| 4998 |
+
"grad_norm": 0.11553295701742172,
|
| 4999 |
+
"learning_rate": 0.0001856198515203905,
|
| 5000 |
+
"loss": 1.044,
|
| 5001 |
+
"step": 713
|
| 5002 |
+
},
|
| 5003 |
+
{
|
| 5004 |
+
"epoch": 0.145151453547469,
|
| 5005 |
+
"grad_norm": 0.11737996339797974,
|
| 5006 |
+
"learning_rate": 0.00018559951184785926,
|
| 5007 |
+
"loss": 1.0154,
|
| 5008 |
+
"step": 714
|
| 5009 |
+
},
|
| 5010 |
+
{
|
| 5011 |
+
"epoch": 0.14535474689977637,
|
| 5012 |
+
"grad_norm": 0.1481630802154541,
|
| 5013 |
+
"learning_rate": 0.00018557917217532798,
|
| 5014 |
+
"loss": 1.1544,
|
| 5015 |
+
"step": 715
|
| 5016 |
+
},
|
| 5017 |
+
{
|
| 5018 |
+
"epoch": 0.14555804025208377,
|
| 5019 |
+
"grad_norm": 0.12081188708543777,
|
| 5020 |
+
"learning_rate": 0.0001855588325027967,
|
| 5021 |
+
"loss": 1.0034,
|
| 5022 |
+
"step": 716
|
| 5023 |
+
},
|
| 5024 |
+
{
|
| 5025 |
+
"epoch": 0.14576133360439114,
|
| 5026 |
+
"grad_norm": 0.13458681106567383,
|
| 5027 |
+
"learning_rate": 0.00018553849283026543,
|
| 5028 |
+
"loss": 1.1627,
|
| 5029 |
+
"step": 717
|
| 5030 |
+
},
|
| 5031 |
+
{
|
| 5032 |
+
"epoch": 0.1459646269566985,
|
| 5033 |
+
"grad_norm": 0.13506878912448883,
|
| 5034 |
+
"learning_rate": 0.00018551815315773418,
|
| 5035 |
+
"loss": 1.1927,
|
| 5036 |
+
"step": 718
|
| 5037 |
+
},
|
| 5038 |
+
{
|
| 5039 |
+
"epoch": 0.14616792030900588,
|
| 5040 |
+
"grad_norm": 0.10834948718547821,
|
| 5041 |
+
"learning_rate": 0.0001854978134852029,
|
| 5042 |
+
"loss": 1.0943,
|
| 5043 |
+
"step": 719
|
| 5044 |
+
},
|
| 5045 |
+
{
|
| 5046 |
+
"epoch": 0.14637121366131328,
|
| 5047 |
+
"grad_norm": 0.13779957592487335,
|
| 5048 |
+
"learning_rate": 0.00018547747381267163,
|
| 5049 |
+
"loss": 1.2356,
|
| 5050 |
+
"step": 720
|
| 5051 |
+
},
|
| 5052 |
+
{
|
| 5053 |
+
"epoch": 0.14657450701362065,
|
| 5054 |
+
"grad_norm": 0.12655863165855408,
|
| 5055 |
+
"learning_rate": 0.00018545713414014033,
|
| 5056 |
+
"loss": 1.1085,
|
| 5057 |
+
"step": 721
|
| 5058 |
+
},
|
| 5059 |
+
{
|
| 5060 |
+
"epoch": 0.14677780036592802,
|
| 5061 |
+
"grad_norm": 0.1144525483250618,
|
| 5062 |
+
"learning_rate": 0.00018543679446760908,
|
| 5063 |
+
"loss": 1.0517,
|
| 5064 |
+
"step": 722
|
| 5065 |
+
},
|
| 5066 |
+
{
|
| 5067 |
+
"epoch": 0.14698109371823542,
|
| 5068 |
+
"grad_norm": 0.12001293152570724,
|
| 5069 |
+
"learning_rate": 0.0001854164547950778,
|
| 5070 |
+
"loss": 1.1439,
|
| 5071 |
+
"step": 723
|
| 5072 |
+
},
|
| 5073 |
+
{
|
| 5074 |
+
"epoch": 0.1471843870705428,
|
| 5075 |
+
"grad_norm": 0.12786982953548431,
|
| 5076 |
+
"learning_rate": 0.00018539611512254653,
|
| 5077 |
+
"loss": 1.1846,
|
| 5078 |
+
"step": 724
|
| 5079 |
+
},
|
| 5080 |
+
{
|
| 5081 |
+
"epoch": 0.14738768042285016,
|
| 5082 |
+
"grad_norm": 0.1154879704117775,
|
| 5083 |
+
"learning_rate": 0.00018537577545001526,
|
| 5084 |
+
"loss": 0.941,
|
| 5085 |
+
"step": 725
|
| 5086 |
+
},
|
| 5087 |
+
{
|
| 5088 |
+
"epoch": 0.14759097377515756,
|
| 5089 |
+
"grad_norm": 0.10635704547166824,
|
| 5090 |
+
"learning_rate": 0.000185355435777484,
|
| 5091 |
+
"loss": 0.915,
|
| 5092 |
+
"step": 726
|
| 5093 |
+
},
|
| 5094 |
+
{
|
| 5095 |
+
"epoch": 0.14779426712746493,
|
| 5096 |
+
"grad_norm": 0.11456220597028732,
|
| 5097 |
+
"learning_rate": 0.00018533509610495273,
|
| 5098 |
+
"loss": 1.0387,
|
| 5099 |
+
"step": 727
|
| 5100 |
+
},
|
| 5101 |
+
{
|
| 5102 |
+
"epoch": 0.1479975604797723,
|
| 5103 |
+
"grad_norm": 0.11217451840639114,
|
| 5104 |
+
"learning_rate": 0.00018531475643242146,
|
| 5105 |
+
"loss": 1.0938,
|
| 5106 |
+
"step": 728
|
| 5107 |
+
},
|
| 5108 |
+
{
|
| 5109 |
+
"epoch": 0.1482008538320797,
|
| 5110 |
+
"grad_norm": 0.1105191633105278,
|
| 5111 |
+
"learning_rate": 0.00018529441675989015,
|
| 5112 |
+
"loss": 1.0398,
|
| 5113 |
+
"step": 729
|
| 5114 |
+
},
|
| 5115 |
+
{
|
| 5116 |
+
"epoch": 0.14840414718438708,
|
| 5117 |
+
"grad_norm": 0.11848670989274979,
|
| 5118 |
+
"learning_rate": 0.0001852740770873589,
|
| 5119 |
+
"loss": 1.04,
|
| 5120 |
+
"step": 730
|
| 5121 |
+
},
|
| 5122 |
+
{
|
| 5123 |
+
"epoch": 0.14860744053669445,
|
| 5124 |
+
"grad_norm": 0.11965551227331161,
|
| 5125 |
+
"learning_rate": 0.00018525373741482763,
|
| 5126 |
+
"loss": 0.966,
|
| 5127 |
+
"step": 731
|
| 5128 |
+
},
|
| 5129 |
+
{
|
| 5130 |
+
"epoch": 0.14881073388900182,
|
| 5131 |
+
"grad_norm": 0.12252170592546463,
|
| 5132 |
+
"learning_rate": 0.00018523339774229635,
|
| 5133 |
+
"loss": 1.1997,
|
| 5134 |
+
"step": 732
|
| 5135 |
+
},
|
| 5136 |
+
{
|
| 5137 |
+
"epoch": 0.14901402724130922,
|
| 5138 |
+
"grad_norm": 0.11600001901388168,
|
| 5139 |
+
"learning_rate": 0.00018521305806976508,
|
| 5140 |
+
"loss": 1.2425,
|
| 5141 |
+
"step": 733
|
| 5142 |
+
},
|
| 5143 |
+
{
|
| 5144 |
+
"epoch": 0.1492173205936166,
|
| 5145 |
+
"grad_norm": 0.11161402612924576,
|
| 5146 |
+
"learning_rate": 0.00018519271839723383,
|
| 5147 |
+
"loss": 0.9978,
|
| 5148 |
+
"step": 734
|
| 5149 |
+
},
|
| 5150 |
+
{
|
| 5151 |
+
"epoch": 0.14942061394592396,
|
| 5152 |
+
"grad_norm": 0.12365563958883286,
|
| 5153 |
+
"learning_rate": 0.00018517237872470255,
|
| 5154 |
+
"loss": 0.9652,
|
| 5155 |
+
"step": 735
|
| 5156 |
+
},
|
| 5157 |
+
{
|
| 5158 |
+
"epoch": 0.14962390729823136,
|
| 5159 |
+
"grad_norm": 0.11252112686634064,
|
| 5160 |
+
"learning_rate": 0.00018515203905217128,
|
| 5161 |
+
"loss": 0.948,
|
| 5162 |
+
"step": 736
|
| 5163 |
+
},
|
| 5164 |
+
{
|
| 5165 |
+
"epoch": 0.14982720065053873,
|
| 5166 |
+
"grad_norm": 0.12211350351572037,
|
| 5167 |
+
"learning_rate": 0.00018513169937963998,
|
| 5168 |
+
"loss": 1.0636,
|
| 5169 |
+
"step": 737
|
| 5170 |
+
},
|
| 5171 |
+
{
|
| 5172 |
+
"epoch": 0.1500304940028461,
|
| 5173 |
+
"grad_norm": 0.13200169801712036,
|
| 5174 |
+
"learning_rate": 0.00018511135970710873,
|
| 5175 |
+
"loss": 1.158,
|
| 5176 |
+
"step": 738
|
| 5177 |
+
},
|
| 5178 |
+
{
|
| 5179 |
+
"epoch": 0.1502337873551535,
|
| 5180 |
+
"grad_norm": 0.11223406344652176,
|
| 5181 |
+
"learning_rate": 0.00018509102003457745,
|
| 5182 |
+
"loss": 1.1194,
|
| 5183 |
+
"step": 739
|
| 5184 |
+
},
|
| 5185 |
+
{
|
| 5186 |
+
"epoch": 0.15043708070746087,
|
| 5187 |
+
"grad_norm": 0.11996794492006302,
|
| 5188 |
+
"learning_rate": 0.00018507068036204618,
|
| 5189 |
+
"loss": 1.0485,
|
| 5190 |
+
"step": 740
|
| 5191 |
+
},
|
| 5192 |
+
{
|
| 5193 |
+
"epoch": 0.15064037405976824,
|
| 5194 |
+
"grad_norm": 0.13017338514328003,
|
| 5195 |
+
"learning_rate": 0.0001850503406895149,
|
| 5196 |
+
"loss": 1.1304,
|
| 5197 |
+
"step": 741
|
| 5198 |
+
},
|
| 5199 |
+
{
|
| 5200 |
+
"epoch": 0.15084366741207564,
|
| 5201 |
+
"grad_norm": 0.1273190826177597,
|
| 5202 |
+
"learning_rate": 0.00018503000101698365,
|
| 5203 |
+
"loss": 1.0937,
|
| 5204 |
+
"step": 742
|
| 5205 |
+
},
|
| 5206 |
+
{
|
| 5207 |
+
"epoch": 0.151046960764383,
|
| 5208 |
+
"grad_norm": 0.1322571486234665,
|
| 5209 |
+
"learning_rate": 0.00018500966134445238,
|
| 5210 |
+
"loss": 1.1364,
|
| 5211 |
+
"step": 743
|
| 5212 |
+
},
|
| 5213 |
+
{
|
| 5214 |
+
"epoch": 0.15125025411669038,
|
| 5215 |
+
"grad_norm": 0.12314455956220627,
|
| 5216 |
+
"learning_rate": 0.0001849893216719211,
|
| 5217 |
+
"loss": 1.0005,
|
| 5218 |
+
"step": 744
|
| 5219 |
+
},
|
| 5220 |
+
{
|
| 5221 |
+
"epoch": 0.15145354746899775,
|
| 5222 |
+
"grad_norm": 0.1126449927687645,
|
| 5223 |
+
"learning_rate": 0.0001849689819993898,
|
| 5224 |
+
"loss": 1.0231,
|
| 5225 |
+
"step": 745
|
| 5226 |
+
},
|
| 5227 |
+
{
|
| 5228 |
+
"epoch": 0.15165684082130515,
|
| 5229 |
+
"grad_norm": 0.12586358189582825,
|
| 5230 |
+
"learning_rate": 0.00018494864232685855,
|
| 5231 |
+
"loss": 1.0816,
|
| 5232 |
+
"step": 746
|
| 5233 |
+
},
|
| 5234 |
+
{
|
| 5235 |
+
"epoch": 0.15186013417361252,
|
| 5236 |
+
"grad_norm": 0.09933953732252121,
|
| 5237 |
+
"learning_rate": 0.00018492830265432727,
|
| 5238 |
+
"loss": 0.8666,
|
| 5239 |
+
"step": 747
|
| 5240 |
+
},
|
| 5241 |
+
{
|
| 5242 |
+
"epoch": 0.1520634275259199,
|
| 5243 |
+
"grad_norm": 0.12422667443752289,
|
| 5244 |
+
"learning_rate": 0.000184907962981796,
|
| 5245 |
+
"loss": 1.0502,
|
| 5246 |
+
"step": 748
|
| 5247 |
+
},
|
| 5248 |
+
{
|
| 5249 |
+
"epoch": 0.1522667208782273,
|
| 5250 |
+
"grad_norm": 0.12274408340454102,
|
| 5251 |
+
"learning_rate": 0.00018488762330926472,
|
| 5252 |
+
"loss": 1.1445,
|
| 5253 |
+
"step": 749
|
| 5254 |
+
},
|
| 5255 |
+
{
|
| 5256 |
+
"epoch": 0.15247001423053466,
|
| 5257 |
+
"grad_norm": 0.1317015141248703,
|
| 5258 |
+
"learning_rate": 0.00018486728363673348,
|
| 5259 |
+
"loss": 1.2226,
|
| 5260 |
+
"step": 750
|
| 5261 |
+
},
|
| 5262 |
+
{
|
| 5263 |
+
"epoch": 0.15267330758284203,
|
| 5264 |
+
"grad_norm": 0.1201949417591095,
|
| 5265 |
+
"learning_rate": 0.0001848469439642022,
|
| 5266 |
+
"loss": 0.9285,
|
| 5267 |
+
"step": 751
|
| 5268 |
+
},
|
| 5269 |
+
{
|
| 5270 |
+
"epoch": 0.15287660093514943,
|
| 5271 |
+
"grad_norm": 0.11115135997533798,
|
| 5272 |
+
"learning_rate": 0.00018482660429167092,
|
| 5273 |
+
"loss": 1.1262,
|
| 5274 |
+
"step": 752
|
| 5275 |
+
},
|
| 5276 |
+
{
|
| 5277 |
+
"epoch": 0.1530798942874568,
|
| 5278 |
+
"grad_norm": 0.11809299886226654,
|
| 5279 |
+
"learning_rate": 0.00018480626461913965,
|
| 5280 |
+
"loss": 1.0792,
|
| 5281 |
+
"step": 753
|
| 5282 |
+
},
|
| 5283 |
+
{
|
| 5284 |
+
"epoch": 0.15328318763976417,
|
| 5285 |
+
"grad_norm": 0.14711928367614746,
|
| 5286 |
+
"learning_rate": 0.00018478592494660835,
|
| 5287 |
+
"loss": 1.1647,
|
| 5288 |
+
"step": 754
|
| 5289 |
+
},
|
| 5290 |
+
{
|
| 5291 |
+
"epoch": 0.15348648099207157,
|
| 5292 |
+
"grad_norm": 0.12082501500844955,
|
| 5293 |
+
"learning_rate": 0.0001847655852740771,
|
| 5294 |
+
"loss": 1.1866,
|
| 5295 |
+
"step": 755
|
| 5296 |
+
},
|
| 5297 |
+
{
|
| 5298 |
+
"epoch": 0.15368977434437894,
|
| 5299 |
+
"grad_norm": 0.1093011349439621,
|
| 5300 |
+
"learning_rate": 0.00018474524560154582,
|
| 5301 |
+
"loss": 0.9978,
|
| 5302 |
+
"step": 756
|
| 5303 |
+
},
|
| 5304 |
+
{
|
| 5305 |
+
"epoch": 0.15389306769668631,
|
| 5306 |
+
"grad_norm": 0.11525548994541168,
|
| 5307 |
+
"learning_rate": 0.00018472490592901455,
|
| 5308 |
+
"loss": 0.9134,
|
| 5309 |
+
"step": 757
|
| 5310 |
+
},
|
| 5311 |
+
{
|
| 5312 |
+
"epoch": 0.15409636104899369,
|
| 5313 |
+
"grad_norm": 0.12464176118373871,
|
| 5314 |
+
"learning_rate": 0.00018470456625648327,
|
| 5315 |
+
"loss": 1.0974,
|
| 5316 |
+
"step": 758
|
| 5317 |
+
},
|
| 5318 |
+
{
|
| 5319 |
+
"epoch": 0.15429965440130108,
|
| 5320 |
+
"grad_norm": 0.11930055171251297,
|
| 5321 |
+
"learning_rate": 0.00018468422658395202,
|
| 5322 |
+
"loss": 0.8953,
|
| 5323 |
+
"step": 759
|
| 5324 |
+
},
|
| 5325 |
+
{
|
| 5326 |
+
"epoch": 0.15450294775360846,
|
| 5327 |
+
"grad_norm": 0.12347722053527832,
|
| 5328 |
+
"learning_rate": 0.00018466388691142075,
|
| 5329 |
+
"loss": 1.0212,
|
| 5330 |
+
"step": 760
|
| 5331 |
+
},
|
| 5332 |
+
{
|
| 5333 |
+
"epoch": 0.15470624110591583,
|
| 5334 |
+
"grad_norm": 0.1258956342935562,
|
| 5335 |
+
"learning_rate": 0.00018464354723888947,
|
| 5336 |
+
"loss": 1.2616,
|
| 5337 |
+
"step": 761
|
| 5338 |
+
},
|
| 5339 |
+
{
|
| 5340 |
+
"epoch": 0.15490953445822322,
|
| 5341 |
+
"grad_norm": 0.12692275643348694,
|
| 5342 |
+
"learning_rate": 0.00018462320756635817,
|
| 5343 |
+
"loss": 1.1994,
|
| 5344 |
+
"step": 762
|
| 5345 |
+
},
|
| 5346 |
+
{
|
| 5347 |
+
"epoch": 0.1551128278105306,
|
| 5348 |
+
"grad_norm": 0.13774073123931885,
|
| 5349 |
+
"learning_rate": 0.00018460286789382692,
|
| 5350 |
+
"loss": 1.2109,
|
| 5351 |
+
"step": 763
|
| 5352 |
+
},
|
| 5353 |
+
{
|
| 5354 |
+
"epoch": 0.15531612116283797,
|
| 5355 |
+
"grad_norm": 0.12587130069732666,
|
| 5356 |
+
"learning_rate": 0.00018458252822129564,
|
| 5357 |
+
"loss": 1.1059,
|
| 5358 |
+
"step": 764
|
| 5359 |
+
},
|
| 5360 |
+
{
|
| 5361 |
+
"epoch": 0.15551941451514537,
|
| 5362 |
+
"grad_norm": 0.13462059199810028,
|
| 5363 |
+
"learning_rate": 0.00018456218854876437,
|
| 5364 |
+
"loss": 1.0648,
|
| 5365 |
+
"step": 765
|
| 5366 |
+
},
|
| 5367 |
+
{
|
| 5368 |
+
"epoch": 0.15572270786745274,
|
| 5369 |
+
"grad_norm": 0.1329740285873413,
|
| 5370 |
+
"learning_rate": 0.0001845418488762331,
|
| 5371 |
+
"loss": 1.264,
|
| 5372 |
+
"step": 766
|
| 5373 |
+
},
|
| 5374 |
+
{
|
| 5375 |
+
"epoch": 0.1559260012197601,
|
| 5376 |
+
"grad_norm": 0.12275559455156326,
|
| 5377 |
+
"learning_rate": 0.00018452150920370185,
|
| 5378 |
+
"loss": 0.9893,
|
| 5379 |
+
"step": 767
|
| 5380 |
+
},
|
| 5381 |
+
{
|
| 5382 |
+
"epoch": 0.1561292945720675,
|
| 5383 |
+
"grad_norm": 0.12821702659130096,
|
| 5384 |
+
"learning_rate": 0.00018450116953117057,
|
| 5385 |
+
"loss": 1.0681,
|
| 5386 |
+
"step": 768
|
| 5387 |
+
},
|
| 5388 |
+
{
|
| 5389 |
+
"epoch": 0.15633258792437488,
|
| 5390 |
+
"grad_norm": 0.11758620291948318,
|
| 5391 |
+
"learning_rate": 0.0001844808298586393,
|
| 5392 |
+
"loss": 1.0476,
|
| 5393 |
+
"step": 769
|
| 5394 |
+
},
|
| 5395 |
+
{
|
| 5396 |
+
"epoch": 0.15653588127668225,
|
| 5397 |
+
"grad_norm": 0.11491292715072632,
|
| 5398 |
+
"learning_rate": 0.000184460490186108,
|
| 5399 |
+
"loss": 1.1428,
|
| 5400 |
+
"step": 770
|
| 5401 |
+
},
|
| 5402 |
+
{
|
| 5403 |
+
"epoch": 0.15673917462898962,
|
| 5404 |
+
"grad_norm": 0.12064868956804276,
|
| 5405 |
+
"learning_rate": 0.00018444015051357674,
|
| 5406 |
+
"loss": 0.9565,
|
| 5407 |
+
"step": 771
|
| 5408 |
+
},
|
| 5409 |
+
{
|
| 5410 |
+
"epoch": 0.15694246798129702,
|
| 5411 |
+
"grad_norm": 0.12319160997867584,
|
| 5412 |
+
"learning_rate": 0.00018441981084104547,
|
| 5413 |
+
"loss": 1.0593,
|
| 5414 |
+
"step": 772
|
| 5415 |
+
},
|
| 5416 |
+
{
|
| 5417 |
+
"epoch": 0.1571457613336044,
|
| 5418 |
+
"grad_norm": 0.13514620065689087,
|
| 5419 |
+
"learning_rate": 0.0001843994711685142,
|
| 5420 |
+
"loss": 1.1908,
|
| 5421 |
+
"step": 773
|
| 5422 |
+
},
|
| 5423 |
+
{
|
| 5424 |
+
"epoch": 0.15734905468591176,
|
| 5425 |
+
"grad_norm": 0.1343378722667694,
|
| 5426 |
+
"learning_rate": 0.00018437913149598292,
|
| 5427 |
+
"loss": 1.2193,
|
| 5428 |
+
"step": 774
|
| 5429 |
+
},
|
| 5430 |
+
{
|
| 5431 |
+
"epoch": 0.15755234803821916,
|
| 5432 |
+
"grad_norm": 0.13351817429065704,
|
| 5433 |
+
"learning_rate": 0.00018435879182345167,
|
| 5434 |
+
"loss": 1.1141,
|
| 5435 |
+
"step": 775
|
| 5436 |
+
},
|
| 5437 |
+
{
|
| 5438 |
+
"epoch": 0.15775564139052653,
|
| 5439 |
+
"grad_norm": 0.11843458563089371,
|
| 5440 |
+
"learning_rate": 0.0001843384521509204,
|
| 5441 |
+
"loss": 1.1933,
|
| 5442 |
+
"step": 776
|
| 5443 |
+
},
|
| 5444 |
+
{
|
| 5445 |
+
"epoch": 0.1579589347428339,
|
| 5446 |
+
"grad_norm": 0.12293927371501923,
|
| 5447 |
+
"learning_rate": 0.00018431811247838912,
|
| 5448 |
+
"loss": 1.0682,
|
| 5449 |
+
"step": 777
|
| 5450 |
+
},
|
| 5451 |
+
{
|
| 5452 |
+
"epoch": 0.1581622280951413,
|
| 5453 |
+
"grad_norm": 0.11566301435232162,
|
| 5454 |
+
"learning_rate": 0.00018429777280585781,
|
| 5455 |
+
"loss": 1.1093,
|
| 5456 |
+
"step": 778
|
| 5457 |
+
},
|
| 5458 |
+
{
|
| 5459 |
+
"epoch": 0.15836552144744867,
|
| 5460 |
+
"grad_norm": 0.11641670763492584,
|
| 5461 |
+
"learning_rate": 0.00018427743313332657,
|
| 5462 |
+
"loss": 1.2028,
|
| 5463 |
+
"step": 779
|
| 5464 |
+
},
|
| 5465 |
+
{
|
| 5466 |
+
"epoch": 0.15856881479975604,
|
| 5467 |
+
"grad_norm": 0.14020314812660217,
|
| 5468 |
+
"learning_rate": 0.0001842570934607953,
|
| 5469 |
+
"loss": 1.0472,
|
| 5470 |
+
"step": 780
|
| 5471 |
+
},
|
| 5472 |
+
{
|
| 5473 |
+
"epoch": 0.15877210815206344,
|
| 5474 |
+
"grad_norm": 0.11766766011714935,
|
| 5475 |
+
"learning_rate": 0.00018423675378826401,
|
| 5476 |
+
"loss": 0.9908,
|
| 5477 |
+
"step": 781
|
| 5478 |
+
},
|
| 5479 |
+
{
|
| 5480 |
+
"epoch": 0.1589754015043708,
|
| 5481 |
+
"grad_norm": 0.14530715346336365,
|
| 5482 |
+
"learning_rate": 0.00018421641411573274,
|
| 5483 |
+
"loss": 1.2046,
|
| 5484 |
+
"step": 782
|
| 5485 |
+
},
|
| 5486 |
+
{
|
| 5487 |
+
"epoch": 0.15917869485667818,
|
| 5488 |
+
"grad_norm": 0.12271513789892197,
|
| 5489 |
+
"learning_rate": 0.0001841960744432015,
|
| 5490 |
+
"loss": 1.1401,
|
| 5491 |
+
"step": 783
|
| 5492 |
+
},
|
| 5493 |
+
{
|
| 5494 |
+
"epoch": 0.15938198820898555,
|
| 5495 |
+
"grad_norm": 0.12754741311073303,
|
| 5496 |
+
"learning_rate": 0.00018417573477067022,
|
| 5497 |
+
"loss": 1.2811,
|
| 5498 |
+
"step": 784
|
| 5499 |
+
},
|
| 5500 |
+
{
|
| 5501 |
+
"epoch": 0.15958528156129295,
|
| 5502 |
+
"grad_norm": 0.10751698166131973,
|
| 5503 |
+
"learning_rate": 0.00018415539509813894,
|
| 5504 |
+
"loss": 0.9566,
|
| 5505 |
+
"step": 785
|
| 5506 |
+
},
|
| 5507 |
+
{
|
| 5508 |
+
"epoch": 0.15978857491360032,
|
| 5509 |
+
"grad_norm": 0.12434156984090805,
|
| 5510 |
+
"learning_rate": 0.00018413505542560764,
|
| 5511 |
+
"loss": 1.2307,
|
| 5512 |
+
"step": 786
|
| 5513 |
+
},
|
| 5514 |
+
{
|
| 5515 |
+
"epoch": 0.1599918682659077,
|
| 5516 |
+
"grad_norm": 0.1130242571234703,
|
| 5517 |
+
"learning_rate": 0.0001841147157530764,
|
| 5518 |
+
"loss": 1.0406,
|
| 5519 |
+
"step": 787
|
| 5520 |
+
},
|
| 5521 |
+
{
|
| 5522 |
+
"epoch": 0.1601951616182151,
|
| 5523 |
+
"grad_norm": 0.12631991505622864,
|
| 5524 |
+
"learning_rate": 0.0001840943760805451,
|
| 5525 |
+
"loss": 1.0835,
|
| 5526 |
+
"step": 788
|
| 5527 |
+
},
|
| 5528 |
+
{
|
| 5529 |
+
"epoch": 0.16039845497052246,
|
| 5530 |
+
"grad_norm": 0.11642556637525558,
|
| 5531 |
+
"learning_rate": 0.00018407403640801384,
|
| 5532 |
+
"loss": 0.9743,
|
| 5533 |
+
"step": 789
|
| 5534 |
+
},
|
| 5535 |
+
{
|
| 5536 |
+
"epoch": 0.16060174832282983,
|
| 5537 |
+
"grad_norm": 0.1119033470749855,
|
| 5538 |
+
"learning_rate": 0.00018405369673548256,
|
| 5539 |
+
"loss": 1.1377,
|
| 5540 |
+
"step": 790
|
| 5541 |
+
},
|
| 5542 |
+
{
|
| 5543 |
+
"epoch": 0.16080504167513723,
|
| 5544 |
+
"grad_norm": 0.14675219357013702,
|
| 5545 |
+
"learning_rate": 0.00018403335706295131,
|
| 5546 |
+
"loss": 1.2846,
|
| 5547 |
+
"step": 791
|
| 5548 |
+
},
|
| 5549 |
+
{
|
| 5550 |
+
"epoch": 0.1610083350274446,
|
| 5551 |
+
"grad_norm": 0.1238279864192009,
|
| 5552 |
+
"learning_rate": 0.00018401301739042004,
|
| 5553 |
+
"loss": 1.1033,
|
| 5554 |
+
"step": 792
|
| 5555 |
+
},
|
| 5556 |
+
{
|
| 5557 |
+
"epoch": 0.16121162837975198,
|
| 5558 |
+
"grad_norm": 0.12538330256938934,
|
| 5559 |
+
"learning_rate": 0.00018399267771788876,
|
| 5560 |
+
"loss": 1.2344,
|
| 5561 |
+
"step": 793
|
| 5562 |
+
},
|
| 5563 |
+
{
|
| 5564 |
+
"epoch": 0.16141492173205937,
|
| 5565 |
+
"grad_norm": 0.11384537816047668,
|
| 5566 |
+
"learning_rate": 0.00018397233804535746,
|
| 5567 |
+
"loss": 1.0143,
|
| 5568 |
+
"step": 794
|
| 5569 |
+
},
|
| 5570 |
+
{
|
| 5571 |
+
"epoch": 0.16161821508436675,
|
| 5572 |
+
"grad_norm": 0.1444682627916336,
|
| 5573 |
+
"learning_rate": 0.00018395199837282618,
|
| 5574 |
+
"loss": 1.2364,
|
| 5575 |
+
"step": 795
|
| 5576 |
+
},
|
| 5577 |
+
{
|
| 5578 |
+
"epoch": 0.16182150843667412,
|
| 5579 |
+
"grad_norm": 0.12999016046524048,
|
| 5580 |
+
"learning_rate": 0.00018393165870029494,
|
| 5581 |
+
"loss": 1.1853,
|
| 5582 |
+
"step": 796
|
| 5583 |
+
},
|
| 5584 |
+
{
|
| 5585 |
+
"epoch": 0.1620248017889815,
|
| 5586 |
+
"grad_norm": 0.12258971482515335,
|
| 5587 |
+
"learning_rate": 0.00018391131902776366,
|
| 5588 |
+
"loss": 1.2673,
|
| 5589 |
+
"step": 797
|
| 5590 |
+
},
|
| 5591 |
+
{
|
| 5592 |
+
"epoch": 0.16222809514128889,
|
| 5593 |
+
"grad_norm": 0.13033455610275269,
|
| 5594 |
+
"learning_rate": 0.00018389097935523238,
|
| 5595 |
+
"loss": 0.8922,
|
| 5596 |
+
"step": 798
|
| 5597 |
+
},
|
| 5598 |
+
{
|
| 5599 |
+
"epoch": 0.16243138849359626,
|
| 5600 |
+
"grad_norm": 0.14746494591236115,
|
| 5601 |
+
"learning_rate": 0.0001838706396827011,
|
| 5602 |
+
"loss": 1.2164,
|
| 5603 |
+
"step": 799
|
| 5604 |
+
},
|
| 5605 |
+
{
|
| 5606 |
+
"epoch": 0.16263468184590363,
|
| 5607 |
+
"grad_norm": 0.12869805097579956,
|
| 5608 |
+
"learning_rate": 0.00018385030001016986,
|
| 5609 |
+
"loss": 1.1788,
|
| 5610 |
+
"step": 800
|
| 5611 |
}
|
| 5612 |
],
|
| 5613 |
"logging_steps": 1,
|
|
|
|
| 5627 |
"attributes": {}
|
| 5628 |
}
|
| 5629 |
},
|
| 5630 |
+
"total_flos": 4.4585745800862106e+17,
|
| 5631 |
"train_batch_size": 8,
|
| 5632 |
"trial_name": null,
|
| 5633 |
"trial_params": null
|