mveroe commited on
Commit
5fd0b4e
·
verified ·
1 Parent(s): 60231be

Training in progress, epoch 2, checkpoint

Browse files
checkpoint-1294/config.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2ForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151643,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 1536,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 8960,
12
+ "layer_types": [
13
+ "full_attention",
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention"
41
+ ],
42
+ "max_position_embeddings": 131072,
43
+ "max_window_layers": 28,
44
+ "model_type": "qwen2",
45
+ "num_attention_heads": 12,
46
+ "num_hidden_layers": 28,
47
+ "num_key_value_heads": 2,
48
+ "rms_norm_eps": 1e-06,
49
+ "rope_scaling": null,
50
+ "rope_theta": 1000000.0,
51
+ "sliding_window": null,
52
+ "tie_word_embeddings": true,
53
+ "torch_dtype": "bfloat16",
54
+ "transformers_version": "4.55.0",
55
+ "use_cache": true,
56
+ "use_mrope": false,
57
+ "use_sliding_window": false,
58
+ "vocab_size": 151667
59
+ }
checkpoint-1294/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "eos_token_id": 151643,
4
+ "max_new_tokens": 2048,
5
+ "transformers_version": "4.55.0"
6
+ }
checkpoint-1294/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31ddcfdbebb3469019ef4b5bb0a0f51c61e202a22339de799543f491e985fa04
3
+ size 3086640776
checkpoint-1294/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0215eb7a2c6faed14246bbdb4b87280b73d7b5c0123ac7cef889ad1ea16b7388
3
+ size 6056651
checkpoint-1294/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13605cc6778bff441a8cea8ce6c137552ec3ec41c3ee74b10e0b3a8493e3116e
3
+ size 15365
checkpoint-1294/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0eb1f7b1acfd920c8cdef076eee48ccfec7d3ed4d8c5d83c2592fbbb4a4f9b38
3
+ size 15429
checkpoint-1294/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5664fd5079c69dc8edcc429ded884f07d45c771f27a5a65fe1ad751348f9e1de
3
+ size 15429
checkpoint-1294/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a3f17c4e252e5b7e99f3573c3bca992070f9ef436dd578ee9e88c333a94cef0
3
+ size 15429
checkpoint-1294/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:079f98db65c4c0e70880e54cb476552916626dce5a194de55ac6d813e4314c93
3
+ size 1465
checkpoint-1294/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-1294/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9111af7da848affc882008f9b1eed603765d07a949de83eec6f87d1d825dce52
3
+ size 5969