wenhuach commited on
Commit
664cde9
·
verified ·
1 Parent(s): ba043c8

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</think>": 151668,
3
+ "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
+ "<think>": 151667,
6
+ "<tool_call>": 151657,
7
+ "<tool_response>": 151665,
8
+ "<|box_end|>": 151649,
9
+ "<|box_start|>": 151648,
10
+ "<|endoftext|>": 151643,
11
+ "<|file_sep|>": 151664,
12
+ "<|fim_middle|>": 151660,
13
+ "<|fim_pad|>": 151662,
14
+ "<|fim_prefix|>": 151659,
15
+ "<|fim_suffix|>": 151661,
16
+ "<|im_end|>": 151645,
17
+ "<|im_start|>": 151644,
18
+ "<|image_pad|>": 151655,
19
+ "<|object_ref_end|>": 151647,
20
+ "<|object_ref_start|>": 151646,
21
+ "<|quad_end|>": 151651,
22
+ "<|quad_start|>": 151650,
23
+ "<|repo_name|>": 151663,
24
+ "<|video_pad|>": 151656,
25
+ "<|vision_end|>": 151653,
26
+ "<|vision_pad|>": 151654,
27
+ "<|vision_start|>": 151652
28
+ }
chat_template.jinja ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0].role == 'system' %}
4
+ {{- messages[0].content + '\n\n' }}
5
+ {%- endif %}
6
+ {{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
7
+ {%- for tool in tools %}
8
+ {{- "\n" }}
9
+ {{- tool | tojson }}
10
+ {%- endfor %}
11
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
12
+ {%- else %}
13
+ {%- if messages[0].role == 'system' %}
14
+ {{- '<|im_start|>system\n' + messages[0].content + '<|im_end|>\n' }}
15
+ {%- endif %}
16
+ {%- endif %}
17
+ {%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}
18
+ {%- for message in messages[::-1] %}
19
+ {%- set index = (messages|length - 1) - loop.index0 %}
20
+ {%- if ns.multi_step_tool and message.role == "user" and message.content is string and not(message.content.startswith('<tool_response>') and message.content.endswith('</tool_response>')) %}
21
+ {%- set ns.multi_step_tool = false %}
22
+ {%- set ns.last_query_index = index %}
23
+ {%- endif %}
24
+ {%- endfor %}
25
+ {%- for message in messages %}
26
+ {%- if message.content is string %}
27
+ {%- set content = message.content %}
28
+ {%- else %}
29
+ {%- set content = '' %}
30
+ {%- endif %}
31
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) %}
32
+ {{- '<|im_start|>' + message.role + '\n' + content + '<|im_end|>' + '\n' }}
33
+ {%- elif message.role == "assistant" %}
34
+ {%- set reasoning_content = '' %}
35
+ {%- if message.reasoning_content is string %}
36
+ {%- set reasoning_content = message.reasoning_content %}
37
+ {%- else %}
38
+ {%- if '</think>' in content %}
39
+ {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
40
+ {%- set content = content.split('</think>')[-1].lstrip('\n') %}
41
+ {%- endif %}
42
+ {%- endif %}
43
+ {%- if loop.index0 > ns.last_query_index %}
44
+ {%- if loop.last or (not loop.last and reasoning_content) %}
45
+ {{- '<|im_start|>' + message.role + '\n<think>\n' + reasoning_content.strip('\n') + '\n</think>\n\n' + content.lstrip('\n') }}
46
+ {%- else %}
47
+ {{- '<|im_start|>' + message.role + '\n' + content }}
48
+ {%- endif %}
49
+ {%- else %}
50
+ {{- '<|im_start|>' + message.role + '\n' + content }}
51
+ {%- endif %}
52
+ {%- if message.tool_calls %}
53
+ {%- for tool_call in message.tool_calls %}
54
+ {%- if (loop.first and content) or (not loop.first) %}
55
+ {{- '\n' }}
56
+ {%- endif %}
57
+ {%- if tool_call.function %}
58
+ {%- set tool_call = tool_call.function %}
59
+ {%- endif %}
60
+ {{- '<tool_call>\n{"name": "' }}
61
+ {{- tool_call.name }}
62
+ {{- '", "arguments": ' }}
63
+ {%- if tool_call.arguments is string %}
64
+ {{- tool_call.arguments }}
65
+ {%- else %}
66
+ {{- tool_call.arguments | tojson }}
67
+ {%- endif %}
68
+ {{- '}\n</tool_call>' }}
69
+ {%- endfor %}
70
+ {%- endif %}
71
+ {{- '<|im_end|>\n' }}
72
+ {%- elif message.role == "tool" %}
73
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
74
+ {{- '<|im_start|>user' }}
75
+ {%- endif %}
76
+ {{- '\n<tool_response>\n' }}
77
+ {{- content }}
78
+ {{- '\n</tool_response>' }}
79
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
80
+ {{- '<|im_end|>\n' }}
81
+ {%- endif %}
82
+ {%- endif %}
83
+ {%- endfor %}
84
+ {%- if add_generation_prompt %}
85
+ {{- '<|im_start|>assistant\n' }}
86
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,1154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3MoeForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "decoder_sparse_step": 1,
9
+ "eos_token_id": 151645,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 6144,
15
+ "max_position_embeddings": 262144,
16
+ "max_window_layers": 48,
17
+ "mlp_only_layers": [],
18
+ "model_type": "qwen3_moe",
19
+ "moe_intermediate_size": 768,
20
+ "norm_topk_prob": true,
21
+ "num_attention_heads": 32,
22
+ "num_experts": 128,
23
+ "num_experts_per_tok": 8,
24
+ "num_hidden_layers": 48,
25
+ "num_key_value_heads": 4,
26
+ "output_router_logits": false,
27
+ "quantization_config": {
28
+ "autoround_version": "0.6.1.dev",
29
+ "bits": 4,
30
+ "data_type": "int",
31
+ "extra_config": {
32
+ "model.layers.0.mlp.gate": {
33
+ "bits": 16
34
+ },
35
+ "model.layers.0.self_attn.k_proj": {
36
+ "bits": 8,
37
+ "group_size": 128
38
+ },
39
+ "model.layers.0.self_attn.o_proj": {
40
+ "bits": 8,
41
+ "group_size": 128
42
+ },
43
+ "model.layers.0.self_attn.q_proj": {
44
+ "bits": 8,
45
+ "group_size": 128
46
+ },
47
+ "model.layers.0.self_attn.v_proj": {
48
+ "bits": 8,
49
+ "group_size": 128
50
+ },
51
+ "model.layers.1.mlp.gate": {
52
+ "bits": 16
53
+ },
54
+ "model.layers.1.self_attn.k_proj": {
55
+ "bits": 8,
56
+ "group_size": 128
57
+ },
58
+ "model.layers.1.self_attn.o_proj": {
59
+ "bits": 8,
60
+ "group_size": 128
61
+ },
62
+ "model.layers.1.self_attn.q_proj": {
63
+ "bits": 8,
64
+ "group_size": 128
65
+ },
66
+ "model.layers.1.self_attn.v_proj": {
67
+ "bits": 8,
68
+ "group_size": 128
69
+ },
70
+ "model.layers.10.mlp.gate": {
71
+ "bits": 16
72
+ },
73
+ "model.layers.10.self_attn.k_proj": {
74
+ "bits": 8,
75
+ "group_size": 128
76
+ },
77
+ "model.layers.10.self_attn.o_proj": {
78
+ "bits": 8,
79
+ "group_size": 128
80
+ },
81
+ "model.layers.10.self_attn.q_proj": {
82
+ "bits": 8,
83
+ "group_size": 128
84
+ },
85
+ "model.layers.10.self_attn.v_proj": {
86
+ "bits": 8,
87
+ "group_size": 128
88
+ },
89
+ "model.layers.11.mlp.gate": {
90
+ "bits": 16
91
+ },
92
+ "model.layers.11.self_attn.k_proj": {
93
+ "bits": 8,
94
+ "group_size": 128
95
+ },
96
+ "model.layers.11.self_attn.o_proj": {
97
+ "bits": 8,
98
+ "group_size": 128
99
+ },
100
+ "model.layers.11.self_attn.q_proj": {
101
+ "bits": 8,
102
+ "group_size": 128
103
+ },
104
+ "model.layers.11.self_attn.v_proj": {
105
+ "bits": 8,
106
+ "group_size": 128
107
+ },
108
+ "model.layers.12.mlp.gate": {
109
+ "bits": 16
110
+ },
111
+ "model.layers.12.self_attn.k_proj": {
112
+ "bits": 8,
113
+ "group_size": 128
114
+ },
115
+ "model.layers.12.self_attn.o_proj": {
116
+ "bits": 8,
117
+ "group_size": 128
118
+ },
119
+ "model.layers.12.self_attn.q_proj": {
120
+ "bits": 8,
121
+ "group_size": 128
122
+ },
123
+ "model.layers.12.self_attn.v_proj": {
124
+ "bits": 8,
125
+ "group_size": 128
126
+ },
127
+ "model.layers.13.mlp.gate": {
128
+ "bits": 16
129
+ },
130
+ "model.layers.13.self_attn.k_proj": {
131
+ "bits": 8,
132
+ "group_size": 128
133
+ },
134
+ "model.layers.13.self_attn.o_proj": {
135
+ "bits": 8,
136
+ "group_size": 128
137
+ },
138
+ "model.layers.13.self_attn.q_proj": {
139
+ "bits": 8,
140
+ "group_size": 128
141
+ },
142
+ "model.layers.13.self_attn.v_proj": {
143
+ "bits": 8,
144
+ "group_size": 128
145
+ },
146
+ "model.layers.14.mlp.gate": {
147
+ "bits": 16
148
+ },
149
+ "model.layers.14.self_attn.k_proj": {
150
+ "bits": 8,
151
+ "group_size": 128
152
+ },
153
+ "model.layers.14.self_attn.o_proj": {
154
+ "bits": 8,
155
+ "group_size": 128
156
+ },
157
+ "model.layers.14.self_attn.q_proj": {
158
+ "bits": 8,
159
+ "group_size": 128
160
+ },
161
+ "model.layers.14.self_attn.v_proj": {
162
+ "bits": 8,
163
+ "group_size": 128
164
+ },
165
+ "model.layers.15.mlp.gate": {
166
+ "bits": 16
167
+ },
168
+ "model.layers.15.self_attn.k_proj": {
169
+ "bits": 8,
170
+ "group_size": 128
171
+ },
172
+ "model.layers.15.self_attn.o_proj": {
173
+ "bits": 8,
174
+ "group_size": 128
175
+ },
176
+ "model.layers.15.self_attn.q_proj": {
177
+ "bits": 8,
178
+ "group_size": 128
179
+ },
180
+ "model.layers.15.self_attn.v_proj": {
181
+ "bits": 8,
182
+ "group_size": 128
183
+ },
184
+ "model.layers.16.mlp.gate": {
185
+ "bits": 16
186
+ },
187
+ "model.layers.16.self_attn.k_proj": {
188
+ "bits": 8,
189
+ "group_size": 128
190
+ },
191
+ "model.layers.16.self_attn.o_proj": {
192
+ "bits": 8,
193
+ "group_size": 128
194
+ },
195
+ "model.layers.16.self_attn.q_proj": {
196
+ "bits": 8,
197
+ "group_size": 128
198
+ },
199
+ "model.layers.16.self_attn.v_proj": {
200
+ "bits": 8,
201
+ "group_size": 128
202
+ },
203
+ "model.layers.17.mlp.gate": {
204
+ "bits": 16
205
+ },
206
+ "model.layers.17.self_attn.k_proj": {
207
+ "bits": 8,
208
+ "group_size": 128
209
+ },
210
+ "model.layers.17.self_attn.o_proj": {
211
+ "bits": 8,
212
+ "group_size": 128
213
+ },
214
+ "model.layers.17.self_attn.q_proj": {
215
+ "bits": 8,
216
+ "group_size": 128
217
+ },
218
+ "model.layers.17.self_attn.v_proj": {
219
+ "bits": 8,
220
+ "group_size": 128
221
+ },
222
+ "model.layers.18.mlp.gate": {
223
+ "bits": 16
224
+ },
225
+ "model.layers.18.self_attn.k_proj": {
226
+ "bits": 8,
227
+ "group_size": 128
228
+ },
229
+ "model.layers.18.self_attn.o_proj": {
230
+ "bits": 8,
231
+ "group_size": 128
232
+ },
233
+ "model.layers.18.self_attn.q_proj": {
234
+ "bits": 8,
235
+ "group_size": 128
236
+ },
237
+ "model.layers.18.self_attn.v_proj": {
238
+ "bits": 8,
239
+ "group_size": 128
240
+ },
241
+ "model.layers.19.mlp.gate": {
242
+ "bits": 16
243
+ },
244
+ "model.layers.19.self_attn.k_proj": {
245
+ "bits": 8,
246
+ "group_size": 128
247
+ },
248
+ "model.layers.19.self_attn.o_proj": {
249
+ "bits": 8,
250
+ "group_size": 128
251
+ },
252
+ "model.layers.19.self_attn.q_proj": {
253
+ "bits": 8,
254
+ "group_size": 128
255
+ },
256
+ "model.layers.19.self_attn.v_proj": {
257
+ "bits": 8,
258
+ "group_size": 128
259
+ },
260
+ "model.layers.2.mlp.gate": {
261
+ "bits": 16
262
+ },
263
+ "model.layers.2.self_attn.k_proj": {
264
+ "bits": 8,
265
+ "group_size": 128
266
+ },
267
+ "model.layers.2.self_attn.o_proj": {
268
+ "bits": 8,
269
+ "group_size": 128
270
+ },
271
+ "model.layers.2.self_attn.q_proj": {
272
+ "bits": 8,
273
+ "group_size": 128
274
+ },
275
+ "model.layers.2.self_attn.v_proj": {
276
+ "bits": 8,
277
+ "group_size": 128
278
+ },
279
+ "model.layers.20.mlp.gate": {
280
+ "bits": 16
281
+ },
282
+ "model.layers.20.self_attn.k_proj": {
283
+ "bits": 8,
284
+ "group_size": 128
285
+ },
286
+ "model.layers.20.self_attn.o_proj": {
287
+ "bits": 8,
288
+ "group_size": 128
289
+ },
290
+ "model.layers.20.self_attn.q_proj": {
291
+ "bits": 8,
292
+ "group_size": 128
293
+ },
294
+ "model.layers.20.self_attn.v_proj": {
295
+ "bits": 8,
296
+ "group_size": 128
297
+ },
298
+ "model.layers.21.mlp.gate": {
299
+ "bits": 16
300
+ },
301
+ "model.layers.21.self_attn.k_proj": {
302
+ "bits": 8,
303
+ "group_size": 128
304
+ },
305
+ "model.layers.21.self_attn.o_proj": {
306
+ "bits": 8,
307
+ "group_size": 128
308
+ },
309
+ "model.layers.21.self_attn.q_proj": {
310
+ "bits": 8,
311
+ "group_size": 128
312
+ },
313
+ "model.layers.21.self_attn.v_proj": {
314
+ "bits": 8,
315
+ "group_size": 128
316
+ },
317
+ "model.layers.22.mlp.gate": {
318
+ "bits": 16
319
+ },
320
+ "model.layers.22.self_attn.k_proj": {
321
+ "bits": 8,
322
+ "group_size": 128
323
+ },
324
+ "model.layers.22.self_attn.o_proj": {
325
+ "bits": 8,
326
+ "group_size": 128
327
+ },
328
+ "model.layers.22.self_attn.q_proj": {
329
+ "bits": 8,
330
+ "group_size": 128
331
+ },
332
+ "model.layers.22.self_attn.v_proj": {
333
+ "bits": 8,
334
+ "group_size": 128
335
+ },
336
+ "model.layers.23.mlp.gate": {
337
+ "bits": 16
338
+ },
339
+ "model.layers.23.self_attn.k_proj": {
340
+ "bits": 8,
341
+ "group_size": 128
342
+ },
343
+ "model.layers.23.self_attn.o_proj": {
344
+ "bits": 8,
345
+ "group_size": 128
346
+ },
347
+ "model.layers.23.self_attn.q_proj": {
348
+ "bits": 8,
349
+ "group_size": 128
350
+ },
351
+ "model.layers.23.self_attn.v_proj": {
352
+ "bits": 8,
353
+ "group_size": 128
354
+ },
355
+ "model.layers.24.mlp.gate": {
356
+ "bits": 16
357
+ },
358
+ "model.layers.24.self_attn.k_proj": {
359
+ "bits": 8,
360
+ "group_size": 128
361
+ },
362
+ "model.layers.24.self_attn.o_proj": {
363
+ "bits": 8,
364
+ "group_size": 128
365
+ },
366
+ "model.layers.24.self_attn.q_proj": {
367
+ "bits": 8,
368
+ "group_size": 128
369
+ },
370
+ "model.layers.24.self_attn.v_proj": {
371
+ "bits": 8,
372
+ "group_size": 128
373
+ },
374
+ "model.layers.25.mlp.gate": {
375
+ "bits": 16
376
+ },
377
+ "model.layers.25.self_attn.k_proj": {
378
+ "bits": 8,
379
+ "group_size": 128
380
+ },
381
+ "model.layers.25.self_attn.o_proj": {
382
+ "bits": 8,
383
+ "group_size": 128
384
+ },
385
+ "model.layers.25.self_attn.q_proj": {
386
+ "bits": 8,
387
+ "group_size": 128
388
+ },
389
+ "model.layers.25.self_attn.v_proj": {
390
+ "bits": 8,
391
+ "group_size": 128
392
+ },
393
+ "model.layers.26.mlp.gate": {
394
+ "bits": 16
395
+ },
396
+ "model.layers.26.self_attn.k_proj": {
397
+ "bits": 8,
398
+ "group_size": 128
399
+ },
400
+ "model.layers.26.self_attn.o_proj": {
401
+ "bits": 8,
402
+ "group_size": 128
403
+ },
404
+ "model.layers.26.self_attn.q_proj": {
405
+ "bits": 8,
406
+ "group_size": 128
407
+ },
408
+ "model.layers.26.self_attn.v_proj": {
409
+ "bits": 8,
410
+ "group_size": 128
411
+ },
412
+ "model.layers.27.mlp.gate": {
413
+ "bits": 16
414
+ },
415
+ "model.layers.27.self_attn.k_proj": {
416
+ "bits": 8,
417
+ "group_size": 128
418
+ },
419
+ "model.layers.27.self_attn.o_proj": {
420
+ "bits": 8,
421
+ "group_size": 128
422
+ },
423
+ "model.layers.27.self_attn.q_proj": {
424
+ "bits": 8,
425
+ "group_size": 128
426
+ },
427
+ "model.layers.27.self_attn.v_proj": {
428
+ "bits": 8,
429
+ "group_size": 128
430
+ },
431
+ "model.layers.28.mlp.gate": {
432
+ "bits": 16
433
+ },
434
+ "model.layers.28.self_attn.k_proj": {
435
+ "bits": 8,
436
+ "group_size": 128
437
+ },
438
+ "model.layers.28.self_attn.o_proj": {
439
+ "bits": 8,
440
+ "group_size": 128
441
+ },
442
+ "model.layers.28.self_attn.q_proj": {
443
+ "bits": 8,
444
+ "group_size": 128
445
+ },
446
+ "model.layers.28.self_attn.v_proj": {
447
+ "bits": 8,
448
+ "group_size": 128
449
+ },
450
+ "model.layers.29.mlp.gate": {
451
+ "bits": 16
452
+ },
453
+ "model.layers.29.self_attn.k_proj": {
454
+ "bits": 8,
455
+ "group_size": 128
456
+ },
457
+ "model.layers.29.self_attn.o_proj": {
458
+ "bits": 8,
459
+ "group_size": 128
460
+ },
461
+ "model.layers.29.self_attn.q_proj": {
462
+ "bits": 8,
463
+ "group_size": 128
464
+ },
465
+ "model.layers.29.self_attn.v_proj": {
466
+ "bits": 8,
467
+ "group_size": 128
468
+ },
469
+ "model.layers.3.mlp.gate": {
470
+ "bits": 16
471
+ },
472
+ "model.layers.3.self_attn.k_proj": {
473
+ "bits": 8,
474
+ "group_size": 128
475
+ },
476
+ "model.layers.3.self_attn.o_proj": {
477
+ "bits": 8,
478
+ "group_size": 128
479
+ },
480
+ "model.layers.3.self_attn.q_proj": {
481
+ "bits": 8,
482
+ "group_size": 128
483
+ },
484
+ "model.layers.3.self_attn.v_proj": {
485
+ "bits": 8,
486
+ "group_size": 128
487
+ },
488
+ "model.layers.30.mlp.gate": {
489
+ "bits": 16
490
+ },
491
+ "model.layers.30.self_attn.k_proj": {
492
+ "bits": 8,
493
+ "group_size": 128
494
+ },
495
+ "model.layers.30.self_attn.o_proj": {
496
+ "bits": 8,
497
+ "group_size": 128
498
+ },
499
+ "model.layers.30.self_attn.q_proj": {
500
+ "bits": 8,
501
+ "group_size": 128
502
+ },
503
+ "model.layers.30.self_attn.v_proj": {
504
+ "bits": 8,
505
+ "group_size": 128
506
+ },
507
+ "model.layers.31.mlp.gate": {
508
+ "bits": 16
509
+ },
510
+ "model.layers.31.self_attn.k_proj": {
511
+ "bits": 8,
512
+ "group_size": 128
513
+ },
514
+ "model.layers.31.self_attn.o_proj": {
515
+ "bits": 8,
516
+ "group_size": 128
517
+ },
518
+ "model.layers.31.self_attn.q_proj": {
519
+ "bits": 8,
520
+ "group_size": 128
521
+ },
522
+ "model.layers.31.self_attn.v_proj": {
523
+ "bits": 8,
524
+ "group_size": 128
525
+ },
526
+ "model.layers.32.mlp.gate": {
527
+ "bits": 16
528
+ },
529
+ "model.layers.32.self_attn.k_proj": {
530
+ "bits": 8,
531
+ "group_size": 128
532
+ },
533
+ "model.layers.32.self_attn.o_proj": {
534
+ "bits": 8,
535
+ "group_size": 128
536
+ },
537
+ "model.layers.32.self_attn.q_proj": {
538
+ "bits": 8,
539
+ "group_size": 128
540
+ },
541
+ "model.layers.32.self_attn.v_proj": {
542
+ "bits": 8,
543
+ "group_size": 128
544
+ },
545
+ "model.layers.33.mlp.gate": {
546
+ "bits": 16
547
+ },
548
+ "model.layers.33.self_attn.k_proj": {
549
+ "bits": 8,
550
+ "group_size": 128
551
+ },
552
+ "model.layers.33.self_attn.o_proj": {
553
+ "bits": 8,
554
+ "group_size": 128
555
+ },
556
+ "model.layers.33.self_attn.q_proj": {
557
+ "bits": 8,
558
+ "group_size": 128
559
+ },
560
+ "model.layers.33.self_attn.v_proj": {
561
+ "bits": 8,
562
+ "group_size": 128
563
+ },
564
+ "model.layers.34.mlp.gate": {
565
+ "bits": 16
566
+ },
567
+ "model.layers.34.self_attn.k_proj": {
568
+ "bits": 8,
569
+ "group_size": 128
570
+ },
571
+ "model.layers.34.self_attn.o_proj": {
572
+ "bits": 8,
573
+ "group_size": 128
574
+ },
575
+ "model.layers.34.self_attn.q_proj": {
576
+ "bits": 8,
577
+ "group_size": 128
578
+ },
579
+ "model.layers.34.self_attn.v_proj": {
580
+ "bits": 8,
581
+ "group_size": 128
582
+ },
583
+ "model.layers.35.mlp.gate": {
584
+ "bits": 16
585
+ },
586
+ "model.layers.35.self_attn.k_proj": {
587
+ "bits": 8,
588
+ "group_size": 128
589
+ },
590
+ "model.layers.35.self_attn.o_proj": {
591
+ "bits": 8,
592
+ "group_size": 128
593
+ },
594
+ "model.layers.35.self_attn.q_proj": {
595
+ "bits": 8,
596
+ "group_size": 128
597
+ },
598
+ "model.layers.35.self_attn.v_proj": {
599
+ "bits": 8,
600
+ "group_size": 128
601
+ },
602
+ "model.layers.36.mlp.gate": {
603
+ "bits": 16
604
+ },
605
+ "model.layers.36.self_attn.k_proj": {
606
+ "bits": 8,
607
+ "group_size": 128
608
+ },
609
+ "model.layers.36.self_attn.o_proj": {
610
+ "bits": 8,
611
+ "group_size": 128
612
+ },
613
+ "model.layers.36.self_attn.q_proj": {
614
+ "bits": 8,
615
+ "group_size": 128
616
+ },
617
+ "model.layers.36.self_attn.v_proj": {
618
+ "bits": 8,
619
+ "group_size": 128
620
+ },
621
+ "model.layers.37.mlp.gate": {
622
+ "bits": 16
623
+ },
624
+ "model.layers.37.self_attn.k_proj": {
625
+ "bits": 8,
626
+ "group_size": 128
627
+ },
628
+ "model.layers.37.self_attn.o_proj": {
629
+ "bits": 8,
630
+ "group_size": 128
631
+ },
632
+ "model.layers.37.self_attn.q_proj": {
633
+ "bits": 8,
634
+ "group_size": 128
635
+ },
636
+ "model.layers.37.self_attn.v_proj": {
637
+ "bits": 8,
638
+ "group_size": 128
639
+ },
640
+ "model.layers.38.mlp.gate": {
641
+ "bits": 16
642
+ },
643
+ "model.layers.38.self_attn.k_proj": {
644
+ "bits": 8,
645
+ "group_size": 128
646
+ },
647
+ "model.layers.38.self_attn.o_proj": {
648
+ "bits": 8,
649
+ "group_size": 128
650
+ },
651
+ "model.layers.38.self_attn.q_proj": {
652
+ "bits": 8,
653
+ "group_size": 128
654
+ },
655
+ "model.layers.38.self_attn.v_proj": {
656
+ "bits": 8,
657
+ "group_size": 128
658
+ },
659
+ "model.layers.39.mlp.gate": {
660
+ "bits": 16
661
+ },
662
+ "model.layers.39.self_attn.k_proj": {
663
+ "bits": 8,
664
+ "group_size": 128
665
+ },
666
+ "model.layers.39.self_attn.o_proj": {
667
+ "bits": 8,
668
+ "group_size": 128
669
+ },
670
+ "model.layers.39.self_attn.q_proj": {
671
+ "bits": 8,
672
+ "group_size": 128
673
+ },
674
+ "model.layers.39.self_attn.v_proj": {
675
+ "bits": 8,
676
+ "group_size": 128
677
+ },
678
+ "model.layers.4.mlp.gate": {
679
+ "bits": 16
680
+ },
681
+ "model.layers.4.self_attn.k_proj": {
682
+ "bits": 8,
683
+ "group_size": 128
684
+ },
685
+ "model.layers.4.self_attn.o_proj": {
686
+ "bits": 8,
687
+ "group_size": 128
688
+ },
689
+ "model.layers.4.self_attn.q_proj": {
690
+ "bits": 8,
691
+ "group_size": 128
692
+ },
693
+ "model.layers.4.self_attn.v_proj": {
694
+ "bits": 8,
695
+ "group_size": 128
696
+ },
697
+ "model.layers.40.mlp.gate": {
698
+ "bits": 16
699
+ },
700
+ "model.layers.40.self_attn.k_proj": {
701
+ "bits": 8,
702
+ "group_size": 128
703
+ },
704
+ "model.layers.40.self_attn.o_proj": {
705
+ "bits": 8,
706
+ "group_size": 128
707
+ },
708
+ "model.layers.40.self_attn.q_proj": {
709
+ "bits": 8,
710
+ "group_size": 128
711
+ },
712
+ "model.layers.40.self_attn.v_proj": {
713
+ "bits": 8,
714
+ "group_size": 128
715
+ },
716
+ "model.layers.41.mlp.gate": {
717
+ "bits": 16
718
+ },
719
+ "model.layers.41.self_attn.k_proj": {
720
+ "bits": 8,
721
+ "group_size": 128
722
+ },
723
+ "model.layers.41.self_attn.o_proj": {
724
+ "bits": 8,
725
+ "group_size": 128
726
+ },
727
+ "model.layers.41.self_attn.q_proj": {
728
+ "bits": 8,
729
+ "group_size": 128
730
+ },
731
+ "model.layers.41.self_attn.v_proj": {
732
+ "bits": 8,
733
+ "group_size": 128
734
+ },
735
+ "model.layers.42.mlp.gate": {
736
+ "bits": 16
737
+ },
738
+ "model.layers.42.self_attn.k_proj": {
739
+ "bits": 8,
740
+ "group_size": 128
741
+ },
742
+ "model.layers.42.self_attn.o_proj": {
743
+ "bits": 8,
744
+ "group_size": 128
745
+ },
746
+ "model.layers.42.self_attn.q_proj": {
747
+ "bits": 8,
748
+ "group_size": 128
749
+ },
750
+ "model.layers.42.self_attn.v_proj": {
751
+ "bits": 8,
752
+ "group_size": 128
753
+ },
754
+ "model.layers.43.mlp.gate": {
755
+ "bits": 16
756
+ },
757
+ "model.layers.43.self_attn.k_proj": {
758
+ "bits": 8,
759
+ "group_size": 128
760
+ },
761
+ "model.layers.43.self_attn.o_proj": {
762
+ "bits": 8,
763
+ "group_size": 128
764
+ },
765
+ "model.layers.43.self_attn.q_proj": {
766
+ "bits": 8,
767
+ "group_size": 128
768
+ },
769
+ "model.layers.43.self_attn.v_proj": {
770
+ "bits": 8,
771
+ "group_size": 128
772
+ },
773
+ "model.layers.44.mlp.gate": {
774
+ "bits": 16
775
+ },
776
+ "model.layers.44.self_attn.k_proj": {
777
+ "bits": 8,
778
+ "group_size": 128
779
+ },
780
+ "model.layers.44.self_attn.o_proj": {
781
+ "bits": 8,
782
+ "group_size": 128
783
+ },
784
+ "model.layers.44.self_attn.q_proj": {
785
+ "bits": 8,
786
+ "group_size": 128
787
+ },
788
+ "model.layers.44.self_attn.v_proj": {
789
+ "bits": 8,
790
+ "group_size": 128
791
+ },
792
+ "model.layers.45.mlp.gate": {
793
+ "bits": 16
794
+ },
795
+ "model.layers.45.self_attn.k_proj": {
796
+ "bits": 8,
797
+ "group_size": 128
798
+ },
799
+ "model.layers.45.self_attn.o_proj": {
800
+ "bits": 8,
801
+ "group_size": 128
802
+ },
803
+ "model.layers.45.self_attn.q_proj": {
804
+ "bits": 8,
805
+ "group_size": 128
806
+ },
807
+ "model.layers.45.self_attn.v_proj": {
808
+ "bits": 8,
809
+ "group_size": 128
810
+ },
811
+ "model.layers.46.mlp.gate": {
812
+ "bits": 16
813
+ },
814
+ "model.layers.46.self_attn.k_proj": {
815
+ "bits": 8,
816
+ "group_size": 128
817
+ },
818
+ "model.layers.46.self_attn.o_proj": {
819
+ "bits": 8,
820
+ "group_size": 128
821
+ },
822
+ "model.layers.46.self_attn.q_proj": {
823
+ "bits": 8,
824
+ "group_size": 128
825
+ },
826
+ "model.layers.46.self_attn.v_proj": {
827
+ "bits": 8,
828
+ "group_size": 128
829
+ },
830
+ "model.layers.47.mlp.gate": {
831
+ "bits": 16
832
+ },
833
+ "model.layers.47.self_attn.k_proj": {
834
+ "bits": 8,
835
+ "group_size": 128
836
+ },
837
+ "model.layers.47.self_attn.o_proj": {
838
+ "bits": 8,
839
+ "group_size": 128
840
+ },
841
+ "model.layers.47.self_attn.q_proj": {
842
+ "bits": 8,
843
+ "group_size": 128
844
+ },
845
+ "model.layers.47.self_attn.v_proj": {
846
+ "bits": 8,
847
+ "group_size": 128
848
+ },
849
+ "model.layers.5.mlp.gate": {
850
+ "bits": 16
851
+ },
852
+ "model.layers.5.self_attn.k_proj": {
853
+ "bits": 8,
854
+ "group_size": 128
855
+ },
856
+ "model.layers.5.self_attn.o_proj": {
857
+ "bits": 8,
858
+ "group_size": 128
859
+ },
860
+ "model.layers.5.self_attn.q_proj": {
861
+ "bits": 8,
862
+ "group_size": 128
863
+ },
864
+ "model.layers.5.self_attn.v_proj": {
865
+ "bits": 8,
866
+ "group_size": 128
867
+ },
868
+ "model.layers.6.mlp.gate": {
869
+ "bits": 16
870
+ },
871
+ "model.layers.6.self_attn.k_proj": {
872
+ "bits": 8,
873
+ "group_size": 128
874
+ },
875
+ "model.layers.6.self_attn.o_proj": {
876
+ "bits": 8,
877
+ "group_size": 128
878
+ },
879
+ "model.layers.6.self_attn.q_proj": {
880
+ "bits": 8,
881
+ "group_size": 128
882
+ },
883
+ "model.layers.6.self_attn.v_proj": {
884
+ "bits": 8,
885
+ "group_size": 128
886
+ },
887
+ "model.layers.7.mlp.gate": {
888
+ "bits": 16
889
+ },
890
+ "model.layers.7.self_attn.k_proj": {
891
+ "bits": 8,
892
+ "group_size": 128
893
+ },
894
+ "model.layers.7.self_attn.o_proj": {
895
+ "bits": 8,
896
+ "group_size": 128
897
+ },
898
+ "model.layers.7.self_attn.q_proj": {
899
+ "bits": 8,
900
+ "group_size": 128
901
+ },
902
+ "model.layers.7.self_attn.v_proj": {
903
+ "bits": 8,
904
+ "group_size": 128
905
+ },
906
+ "model.layers.8.mlp.gate": {
907
+ "bits": 16
908
+ },
909
+ "model.layers.8.self_attn.k_proj": {
910
+ "bits": 8,
911
+ "group_size": 128
912
+ },
913
+ "model.layers.8.self_attn.o_proj": {
914
+ "bits": 8,
915
+ "group_size": 128
916
+ },
917
+ "model.layers.8.self_attn.q_proj": {
918
+ "bits": 8,
919
+ "group_size": 128
920
+ },
921
+ "model.layers.8.self_attn.v_proj": {
922
+ "bits": 8,
923
+ "group_size": 128
924
+ },
925
+ "model.layers.9.mlp.gate": {
926
+ "bits": 16
927
+ },
928
+ "model.layers.9.self_attn.k_proj": {
929
+ "bits": 8,
930
+ "group_size": 128
931
+ },
932
+ "model.layers.9.self_attn.o_proj": {
933
+ "bits": 8,
934
+ "group_size": 128
935
+ },
936
+ "model.layers.9.self_attn.q_proj": {
937
+ "bits": 8,
938
+ "group_size": 128
939
+ },
940
+ "model.layers.9.self_attn.v_proj": {
941
+ "bits": 8,
942
+ "group_size": 128
943
+ },
944
+ "model.layers.0.self_attn.qkv_proj": {
945
+ "bits": 8,
946
+ "group_size": 128
947
+ },
948
+ "model.layers.1.self_attn.qkv_proj": {
949
+ "bits": 8,
950
+ "group_size": 128
951
+ },
952
+ "model.layers.2.self_attn.qkv_proj": {
953
+ "bits": 8,
954
+ "group_size": 128
955
+ },
956
+ "model.layers.3.self_attn.qkv_proj": {
957
+ "bits": 8,
958
+ "group_size": 128
959
+ },
960
+ "model.layers.4.self_attn.qkv_proj": {
961
+ "bits": 8,
962
+ "group_size": 128
963
+ },
964
+ "model.layers.5.self_attn.qkv_proj": {
965
+ "bits": 8,
966
+ "group_size": 128
967
+ },
968
+ "model.layers.6.self_attn.qkv_proj": {
969
+ "bits": 8,
970
+ "group_size": 128
971
+ },
972
+ "model.layers.7.self_attn.qkv_proj": {
973
+ "bits": 8,
974
+ "group_size": 128
975
+ },
976
+ "model.layers.8.self_attn.qkv_proj": {
977
+ "bits": 8,
978
+ "group_size": 128
979
+ },
980
+ "model.layers.9.self_attn.qkv_proj": {
981
+ "bits": 8,
982
+ "group_size": 128
983
+ },
984
+ "model.layers.10.self_attn.qkv_proj": {
985
+ "bits": 8,
986
+ "group_size": 128
987
+ },
988
+ "model.layers.11.self_attn.qkv_proj": {
989
+ "bits": 8,
990
+ "group_size": 128
991
+ },
992
+ "model.layers.12.self_attn.qkv_proj": {
993
+ "bits": 8,
994
+ "group_size": 128
995
+ },
996
+ "model.layers.13.self_attn.qkv_proj": {
997
+ "bits": 8,
998
+ "group_size": 128
999
+ },
1000
+ "model.layers.14.self_attn.qkv_proj": {
1001
+ "bits": 8,
1002
+ "group_size": 128
1003
+ },
1004
+ "model.layers.15.self_attn.qkv_proj": {
1005
+ "bits": 8,
1006
+ "group_size": 128
1007
+ },
1008
+ "model.layers.16.self_attn.qkv_proj": {
1009
+ "bits": 8,
1010
+ "group_size": 128
1011
+ },
1012
+ "model.layers.17.self_attn.qkv_proj": {
1013
+ "bits": 8,
1014
+ "group_size": 128
1015
+ },
1016
+ "model.layers.18.self_attn.qkv_proj": {
1017
+ "bits": 8,
1018
+ "group_size": 128
1019
+ },
1020
+ "model.layers.19.self_attn.qkv_proj": {
1021
+ "bits": 8,
1022
+ "group_size": 128
1023
+ },
1024
+ "model.layers.20.self_attn.qkv_proj": {
1025
+ "bits": 8,
1026
+ "group_size": 128
1027
+ },
1028
+ "model.layers.21.self_attn.qkv_proj": {
1029
+ "bits": 8,
1030
+ "group_size": 128
1031
+ },
1032
+ "model.layers.22.self_attn.qkv_proj": {
1033
+ "bits": 8,
1034
+ "group_size": 128
1035
+ },
1036
+ "model.layers.23.self_attn.qkv_proj": {
1037
+ "bits": 8,
1038
+ "group_size": 128
1039
+ },
1040
+ "model.layers.24.self_attn.qkv_proj": {
1041
+ "bits": 8,
1042
+ "group_size": 128
1043
+ },
1044
+ "model.layers.25.self_attn.qkv_proj": {
1045
+ "bits": 8,
1046
+ "group_size": 128
1047
+ },
1048
+ "model.layers.26.self_attn.qkv_proj": {
1049
+ "bits": 8,
1050
+ "group_size": 128
1051
+ },
1052
+ "model.layers.27.self_attn.qkv_proj": {
1053
+ "bits": 8,
1054
+ "group_size": 128
1055
+ },
1056
+ "model.layers.28.self_attn.qkv_proj": {
1057
+ "bits": 8,
1058
+ "group_size": 128
1059
+ },
1060
+ "model.layers.29.self_attn.qkv_proj": {
1061
+ "bits": 8,
1062
+ "group_size": 128
1063
+ },
1064
+ "model.layers.30.self_attn.qkv_proj": {
1065
+ "bits": 8,
1066
+ "group_size": 128
1067
+ },
1068
+ "model.layers.31.self_attn.qkv_proj": {
1069
+ "bits": 8,
1070
+ "group_size": 128
1071
+ },
1072
+ "model.layers.32.self_attn.qkv_proj": {
1073
+ "bits": 8,
1074
+ "group_size": 128
1075
+ },
1076
+ "model.layers.33.self_attn.qkv_proj": {
1077
+ "bits": 8,
1078
+ "group_size": 128
1079
+ },
1080
+ "model.layers.34.self_attn.qkv_proj": {
1081
+ "bits": 8,
1082
+ "group_size": 128
1083
+ },
1084
+ "model.layers.35.self_attn.qkv_proj": {
1085
+ "bits": 8,
1086
+ "group_size": 128
1087
+ },
1088
+ "model.layers.36.self_attn.qkv_proj": {
1089
+ "bits": 8,
1090
+ "group_size": 128
1091
+ },
1092
+ "model.layers.37.self_attn.qkv_proj": {
1093
+ "bits": 8,
1094
+ "group_size": 128
1095
+ },
1096
+ "model.layers.38.self_attn.qkv_proj": {
1097
+ "bits": 8,
1098
+ "group_size": 128
1099
+ },
1100
+ "model.layers.39.self_attn.qkv_proj": {
1101
+ "bits": 8,
1102
+ "group_size": 128
1103
+ },
1104
+ "model.layers.40.self_attn.qkv_proj": {
1105
+ "bits": 8,
1106
+ "group_size": 128
1107
+ },
1108
+ "model.layers.41.self_attn.qkv_proj": {
1109
+ "bits": 8,
1110
+ "group_size": 128
1111
+ },
1112
+ "model.layers.42.self_attn.qkv_proj": {
1113
+ "bits": 8,
1114
+ "group_size": 128
1115
+ },
1116
+ "model.layers.43.self_attn.qkv_proj": {
1117
+ "bits": 8,
1118
+ "group_size": 128
1119
+ },
1120
+ "model.layers.44.self_attn.qkv_proj": {
1121
+ "bits": 8,
1122
+ "group_size": 128
1123
+ },
1124
+ "model.layers.45.self_attn.qkv_proj": {
1125
+ "bits": 8,
1126
+ "group_size": 128
1127
+ },
1128
+ "model.layers.46.self_attn.qkv_proj": {
1129
+ "bits": 8,
1130
+ "group_size": 128
1131
+ },
1132
+ "model.layers.47.self_attn.qkv_proj": {
1133
+ "bits": 8,
1134
+ "group_size": 128
1135
+ }
1136
+ },
1137
+ "group_size": 64,
1138
+ "iters": 0,
1139
+ "packing_format": "auto_round:auto_gptq",
1140
+ "quant_method": "auto-round",
1141
+ "sym": true
1142
+ },
1143
+ "rms_norm_eps": 1e-06,
1144
+ "rope_scaling": null,
1145
+ "rope_theta": 10000000,
1146
+ "router_aux_loss_coef": 0.001,
1147
+ "sliding_window": null,
1148
+ "tie_word_embeddings": false,
1149
+ "torch_dtype": "bfloat16",
1150
+ "transformers_version": "4.53.2",
1151
+ "use_cache": true,
1152
+ "use_sliding_window": false,
1153
+ "vocab_size": 151936
1154
+ }
generation_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "temperature": 0.7,
10
+ "top_k": 20,
11
+ "top_p": 0.8,
12
+ "transformers_version": "4.53.2"
13
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2cae950b8d82dda8a39bf01481b0e8bf4e600f771bdcb833ca44bce51ad1ac5
3
+ size 5001116792
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30fbe9f584050a7a3ebcbe44e09d36c038e0e22c52328922a6d9615771fe094c
3
+ size 5002049024
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1f350583d399702393514b34e89d3feb26d3cfb39dd86dd88358aa2c39a28cb
3
+ size 5001805208
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:868e7403e434e7cc21b90ebd102b086eb0e1ae04407649fb779620f8a70dec4a
3
+ size 2827264896
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
quantization_config.json ADDED
@@ -0,0 +1,924 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bits": 4,
3
+ "group_size": 64,
4
+ "sym": true,
5
+ "data_type": "int",
6
+ "iters": 0,
7
+ "autoround_version": "0.6.1.dev",
8
+ "quant_method": "auto-round",
9
+ "packing_format": "auto_round:auto_gptq",
10
+ "extra_config": {
11
+ "model.layers.0.self_attn.q_proj": {
12
+ "bits": 8,
13
+ "group_size": 128
14
+ },
15
+ "model.layers.0.self_attn.k_proj": {
16
+ "bits": 8,
17
+ "group_size": 128
18
+ },
19
+ "model.layers.0.self_attn.v_proj": {
20
+ "bits": 8,
21
+ "group_size": 128
22
+ },
23
+ "model.layers.0.self_attn.o_proj": {
24
+ "bits": 8,
25
+ "group_size": 128
26
+ },
27
+ "model.layers.0.mlp.gate": {
28
+ "bits": 16
29
+ },
30
+ "model.layers.1.self_attn.q_proj": {
31
+ "bits": 8,
32
+ "group_size": 128
33
+ },
34
+ "model.layers.1.self_attn.k_proj": {
35
+ "bits": 8,
36
+ "group_size": 128
37
+ },
38
+ "model.layers.1.self_attn.v_proj": {
39
+ "bits": 8,
40
+ "group_size": 128
41
+ },
42
+ "model.layers.1.self_attn.o_proj": {
43
+ "bits": 8,
44
+ "group_size": 128
45
+ },
46
+ "model.layers.1.mlp.gate": {
47
+ "bits": 16
48
+ },
49
+ "model.layers.2.self_attn.q_proj": {
50
+ "bits": 8,
51
+ "group_size": 128
52
+ },
53
+ "model.layers.2.self_attn.k_proj": {
54
+ "bits": 8,
55
+ "group_size": 128
56
+ },
57
+ "model.layers.2.self_attn.v_proj": {
58
+ "bits": 8,
59
+ "group_size": 128
60
+ },
61
+ "model.layers.2.self_attn.o_proj": {
62
+ "bits": 8,
63
+ "group_size": 128
64
+ },
65
+ "model.layers.2.mlp.gate": {
66
+ "bits": 16
67
+ },
68
+ "model.layers.3.self_attn.q_proj": {
69
+ "bits": 8,
70
+ "group_size": 128
71
+ },
72
+ "model.layers.3.self_attn.k_proj": {
73
+ "bits": 8,
74
+ "group_size": 128
75
+ },
76
+ "model.layers.3.self_attn.v_proj": {
77
+ "bits": 8,
78
+ "group_size": 128
79
+ },
80
+ "model.layers.3.self_attn.o_proj": {
81
+ "bits": 8,
82
+ "group_size": 128
83
+ },
84
+ "model.layers.3.mlp.gate": {
85
+ "bits": 16
86
+ },
87
+ "model.layers.4.self_attn.q_proj": {
88
+ "bits": 8,
89
+ "group_size": 128
90
+ },
91
+ "model.layers.4.self_attn.k_proj": {
92
+ "bits": 8,
93
+ "group_size": 128
94
+ },
95
+ "model.layers.4.self_attn.v_proj": {
96
+ "bits": 8,
97
+ "group_size": 128
98
+ },
99
+ "model.layers.4.self_attn.o_proj": {
100
+ "bits": 8,
101
+ "group_size": 128
102
+ },
103
+ "model.layers.4.mlp.gate": {
104
+ "bits": 16
105
+ },
106
+ "model.layers.5.self_attn.q_proj": {
107
+ "bits": 8,
108
+ "group_size": 128
109
+ },
110
+ "model.layers.5.self_attn.k_proj": {
111
+ "bits": 8,
112
+ "group_size": 128
113
+ },
114
+ "model.layers.5.self_attn.v_proj": {
115
+ "bits": 8,
116
+ "group_size": 128
117
+ },
118
+ "model.layers.5.self_attn.o_proj": {
119
+ "bits": 8,
120
+ "group_size": 128
121
+ },
122
+ "model.layers.5.mlp.gate": {
123
+ "bits": 16
124
+ },
125
+ "model.layers.6.self_attn.q_proj": {
126
+ "bits": 8,
127
+ "group_size": 128
128
+ },
129
+ "model.layers.6.self_attn.k_proj": {
130
+ "bits": 8,
131
+ "group_size": 128
132
+ },
133
+ "model.layers.6.self_attn.v_proj": {
134
+ "bits": 8,
135
+ "group_size": 128
136
+ },
137
+ "model.layers.6.self_attn.o_proj": {
138
+ "bits": 8,
139
+ "group_size": 128
140
+ },
141
+ "model.layers.6.mlp.gate": {
142
+ "bits": 16
143
+ },
144
+ "model.layers.7.self_attn.q_proj": {
145
+ "bits": 8,
146
+ "group_size": 128
147
+ },
148
+ "model.layers.7.self_attn.k_proj": {
149
+ "bits": 8,
150
+ "group_size": 128
151
+ },
152
+ "model.layers.7.self_attn.v_proj": {
153
+ "bits": 8,
154
+ "group_size": 128
155
+ },
156
+ "model.layers.7.self_attn.o_proj": {
157
+ "bits": 8,
158
+ "group_size": 128
159
+ },
160
+ "model.layers.7.mlp.gate": {
161
+ "bits": 16
162
+ },
163
+ "model.layers.8.self_attn.q_proj": {
164
+ "bits": 8,
165
+ "group_size": 128
166
+ },
167
+ "model.layers.8.self_attn.k_proj": {
168
+ "bits": 8,
169
+ "group_size": 128
170
+ },
171
+ "model.layers.8.self_attn.v_proj": {
172
+ "bits": 8,
173
+ "group_size": 128
174
+ },
175
+ "model.layers.8.self_attn.o_proj": {
176
+ "bits": 8,
177
+ "group_size": 128
178
+ },
179
+ "model.layers.8.mlp.gate": {
180
+ "bits": 16
181
+ },
182
+ "model.layers.9.self_attn.q_proj": {
183
+ "bits": 8,
184
+ "group_size": 128
185
+ },
186
+ "model.layers.9.self_attn.k_proj": {
187
+ "bits": 8,
188
+ "group_size": 128
189
+ },
190
+ "model.layers.9.self_attn.v_proj": {
191
+ "bits": 8,
192
+ "group_size": 128
193
+ },
194
+ "model.layers.9.self_attn.o_proj": {
195
+ "bits": 8,
196
+ "group_size": 128
197
+ },
198
+ "model.layers.9.mlp.gate": {
199
+ "bits": 16
200
+ },
201
+ "model.layers.10.self_attn.q_proj": {
202
+ "bits": 8,
203
+ "group_size": 128
204
+ },
205
+ "model.layers.10.self_attn.k_proj": {
206
+ "bits": 8,
207
+ "group_size": 128
208
+ },
209
+ "model.layers.10.self_attn.v_proj": {
210
+ "bits": 8,
211
+ "group_size": 128
212
+ },
213
+ "model.layers.10.self_attn.o_proj": {
214
+ "bits": 8,
215
+ "group_size": 128
216
+ },
217
+ "model.layers.10.mlp.gate": {
218
+ "bits": 16
219
+ },
220
+ "model.layers.11.self_attn.q_proj": {
221
+ "bits": 8,
222
+ "group_size": 128
223
+ },
224
+ "model.layers.11.self_attn.k_proj": {
225
+ "bits": 8,
226
+ "group_size": 128
227
+ },
228
+ "model.layers.11.self_attn.v_proj": {
229
+ "bits": 8,
230
+ "group_size": 128
231
+ },
232
+ "model.layers.11.self_attn.o_proj": {
233
+ "bits": 8,
234
+ "group_size": 128
235
+ },
236
+ "model.layers.11.mlp.gate": {
237
+ "bits": 16
238
+ },
239
+ "model.layers.12.self_attn.q_proj": {
240
+ "bits": 8,
241
+ "group_size": 128
242
+ },
243
+ "model.layers.12.self_attn.k_proj": {
244
+ "bits": 8,
245
+ "group_size": 128
246
+ },
247
+ "model.layers.12.self_attn.v_proj": {
248
+ "bits": 8,
249
+ "group_size": 128
250
+ },
251
+ "model.layers.12.self_attn.o_proj": {
252
+ "bits": 8,
253
+ "group_size": 128
254
+ },
255
+ "model.layers.12.mlp.gate": {
256
+ "bits": 16
257
+ },
258
+ "model.layers.13.self_attn.q_proj": {
259
+ "bits": 8,
260
+ "group_size": 128
261
+ },
262
+ "model.layers.13.self_attn.k_proj": {
263
+ "bits": 8,
264
+ "group_size": 128
265
+ },
266
+ "model.layers.13.self_attn.v_proj": {
267
+ "bits": 8,
268
+ "group_size": 128
269
+ },
270
+ "model.layers.13.self_attn.o_proj": {
271
+ "bits": 8,
272
+ "group_size": 128
273
+ },
274
+ "model.layers.13.mlp.gate": {
275
+ "bits": 16
276
+ },
277
+ "model.layers.14.self_attn.q_proj": {
278
+ "bits": 8,
279
+ "group_size": 128
280
+ },
281
+ "model.layers.14.self_attn.k_proj": {
282
+ "bits": 8,
283
+ "group_size": 128
284
+ },
285
+ "model.layers.14.self_attn.v_proj": {
286
+ "bits": 8,
287
+ "group_size": 128
288
+ },
289
+ "model.layers.14.self_attn.o_proj": {
290
+ "bits": 8,
291
+ "group_size": 128
292
+ },
293
+ "model.layers.14.mlp.gate": {
294
+ "bits": 16
295
+ },
296
+ "model.layers.15.self_attn.q_proj": {
297
+ "bits": 8,
298
+ "group_size": 128
299
+ },
300
+ "model.layers.15.self_attn.k_proj": {
301
+ "bits": 8,
302
+ "group_size": 128
303
+ },
304
+ "model.layers.15.self_attn.v_proj": {
305
+ "bits": 8,
306
+ "group_size": 128
307
+ },
308
+ "model.layers.15.self_attn.o_proj": {
309
+ "bits": 8,
310
+ "group_size": 128
311
+ },
312
+ "model.layers.15.mlp.gate": {
313
+ "bits": 16
314
+ },
315
+ "model.layers.16.self_attn.q_proj": {
316
+ "bits": 8,
317
+ "group_size": 128
318
+ },
319
+ "model.layers.16.self_attn.k_proj": {
320
+ "bits": 8,
321
+ "group_size": 128
322
+ },
323
+ "model.layers.16.self_attn.v_proj": {
324
+ "bits": 8,
325
+ "group_size": 128
326
+ },
327
+ "model.layers.16.self_attn.o_proj": {
328
+ "bits": 8,
329
+ "group_size": 128
330
+ },
331
+ "model.layers.16.mlp.gate": {
332
+ "bits": 16
333
+ },
334
+ "model.layers.17.self_attn.q_proj": {
335
+ "bits": 8,
336
+ "group_size": 128
337
+ },
338
+ "model.layers.17.self_attn.k_proj": {
339
+ "bits": 8,
340
+ "group_size": 128
341
+ },
342
+ "model.layers.17.self_attn.v_proj": {
343
+ "bits": 8,
344
+ "group_size": 128
345
+ },
346
+ "model.layers.17.self_attn.o_proj": {
347
+ "bits": 8,
348
+ "group_size": 128
349
+ },
350
+ "model.layers.17.mlp.gate": {
351
+ "bits": 16
352
+ },
353
+ "model.layers.18.self_attn.q_proj": {
354
+ "bits": 8,
355
+ "group_size": 128
356
+ },
357
+ "model.layers.18.self_attn.k_proj": {
358
+ "bits": 8,
359
+ "group_size": 128
360
+ },
361
+ "model.layers.18.self_attn.v_proj": {
362
+ "bits": 8,
363
+ "group_size": 128
364
+ },
365
+ "model.layers.18.self_attn.o_proj": {
366
+ "bits": 8,
367
+ "group_size": 128
368
+ },
369
+ "model.layers.18.mlp.gate": {
370
+ "bits": 16
371
+ },
372
+ "model.layers.19.self_attn.q_proj": {
373
+ "bits": 8,
374
+ "group_size": 128
375
+ },
376
+ "model.layers.19.self_attn.k_proj": {
377
+ "bits": 8,
378
+ "group_size": 128
379
+ },
380
+ "model.layers.19.self_attn.v_proj": {
381
+ "bits": 8,
382
+ "group_size": 128
383
+ },
384
+ "model.layers.19.self_attn.o_proj": {
385
+ "bits": 8,
386
+ "group_size": 128
387
+ },
388
+ "model.layers.19.mlp.gate": {
389
+ "bits": 16
390
+ },
391
+ "model.layers.20.self_attn.q_proj": {
392
+ "bits": 8,
393
+ "group_size": 128
394
+ },
395
+ "model.layers.20.self_attn.k_proj": {
396
+ "bits": 8,
397
+ "group_size": 128
398
+ },
399
+ "model.layers.20.self_attn.v_proj": {
400
+ "bits": 8,
401
+ "group_size": 128
402
+ },
403
+ "model.layers.20.self_attn.o_proj": {
404
+ "bits": 8,
405
+ "group_size": 128
406
+ },
407
+ "model.layers.20.mlp.gate": {
408
+ "bits": 16
409
+ },
410
+ "model.layers.21.self_attn.q_proj": {
411
+ "bits": 8,
412
+ "group_size": 128
413
+ },
414
+ "model.layers.21.self_attn.k_proj": {
415
+ "bits": 8,
416
+ "group_size": 128
417
+ },
418
+ "model.layers.21.self_attn.v_proj": {
419
+ "bits": 8,
420
+ "group_size": 128
421
+ },
422
+ "model.layers.21.self_attn.o_proj": {
423
+ "bits": 8,
424
+ "group_size": 128
425
+ },
426
+ "model.layers.21.mlp.gate": {
427
+ "bits": 16
428
+ },
429
+ "model.layers.22.self_attn.q_proj": {
430
+ "bits": 8,
431
+ "group_size": 128
432
+ },
433
+ "model.layers.22.self_attn.k_proj": {
434
+ "bits": 8,
435
+ "group_size": 128
436
+ },
437
+ "model.layers.22.self_attn.v_proj": {
438
+ "bits": 8,
439
+ "group_size": 128
440
+ },
441
+ "model.layers.22.self_attn.o_proj": {
442
+ "bits": 8,
443
+ "group_size": 128
444
+ },
445
+ "model.layers.22.mlp.gate": {
446
+ "bits": 16
447
+ },
448
+ "model.layers.23.self_attn.q_proj": {
449
+ "bits": 8,
450
+ "group_size": 128
451
+ },
452
+ "model.layers.23.self_attn.k_proj": {
453
+ "bits": 8,
454
+ "group_size": 128
455
+ },
456
+ "model.layers.23.self_attn.v_proj": {
457
+ "bits": 8,
458
+ "group_size": 128
459
+ },
460
+ "model.layers.23.self_attn.o_proj": {
461
+ "bits": 8,
462
+ "group_size": 128
463
+ },
464
+ "model.layers.23.mlp.gate": {
465
+ "bits": 16
466
+ },
467
+ "model.layers.24.self_attn.q_proj": {
468
+ "bits": 8,
469
+ "group_size": 128
470
+ },
471
+ "model.layers.24.self_attn.k_proj": {
472
+ "bits": 8,
473
+ "group_size": 128
474
+ },
475
+ "model.layers.24.self_attn.v_proj": {
476
+ "bits": 8,
477
+ "group_size": 128
478
+ },
479
+ "model.layers.24.self_attn.o_proj": {
480
+ "bits": 8,
481
+ "group_size": 128
482
+ },
483
+ "model.layers.24.mlp.gate": {
484
+ "bits": 16
485
+ },
486
+ "model.layers.25.self_attn.q_proj": {
487
+ "bits": 8,
488
+ "group_size": 128
489
+ },
490
+ "model.layers.25.self_attn.k_proj": {
491
+ "bits": 8,
492
+ "group_size": 128
493
+ },
494
+ "model.layers.25.self_attn.v_proj": {
495
+ "bits": 8,
496
+ "group_size": 128
497
+ },
498
+ "model.layers.25.self_attn.o_proj": {
499
+ "bits": 8,
500
+ "group_size": 128
501
+ },
502
+ "model.layers.25.mlp.gate": {
503
+ "bits": 16
504
+ },
505
+ "model.layers.26.self_attn.q_proj": {
506
+ "bits": 8,
507
+ "group_size": 128
508
+ },
509
+ "model.layers.26.self_attn.k_proj": {
510
+ "bits": 8,
511
+ "group_size": 128
512
+ },
513
+ "model.layers.26.self_attn.v_proj": {
514
+ "bits": 8,
515
+ "group_size": 128
516
+ },
517
+ "model.layers.26.self_attn.o_proj": {
518
+ "bits": 8,
519
+ "group_size": 128
520
+ },
521
+ "model.layers.26.mlp.gate": {
522
+ "bits": 16
523
+ },
524
+ "model.layers.27.self_attn.q_proj": {
525
+ "bits": 8,
526
+ "group_size": 128
527
+ },
528
+ "model.layers.27.self_attn.k_proj": {
529
+ "bits": 8,
530
+ "group_size": 128
531
+ },
532
+ "model.layers.27.self_attn.v_proj": {
533
+ "bits": 8,
534
+ "group_size": 128
535
+ },
536
+ "model.layers.27.self_attn.o_proj": {
537
+ "bits": 8,
538
+ "group_size": 128
539
+ },
540
+ "model.layers.27.mlp.gate": {
541
+ "bits": 16
542
+ },
543
+ "model.layers.28.self_attn.q_proj": {
544
+ "bits": 8,
545
+ "group_size": 128
546
+ },
547
+ "model.layers.28.self_attn.k_proj": {
548
+ "bits": 8,
549
+ "group_size": 128
550
+ },
551
+ "model.layers.28.self_attn.v_proj": {
552
+ "bits": 8,
553
+ "group_size": 128
554
+ },
555
+ "model.layers.28.self_attn.o_proj": {
556
+ "bits": 8,
557
+ "group_size": 128
558
+ },
559
+ "model.layers.28.mlp.gate": {
560
+ "bits": 16
561
+ },
562
+ "model.layers.29.self_attn.q_proj": {
563
+ "bits": 8,
564
+ "group_size": 128
565
+ },
566
+ "model.layers.29.self_attn.k_proj": {
567
+ "bits": 8,
568
+ "group_size": 128
569
+ },
570
+ "model.layers.29.self_attn.v_proj": {
571
+ "bits": 8,
572
+ "group_size": 128
573
+ },
574
+ "model.layers.29.self_attn.o_proj": {
575
+ "bits": 8,
576
+ "group_size": 128
577
+ },
578
+ "model.layers.29.mlp.gate": {
579
+ "bits": 16
580
+ },
581
+ "model.layers.30.self_attn.q_proj": {
582
+ "bits": 8,
583
+ "group_size": 128
584
+ },
585
+ "model.layers.30.self_attn.k_proj": {
586
+ "bits": 8,
587
+ "group_size": 128
588
+ },
589
+ "model.layers.30.self_attn.v_proj": {
590
+ "bits": 8,
591
+ "group_size": 128
592
+ },
593
+ "model.layers.30.self_attn.o_proj": {
594
+ "bits": 8,
595
+ "group_size": 128
596
+ },
597
+ "model.layers.30.mlp.gate": {
598
+ "bits": 16
599
+ },
600
+ "model.layers.31.self_attn.q_proj": {
601
+ "bits": 8,
602
+ "group_size": 128
603
+ },
604
+ "model.layers.31.self_attn.k_proj": {
605
+ "bits": 8,
606
+ "group_size": 128
607
+ },
608
+ "model.layers.31.self_attn.v_proj": {
609
+ "bits": 8,
610
+ "group_size": 128
611
+ },
612
+ "model.layers.31.self_attn.o_proj": {
613
+ "bits": 8,
614
+ "group_size": 128
615
+ },
616
+ "model.layers.31.mlp.gate": {
617
+ "bits": 16
618
+ },
619
+ "model.layers.32.self_attn.q_proj": {
620
+ "bits": 8,
621
+ "group_size": 128
622
+ },
623
+ "model.layers.32.self_attn.k_proj": {
624
+ "bits": 8,
625
+ "group_size": 128
626
+ },
627
+ "model.layers.32.self_attn.v_proj": {
628
+ "bits": 8,
629
+ "group_size": 128
630
+ },
631
+ "model.layers.32.self_attn.o_proj": {
632
+ "bits": 8,
633
+ "group_size": 128
634
+ },
635
+ "model.layers.32.mlp.gate": {
636
+ "bits": 16
637
+ },
638
+ "model.layers.33.self_attn.q_proj": {
639
+ "bits": 8,
640
+ "group_size": 128
641
+ },
642
+ "model.layers.33.self_attn.k_proj": {
643
+ "bits": 8,
644
+ "group_size": 128
645
+ },
646
+ "model.layers.33.self_attn.v_proj": {
647
+ "bits": 8,
648
+ "group_size": 128
649
+ },
650
+ "model.layers.33.self_attn.o_proj": {
651
+ "bits": 8,
652
+ "group_size": 128
653
+ },
654
+ "model.layers.33.mlp.gate": {
655
+ "bits": 16
656
+ },
657
+ "model.layers.34.self_attn.q_proj": {
658
+ "bits": 8,
659
+ "group_size": 128
660
+ },
661
+ "model.layers.34.self_attn.k_proj": {
662
+ "bits": 8,
663
+ "group_size": 128
664
+ },
665
+ "model.layers.34.self_attn.v_proj": {
666
+ "bits": 8,
667
+ "group_size": 128
668
+ },
669
+ "model.layers.34.self_attn.o_proj": {
670
+ "bits": 8,
671
+ "group_size": 128
672
+ },
673
+ "model.layers.34.mlp.gate": {
674
+ "bits": 16
675
+ },
676
+ "model.layers.35.self_attn.q_proj": {
677
+ "bits": 8,
678
+ "group_size": 128
679
+ },
680
+ "model.layers.35.self_attn.k_proj": {
681
+ "bits": 8,
682
+ "group_size": 128
683
+ },
684
+ "model.layers.35.self_attn.v_proj": {
685
+ "bits": 8,
686
+ "group_size": 128
687
+ },
688
+ "model.layers.35.self_attn.o_proj": {
689
+ "bits": 8,
690
+ "group_size": 128
691
+ },
692
+ "model.layers.35.mlp.gate": {
693
+ "bits": 16
694
+ },
695
+ "model.layers.36.self_attn.q_proj": {
696
+ "bits": 8,
697
+ "group_size": 128
698
+ },
699
+ "model.layers.36.self_attn.k_proj": {
700
+ "bits": 8,
701
+ "group_size": 128
702
+ },
703
+ "model.layers.36.self_attn.v_proj": {
704
+ "bits": 8,
705
+ "group_size": 128
706
+ },
707
+ "model.layers.36.self_attn.o_proj": {
708
+ "bits": 8,
709
+ "group_size": 128
710
+ },
711
+ "model.layers.36.mlp.gate": {
712
+ "bits": 16
713
+ },
714
+ "model.layers.37.self_attn.q_proj": {
715
+ "bits": 8,
716
+ "group_size": 128
717
+ },
718
+ "model.layers.37.self_attn.k_proj": {
719
+ "bits": 8,
720
+ "group_size": 128
721
+ },
722
+ "model.layers.37.self_attn.v_proj": {
723
+ "bits": 8,
724
+ "group_size": 128
725
+ },
726
+ "model.layers.37.self_attn.o_proj": {
727
+ "bits": 8,
728
+ "group_size": 128
729
+ },
730
+ "model.layers.37.mlp.gate": {
731
+ "bits": 16
732
+ },
733
+ "model.layers.38.self_attn.q_proj": {
734
+ "bits": 8,
735
+ "group_size": 128
736
+ },
737
+ "model.layers.38.self_attn.k_proj": {
738
+ "bits": 8,
739
+ "group_size": 128
740
+ },
741
+ "model.layers.38.self_attn.v_proj": {
742
+ "bits": 8,
743
+ "group_size": 128
744
+ },
745
+ "model.layers.38.self_attn.o_proj": {
746
+ "bits": 8,
747
+ "group_size": 128
748
+ },
749
+ "model.layers.38.mlp.gate": {
750
+ "bits": 16
751
+ },
752
+ "model.layers.39.self_attn.q_proj": {
753
+ "bits": 8,
754
+ "group_size": 128
755
+ },
756
+ "model.layers.39.self_attn.k_proj": {
757
+ "bits": 8,
758
+ "group_size": 128
759
+ },
760
+ "model.layers.39.self_attn.v_proj": {
761
+ "bits": 8,
762
+ "group_size": 128
763
+ },
764
+ "model.layers.39.self_attn.o_proj": {
765
+ "bits": 8,
766
+ "group_size": 128
767
+ },
768
+ "model.layers.39.mlp.gate": {
769
+ "bits": 16
770
+ },
771
+ "model.layers.40.self_attn.q_proj": {
772
+ "bits": 8,
773
+ "group_size": 128
774
+ },
775
+ "model.layers.40.self_attn.k_proj": {
776
+ "bits": 8,
777
+ "group_size": 128
778
+ },
779
+ "model.layers.40.self_attn.v_proj": {
780
+ "bits": 8,
781
+ "group_size": 128
782
+ },
783
+ "model.layers.40.self_attn.o_proj": {
784
+ "bits": 8,
785
+ "group_size": 128
786
+ },
787
+ "model.layers.40.mlp.gate": {
788
+ "bits": 16
789
+ },
790
+ "model.layers.41.self_attn.q_proj": {
791
+ "bits": 8,
792
+ "group_size": 128
793
+ },
794
+ "model.layers.41.self_attn.k_proj": {
795
+ "bits": 8,
796
+ "group_size": 128
797
+ },
798
+ "model.layers.41.self_attn.v_proj": {
799
+ "bits": 8,
800
+ "group_size": 128
801
+ },
802
+ "model.layers.41.self_attn.o_proj": {
803
+ "bits": 8,
804
+ "group_size": 128
805
+ },
806
+ "model.layers.41.mlp.gate": {
807
+ "bits": 16
808
+ },
809
+ "model.layers.42.self_attn.q_proj": {
810
+ "bits": 8,
811
+ "group_size": 128
812
+ },
813
+ "model.layers.42.self_attn.k_proj": {
814
+ "bits": 8,
815
+ "group_size": 128
816
+ },
817
+ "model.layers.42.self_attn.v_proj": {
818
+ "bits": 8,
819
+ "group_size": 128
820
+ },
821
+ "model.layers.42.self_attn.o_proj": {
822
+ "bits": 8,
823
+ "group_size": 128
824
+ },
825
+ "model.layers.42.mlp.gate": {
826
+ "bits": 16
827
+ },
828
+ "model.layers.43.self_attn.q_proj": {
829
+ "bits": 8,
830
+ "group_size": 128
831
+ },
832
+ "model.layers.43.self_attn.k_proj": {
833
+ "bits": 8,
834
+ "group_size": 128
835
+ },
836
+ "model.layers.43.self_attn.v_proj": {
837
+ "bits": 8,
838
+ "group_size": 128
839
+ },
840
+ "model.layers.43.self_attn.o_proj": {
841
+ "bits": 8,
842
+ "group_size": 128
843
+ },
844
+ "model.layers.43.mlp.gate": {
845
+ "bits": 16
846
+ },
847
+ "model.layers.44.self_attn.q_proj": {
848
+ "bits": 8,
849
+ "group_size": 128
850
+ },
851
+ "model.layers.44.self_attn.k_proj": {
852
+ "bits": 8,
853
+ "group_size": 128
854
+ },
855
+ "model.layers.44.self_attn.v_proj": {
856
+ "bits": 8,
857
+ "group_size": 128
858
+ },
859
+ "model.layers.44.self_attn.o_proj": {
860
+ "bits": 8,
861
+ "group_size": 128
862
+ },
863
+ "model.layers.44.mlp.gate": {
864
+ "bits": 16
865
+ },
866
+ "model.layers.45.self_attn.q_proj": {
867
+ "bits": 8,
868
+ "group_size": 128
869
+ },
870
+ "model.layers.45.self_attn.k_proj": {
871
+ "bits": 8,
872
+ "group_size": 128
873
+ },
874
+ "model.layers.45.self_attn.v_proj": {
875
+ "bits": 8,
876
+ "group_size": 128
877
+ },
878
+ "model.layers.45.self_attn.o_proj": {
879
+ "bits": 8,
880
+ "group_size": 128
881
+ },
882
+ "model.layers.45.mlp.gate": {
883
+ "bits": 16
884
+ },
885
+ "model.layers.46.self_attn.q_proj": {
886
+ "bits": 8,
887
+ "group_size": 128
888
+ },
889
+ "model.layers.46.self_attn.k_proj": {
890
+ "bits": 8,
891
+ "group_size": 128
892
+ },
893
+ "model.layers.46.self_attn.v_proj": {
894
+ "bits": 8,
895
+ "group_size": 128
896
+ },
897
+ "model.layers.46.self_attn.o_proj": {
898
+ "bits": 8,
899
+ "group_size": 128
900
+ },
901
+ "model.layers.46.mlp.gate": {
902
+ "bits": 16
903
+ },
904
+ "model.layers.47.self_attn.q_proj": {
905
+ "bits": 8,
906
+ "group_size": 128
907
+ },
908
+ "model.layers.47.self_attn.k_proj": {
909
+ "bits": 8,
910
+ "group_size": 128
911
+ },
912
+ "model.layers.47.self_attn.v_proj": {
913
+ "bits": 8,
914
+ "group_size": 128
915
+ },
916
+ "model.layers.47.self_attn.o_proj": {
917
+ "bits": 8,
918
+ "group_size": 128
919
+ },
920
+ "model.layers.47.mlp.gate": {
921
+ "bits": 16
922
+ }
923
+ }
924
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
tokenizer_config.json ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<tool_response>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "151666": {
190
+ "content": "</tool_response>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "151667": {
198
+ "content": "<think>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "151668": {
206
+ "content": "</think>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ }
213
+ },
214
+ "additional_special_tokens": [
215
+ "<|im_start|>",
216
+ "<|im_end|>",
217
+ "<|object_ref_start|>",
218
+ "<|object_ref_end|>",
219
+ "<|box_start|>",
220
+ "<|box_end|>",
221
+ "<|quad_start|>",
222
+ "<|quad_end|>",
223
+ "<|vision_start|>",
224
+ "<|vision_end|>",
225
+ "<|vision_pad|>",
226
+ "<|image_pad|>",
227
+ "<|video_pad|>"
228
+ ],
229
+ "bos_token": null,
230
+ "clean_up_tokenization_spaces": false,
231
+ "eos_token": "<|im_end|>",
232
+ "errors": "replace",
233
+ "extra_special_tokens": {},
234
+ "model_max_length": 262144,
235
+ "pad_token": "<|endoftext|>",
236
+ "split_special_tokens": false,
237
+ "tokenizer_class": "Qwen2Tokenizer",
238
+ "unk_token": null
239
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff