justinjja commited on
Commit
11a6b2e
·
verified ·
1 Parent(s): 7d8799f

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<|endofexecution|>": 151655,
3
+ "<|endofobservation|>": 151653,
4
+ "<|endofresponse|>": 151649,
5
+ "<|endofsystem|>": 151651,
6
+ "<|endoftext|>": 151643,
7
+ "<|endofuserprompt|>": 151647,
8
+ "<|execution|>": 151654,
9
+ "<|im_end|>": 151645,
10
+ "<|im_start|>": 151644,
11
+ "<|observation|>": 151652,
12
+ "<|reject-unknown|>": 151656,
13
+ "<|response|>": 151648,
14
+ "<|sec-cot|>": 151657,
15
+ "<|sec-end-cot|>": 151658,
16
+ "<|system|>": 151650,
17
+ "<|userprompt|>": 151646
18
+ }
chat_template.jinja ADDED
@@ -0,0 +1 @@
 
 
1
+ {% if messages[0]['role'] == 'system' %}<|system|>{{ messages[0]['content'] }}<|endofsystem|>{% set start_idx = 1 %}{% else %}<|system|>You are a helpful assistant.<|endofsystem|>{% set start_idx = 0 %}{% endif %}{% for idx in range(start_idx, messages|length) %}{% if messages[idx]['role'] == 'user' %}<|userprompt|>{{ messages[idx]['content'] }}<|endofuserprompt|>{% elif messages[idx]['role'] == 'assistant' %}<|response|>{{ messages[idx]['content'] }}<|endofresponse|>{% endif %}{% endfor %}{% if add_generation_prompt and messages[-1]['role'] == 'user' %}<|response|>{% endif %}
config.json ADDED
@@ -0,0 +1,320 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Dots1ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "eos_token_id": 151645,
8
+ "first_k_dense_replace": 1,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 10944,
13
+ "layer_types": [
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention",
52
+ "full_attention",
53
+ "full_attention",
54
+ "full_attention",
55
+ "full_attention",
56
+ "full_attention",
57
+ "full_attention",
58
+ "full_attention",
59
+ "full_attention",
60
+ "full_attention",
61
+ "full_attention",
62
+ "full_attention",
63
+ "full_attention",
64
+ "full_attention",
65
+ "full_attention",
66
+ "full_attention",
67
+ "full_attention",
68
+ "full_attention",
69
+ "full_attention",
70
+ "full_attention",
71
+ "full_attention",
72
+ "full_attention",
73
+ "full_attention",
74
+ "full_attention",
75
+ "full_attention"
76
+ ],
77
+ "max_position_embeddings": 32768,
78
+ "max_window_layers": 62,
79
+ "model_type": "dots1",
80
+ "moe_intermediate_size": 1408,
81
+ "moe_layer_freq": 1,
82
+ "n_group": 1,
83
+ "n_routed_experts": 128,
84
+ "n_shared_experts": 2,
85
+ "norm_topk_prob": true,
86
+ "num_attention_heads": 32,
87
+ "num_experts_per_tok": 6,
88
+ "num_hidden_layers": 62,
89
+ "num_key_value_heads": 32,
90
+ "pretraining_tp": 1,
91
+ "quantization_config": {
92
+ "config_groups": {
93
+ "group_0": {
94
+ "input_activations": null,
95
+ "output_activations": null,
96
+ "targets": [
97
+ "Linear"
98
+ ],
99
+ "weights": {
100
+ "actorder": null,
101
+ "block_structure": null,
102
+ "dynamic": false,
103
+ "group_size": 128,
104
+ "num_bits": 4,
105
+ "observer": "minmax",
106
+ "observer_kwargs": {},
107
+ "strategy": "group",
108
+ "symmetric": true,
109
+ "type": "int"
110
+ }
111
+ }
112
+ },
113
+ "format": "pack-quantized",
114
+ "global_compression_ratio": null,
115
+ "ignore": [
116
+ "model.layers.0.mlp.gate_proj",
117
+ "model.layers.0.mlp.down_proj",
118
+ "model.layers.1.mlp.shared_experts.gate_proj",
119
+ "model.layers.1.mlp.shared_experts.up_proj",
120
+ "model.layers.1.mlp.shared_experts.down_proj",
121
+ "model.layers.2.mlp.shared_experts.gate_proj",
122
+ "model.layers.2.mlp.shared_experts.up_proj",
123
+ "model.layers.2.mlp.shared_experts.down_proj",
124
+ "model.layers.3.mlp.shared_experts.gate_proj",
125
+ "model.layers.3.mlp.shared_experts.up_proj",
126
+ "model.layers.3.mlp.shared_experts.down_proj",
127
+ "model.layers.4.mlp.shared_experts.gate_proj",
128
+ "model.layers.4.mlp.shared_experts.up_proj",
129
+ "model.layers.4.mlp.shared_experts.down_proj",
130
+ "model.layers.5.mlp.shared_experts.gate_proj",
131
+ "model.layers.5.mlp.shared_experts.up_proj",
132
+ "model.layers.5.mlp.shared_experts.down_proj",
133
+ "model.layers.6.mlp.shared_experts.gate_proj",
134
+ "model.layers.6.mlp.shared_experts.up_proj",
135
+ "model.layers.6.mlp.shared_experts.down_proj",
136
+ "model.layers.7.mlp.shared_experts.gate_proj",
137
+ "model.layers.7.mlp.shared_experts.up_proj",
138
+ "model.layers.7.mlp.shared_experts.down_proj",
139
+ "model.layers.8.mlp.shared_experts.gate_proj",
140
+ "model.layers.8.mlp.shared_experts.up_proj",
141
+ "model.layers.8.mlp.shared_experts.down_proj",
142
+ "model.layers.9.mlp.shared_experts.gate_proj",
143
+ "model.layers.9.mlp.shared_experts.up_proj",
144
+ "model.layers.9.mlp.shared_experts.down_proj",
145
+ "model.layers.10.mlp.shared_experts.gate_proj",
146
+ "model.layers.10.mlp.shared_experts.up_proj",
147
+ "model.layers.10.mlp.shared_experts.down_proj",
148
+ "model.layers.11.mlp.shared_experts.gate_proj",
149
+ "model.layers.11.mlp.shared_experts.up_proj",
150
+ "model.layers.11.mlp.shared_experts.down_proj",
151
+ "model.layers.12.mlp.shared_experts.gate_proj",
152
+ "model.layers.12.mlp.shared_experts.up_proj",
153
+ "model.layers.12.mlp.shared_experts.down_proj",
154
+ "model.layers.13.mlp.shared_experts.gate_proj",
155
+ "model.layers.13.mlp.shared_experts.up_proj",
156
+ "model.layers.13.mlp.shared_experts.down_proj",
157
+ "model.layers.14.mlp.shared_experts.gate_proj",
158
+ "model.layers.14.mlp.shared_experts.up_proj",
159
+ "model.layers.14.mlp.shared_experts.down_proj",
160
+ "model.layers.15.mlp.shared_experts.gate_proj",
161
+ "model.layers.15.mlp.shared_experts.up_proj",
162
+ "model.layers.15.mlp.shared_experts.down_proj",
163
+ "model.layers.16.mlp.shared_experts.gate_proj",
164
+ "model.layers.16.mlp.shared_experts.up_proj",
165
+ "model.layers.16.mlp.shared_experts.down_proj",
166
+ "model.layers.17.mlp.shared_experts.gate_proj",
167
+ "model.layers.17.mlp.shared_experts.up_proj",
168
+ "model.layers.17.mlp.shared_experts.down_proj",
169
+ "model.layers.18.mlp.shared_experts.gate_proj",
170
+ "model.layers.18.mlp.shared_experts.up_proj",
171
+ "model.layers.18.mlp.shared_experts.down_proj",
172
+ "model.layers.19.mlp.shared_experts.gate_proj",
173
+ "model.layers.19.mlp.shared_experts.up_proj",
174
+ "model.layers.19.mlp.shared_experts.down_proj",
175
+ "model.layers.20.mlp.shared_experts.gate_proj",
176
+ "model.layers.20.mlp.shared_experts.up_proj",
177
+ "model.layers.20.mlp.shared_experts.down_proj",
178
+ "model.layers.21.mlp.shared_experts.gate_proj",
179
+ "model.layers.21.mlp.shared_experts.up_proj",
180
+ "model.layers.21.mlp.shared_experts.down_proj",
181
+ "model.layers.22.mlp.shared_experts.gate_proj",
182
+ "model.layers.22.mlp.shared_experts.up_proj",
183
+ "model.layers.22.mlp.shared_experts.down_proj",
184
+ "model.layers.23.mlp.shared_experts.gate_proj",
185
+ "model.layers.23.mlp.shared_experts.up_proj",
186
+ "model.layers.23.mlp.shared_experts.down_proj",
187
+ "model.layers.24.mlp.shared_experts.gate_proj",
188
+ "model.layers.24.mlp.shared_experts.up_proj",
189
+ "model.layers.24.mlp.shared_experts.down_proj",
190
+ "model.layers.25.mlp.shared_experts.gate_proj",
191
+ "model.layers.25.mlp.shared_experts.up_proj",
192
+ "model.layers.25.mlp.shared_experts.down_proj",
193
+ "model.layers.26.mlp.shared_experts.gate_proj",
194
+ "model.layers.26.mlp.shared_experts.up_proj",
195
+ "model.layers.26.mlp.shared_experts.down_proj",
196
+ "model.layers.27.mlp.shared_experts.gate_proj",
197
+ "model.layers.27.mlp.shared_experts.up_proj",
198
+ "model.layers.27.mlp.shared_experts.down_proj",
199
+ "model.layers.28.mlp.shared_experts.gate_proj",
200
+ "model.layers.28.mlp.shared_experts.up_proj",
201
+ "model.layers.28.mlp.shared_experts.down_proj",
202
+ "model.layers.29.mlp.shared_experts.gate_proj",
203
+ "model.layers.29.mlp.shared_experts.up_proj",
204
+ "model.layers.29.mlp.shared_experts.down_proj",
205
+ "model.layers.30.mlp.shared_experts.gate_proj",
206
+ "model.layers.30.mlp.shared_experts.up_proj",
207
+ "model.layers.30.mlp.shared_experts.down_proj",
208
+ "model.layers.31.mlp.shared_experts.gate_proj",
209
+ "model.layers.31.mlp.shared_experts.up_proj",
210
+ "model.layers.31.mlp.shared_experts.down_proj",
211
+ "model.layers.32.mlp.shared_experts.gate_proj",
212
+ "model.layers.32.mlp.shared_experts.up_proj",
213
+ "model.layers.32.mlp.shared_experts.down_proj",
214
+ "model.layers.33.mlp.shared_experts.gate_proj",
215
+ "model.layers.33.mlp.shared_experts.up_proj",
216
+ "model.layers.33.mlp.shared_experts.down_proj",
217
+ "model.layers.34.mlp.shared_experts.gate_proj",
218
+ "model.layers.34.mlp.shared_experts.up_proj",
219
+ "model.layers.34.mlp.shared_experts.down_proj",
220
+ "model.layers.35.mlp.shared_experts.gate_proj",
221
+ "model.layers.35.mlp.shared_experts.up_proj",
222
+ "model.layers.35.mlp.shared_experts.down_proj",
223
+ "model.layers.36.mlp.shared_experts.gate_proj",
224
+ "model.layers.36.mlp.shared_experts.up_proj",
225
+ "model.layers.36.mlp.shared_experts.down_proj",
226
+ "model.layers.37.mlp.shared_experts.gate_proj",
227
+ "model.layers.37.mlp.shared_experts.up_proj",
228
+ "model.layers.37.mlp.shared_experts.down_proj",
229
+ "model.layers.38.mlp.shared_experts.gate_proj",
230
+ "model.layers.38.mlp.shared_experts.up_proj",
231
+ "model.layers.38.mlp.shared_experts.down_proj",
232
+ "model.layers.39.mlp.shared_experts.gate_proj",
233
+ "model.layers.39.mlp.shared_experts.up_proj",
234
+ "model.layers.39.mlp.shared_experts.down_proj",
235
+ "model.layers.40.mlp.shared_experts.gate_proj",
236
+ "model.layers.40.mlp.shared_experts.up_proj",
237
+ "model.layers.40.mlp.shared_experts.down_proj",
238
+ "model.layers.41.mlp.shared_experts.gate_proj",
239
+ "model.layers.41.mlp.shared_experts.up_proj",
240
+ "model.layers.41.mlp.shared_experts.down_proj",
241
+ "model.layers.42.mlp.shared_experts.gate_proj",
242
+ "model.layers.42.mlp.shared_experts.up_proj",
243
+ "model.layers.42.mlp.shared_experts.down_proj",
244
+ "model.layers.43.mlp.shared_experts.gate_proj",
245
+ "model.layers.43.mlp.shared_experts.up_proj",
246
+ "model.layers.43.mlp.shared_experts.down_proj",
247
+ "model.layers.44.mlp.shared_experts.gate_proj",
248
+ "model.layers.44.mlp.shared_experts.up_proj",
249
+ "model.layers.44.mlp.shared_experts.down_proj",
250
+ "model.layers.45.mlp.shared_experts.gate_proj",
251
+ "model.layers.45.mlp.shared_experts.up_proj",
252
+ "model.layers.45.mlp.shared_experts.down_proj",
253
+ "model.layers.46.mlp.shared_experts.gate_proj",
254
+ "model.layers.46.mlp.shared_experts.up_proj",
255
+ "model.layers.46.mlp.shared_experts.down_proj",
256
+ "model.layers.47.mlp.shared_experts.gate_proj",
257
+ "model.layers.47.mlp.shared_experts.up_proj",
258
+ "model.layers.47.mlp.shared_experts.down_proj",
259
+ "model.layers.48.mlp.shared_experts.gate_proj",
260
+ "model.layers.48.mlp.shared_experts.up_proj",
261
+ "model.layers.48.mlp.shared_experts.down_proj",
262
+ "model.layers.49.mlp.shared_experts.gate_proj",
263
+ "model.layers.49.mlp.shared_experts.up_proj",
264
+ "model.layers.49.mlp.shared_experts.down_proj",
265
+ "model.layers.50.mlp.shared_experts.gate_proj",
266
+ "model.layers.50.mlp.shared_experts.up_proj",
267
+ "model.layers.50.mlp.shared_experts.down_proj",
268
+ "model.layers.51.mlp.shared_experts.gate_proj",
269
+ "model.layers.51.mlp.shared_experts.up_proj",
270
+ "model.layers.51.mlp.shared_experts.down_proj",
271
+ "model.layers.52.mlp.shared_experts.gate_proj",
272
+ "model.layers.52.mlp.shared_experts.up_proj",
273
+ "model.layers.52.mlp.shared_experts.down_proj",
274
+ "model.layers.53.mlp.shared_experts.gate_proj",
275
+ "model.layers.53.mlp.shared_experts.up_proj",
276
+ "model.layers.53.mlp.shared_experts.down_proj",
277
+ "model.layers.54.mlp.shared_experts.gate_proj",
278
+ "model.layers.54.mlp.shared_experts.up_proj",
279
+ "model.layers.54.mlp.shared_experts.down_proj",
280
+ "model.layers.55.mlp.shared_experts.gate_proj",
281
+ "model.layers.55.mlp.shared_experts.up_proj",
282
+ "model.layers.55.mlp.shared_experts.down_proj",
283
+ "model.layers.56.mlp.shared_experts.gate_proj",
284
+ "model.layers.56.mlp.shared_experts.up_proj",
285
+ "model.layers.56.mlp.shared_experts.down_proj",
286
+ "model.layers.57.mlp.shared_experts.gate_proj",
287
+ "model.layers.57.mlp.shared_experts.up_proj",
288
+ "model.layers.57.mlp.shared_experts.down_proj",
289
+ "model.layers.58.mlp.shared_experts.gate_proj",
290
+ "model.layers.58.mlp.shared_experts.up_proj",
291
+ "model.layers.58.mlp.shared_experts.down_proj",
292
+ "model.layers.59.mlp.shared_experts.gate_proj",
293
+ "model.layers.59.mlp.shared_experts.up_proj",
294
+ "model.layers.59.mlp.shared_experts.down_proj",
295
+ "model.layers.60.mlp.shared_experts.gate_proj",
296
+ "model.layers.60.mlp.shared_experts.up_proj",
297
+ "model.layers.60.mlp.shared_experts.down_proj",
298
+ "model.layers.61.mlp.shared_experts.gate_proj",
299
+ "model.layers.61.mlp.shared_experts.up_proj",
300
+ "model.layers.61.mlp.shared_experts.down_proj",
301
+ "lm_head"
302
+ ],
303
+ "kv_cache_scheme": null,
304
+ "quant_method": "compressed-tensors",
305
+ "quantization_status": "compressed"
306
+ },
307
+ "rms_norm_eps": 1e-05,
308
+ "rope_scaling": null,
309
+ "rope_theta": 10000000,
310
+ "routed_scaling_factor": 2.5,
311
+ "scoring_func": "noaux_tc",
312
+ "sliding_window": null,
313
+ "tie_word_embeddings": false,
314
+ "topk_group": 1,
315
+ "torch_dtype": "bfloat16",
316
+ "transformers_version": "4.53.0.dev0",
317
+ "use_cache": true,
318
+ "use_sliding_window": false,
319
+ "vocab_size": 152064
320
+ }
generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "do_sample": true,
5
+ "eos_token_id": 151645,
6
+ "top_p": 0.8,
7
+ "transformers_version": "4.53.0.dev0"
8
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61ff798dd409e0576d55dc88e89cdb4f00376097d439a38d0cd2c173202723fb
3
+ size 4998598248
model-00002-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c978ca4f52ec54c2eb6046e916e8d1952e4babda09c8937baf59b142f2bdd11
3
+ size 4999592368
model-00003-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6977c70a5c41a474482e648daf9563f09a8a39516fef665464013fc16068edc3
3
+ size 4999594576
model-00004-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33ed7ebc5bdb7aa49dddf8300e450fa1dfe7c37af1b80eeae68e69cbf9c6c839
3
+ size 4999597064
model-00005-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5299f245c0dc5c36c1e99d7c56ef6521b11b532c9ae2f01efd2017bc4bc3d457
3
+ size 4999597072
model-00006-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45e6c65ab08f4e632886c9846927e6158ee7e7ba844ce49b0ac52e8976c000e6
3
+ size 4999597072
model-00007-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9f825fc113facfb75653c0a7def57e80868d65a0b8dc102f70ce6ff6f635ca3
3
+ size 4999597064
model-00008-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c58330823786c03e90c35c7aaebde3b6983dc13d3362f5f803fe7c521f3030b
3
+ size 4999597072
model-00009-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:253f21bffc497d5a4a9f07729d78c75d95b278ec36eb39f27bdcc41ea8be5365
3
+ size 4999597072
model-00010-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed87b0a79612e02634cdb8d6bd491f142e5a0352cf752f768ee9ca39d58ee05f
3
+ size 4999597064
model-00011-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6857177b18e5e3cc37f2de4a85f0edfc8d144ff0a8975224314b3376e897f334
3
+ size 4999597072
model-00012-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7c40230584051c558c9c191036f1b1c6fd5c54c6d6b0375a7049ce0fcc3f4cc
3
+ size 4999597072
model-00013-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ec977324bb1be77ec4b18c4c2563fee873ea74320605da72076dc74da19979e
3
+ size 4999597064
model-00014-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:336c7100b3a31a7db8be8ba2e57c4781b38d4b17cdb500b6a10183cc42be3502
3
+ size 4999596944
model-00015-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84a9aaa1fe35518d1c112002fcc3f49ed3961b119bfda8ff72b5156f70ad9348
3
+ size 4988749784
model-00016-of-00016.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:146227b7b43062e716f96119aa7bdfb54f436556c1e6e8c9776f2ce875d2ffb0
3
+ size 3808788992
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
recipe.yaml ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ default_stage:
2
+ default_modifiers:
3
+ QuantizationModifier:
4
+ ignore: [model.layers.3.mlp.shared_experts.up_proj, model.layers.25.mlp.shared_experts.up_proj,
5
+ model.layers.8.mlp.shared_experts.gate_proj, model.layers.40.mlp.shared_experts.gate_proj,
6
+ model.layers.38.mlp.shared_experts.gate_proj, model.layers.20.mlp.shared_experts.down_proj,
7
+ model.layers.37.mlp.shared_experts.up_proj, model.layers.23.mlp.shared_experts.gate_proj,
8
+ model.layers.15.mlp.shared_experts.up_proj, model.layers.46.mlp.shared_experts.down_proj,
9
+ model.layers.18.mlp.shared_experts.gate_proj, model.layers.24.mlp.shared_experts.gate_proj,
10
+ model.layers.11.mlp.shared_experts.up_proj, model.layers.53.mlp.shared_experts.gate_proj,
11
+ model.layers.0.mlp.gate_proj, model.layers.31.mlp.shared_experts.up_proj, model.layers.43.mlp.shared_experts.up_proj,
12
+ model.layers.34.mlp.shared_experts.gate_proj, model.layers.11.mlp.shared_experts.down_proj,
13
+ model.layers.26.mlp.shared_experts.up_proj, model.layers.25.mlp.shared_experts.gate_proj,
14
+ model.layers.54.mlp.shared_experts.gate_proj, model.layers.51.mlp.shared_experts.down_proj,
15
+ model.layers.3.mlp.shared_experts.gate_proj, model.layers.27.mlp.shared_experts.up_proj,
16
+ model.layers.57.mlp.shared_experts.down_proj, model.layers.14.mlp.shared_experts.down_proj,
17
+ model.layers.53.mlp.shared_experts.up_proj, model.layers.4.mlp.shared_experts.gate_proj,
18
+ model.layers.22.mlp.shared_experts.down_proj, model.layers.20.mlp.shared_experts.up_proj,
19
+ model.layers.28.mlp.shared_experts.down_proj, model.layers.34.mlp.shared_experts.up_proj,
20
+ model.layers.26.mlp.shared_experts.gate_proj, model.layers.41.mlp.shared_experts.down_proj,
21
+ model.layers.9.mlp.shared_experts.down_proj, model.layers.49.mlp.shared_experts.down_proj,
22
+ model.layers.33.mlp.shared_experts.gate_proj, model.layers.17.mlp.shared_experts.gate_proj,
23
+ model.layers.2.mlp.shared_experts.gate_proj, model.layers.37.mlp.shared_experts.gate_proj,
24
+ model.layers.59.mlp.shared_experts.gate_proj, model.layers.7.mlp.shared_experts.gate_proj,
25
+ model.layers.21.mlp.shared_experts.gate_proj, model.layers.30.mlp.shared_experts.down_proj,
26
+ model.layers.38.mlp.shared_experts.up_proj, model.layers.59.mlp.shared_experts.down_proj,
27
+ model.layers.12.mlp.shared_experts.up_proj, model.layers.56.mlp.shared_experts.down_proj,
28
+ model.layers.10.mlp.shared_experts.down_proj, model.layers.37.mlp.shared_experts.down_proj,
29
+ model.layers.39.mlp.shared_experts.up_proj, model.layers.7.mlp.shared_experts.down_proj,
30
+ model.layers.42.mlp.shared_experts.down_proj, model.layers.52.mlp.shared_experts.up_proj,
31
+ model.layers.29.mlp.shared_experts.up_proj, model.layers.29.mlp.shared_experts.down_proj,
32
+ model.layers.18.mlp.shared_experts.down_proj, model.layers.31.mlp.shared_experts.gate_proj,
33
+ model.layers.5.mlp.shared_experts.down_proj, model.layers.35.mlp.shared_experts.up_proj,
34
+ model.layers.39.mlp.shared_experts.down_proj, model.layers.19.mlp.shared_experts.up_proj,
35
+ model.layers.15.mlp.shared_experts.gate_proj, model.layers.29.mlp.shared_experts.gate_proj,
36
+ model.layers.61.mlp.shared_experts.up_proj, model.layers.2.mlp.shared_experts.down_proj,
37
+ model.layers.23.mlp.shared_experts.up_proj, model.layers.36.mlp.shared_experts.down_proj,
38
+ model.layers.9.mlp.shared_experts.gate_proj, model.layers.32.mlp.shared_experts.gate_proj,
39
+ model.layers.48.mlp.shared_experts.up_proj, model.layers.21.mlp.shared_experts.up_proj,
40
+ model.layers.6.mlp.shared_experts.gate_proj, model.layers.6.mlp.shared_experts.up_proj,
41
+ model.layers.10.mlp.shared_experts.gate_proj, model.layers.61.mlp.shared_experts.gate_proj,
42
+ model.layers.57.mlp.shared_experts.gate_proj, model.layers.34.mlp.shared_experts.down_proj,
43
+ model.layers.49.mlp.shared_experts.up_proj, model.layers.20.mlp.shared_experts.gate_proj,
44
+ model.layers.12.mlp.shared_experts.down_proj, model.layers.16.mlp.shared_experts.up_proj,
45
+ model.layers.24.mlp.shared_experts.down_proj, model.layers.35.mlp.shared_experts.down_proj,
46
+ model.layers.59.mlp.shared_experts.up_proj, model.layers.14.mlp.shared_experts.up_proj,
47
+ model.layers.54.mlp.shared_experts.up_proj, model.layers.17.mlp.shared_experts.up_proj,
48
+ model.layers.51.mlp.shared_experts.gate_proj, model.layers.16.mlp.shared_experts.gate_proj,
49
+ model.layers.48.mlp.shared_experts.down_proj, model.layers.25.mlp.shared_experts.down_proj,
50
+ model.layers.58.mlp.shared_experts.down_proj, model.layers.28.mlp.shared_experts.up_proj,
51
+ model.layers.43.mlp.shared_experts.down_proj, model.layers.11.mlp.shared_experts.gate_proj,
52
+ model.layers.61.mlp.shared_experts.down_proj, model.layers.45.mlp.shared_experts.up_proj,
53
+ model.layers.45.mlp.shared_experts.down_proj, model.layers.60.mlp.shared_experts.up_proj,
54
+ model.layers.36.mlp.shared_experts.up_proj, model.layers.14.mlp.shared_experts.gate_proj,
55
+ model.layers.32.mlp.shared_experts.up_proj, model.layers.47.mlp.shared_experts.down_proj,
56
+ model.layers.47.mlp.shared_experts.up_proj, model.layers.50.mlp.shared_experts.up_proj,
57
+ model.layers.19.mlp.shared_experts.gate_proj, model.layers.5.mlp.shared_experts.gate_proj,
58
+ model.layers.53.mlp.shared_experts.down_proj, model.layers.42.mlp.shared_experts.gate_proj,
59
+ model.layers.43.mlp.shared_experts.gate_proj, model.layers.2.mlp.shared_experts.up_proj,
60
+ model.layers.1.mlp.shared_experts.gate_proj, model.layers.45.mlp.shared_experts.gate_proj,
61
+ model.layers.23.mlp.shared_experts.down_proj, model.layers.55.mlp.shared_experts.down_proj,
62
+ model.layers.31.mlp.shared_experts.down_proj, model.layers.13.mlp.shared_experts.down_proj,
63
+ model.layers.57.mlp.shared_experts.up_proj, model.layers.48.mlp.shared_experts.gate_proj,
64
+ model.layers.17.mlp.shared_experts.down_proj, model.layers.15.mlp.shared_experts.down_proj,
65
+ model.layers.44.mlp.shared_experts.down_proj, model.layers.4.mlp.shared_experts.up_proj,
66
+ model.layers.3.mlp.shared_experts.down_proj, model.layers.54.mlp.shared_experts.down_proj,
67
+ model.layers.50.mlp.shared_experts.gate_proj, model.layers.1.mlp.shared_experts.up_proj,
68
+ model.layers.41.mlp.shared_experts.gate_proj, model.layers.49.mlp.shared_experts.gate_proj,
69
+ model.layers.32.mlp.shared_experts.down_proj, model.layers.56.mlp.shared_experts.up_proj,
70
+ model.layers.9.mlp.shared_experts.up_proj, model.layers.7.mlp.shared_experts.up_proj,
71
+ model.layers.18.mlp.shared_experts.up_proj, model.layers.1.mlp.shared_experts.down_proj,
72
+ model.layers.39.mlp.shared_experts.gate_proj, model.layers.58.mlp.shared_experts.gate_proj,
73
+ model.layers.13.mlp.shared_experts.gate_proj, model.layers.47.mlp.shared_experts.gate_proj,
74
+ model.layers.52.mlp.shared_experts.gate_proj, model.layers.24.mlp.shared_experts.up_proj,
75
+ model.layers.26.mlp.shared_experts.down_proj, model.layers.22.mlp.shared_experts.up_proj,
76
+ model.layers.38.mlp.shared_experts.down_proj, model.layers.27.mlp.shared_experts.down_proj,
77
+ model.layers.52.mlp.shared_experts.down_proj, model.layers.6.mlp.shared_experts.down_proj,
78
+ model.layers.55.mlp.shared_experts.gate_proj, model.layers.27.mlp.shared_experts.gate_proj,
79
+ model.layers.12.mlp.shared_experts.gate_proj, lm_head, model.layers.40.mlp.shared_experts.down_proj,
80
+ model.layers.55.mlp.shared_experts.up_proj, model.layers.44.mlp.shared_experts.gate_proj,
81
+ model.layers.30.mlp.shared_experts.gate_proj, model.layers.60.mlp.shared_experts.gate_proj,
82
+ model.layers.22.mlp.shared_experts.gate_proj, model.layers.19.mlp.shared_experts.down_proj,
83
+ model.layers.50.mlp.shared_experts.down_proj, model.layers.33.mlp.shared_experts.down_proj,
84
+ model.layers.5.mlp.shared_experts.up_proj, model.layers.30.mlp.shared_experts.up_proj,
85
+ model.layers.44.mlp.shared_experts.up_proj, model.layers.58.mlp.shared_experts.up_proj,
86
+ model.layers.33.mlp.shared_experts.up_proj, model.layers.13.mlp.shared_experts.up_proj,
87
+ model.layers.46.mlp.shared_experts.gate_proj, model.layers.51.mlp.shared_experts.up_proj,
88
+ model.layers.35.mlp.shared_experts.gate_proj, model.layers.46.mlp.shared_experts.up_proj,
89
+ model.layers.56.mlp.shared_experts.gate_proj, model.layers.40.mlp.shared_experts.up_proj,
90
+ model.layers.28.mlp.shared_experts.gate_proj, model.layers.8.mlp.shared_experts.up_proj,
91
+ model.layers.60.mlp.shared_experts.down_proj, model.layers.41.mlp.shared_experts.up_proj,
92
+ model.layers.0.mlp.down_proj, model.layers.8.mlp.shared_experts.down_proj, model.layers.16.mlp.shared_experts.down_proj,
93
+ model.layers.42.mlp.shared_experts.up_proj, model.layers.10.mlp.shared_experts.up_proj,
94
+ model.layers.4.mlp.shared_experts.down_proj, model.layers.21.mlp.shared_experts.down_proj,
95
+ model.layers.36.mlp.shared_experts.gate_proj]
96
+ targets: [Linear]
97
+ scheme: W4A16
special_tokens_map.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|userprompt|>",
6
+ "<|endofuserprompt|>",
7
+ "<|response|>",
8
+ "<|endofresponse|>",
9
+ "<|system|>",
10
+ "<|endofsystem|>",
11
+ "<|observation|>",
12
+ "<|endofobservation|>",
13
+ "<|execution|>",
14
+ "<|endofexecution|>",
15
+ "<|reject-unknown|>",
16
+ "<|sec-cot|>",
17
+ "<|sec-end-cot|>"
18
+ ],
19
+ "eos_token": {
20
+ "content": "<|endofresponse|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false
25
+ },
26
+ "pad_token": {
27
+ "content": "<|endofresponse|>",
28
+ "lstrip": false,
29
+ "normalized": false,
30
+ "rstrip": false,
31
+ "single_word": false
32
+ }
33
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe750f7b1f42fb8f760ee7ee91fa5fb3974795b3dc14c269819b83a086f5e98d
3
+ size 11420764
tokenizer_config.json ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "151643": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "151644": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "151645": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "151646": {
29
+ "content": "<|userprompt|>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "151647": {
37
+ "content": "<|endofuserprompt|>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "151648": {
45
+ "content": "<|response|>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "151649": {
53
+ "content": "<|endofresponse|>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "151650": {
61
+ "content": "<|system|>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "151651": {
69
+ "content": "<|endofsystem|>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "151652": {
77
+ "content": "<|observation|>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "151653": {
85
+ "content": "<|endofobservation|>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "151654": {
93
+ "content": "<|execution|>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "151655": {
101
+ "content": "<|endofexecution|>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "151656": {
109
+ "content": "<|reject-unknown|>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "151657": {
117
+ "content": "<|sec-cot|>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "151658": {
125
+ "content": "<|sec-end-cot|>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ }
132
+ },
133
+ "additional_special_tokens": [
134
+ "<|im_start|>",
135
+ "<|im_end|>",
136
+ "<|userprompt|>",
137
+ "<|endofuserprompt|>",
138
+ "<|response|>",
139
+ "<|endofresponse|>",
140
+ "<|system|>",
141
+ "<|endofsystem|>",
142
+ "<|observation|>",
143
+ "<|endofobservation|>",
144
+ "<|execution|>",
145
+ "<|endofexecution|>",
146
+ "<|reject-unknown|>",
147
+ "<|sec-cot|>",
148
+ "<|sec-end-cot|>"
149
+ ],
150
+ "bos_token": null,
151
+ "clean_up_tokenization_spaces": false,
152
+ "eos_token": "<|endofresponse|>",
153
+ "errors": "replace",
154
+ "extra_special_tokens": {},
155
+ "model_max_length": 131072,
156
+ "pad_token": "<|endofresponse|>",
157
+ "split_special_tokens": false,
158
+ "tokenizer_class": "Qwen2Tokenizer",
159
+ "unk_token": null
160
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff