wybertwang commited on
Commit
93d4c24
·
verified ·
1 Parent(s): e005822

Upload folder using huggingface_hub

Browse files
Qwen2.5-3B-Instruct/.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
Qwen2.5-3B-Instruct/1.json ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "<t5_00100>": 152268,
2
+ "<t5_00101>": 152269,
3
+ "<t5_00102>": 152270,
4
+ "<t5_00103>": 152271,
5
+ "<t5_00104>": 152272,
6
+ "<t5_00105>": 152273,
7
+ "<t5_00106>": 152274,
8
+ "<t5_00107>": 152275,
9
+ "<t5_00108>": 152276,
10
+ "<t5_00109>": 152277,
11
+ "<t5_00110>": 152278,
12
+ "<t5_00111>": 152279,
13
+ "<t5_00112>": 152280,
14
+ "<t5_00113>": 152281,
15
+ "<t5_00114>": 152282,
16
+ "<t5_00115>": 152283,
17
+ "<t5_00116>": 152284,
18
+ "<t5_00117>": 152285,
19
+ "<t5_00118>": 152286,
20
+ "<t5_00119>": 152287,
21
+ "<t5_00120>": 152288,
22
+ "<t5_00121>": 152289,
23
+ "<t5_00122>": 152290,
24
+ "<t5_00123>": 152291,
25
+ "<t5_00124>": 152292,
26
+ "<t5_00125>": 152293,
27
+ "<t5_00126>": 152294,
28
+ "<t5_00127>": 152295,
29
+ "<t5_00128>": 152296,
30
+ "<t5_00129>": 152297,
31
+ "<t5_00130>": 152298,
32
+ "<t5_00131>": 152299,
33
+ "<t5_00132>": 152300,
34
+ "<t5_00133>": 152301,
35
+ "<t5_00134>": 152302,
36
+ "<t5_00135>": 152303,
37
+ "<t5_00136>": 152304,
38
+ "<t5_00137>": 152305,
39
+ "<t5_00138>": 152306,
40
+ "<t5_00139>": 152307,
41
+ "<t5_00140>": 152308,
42
+ "<t5_00141>": 152309,
43
+ "<t5_00142>": 152310,
44
+ "<t5_00143>": 152311,
45
+ "<t5_00144>": 152312,
46
+ "<t5_00145>": 152313,
47
+ "<t5_00146>": 152314,
48
+ "<t5_00147>": 152315,
49
+ "<t5_00148>": 152316,
50
+ "<t5_00149>": 152317,
51
+ "<t5_00150>": 152318,
52
+ "<t5_00151>": 152319,
53
+ "<t5_00152>": 152320,
54
+ "<t5_00153>": 152321,
55
+ "<t5_00154>": 152322,
56
+ "<t5_00155>": 152323,
57
+ "<t5_00156>": 152324,
58
+ "<t5_00157>": 152325,
59
+ "<t5_00158>": 152326,
60
+ "<t5_00159>": 152327,
61
+ "<t5_00160>": 152328,
62
+ "<t5_00161>": 152329,
63
+ "<t5_00162>": 152330,
64
+ "<t5_00163>": 152331,
65
+ "<t5_00164>": 152332,
66
+ "<t5_00165>": 152333,
67
+ "<t5_00166>": 152334,
68
+ "<t5_00167>": 152335,
69
+ "<t5_00168>": 152336,
70
+ "<t5_00169>": 152337,
71
+ "<t5_00170>": 152338,
72
+ "<t5_00171>": 152339,
73
+ "<t5_00172>": 152340,
74
+ "<t5_00173>": 152341,
75
+ "<t5_00174>": 152342,
76
+ "<t5_00175>": 152343,
77
+ "<t5_00176>": 152344,
78
+ "<t5_00177>": 152345,
79
+ "<t5_00178>": 152346,
80
+ "<t5_00179>": 152347,
81
+ "<t5_00180>": 152348,
82
+ "<t5_00181>": 152349,
83
+ "<t5_00182>": 152350,
84
+ "<t5_00183>": 152351,
85
+ "<t5_00184>": 152352,
86
+ "<t5_00185>": 152353,
87
+ "<t5_00186>": 152354,
88
+ "<t5_00187>": 152355,
89
+ "<t5_00188>": 152356,
90
+ "<t5_00189>": 152357,
91
+ "<t5_00190>": 152358,
92
+ "<t5_00191>": 152359,
93
+ "<t5_00192>": 152360,
94
+ "<t5_00193>": 152361,
95
+ "<t5_00194>": 152362,
96
+ "<t5_00195>": 152363,
97
+ "<t5_00196>": 152364,
98
+ "<t5_00197>": 152365,
99
+ "<t5_00198>": 152366,
100
+ "<t5_00199>": 152367,
101
+ "<t5_00200>": 152368,
102
+ "<t5_00201>": 152369,
103
+ "<t5_00202>": 152370,
104
+ "<t5_00203>": 152371,
105
+ "<t5_00204>": 152372,
106
+ "<t5_00205>": 152373,
107
+ "<t5_00206>": 152374,
108
+ "<t5_00207>": 152375,
109
+ "<t5_00208>": 152376,
110
+ "<t5_00209>": 152377,
111
+ "<t5_00210>": 152378,
112
+ "<t5_00211>": 152379,
113
+ "<t5_00212>": 152380,
114
+ "<t5_00213>": 152381,
115
+ "<t5_00214>": 152382,
116
+ "<t5_00215>": 152383,
117
+ "<t5_00216>": 152384,
118
+ "<t5_00217>": 152385,
119
+ "<t5_00218>": 152386,
120
+ "<t5_00219>": 152387,
121
+ "<t5_00220>": 152388,
122
+ "<t5_00221>": 152389,
123
+ "<t5_00222>": 152390,
124
+ "<t5_00223>": 152391,
125
+ "<t5_00224>": 152392,
126
+ "<t5_00225>": 152393,
127
+ "<t5_00226>": 152394,
128
+ "<t5_00227>": 152395,
129
+ "<t5_00228>": 152396,
130
+ "<t5_00229>": 152397,
131
+ "<t5_00230>": 152398,
132
+ "<t5_00231>": 152399,
133
+ "<t5_00232>": 152400,
134
+ "<t5_00233>": 152401,
135
+ "<t5_00234>": 152402,
136
+ "<t5_00235>": 152403,
137
+ "<t5_00236>": 152404,
138
+ "<t5_00237>": 152405,
139
+ "<t5_00238>": 152406,
140
+ "<t5_00239>": 152407,
141
+ "<t5_00240>": 152408,
142
+ "<t5_00241>": 152409,
143
+ "<t5_00242>": 152410,
144
+ "<t5_00243>": 152411,
145
+ "<t5_00244>": 152412,
146
+ "<t5_00245>": 152413,
147
+ "<t5_00246>": 152414,
148
+ "<t5_00247>": 152415,
149
+ "<t5_00248>": 152416,
150
+ "<t5_00249>": 152417,
151
+ "<t5_00250>": 152418,
152
+ "<t5_00251>": 152419,
153
+ "<t5_00252>": 152420,
154
+ "<t5_00253>": 152421,
155
+ "<t5_00254>": 152422,
156
+ "<t5_00255>": 152423,
157
+ "<t5_00256>": 152424,
158
+ "<t5_00257>": 152425,
159
+ "<t5_00258>": 152426,
160
+ "<t5_00259>": 152427,
161
+ "<t5_00260>": 152428,
162
+ "<t5_00261>": 152429,
163
+ "<t5_00262>": 152430,
164
+ "<t5_00263>": 152431,
165
+ "<t5_00264>": 152432,
166
+ "<t5_00265>": 152433,
167
+ "<t5_00266>": 152434,
168
+ "<t5_00267>": 152435,
169
+ "<t5_00268>": 152436,
170
+ "<t5_00269>": 152437,
171
+ "<t5_00270>": 152438,
172
+ "<t5_00271>": 152439,
173
+ "<t5_00272>": 152440,
174
+ "<t5_00273>": 152441,
175
+ "<t5_00274>": 152442,
176
+ "<t5_00275>": 152443,
177
+ "<t5_00276>": 152444,
178
+ "<t5_00277>": 152445,
179
+ "<t5_00278>": 152446,
180
+ "<t5_00279>": 152447,
181
+ "<t5_00280>": 152448,
182
+ "<t5_00281>": 152449,
183
+ "<t5_00282>": 152450,
184
+ "<t5_00283>": 152451,
185
+ "<t5_00284>": 152452,
186
+ "<t5_00285>": 152453,
187
+ "<t5_00286>": 152454,
188
+ "<t5_00287>": 152455,
189
+ "<t5_00288>": 152456,
190
+ "<t5_00289>": 152457,
191
+ "<t5_00290>": 152458,
192
+ "<t5_00291>": 152459,
193
+ "<t5_00292>": 152460,
194
+ "<t5_00293>": 152461,
195
+ "<t5_00294>": 152462,
196
+ "<t5_00295>": 152463,
197
+ "<t5_00296>": 152464,
198
+ "<t5_00297>": 152465,
199
+ "<t5_00298>": 152466,
200
+ "<t5_00299>": 152467,
201
+ "<t5_00300>": 152468,
Qwen2.5-3B-Instruct/LICENSE ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Qwen RESEARCH LICENSE AGREEMENT
2
+
3
+ Qwen RESEARCH LICENSE AGREEMENT Release Date: September 19, 2024
4
+
5
+ By clicking to agree or by using or distributing any portion or element of the Qwen Materials, you will be deemed to have recognized and accepted the content of this Agreement, which is effective immediately.
6
+
7
+ 1. Definitions
8
+ a. This Qwen RESEARCH LICENSE AGREEMENT (this "Agreement") shall mean the terms and conditions for use, reproduction, distribution and modification of the Materials as defined by this Agreement.
9
+ b. "We" (or "Us") shall mean Alibaba Cloud.
10
+ c. "You" (or "Your") shall mean a natural person or legal entity exercising the rights granted by this Agreement and/or using the Materials for any purpose and in any field of use.
11
+ d. "Third Parties" shall mean individuals or legal entities that are not under common control with us or you.
12
+ e. "Qwen" shall mean the large language models, and software and algorithms, consisting of trained model weights, parameters (including optimizer states), machine-learning model code, inference-enabling code, training-enabling code, fine-tuning enabling code and other elements of the foregoing distributed by us.
13
+ f. "Materials" shall mean, collectively, Alibaba Cloud's proprietary Qwen and Documentation (and any portion thereof) made available under this Agreement.
14
+ g. "Source" form shall mean the preferred form for making modifications, including but not limited to model source code, documentation source, and configuration files.
15
+ h. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
16
+ i. "Non-Commercial" shall mean for research or evaluation purposes only.
17
+
18
+ 2. Grant of Rights
19
+ a. You are granted a non-exclusive, worldwide, non-transferable and royalty-free limited license under Alibaba Cloud's intellectual property or other rights owned by us embodied in the Materials to use, reproduce, distribute, copy, create derivative works of, and make modifications to the Materials FOR NON-COMMERCIAL PURPOSES ONLY.
20
+ b. If you are commercially using the Materials, you shall request a license from us.
21
+
22
+ 3. Redistribution
23
+ You may distribute copies or make the Materials, or derivative works thereof, available as part of a product or service that contains any of them, with or without modifications, and in Source or Object form, provided that you meet the following conditions:
24
+ a. You shall give any other recipients of the Materials or derivative works a copy of this Agreement;
25
+ b. You shall cause any modified files to carry prominent notices stating that you changed the files;
26
+ c. You shall retain in all copies of the Materials that you distribute the following attribution notices within a "Notice" text file distributed as a part of such copies: "Qwen is licensed under the Qwen RESEARCH LICENSE AGREEMENT, Copyright (c) Alibaba Cloud. All Rights Reserved."; and
27
+ d. You may add your own copyright statement to your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of your modifications, or for any such derivative works as a whole, provided your use, reproduction, and distribution of the work otherwise complies with the terms and conditions of this Agreement.
28
+
29
+ 4. Rules of use
30
+ a. The Materials may be subject to export controls or restrictions in China, the United States or other countries or regions. You shall comply with applicable laws and regulations in your use of the Materials.
31
+ b. If you use the Materials or any outputs or results therefrom to create, train, fine-tune, or improve an AI model that is distributed or made available, you shall prominently display “Built with Qwen” or “Improved using Qwen” in the related product documentation.
32
+
33
+ 5. Intellectual Property
34
+ a. We retain ownership of all intellectual property rights in and to the Materials and derivatives made by or for us. Conditioned upon compliance with the terms and conditions of this Agreement, with respect to any derivative works and modifications of the Materials that are made by you, you are and will be the owner of such derivative works and modifications.
35
+ b. No trademark license is granted to use the trade names, trademarks, service marks, or product names of us, except as required to fulfill notice requirements under this Agreement or as required for reasonable and customary use in describing and redistributing the Materials.
36
+ c. If you commence a lawsuit or other proceedings (including a cross-claim or counterclaim in a lawsuit) against us or any entity alleging that the Materials or any output therefrom, or any part of the foregoing, infringe any intellectual property or other right owned or licensable by you, then all licenses granted to you under this Agreement shall terminate as of the date such lawsuit or other proceeding is commenced or brought.
37
+
38
+ 6. Disclaimer of Warranty and Limitation of Liability
39
+ a. We are not obligated to support, update, provide training for, or develop any further version of the Qwen Materials or to grant any license thereto.
40
+ b. THE MATERIALS ARE PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. WE MAKE NO WARRANTY AND ASSUME NO RESPONSIBILITY FOR THE SAFETY OR STABILITY OF THE MATERIALS AND ANY OUTPUT THEREFROM.
41
+ c. IN NO EVENT SHALL WE BE LIABLE TO YOU FOR ANY DAMAGES, INCLUDING, BUT NOT LIMITED TO ANY DIRECT, OR INDIRECT, SPECIAL OR CONSEQUENTIAL DAMAGES ARISING FROM YOUR USE OR INABILITY TO USE THE MATERIALS OR ANY OUTPUT OF IT, NO MATTER HOW IT’S CAUSED.
42
+ d. You will defend, indemnify and hold harmless us from and against any claim by any third party arising out of or related to your use or distribution of the Materials.
43
+
44
+ 7. Survival and Termination.
45
+ a. The term of this Agreement shall commence upon your acceptance of this Agreement or access to the Materials and will continue in full force and effect until terminated in accordance with the terms and conditions herein.
46
+ b. We may terminate this Agreement if you breach any of the terms or conditions of this Agreement. Upon termination of this Agreement, you must delete and cease use of the Materials. Sections 6 and 8 shall survive the termination of this Agreement.
47
+
48
+ 8. Governing Law and Jurisdiction.
49
+ a. This Agreement and any dispute arising out of or relating to it will be governed by the laws of China, without regard to conflict of law principles, and the UN Convention on Contracts for the International Sale of Goods does not apply to this Agreement.
50
+ b. The People's Courts in Hangzhou City shall have exclusive jurisdiction over any dispute arising out of this Agreement.
51
+
52
+ 9. Other Terms and Conditions.
53
+ a. Any arrangements, understandings, or agreements regarding the Material not stated herein are separate from and independent of the terms and conditions of this Agreement. You shall request a separate license from us, if you use the Materials in ways not expressly agreed to in this Agreement.
54
+ b. We shall not be bound by any additional or different terms or conditions communicated by you unless expressly agreed.
Qwen2.5-3B-Instruct/README.md ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: other
3
+ license_name: qwen-research
4
+ license_link: https://huggingface.co/Qwen/Qwen2.5-3B-Instruct/blob/main/LICENSE
5
+ language:
6
+ - en
7
+ pipeline_tag: text-generation
8
+ base_model: Qwen/Qwen2.5-3B
9
+ tags:
10
+ - chat
11
+ library_name: transformers
12
+ ---
13
+
14
+ # Qwen2.5-3B-Instruct
15
+
16
+ ## Introduction
17
+
18
+ Qwen2.5 is the latest series of Qwen large language models. For Qwen2.5, we release a number of base language models and instruction-tuned language models ranging from 0.5 to 72 billion parameters. Qwen2.5 brings the following improvements upon Qwen2:
19
+
20
+ - Significantly **more knowledge** and has greatly improved capabilities in **coding** and **mathematics**, thanks to our specialized expert models in these domains.
21
+ - Significant improvements in **instruction following**, **generating long texts** (over 8K tokens), **understanding structured data** (e.g, tables), and **generating structured outputs** especially JSON. **More resilient to the diversity of system prompts**, enhancing role-play implementation and condition-setting for chatbots.
22
+ - **Long-context Support** up to 128K tokens and can generate up to 8K tokens.
23
+ - **Multilingual support** for over 29 languages, including Chinese, English, French, Spanish, Portuguese, German, Italian, Russian, Japanese, Korean, Vietnamese, Thai, Arabic, and more.
24
+
25
+ **This repo contains the instruction-tuned 3B Qwen2.5 model**, which has the following features:
26
+ - Type: Causal Language Models
27
+ - Training Stage: Pretraining & Post-training
28
+ - Architecture: transformers with RoPE, SwiGLU, RMSNorm, Attention QKV bias and tied word embeddings
29
+ - Number of Parameters: 3.09B
30
+ - Number of Paramaters (Non-Embedding): 2.77B
31
+ - Number of Layers: 36
32
+ - Number of Attention Heads (GQA): 16 for Q and 2 for KV
33
+ - Context Length: Full 32,768 tokens and generation 8192 tokens
34
+
35
+ For more details, please refer to our [blog](https://qwenlm.github.io/blog/qwen2.5/), [GitHub](https://github.com/QwenLM/Qwen2.5), and [Documentation](https://qwen.readthedocs.io/en/latest/).
36
+
37
+ ## Requirements
38
+
39
+ The code of Qwen2.5 has been in the latest Hugging face `transformers` and we advise you to use the latest version of `transformers`.
40
+
41
+ With `transformers<4.37.0`, you will encounter the following error:
42
+ ```
43
+ KeyError: 'qwen2'
44
+ ```
45
+
46
+ ## Quickstart
47
+
48
+ Here provides a code snippet with `apply_chat_template` to show you how to load the tokenizer and model and how to generate contents.
49
+
50
+ ```python
51
+ from transformers import AutoModelForCausalLM, AutoTokenizer
52
+
53
+ model_name = "Qwen/Qwen2.5-3B-Instruct"
54
+
55
+ model = AutoModelForCausalLM.from_pretrained(
56
+ model_name,
57
+ torch_dtype="auto",
58
+ device_map="auto"
59
+ )
60
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
61
+
62
+ prompt = "Give me a short introduction to large language model."
63
+ messages = [
64
+ {"role": "system", "content": "You are Qwen, created by Alibaba Cloud. You are a helpful assistant."},
65
+ {"role": "user", "content": prompt}
66
+ ]
67
+ text = tokenizer.apply_chat_template(
68
+ messages,
69
+ tokenize=False,
70
+ add_generation_prompt=True
71
+ )
72
+ model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
73
+
74
+ generated_ids = model.generate(
75
+ **model_inputs,
76
+ max_new_tokens=512
77
+ )
78
+ generated_ids = [
79
+ output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
80
+ ]
81
+
82
+ response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
83
+ ```
84
+
85
+
86
+ ## Evaluation & Performance
87
+
88
+ Detailed evaluation results are reported in this [📑 blog](https://qwenlm.github.io/blog/qwen2.5/).
89
+
90
+ For requirements on GPU memory and the respective throughput, see results [here](https://qwen.readthedocs.io/en/latest/benchmark/speed_benchmark.html).
91
+
92
+ ## Citation
93
+
94
+ If you find our work helpful, feel free to give us a cite.
95
+
96
+ ```
97
+ @misc{qwen2.5,
98
+ title = {Qwen2.5: A Party of Foundation Models},
99
+ url = {https://qwenlm.github.io/blog/qwen2.5/},
100
+ author = {Qwen Team},
101
+ month = {September},
102
+ year = {2024}
103
+ }
104
+
105
+ @article{qwen2,
106
+ title={Qwen2 Technical Report},
107
+ author={An Yang and Baosong Yang and Binyuan Hui and Bo Zheng and Bowen Yu and Chang Zhou and Chengpeng Li and Chengyuan Li and Dayiheng Liu and Fei Huang and Guanting Dong and Haoran Wei and Huan Lin and Jialong Tang and Jialin Wang and Jian Yang and Jianhong Tu and Jianwei Zhang and Jianxin Ma and Jin Xu and Jingren Zhou and Jinze Bai and Jinzheng He and Junyang Lin and Kai Dang and Keming Lu and Keqin Chen and Kexin Yang and Mei Li and Mingfeng Xue and Na Ni and Pei Zhang and Peng Wang and Ru Peng and Rui Men and Ruize Gao and Runji Lin and Shijie Wang and Shuai Bai and Sinan Tan and Tianhang Zhu and Tianhao Li and Tianyu Liu and Wenbin Ge and Xiaodong Deng and Xiaohuan Zhou and Xingzhang Ren and Xinyu Zhang and Xipin Wei and Xuancheng Ren and Yang Fan and Yang Yao and Yichang Zhang and Yu Wan and Yunfei Chu and Yuqiong Liu and Zeyu Cui and Zhenru Zhang and Zhihao Fan},
108
+ journal={arXiv preprint arXiv:2407.10671},
109
+ year={2024}
110
+ }
111
+ ```
Qwen2.5-3B-Instruct/added_tokens.json ADDED
@@ -0,0 +1,614 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</t5>": 151937,
3
+ "<video_padding>": 151939,
4
+ "<timestamp>": 152165,
5
+ "</timestamp>": 152164,
6
+ "<t5>": 151936,
7
+ "<t5_00000>": 151836,
8
+ "<t5_00001>": 151837,
9
+ "<t5_00002>": 151838,
10
+ "<t5_00003>": 151839,
11
+ "<t5_00004>": 151840,
12
+ "<t5_00005>": 151841,
13
+ "<t5_00006>": 151842,
14
+ "<t5_00007>": 151843,
15
+ "<t5_00008>": 151844,
16
+ "<t5_00009>": 151845,
17
+ "<t5_00010>": 151846,
18
+ "<t5_00011>": 151847,
19
+ "<t5_00012>": 151848,
20
+ "<t5_00013>": 151849,
21
+ "<t5_00014>": 151850,
22
+ "<t5_00015>": 151851,
23
+ "<t5_00016>": 151852,
24
+ "<t5_00017>": 151853,
25
+ "<t5_00018>": 151854,
26
+ "<t5_00019>": 151855,
27
+ "<t5_00020>": 151856,
28
+ "<t5_00021>": 151857,
29
+ "<t5_00022>": 151858,
30
+ "<t5_00023>": 151859,
31
+ "<t5_00024>": 151860,
32
+ "<t5_00025>": 151861,
33
+ "<t5_00026>": 151862,
34
+ "<t5_00027>": 151863,
35
+ "<t5_00028>": 151864,
36
+ "<t5_00029>": 151865,
37
+ "<t5_00030>": 151866,
38
+ "<t5_00031>": 151867,
39
+ "<t5_00032>": 151868,
40
+ "<t5_00033>": 151869,
41
+ "<t5_00034>": 151870,
42
+ "<t5_00035>": 151871,
43
+ "<t5_00036>": 151872,
44
+ "<t5_00037>": 151873,
45
+ "<t5_00038>": 151874,
46
+ "<t5_00039>": 151875,
47
+ "<t5_00040>": 151876,
48
+ "<t5_00041>": 151877,
49
+ "<t5_00042>": 151878,
50
+ "<t5_00043>": 151879,
51
+ "<t5_00044>": 151880,
52
+ "<t5_00045>": 151881,
53
+ "<t5_00046>": 151882,
54
+ "<t5_00047>": 151883,
55
+ "<t5_00048>": 151884,
56
+ "<t5_00049>": 151885,
57
+ "<t5_00050>": 151886,
58
+ "<t5_00051>": 151887,
59
+ "<t5_00052>": 151888,
60
+ "<t5_00053>": 151889,
61
+ "<t5_00054>": 151890,
62
+ "<t5_00055>": 151891,
63
+ "<t5_00056>": 151892,
64
+ "<t5_00057>": 151893,
65
+ "<t5_00058>": 151894,
66
+ "<t5_00059>": 151895,
67
+ "<t5_00060>": 151896,
68
+ "<t5_00061>": 151897,
69
+ "<t5_00062>": 151898,
70
+ "<t5_00063>": 151899,
71
+ "<t5_00064>": 151900,
72
+ "<t5_00065>": 151901,
73
+ "<t5_00066>": 151902,
74
+ "<t5_00067>": 151903,
75
+ "<t5_00068>": 151904,
76
+ "<t5_00069>": 151905,
77
+ "<t5_00070>": 151906,
78
+ "<t5_00071>": 151907,
79
+ "<t5_00072>": 151908,
80
+ "<t5_00073>": 151909,
81
+ "<t5_00074>": 151910,
82
+ "<t5_00075>": 151911,
83
+ "<t5_00076>": 151912,
84
+ "<t5_00077>": 151913,
85
+ "<t5_00078>": 151914,
86
+ "<t5_00079>": 151915,
87
+ "<t5_00080>": 151916,
88
+ "<t5_00081>": 151917,
89
+ "<t5_00082>": 151918,
90
+ "<t5_00083>": 151919,
91
+ "<t5_00084>": 151920,
92
+ "<t5_00085>": 151921,
93
+ "<t5_00086>": 151922,
94
+ "<t5_00087>": 151923,
95
+ "<t5_00088>": 151924,
96
+ "<t5_00089>": 151925,
97
+ "<t5_00090>": 151926,
98
+ "<t5_00091>": 151927,
99
+ "<t5_00092>": 151928,
100
+ "<t5_00093>": 151929,
101
+ "<t5_00094>": 151930,
102
+ "<t5_00095>": 151931,
103
+ "<t5_00096>": 151932,
104
+ "<t5_00097>": 151933,
105
+ "<t5_00098>": 151934,
106
+ "<t5_00099>": 151935,
107
+ "<aud>": 152166,
108
+ "</aud>": 152167,
109
+ "<aud_00000>": 152168,
110
+ "<aud_00001>": 152169,
111
+ "<aud_00002>": 152170,
112
+ "<aud_00003>": 152171,
113
+ "<aud_00004>": 152172,
114
+ "<aud_00005>": 152173,
115
+ "<aud_00006>": 152174,
116
+ "<aud_00007>": 152175,
117
+ "<aud_00008>": 152176,
118
+ "<aud_00009>": 152177,
119
+ "<aud_00010>": 152178,
120
+ "<aud_00011>": 152179,
121
+ "<aud_00012>": 152180,
122
+ "<aud_00013>": 152181,
123
+ "<aud_00014>": 152182,
124
+ "<aud_00015>": 152183,
125
+ "<aud_00016>": 152184,
126
+ "<aud_00017>": 152185,
127
+ "<aud_00018>": 152186,
128
+ "<aud_00019>": 152187,
129
+ "<aud_00020>": 152188,
130
+ "<aud_00021>": 152189,
131
+ "<aud_00022>": 152190,
132
+ "<aud_00023>": 152191,
133
+ "<aud_00024>": 152192,
134
+ "<aud_00025>": 152193,
135
+ "<aud_00026>": 152194,
136
+ "<aud_00027>": 152195,
137
+ "<aud_00028>": 152196,
138
+ "<aud_00029>": 152197,
139
+ "<aud_00030>": 152198,
140
+ "<aud_00031>": 152199,
141
+ "<aud_00032>": 152200,
142
+ "<aud_00033>": 152201,
143
+ "<aud_00034>": 152202,
144
+ "<aud_00035>": 152203,
145
+ "<aud_00036>": 152204,
146
+ "<aud_00037>": 152205,
147
+ "<aud_00038>": 152206,
148
+ "<aud_00039>": 152207,
149
+ "<aud_00040>": 152208,
150
+ "<aud_00041>": 152209,
151
+ "<aud_00042>": 152210,
152
+ "<aud_00043>": 152211,
153
+ "<aud_00044>": 152212,
154
+ "<aud_00045>": 152213,
155
+ "<aud_00046>": 152214,
156
+ "<aud_00047>": 152215,
157
+ "<aud_00048>": 152216,
158
+ "<aud_00049>": 152217,
159
+ "<aud_00050>": 152218,
160
+ "<aud_00051>": 152219,
161
+ "<aud_00052>": 152220,
162
+ "<aud_00053>": 152221,
163
+ "<aud_00054>": 152222,
164
+ "<aud_00055>": 152223,
165
+ "<aud_00056>": 152224,
166
+ "<aud_00057>": 152225,
167
+ "<aud_00058>": 152226,
168
+ "<aud_00059>": 152227,
169
+ "<aud_00060>": 152228,
170
+ "<aud_00061>": 152229,
171
+ "<aud_00062>": 152230,
172
+ "<aud_00063>": 152231,
173
+ "<aud_00064>": 152232,
174
+ "<aud_00065>": 152233,
175
+ "<aud_00066>": 152234,
176
+ "<aud_00067>": 152235,
177
+ "<aud_00068>": 152236,
178
+ "<aud_00069>": 152237,
179
+ "<aud_00070>": 152238,
180
+ "<aud_00071>": 152239,
181
+ "<aud_00072>": 152240,
182
+ "<aud_00073>": 152241,
183
+ "<aud_00074>": 152242,
184
+ "<aud_00075>": 152243,
185
+ "<aud_00076>": 152244,
186
+ "<aud_00077>": 152245,
187
+ "<aud_00078>": 152246,
188
+ "<aud_00079>": 152247,
189
+ "<aud_00080>": 152248,
190
+ "<aud_00081>": 152249,
191
+ "<aud_00082>": 152250,
192
+ "<aud_00083>": 152251,
193
+ "<aud_00084>": 152252,
194
+ "<aud_00085>": 152253,
195
+ "<aud_00086>": 152254,
196
+ "<aud_00087>": 152255,
197
+ "<aud_00088>": 152256,
198
+ "<aud_00089>": 152257,
199
+ "<aud_00090>": 152258,
200
+ "<aud_00091>": 152259,
201
+ "<aud_00092>": 152260,
202
+ "<aud_00093>": 152261,
203
+ "<aud_00094>": 152262,
204
+ "<aud_00095>": 152263,
205
+ "<aud_00096>": 152264,
206
+ "<aud_00097>": 152265,
207
+ "<aud_00098>": 152266,
208
+ "<aud_00099>": 152267,
209
+ "<aud_00100>": 152268,
210
+ "<aud_00101>": 152269,
211
+ "<aud_00102>": 152270,
212
+ "<aud_00103>": 152271,
213
+ "<aud_00104>": 152272,
214
+ "<aud_00105>": 152273,
215
+ "<aud_00106>": 152274,
216
+ "<aud_00107>": 152275,
217
+ "<aud_00108>": 152276,
218
+ "<aud_00109>": 152277,
219
+ "<aud_00110>": 152278,
220
+ "<aud_00111>": 152279,
221
+ "<aud_00112>": 152280,
222
+ "<aud_00113>": 152281,
223
+ "<aud_00114>": 152282,
224
+ "<aud_00115>": 152283,
225
+ "<aud_00116>": 152284,
226
+ "<aud_00117>": 152285,
227
+ "<aud_00118>": 152286,
228
+ "<aud_00119>": 152287,
229
+ "<aud_00120>": 152288,
230
+ "<aud_00121>": 152289,
231
+ "<aud_00122>": 152290,
232
+ "<aud_00123>": 152291,
233
+ "<aud_00124>": 152292,
234
+ "<aud_00125>": 152293,
235
+ "<aud_00126>": 152294,
236
+ "<aud_00127>": 152295,
237
+ "<aud_00128>": 152296,
238
+ "<aud_00129>": 152297,
239
+ "<aud_00130>": 152298,
240
+ "<aud_00131>": 152299,
241
+ "<aud_00132>": 152300,
242
+ "<aud_00133>": 152301,
243
+ "<aud_00134>": 152302,
244
+ "<aud_00135>": 152303,
245
+ "<aud_00136>": 152304,
246
+ "<aud_00137>": 152305,
247
+ "<aud_00138>": 152306,
248
+ "<aud_00139>": 152307,
249
+ "<aud_00140>": 152308,
250
+ "<aud_00141>": 152309,
251
+ "<aud_00142>": 152310,
252
+ "<aud_00143>": 152311,
253
+ "<aud_00144>": 152312,
254
+ "<aud_00145>": 152313,
255
+ "<aud_00146>": 152314,
256
+ "<aud_00147>": 152315,
257
+ "<aud_00148>": 152316,
258
+ "<aud_00149>": 152317,
259
+ "<aud_00150>": 152318,
260
+ "<aud_00151>": 152319,
261
+ "<aud_00152>": 152320,
262
+ "<aud_00153>": 152321,
263
+ "<aud_00154>": 152322,
264
+ "<aud_00155>": 152323,
265
+ "<aud_00156>": 152324,
266
+ "<aud_00157>": 152325,
267
+ "<aud_00158>": 152326,
268
+ "<aud_00159>": 152327,
269
+ "<aud_00160>": 152328,
270
+ "<aud_00161>": 152329,
271
+ "<aud_00162>": 152330,
272
+ "<aud_00163>": 152331,
273
+ "<aud_00164>": 152332,
274
+ "<aud_00165>": 152333,
275
+ "<aud_00166>": 152334,
276
+ "<aud_00167>": 152335,
277
+ "<aud_00168>": 152336,
278
+ "<aud_00169>": 152337,
279
+ "<aud_00170>": 152338,
280
+ "<aud_00171>": 152339,
281
+ "<aud_00172>": 152340,
282
+ "<aud_00173>": 152341,
283
+ "<aud_00174>": 152342,
284
+ "<aud_00175>": 152343,
285
+ "<aud_00176>": 152344,
286
+ "<aud_00177>": 152345,
287
+ "<aud_00178>": 152346,
288
+ "<aud_00179>": 152347,
289
+ "<aud_00180>": 152348,
290
+ "<aud_00181>": 152349,
291
+ "<aud_00182>": 152350,
292
+ "<aud_00183>": 152351,
293
+ "<aud_00184>": 152352,
294
+ "<aud_00185>": 152353,
295
+ "<aud_00186>": 152354,
296
+ "<aud_00187>": 152355,
297
+ "<aud_00188>": 152356,
298
+ "<aud_00189>": 152357,
299
+ "<aud_00190>": 152358,
300
+ "<aud_00191>": 152359,
301
+ "<aud_00192>": 152360,
302
+ "<aud_00193>": 152361,
303
+ "<aud_00194>": 152362,
304
+ "<aud_00195>": 152363,
305
+ "<aud_00196>": 152364,
306
+ "<aud_00197>": 152365,
307
+ "<aud_00198>": 152366,
308
+ "<aud_00199>": 152367,
309
+ "<aud_00200>": 152368,
310
+ "<aud_00201>": 152369,
311
+ "<aud_00202>": 152370,
312
+ "<aud_00203>": 152371,
313
+ "<aud_00204>": 152372,
314
+ "<aud_00205>": 152373,
315
+ "<aud_00206>": 152374,
316
+ "<aud_00207>": 152375,
317
+ "<aud_00208>": 152376,
318
+ "<aud_00209>": 152377,
319
+ "<aud_00210>": 152378,
320
+ "<aud_00211>": 152379,
321
+ "<aud_00212>": 152380,
322
+ "<aud_00213>": 152381,
323
+ "<aud_00214>": 152382,
324
+ "<aud_00215>": 152383,
325
+ "<aud_00216>": 152384,
326
+ "<aud_00217>": 152385,
327
+ "<aud_00218>": 152386,
328
+ "<aud_00219>": 152387,
329
+ "<aud_00220>": 152388,
330
+ "<aud_00221>": 152389,
331
+ "<aud_00222>": 152390,
332
+ "<aud_00223>": 152391,
333
+ "<aud_00224>": 152392,
334
+ "<aud_00225>": 152393,
335
+ "<aud_00226>": 152394,
336
+ "<aud_00227>": 152395,
337
+ "<aud_00228>": 152396,
338
+ "<aud_00229>": 152397,
339
+ "<aud_00230>": 152398,
340
+ "<aud_00231>": 152399,
341
+ "<aud_00232>": 152400,
342
+ "<aud_00233>": 152401,
343
+ "<aud_00234>": 152402,
344
+ "<aud_00235>": 152403,
345
+ "<aud_00236>": 152404,
346
+ "<aud_00237>": 152405,
347
+ "<aud_00238>": 152406,
348
+ "<aud_00239>": 152407,
349
+ "<aud_00240>": 152408,
350
+ "<aud_00241>": 152409,
351
+ "<aud_00242>": 152410,
352
+ "<aud_00243>": 152411,
353
+ "<aud_00244>": 152412,
354
+ "<aud_00245>": 152413,
355
+ "<aud_00246>": 152414,
356
+ "<aud_00247>": 152415,
357
+ "<aud_00248>": 152416,
358
+ "<aud_00249>": 152417,
359
+ "<aud_00250>": 152418,
360
+ "<aud_00251>": 152419,
361
+ "<aud_00252>": 152420,
362
+ "<aud_00253>": 152421,
363
+ "<aud_00254>": 152422,
364
+ "<aud_00255>": 152423,
365
+ "<aud_00256>": 152424,
366
+ "<aud_00257>": 152425,
367
+ "<aud_00258>": 152426,
368
+ "<aud_00259>": 152427,
369
+ "<aud_00260>": 152428,
370
+ "<aud_00261>": 152429,
371
+ "<aud_00262>": 152430,
372
+ "<aud_00263>": 152431,
373
+ "<aud_00264>": 152432,
374
+ "<aud_00265>": 152433,
375
+ "<aud_00266>": 152434,
376
+ "<aud_00267>": 152435,
377
+ "<aud_00268>": 152436,
378
+ "<aud_00269>": 152437,
379
+ "<aud_00270>": 152438,
380
+ "<aud_00271>": 152439,
381
+ "<aud_00272>": 152440,
382
+ "<aud_00273>": 152441,
383
+ "<aud_00274>": 152442,
384
+ "<aud_00275>": 152443,
385
+ "<aud_00276>": 152444,
386
+ "<aud_00277>": 152445,
387
+ "<aud_00278>": 152446,
388
+ "<aud_00279>": 152447,
389
+ "<aud_00280>": 152448,
390
+ "<aud_00281>": 152449,
391
+ "<aud_00282>": 152450,
392
+ "<aud_00283>": 152451,
393
+ "<aud_00284>": 152452,
394
+ "<aud_00285>": 152453,
395
+ "<aud_00286>": 152454,
396
+ "<aud_00287>": 152455,
397
+ "<aud_00288>": 152456,
398
+ "<aud_00289>": 152457,
399
+ "<aud_00290>": 152458,
400
+ "<aud_00291>": 152459,
401
+ "<aud_00292>": 152460,
402
+ "<aud_00293>": 152461,
403
+ "<aud_00294>": 152462,
404
+ "<aud_00295>": 152463,
405
+ "<aud_00296>": 152464,
406
+ "<aud_00297>": 152465,
407
+ "<aud_00298>": 152466,
408
+ "<aud_00299>": 152467,
409
+ "<aud_00300>": 152468,
410
+ "<aud_00301>": 152469,
411
+ "<aud_00302>": 152470,
412
+ "<aud_00303>": 152471,
413
+ "<aud_00304>": 152472,
414
+ "<aud_00305>": 152473,
415
+ "<aud_00306>": 152474,
416
+ "<aud_00307>": 152475,
417
+ "<aud_00308>": 152476,
418
+ "<aud_00309>": 152477,
419
+ "<aud_00310>": 152478,
420
+ "<aud_00311>": 152479,
421
+ "<aud_00312>": 152480,
422
+ "<aud_00313>": 152481,
423
+ "<aud_00314>": 152482,
424
+ "<aud_00315>": 152483,
425
+ "<aud_00316>": 152484,
426
+ "<aud_00317>": 152485,
427
+ "<aud_00318>": 152486,
428
+ "<aud_00319>": 152487,
429
+ "<aud_00320>": 152488,
430
+ "<aud_00321>": 152489,
431
+ "<aud_00322>": 152490,
432
+ "<aud_00323>": 152491,
433
+ "<aud_00324>": 152492,
434
+ "<aud_00325>": 152493,
435
+ "<aud_00326>": 152494,
436
+ "<aud_00327>": 152495,
437
+ "<aud_00328>": 152496,
438
+ "<aud_00329>": 152497,
439
+ "<aud_00330>": 152498,
440
+ "<aud_00331>": 152499,
441
+ "<aud_00332>": 152500,
442
+ "<aud_00333>": 152501,
443
+ "<aud_00334>": 152502,
444
+ "<aud_00335>": 152503,
445
+ "<aud_00336>": 152504,
446
+ "<aud_00337>": 152505,
447
+ "<aud_00338>": 152506,
448
+ "<aud_00339>": 152507,
449
+ "<aud_00340>": 152508,
450
+ "<aud_00341>": 152509,
451
+ "<aud_00342>": 152510,
452
+ "<aud_00343>": 152511,
453
+ "<aud_00344>": 152512,
454
+ "<aud_00345>": 152513,
455
+ "<aud_00346>": 152514,
456
+ "<aud_00347>": 152515,
457
+ "<aud_00348>": 152516,
458
+ "<aud_00349>": 152517,
459
+ "<aud_00350>": 152518,
460
+ "<aud_00351>": 152519,
461
+ "<aud_00352>": 152520,
462
+ "<aud_00353>": 152521,
463
+ "<aud_00354>": 152522,
464
+ "<aud_00355>": 152523,
465
+ "<aud_00356>": 152524,
466
+ "<aud_00357>": 152525,
467
+ "<aud_00358>": 152526,
468
+ "<aud_00359>": 152527,
469
+ "<aud_00360>": 152528,
470
+ "<aud_00361>": 152529,
471
+ "<aud_00362>": 152530,
472
+ "<aud_00363>": 152531,
473
+ "<aud_00364>": 152532,
474
+ "<aud_00365>": 152533,
475
+ "<aud_00366>": 152534,
476
+ "<aud_00367>": 152535,
477
+ "<aud_00368>": 152536,
478
+ "<aud_00369>": 152537,
479
+ "<aud_00370>": 152538,
480
+ "<aud_00371>": 152539,
481
+ "<aud_00372>": 152540,
482
+ "<aud_00373>": 152541,
483
+ "<aud_00374>": 152542,
484
+ "<aud_00375>": 152543,
485
+ "<aud_00376>": 152544,
486
+ "<aud_00377>": 152545,
487
+ "<aud_00378>": 152546,
488
+ "<aud_00379>": 152547,
489
+ "<aud_00380>": 152548,
490
+ "<aud_00381>": 152549,
491
+ "<aud_00382>": 152550,
492
+ "<aud_00383>": 152551,
493
+ "<aud_00384>": 152552,
494
+ "<aud_00385>": 152553,
495
+ "<aud_00386>": 152554,
496
+ "<aud_00387>": 152555,
497
+ "<aud_00388>": 152556,
498
+ "<aud_00389>": 152557,
499
+ "<aud_00390>": 152558,
500
+ "<aud_00391>": 152559,
501
+ "<aud_00392>": 152560,
502
+ "<aud_00393>": 152561,
503
+ "<aud_00394>": 152562,
504
+ "<aud_00395>": 152563,
505
+ "<aud_00396>": 152564,
506
+ "<aud_00397>": 152565,
507
+ "<aud_00398>": 152566,
508
+ "<aud_00399>": 152567,
509
+ "<aud_00400>": 152568,
510
+ "<aud_00401>": 152569,
511
+ "<aud_00402>": 152570,
512
+ "<aud_00403>": 152571,
513
+ "<aud_00404>": 152572,
514
+ "<aud_00405>": 152573,
515
+ "<aud_00406>": 152574,
516
+ "<aud_00407>": 152575,
517
+ "<aud_00408>": 152576,
518
+ "<aud_00409>": 152577,
519
+ "<aud_00410>": 152578,
520
+ "<aud_00411>": 152579,
521
+ "<aud_00412>": 152580,
522
+ "<aud_00413>": 152581,
523
+ "<aud_00414>": 152582,
524
+ "<aud_00415>": 152583,
525
+ "<aud_00416>": 152584,
526
+ "<aud_00417>": 152585,
527
+ "<aud_00418>": 152586,
528
+ "<aud_00419>": 152587,
529
+ "<aud_00420>": 152588,
530
+ "<aud_00421>": 152589,
531
+ "<aud_00422>": 152590,
532
+ "<aud_00423>": 152591,
533
+ "<aud_00424>": 152592,
534
+ "<aud_00425>": 152593,
535
+ "<aud_00426>": 152594,
536
+ "<aud_00427>": 152595,
537
+ "<aud_00428>": 152596,
538
+ "<aud_00429>": 152597,
539
+ "<aud_00430>": 152598,
540
+ "<aud_00431>": 152599,
541
+ "<aud_00432>": 152600,
542
+ "<aud_00433>": 152601,
543
+ "<aud_00434>": 152602,
544
+ "<aud_00435>": 152603,
545
+ "<aud_00436>": 152604,
546
+ "<aud_00437>": 152605,
547
+ "<aud_00438>": 152606,
548
+ "<aud_00439>": 152607,
549
+ "<aud_00440>": 152608,
550
+ "<aud_00441>": 152609,
551
+ "<aud_00442>": 152610,
552
+ "<aud_00443>": 152611,
553
+ "<aud_00444>": 152612,
554
+ "<aud_00445>": 152613,
555
+ "<aud_00446>": 152614,
556
+ "<aud_00447>": 152615,
557
+ "<aud_00448>": 152616,
558
+ "<aud_00449>": 152617,
559
+ "<aud_00450>": 152618,
560
+ "<aud_00451>": 152619,
561
+ "<aud_00452>": 152620,
562
+ "<aud_00453>": 152621,
563
+ "<aud_00454>": 152622,
564
+ "<aud_00455>": 152623,
565
+ "<aud_00456>": 152624,
566
+ "<aud_00457>": 152625,
567
+ "<aud_00458>": 152626,
568
+ "<aud_00459>": 152627,
569
+ "<aud_00460>": 152628,
570
+ "<aud_00461>": 152629,
571
+ "<aud_00462>": 152630,
572
+ "<aud_00463>": 152631,
573
+ "<aud_00464>": 152632,
574
+ "<aud_00465>": 152633,
575
+ "<aud_00466>": 152634,
576
+ "<aud_00467>": 152635,
577
+ "<aud_00468>": 152636,
578
+ "<aud_00469>": 152637,
579
+ "<aud_00470>": 152638,
580
+ "<aud_00471>": 152639,
581
+ "<aud_00472>": 152640,
582
+ "<aud_00473>": 152641,
583
+ "<aud_00474>": 152642,
584
+ "<aud_00475>": 152643,
585
+ "<aud_00476>": 152644,
586
+ "<aud_00477>": 152645,
587
+ "<aud_00478>": 152646,
588
+ "<aud_00479>": 152647,
589
+ "<aud_00480>": 152648,
590
+ "<aud_00481>": 152649,
591
+ "<aud_00482>": 152650,
592
+ "<aud_00483>": 152651,
593
+ "<aud_00484>": 152652,
594
+ "<aud_00485>": 152653,
595
+ "<aud_00486>": 152654,
596
+ "<aud_00487>": 152655,
597
+ "<aud_00488>": 152656,
598
+ "<aud_00489>": 152657,
599
+ "<aud_00490>": 152658,
600
+ "<aud_00491>": 152659,
601
+ "<aud_00492>": 152660,
602
+ "<aud_00493>": 152661,
603
+ "<aud_00494>": 152662,
604
+ "<aud_00495>": 152663,
605
+ "<aud_00496>": 152664,
606
+ "<aud_00497>": 152665,
607
+ "<aud_00498>": 152666,
608
+ "<aud_00499>": 152667,
609
+ "<aud_00500>": 152668,
610
+ "<|gen|>": 151938,
611
+ "<|/gen|>": 151938,
612
+ "<|think|>": 151938,
613
+ "<|/think|>": 151938
614
+ }
Qwen2.5-3B-Instruct/added_tokens_multi_audio copy.json ADDED
@@ -0,0 +1,815 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</t5>": 151937,
3
+ "<video_padding>": 151939,
4
+ "<box_end>": 152165,
5
+ "<box_start>": 152164,
6
+ "<t5>": 151936,
7
+ "<t5_00000>": 151836,
8
+ "<t5_00001>": 151837,
9
+ "<t5_00002>": 151838,
10
+ "<t5_00003>": 151839,
11
+ "<t5_00004>": 151840,
12
+ "<t5_00005>": 151841,
13
+ "<t5_00006>": 151842,
14
+ "<t5_00007>": 151843,
15
+ "<t5_00008>": 151844,
16
+ "<t5_00009>": 151845,
17
+ "<t5_00010>": 151846,
18
+ "<t5_00011>": 151847,
19
+ "<t5_00012>": 151848,
20
+ "<t5_00013>": 151849,
21
+ "<t5_00014>": 151850,
22
+ "<t5_00015>": 151851,
23
+ "<t5_00016>": 151852,
24
+ "<t5_00017>": 151853,
25
+ "<t5_00018>": 151854,
26
+ "<t5_00019>": 151855,
27
+ "<t5_00020>": 151856,
28
+ "<t5_00021>": 151857,
29
+ "<t5_00022>": 151858,
30
+ "<t5_00023>": 151859,
31
+ "<t5_00024>": 151860,
32
+ "<t5_00025>": 151861,
33
+ "<t5_00026>": 151862,
34
+ "<t5_00027>": 151863,
35
+ "<t5_00028>": 151864,
36
+ "<t5_00029>": 151865,
37
+ "<t5_00030>": 151866,
38
+ "<t5_00031>": 151867,
39
+ "<t5_00032>": 151868,
40
+ "<t5_00033>": 151869,
41
+ "<t5_00034>": 151870,
42
+ "<t5_00035>": 151871,
43
+ "<t5_00036>": 151872,
44
+ "<t5_00037>": 151873,
45
+ "<t5_00038>": 151874,
46
+ "<t5_00039>": 151875,
47
+ "<t5_00040>": 151876,
48
+ "<t5_00041>": 151877,
49
+ "<t5_00042>": 151878,
50
+ "<t5_00043>": 151879,
51
+ "<t5_00044>": 151880,
52
+ "<t5_00045>": 151881,
53
+ "<t5_00046>": 151882,
54
+ "<t5_00047>": 151883,
55
+ "<t5_00048>": 151884,
56
+ "<t5_00049>": 151885,
57
+ "<t5_00050>": 151886,
58
+ "<t5_00051>": 151887,
59
+ "<t5_00052>": 151888,
60
+ "<t5_00053>": 151889,
61
+ "<t5_00054>": 151890,
62
+ "<t5_00055>": 151891,
63
+ "<t5_00056>": 151892,
64
+ "<t5_00057>": 151893,
65
+ "<t5_00058>": 151894,
66
+ "<t5_00059>": 151895,
67
+ "<t5_00060>": 151896,
68
+ "<t5_00061>": 151897,
69
+ "<t5_00062>": 151898,
70
+ "<t5_00063>": 151899,
71
+ "<t5_00064>": 151900,
72
+ "<t5_00065>": 151901,
73
+ "<t5_00066>": 151902,
74
+ "<t5_00067>": 151903,
75
+ "<t5_00068>": 151904,
76
+ "<t5_00069>": 151905,
77
+ "<t5_00070>": 151906,
78
+ "<t5_00071>": 151907,
79
+ "<t5_00072>": 151908,
80
+ "<t5_00073>": 151909,
81
+ "<t5_00074>": 151910,
82
+ "<t5_00075>": 151911,
83
+ "<t5_00076>": 151912,
84
+ "<t5_00077>": 151913,
85
+ "<t5_00078>": 151914,
86
+ "<t5_00079>": 151915,
87
+ "<t5_00080>": 151916,
88
+ "<t5_00081>": 151917,
89
+ "<t5_00082>": 151918,
90
+ "<t5_00083>": 151919,
91
+ "<t5_00084>": 151920,
92
+ "<t5_00085>": 151921,
93
+ "<t5_00086>": 151922,
94
+ "<t5_00087>": 151923,
95
+ "<t5_00088>": 151924,
96
+ "<t5_00089>": 151925,
97
+ "<t5_00090>": 151926,
98
+ "<t5_00091>": 151927,
99
+ "<t5_00092>": 151928,
100
+ "<t5_00093>": 151929,
101
+ "<t5_00094>": 151930,
102
+ "<t5_00095>": 151931,
103
+ "<t5_00096>": 151932,
104
+ "<t5_00097>": 151933,
105
+ "<t5_00098>": 151934,
106
+ "<t5_00099>": 151935,
107
+ "<aud>": 152166,
108
+ "</aud>": 152167,
109
+ "<aud_00000>": 152168,
110
+ "<aud_00001>": 152169,
111
+ "<aud_00002>": 152170,
112
+ "<aud_00003>": 152171,
113
+ "<aud_00004>": 152172,
114
+ "<aud_00005>": 152173,
115
+ "<aud_00006>": 152174,
116
+ "<aud_00007>": 152175,
117
+ "<aud_00008>": 152176,
118
+ "<aud_00009>": 152177,
119
+ "<aud_00010>": 152178,
120
+ "<aud_00011>": 152179,
121
+ "<aud_00012>": 152180,
122
+ "<aud_00013>": 152181,
123
+ "<aud_00014>": 152182,
124
+ "<aud_00015>": 152183,
125
+ "<aud_00016>": 152184,
126
+ "<aud_00017>": 152185,
127
+ "<aud_00018>": 152186,
128
+ "<aud_00019>": 152187,
129
+ "<aud_00020>": 152188,
130
+ "<aud_00021>": 152189,
131
+ "<aud_00022>": 152190,
132
+ "<aud_00023>": 152191,
133
+ "<aud_00024>": 152192,
134
+ "<aud_00025>": 152193,
135
+ "<aud_00026>": 152194,
136
+ "<aud_00027>": 152195,
137
+ "<aud_00028>": 152196,
138
+ "<aud_00029>": 152197,
139
+ "<aud_00030>": 152198,
140
+ "<aud_00031>": 152199,
141
+ "<aud_00032>": 152200,
142
+ "<aud_00033>": 152201,
143
+ "<aud_00034>": 152202,
144
+ "<aud_00035>": 152203,
145
+ "<aud_00036>": 152204,
146
+ "<aud_00037>": 152205,
147
+ "<aud_00038>": 152206,
148
+ "<aud_00039>": 152207,
149
+ "<aud_00040>": 152208,
150
+ "<aud_00041>": 152209,
151
+ "<aud_00042>": 152210,
152
+ "<aud_00043>": 152211,
153
+ "<aud_00044>": 152212,
154
+ "<aud_00045>": 152213,
155
+ "<aud_00046>": 152214,
156
+ "<aud_00047>": 152215,
157
+ "<aud_00048>": 152216,
158
+ "<aud_00049>": 152217,
159
+ "<aud_00050>": 152218,
160
+ "<aud_00051>": 152219,
161
+ "<aud_00052>": 152220,
162
+ "<aud_00053>": 152221,
163
+ "<aud_00054>": 152222,
164
+ "<aud_00055>": 152223,
165
+ "<aud_00056>": 152224,
166
+ "<aud_00057>": 152225,
167
+ "<aud_00058>": 152226,
168
+ "<aud_00059>": 152227,
169
+ "<aud_00060>": 152228,
170
+ "<aud_00061>": 152229,
171
+ "<aud_00062>": 152230,
172
+ "<aud_00063>": 152231,
173
+ "<aud_00064>": 152232,
174
+ "<aud_00065>": 152233,
175
+ "<aud_00066>": 152234,
176
+ "<aud_00067>": 152235,
177
+ "<aud_00068>": 152236,
178
+ "<aud_00069>": 152237,
179
+ "<aud_00070>": 152238,
180
+ "<aud_00071>": 152239,
181
+ "<aud_00072>": 152240,
182
+ "<aud_00073>": 152241,
183
+ "<aud_00074>": 152242,
184
+ "<aud_00075>": 152243,
185
+ "<aud_00076>": 152244,
186
+ "<aud_00077>": 152245,
187
+ "<aud_00078>": 152246,
188
+ "<aud_00079>": 152247,
189
+ "<aud_00080>": 152248,
190
+ "<aud_00081>": 152249,
191
+ "<aud_00082>": 152250,
192
+ "<aud_00083>": 152251,
193
+ "<aud_00084>": 152252,
194
+ "<aud_00085>": 152253,
195
+ "<aud_00086>": 152254,
196
+ "<aud_00087>": 152255,
197
+ "<aud_00088>": 152256,
198
+ "<aud_00089>": 152257,
199
+ "<aud_00090>": 152258,
200
+ "<aud_00091>": 152259,
201
+ "<aud_00092>": 152260,
202
+ "<aud_00093>": 152261,
203
+ "<aud_00094>": 152262,
204
+ "<aud_00095>": 152263,
205
+ "<aud_00096>": 152264,
206
+ "<aud_00097>": 152265,
207
+ "<aud_00098>": 152266,
208
+ "<aud_00099>": 152267,
209
+ "<aud_00100>": 152268,
210
+ "<aud_00101>": 152269,
211
+ "<aud_00102>": 152270,
212
+ "<aud_00103>": 152271,
213
+ "<aud_00104>": 152272,
214
+ "<aud_00105>": 152273,
215
+ "<aud_00106>": 152274,
216
+ "<aud_00107>": 152275,
217
+ "<aud_00108>": 152276,
218
+ "<aud_00109>": 152277,
219
+ "<aud_00110>": 152278,
220
+ "<aud_00111>": 152279,
221
+ "<aud_00112>": 152280,
222
+ "<aud_00113>": 152281,
223
+ "<aud_00114>": 152282,
224
+ "<aud_00115>": 152283,
225
+ "<aud_00116>": 152284,
226
+ "<aud_00117>": 152285,
227
+ "<aud_00118>": 152286,
228
+ "<aud_00119>": 152287,
229
+ "<aud_00120>": 152288,
230
+ "<aud_00121>": 152289,
231
+ "<aud_00122>": 152290,
232
+ "<aud_00123>": 152291,
233
+ "<aud_00124>": 152292,
234
+ "<aud_00125>": 152293,
235
+ "<aud_00126>": 152294,
236
+ "<aud_00127>": 152295,
237
+ "<aud_00128>": 152296,
238
+ "<aud_00129>": 152297,
239
+ "<aud_00130>": 152298,
240
+ "<aud_00131>": 152299,
241
+ "<aud_00132>": 152300,
242
+ "<aud_00133>": 152301,
243
+ "<aud_00134>": 152302,
244
+ "<aud_00135>": 152303,
245
+ "<aud_00136>": 152304,
246
+ "<aud_00137>": 152305,
247
+ "<aud_00138>": 152306,
248
+ "<aud_00139>": 152307,
249
+ "<aud_00140>": 152308,
250
+ "<aud_00141>": 152309,
251
+ "<aud_00142>": 152310,
252
+ "<aud_00143>": 152311,
253
+ "<aud_00144>": 152312,
254
+ "<aud_00145>": 152313,
255
+ "<aud_00146>": 152314,
256
+ "<aud_00147>": 152315,
257
+ "<aud_00148>": 152316,
258
+ "<aud_00149>": 152317,
259
+ "<aud_00150>": 152318,
260
+ "<aud_00151>": 152319,
261
+ "<aud_00152>": 152320,
262
+ "<aud_00153>": 152321,
263
+ "<aud_00154>": 152322,
264
+ "<aud_00155>": 152323,
265
+ "<aud_00156>": 152324,
266
+ "<aud_00157>": 152325,
267
+ "<aud_00158>": 152326,
268
+ "<aud_00159>": 152327,
269
+ "<aud_00160>": 152328,
270
+ "<aud_00161>": 152329,
271
+ "<aud_00162>": 152330,
272
+ "<aud_00163>": 152331,
273
+ "<aud_00164>": 152332,
274
+ "<aud_00165>": 152333,
275
+ "<aud_00166>": 152334,
276
+ "<aud_00167>": 152335,
277
+ "<aud_00168>": 152336,
278
+ "<aud_00169>": 152337,
279
+ "<aud_00170>": 152338,
280
+ "<aud_00171>": 152339,
281
+ "<aud_00172>": 152340,
282
+ "<aud_00173>": 152341,
283
+ "<aud_00174>": 152342,
284
+ "<aud_00175>": 152343,
285
+ "<aud_00176>": 152344,
286
+ "<aud_00177>": 152345,
287
+ "<aud_00178>": 152346,
288
+ "<aud_00179>": 152347,
289
+ "<aud_00180>": 152348,
290
+ "<aud_00181>": 152349,
291
+ "<aud_00182>": 152350,
292
+ "<aud_00183>": 152351,
293
+ "<aud_00184>": 152352,
294
+ "<aud_00185>": 152353,
295
+ "<aud_00186>": 152354,
296
+ "<aud_00187>": 152355,
297
+ "<aud_00188>": 152356,
298
+ "<aud_00189>": 152357,
299
+ "<aud_00190>": 152358,
300
+ "<aud_00191>": 152359,
301
+ "<aud_00192>": 152360,
302
+ "<aud_00193>": 152361,
303
+ "<aud_00194>": 152362,
304
+ "<aud_00195>": 152363,
305
+ "<aud_00196>": 152364,
306
+ "<aud_00197>": 152365,
307
+ "<aud_00198>": 152366,
308
+ "<aud_00199>": 152367,
309
+ "<aud_00200>": 152368,
310
+ "<aud_00201>": 152369,
311
+ "<aud_00202>": 152370,
312
+ "<aud_00203>": 152371,
313
+ "<aud_00204>": 152372,
314
+ "<aud_00205>": 152373,
315
+ "<aud_00206>": 152374,
316
+ "<aud_00207>": 152375,
317
+ "<aud_00208>": 152376,
318
+ "<aud_00209>": 152377,
319
+ "<aud_00210>": 152378,
320
+ "<aud_00211>": 152379,
321
+ "<aud_00212>": 152380,
322
+ "<aud_00213>": 152381,
323
+ "<aud_00214>": 152382,
324
+ "<aud_00215>": 152383,
325
+ "<aud_00216>": 152384,
326
+ "<aud_00217>": 152385,
327
+ "<aud_00218>": 152386,
328
+ "<aud_00219>": 152387,
329
+ "<aud_00220>": 152388,
330
+ "<aud_00221>": 152389,
331
+ "<aud_00222>": 152390,
332
+ "<aud_00223>": 152391,
333
+ "<aud_00224>": 152392,
334
+ "<aud_00225>": 152393,
335
+ "<aud_00226>": 152394,
336
+ "<aud_00227>": 152395,
337
+ "<aud_00228>": 152396,
338
+ "<aud_00229>": 152397,
339
+ "<aud_00230>": 152398,
340
+ "<aud_00231>": 152399,
341
+ "<aud_00232>": 152400,
342
+ "<aud_00233>": 152401,
343
+ "<aud_00234>": 152402,
344
+ "<aud_00235>": 152403,
345
+ "<aud_00236>": 152404,
346
+ "<aud_00237>": 152405,
347
+ "<aud_00238>": 152406,
348
+ "<aud_00239>": 152407,
349
+ "<aud_00240>": 152408,
350
+ "<aud_00241>": 152409,
351
+ "<aud_00242>": 152410,
352
+ "<aud_00243>": 152411,
353
+ "<aud_00244>": 152412,
354
+ "<aud_00245>": 152413,
355
+ "<aud_00246>": 152414,
356
+ "<aud_00247>": 152415,
357
+ "<aud_00248>": 152416,
358
+ "<aud_00249>": 152417,
359
+ "<aud_00250>": 152418,
360
+ "<aud_00251>": 152419,
361
+ "<aud_00252>": 152420,
362
+ "<aud_00253>": 152421,
363
+ "<aud_00254>": 152422,
364
+ "<aud_00255>": 152423,
365
+ "<aud_00256>": 152424,
366
+ "<aud_00257>": 152425,
367
+ "<aud_00258>": 152426,
368
+ "<aud_00259>": 152427,
369
+ "<aud_00260>": 152428,
370
+ "<aud_00261>": 152429,
371
+ "<aud_00262>": 152430,
372
+ "<aud_00263>": 152431,
373
+ "<aud_00264>": 152432,
374
+ "<aud_00265>": 152433,
375
+ "<aud_00266>": 152434,
376
+ "<aud_00267>": 152435,
377
+ "<aud_00268>": 152436,
378
+ "<aud_00269>": 152437,
379
+ "<aud_00270>": 152438,
380
+ "<aud_00271>": 152439,
381
+ "<aud_00272>": 152440,
382
+ "<aud_00273>": 152441,
383
+ "<aud_00274>": 152442,
384
+ "<aud_00275>": 152443,
385
+ "<aud_00276>": 152444,
386
+ "<aud_00277>": 152445,
387
+ "<aud_00278>": 152446,
388
+ "<aud_00279>": 152447,
389
+ "<aud_00280>": 152448,
390
+ "<aud_00281>": 152449,
391
+ "<aud_00282>": 152450,
392
+ "<aud_00283>": 152451,
393
+ "<aud_00284>": 152452,
394
+ "<aud_00285>": 152453,
395
+ "<aud_00286>": 152454,
396
+ "<aud_00287>": 152455,
397
+ "<aud_00288>": 152456,
398
+ "<aud_00289>": 152457,
399
+ "<aud_00290>": 152458,
400
+ "<aud_00291>": 152459,
401
+ "<aud_00292>": 152460,
402
+ "<aud_00293>": 152461,
403
+ "<aud_00294>": 152462,
404
+ "<aud_00295>": 152463,
405
+ "<aud_00296>": 152464,
406
+ "<aud_00297>": 152465,
407
+ "<aud_00298>": 152466,
408
+ "<aud_00299>": 152467,
409
+ "<aud_00300>": 152468,
410
+ "<aud_00301>": 152469,
411
+ "<aud_00302>": 152470,
412
+ "<aud_00303>": 152471,
413
+ "<aud_00304>": 152472,
414
+ "<aud_00305>": 152473,
415
+ "<aud_00306>": 152474,
416
+ "<aud_00307>": 152475,
417
+ "<aud_00308>": 152476,
418
+ "<aud_00309>": 152477,
419
+ "<aud_00310>": 152478,
420
+ "<aud_00311>": 152479,
421
+ "<aud_00312>": 152480,
422
+ "<aud_00313>": 152481,
423
+ "<aud_00314>": 152482,
424
+ "<aud_00315>": 152483,
425
+ "<aud_00316>": 152484,
426
+ "<aud_00317>": 152485,
427
+ "<aud_00318>": 152486,
428
+ "<aud_00319>": 152487,
429
+ "<aud_00320>": 152488,
430
+ "<aud_00321>": 152489,
431
+ "<aud_00322>": 152490,
432
+ "<aud_00323>": 152491,
433
+ "<aud_00324>": 152492,
434
+ "<aud_00325>": 152493,
435
+ "<aud_00326>": 152494,
436
+ "<aud_00327>": 152495,
437
+ "<aud_00328>": 152496,
438
+ "<aud_00329>": 152497,
439
+ "<aud_00330>": 152498,
440
+ "<aud_00331>": 152499,
441
+ "<aud_00332>": 152500,
442
+ "<aud_00333>": 152501,
443
+ "<aud_00334>": 152502,
444
+ "<aud_00335>": 152503,
445
+ "<aud_00336>": 152504,
446
+ "<aud_00337>": 152505,
447
+ "<aud_00338>": 152506,
448
+ "<aud_00339>": 152507,
449
+ "<aud_00340>": 152508,
450
+ "<aud_00341>": 152509,
451
+ "<aud_00342>": 152510,
452
+ "<aud_00343>": 152511,
453
+ "<aud_00344>": 152512,
454
+ "<aud_00345>": 152513,
455
+ "<aud_00346>": 152514,
456
+ "<aud_00347>": 152515,
457
+ "<aud_00348>": 152516,
458
+ "<aud_00349>": 152517,
459
+ "<aud_00350>": 152518,
460
+ "<aud_00351>": 152519,
461
+ "<aud_00352>": 152520,
462
+ "<aud_00353>": 152521,
463
+ "<aud_00354>": 152522,
464
+ "<aud_00355>": 152523,
465
+ "<aud_00356>": 152524,
466
+ "<aud_00357>": 152525,
467
+ "<aud_00358>": 152526,
468
+ "<aud_00359>": 152527,
469
+ "<aud_00360>": 152528,
470
+ "<aud_00361>": 152529,
471
+ "<aud_00362>": 152530,
472
+ "<aud_00363>": 152531,
473
+ "<aud_00364>": 152532,
474
+ "<aud_00365>": 152533,
475
+ "<aud_00366>": 152534,
476
+ "<aud_00367>": 152535,
477
+ "<aud_00368>": 152536,
478
+ "<aud_00369>": 152537,
479
+ "<aud_00370>": 152538,
480
+ "<aud_00371>": 152539,
481
+ "<aud_00372>": 152540,
482
+ "<aud_00373>": 152541,
483
+ "<aud_00374>": 152542,
484
+ "<aud_00375>": 152543,
485
+ "<aud_00376>": 152544,
486
+ "<aud_00377>": 152545,
487
+ "<aud_00378>": 152546,
488
+ "<aud_00379>": 152547,
489
+ "<aud_00380>": 152548,
490
+ "<aud_00381>": 152549,
491
+ "<aud_00382>": 152550,
492
+ "<aud_00383>": 152551,
493
+ "<aud_00384>": 152552,
494
+ "<aud_00385>": 152553,
495
+ "<aud_00386>": 152554,
496
+ "<aud_00387>": 152555,
497
+ "<aud_00388>": 152556,
498
+ "<aud_00389>": 152557,
499
+ "<aud_00390>": 152558,
500
+ "<aud_00391>": 152559,
501
+ "<aud_00392>": 152560,
502
+ "<aud_00393>": 152561,
503
+ "<aud_00394>": 152562,
504
+ "<aud_00395>": 152563,
505
+ "<aud_00396>": 152564,
506
+ "<aud_00397>": 152565,
507
+ "<aud_00398>": 152566,
508
+ "<aud_00399>": 152567,
509
+ "<aud_00400>": 152568,
510
+ "<aud_00401>": 152569,
511
+ "<aud_00402>": 152570,
512
+ "<aud_00403>": 152571,
513
+ "<aud_00404>": 152572,
514
+ "<aud_00405>": 152573,
515
+ "<aud_00406>": 152574,
516
+ "<aud_00407>": 152575,
517
+ "<aud_00408>": 152576,
518
+ "<aud_00409>": 152577,
519
+ "<aud_00410>": 152578,
520
+ "<aud_00411>": 152579,
521
+ "<aud_00412>": 152580,
522
+ "<aud_00413>": 152581,
523
+ "<aud_00414>": 152582,
524
+ "<aud_00415>": 152583,
525
+ "<aud_00416>": 152584,
526
+ "<aud_00417>": 152585,
527
+ "<aud_00418>": 152586,
528
+ "<aud_00419>": 152587,
529
+ "<aud_00420>": 152588,
530
+ "<aud_00421>": 152589,
531
+ "<aud_00422>": 152590,
532
+ "<aud_00423>": 152591,
533
+ "<aud_00424>": 152592,
534
+ "<aud_00425>": 152593,
535
+ "<aud_00426>": 152594,
536
+ "<aud_00427>": 152595,
537
+ "<aud_00428>": 152596,
538
+ "<aud_00429>": 152597,
539
+ "<aud_00430>": 152598,
540
+ "<aud_00431>": 152599,
541
+ "<aud_00432>": 152600,
542
+ "<aud_00433>": 152601,
543
+ "<aud_00434>": 152602,
544
+ "<aud_00435>": 152603,
545
+ "<aud_00436>": 152604,
546
+ "<aud_00437>": 152605,
547
+ "<aud_00438>": 152606,
548
+ "<aud_00439>": 152607,
549
+ "<aud_00440>": 152608,
550
+ "<aud_00441>": 152609,
551
+ "<aud_00442>": 152610,
552
+ "<aud_00443>": 152611,
553
+ "<aud_00444>": 152612,
554
+ "<aud_00445>": 152613,
555
+ "<aud_00446>": 152614,
556
+ "<aud_00447>": 152615,
557
+ "<aud_00448>": 152616,
558
+ "<aud_00449>": 152617,
559
+ "<aud_00450>": 152618,
560
+ "<aud_00451>": 152619,
561
+ "<aud_00452>": 152620,
562
+ "<aud_00453>": 152621,
563
+ "<aud_00454>": 152622,
564
+ "<aud_00455>": 152623,
565
+ "<aud_00456>": 152624,
566
+ "<aud_00457>": 152625,
567
+ "<aud_00458>": 152626,
568
+ "<aud_00459>": 152627,
569
+ "<aud_00460>": 152628,
570
+ "<aud_00461>": 152629,
571
+ "<aud_00462>": 152630,
572
+ "<aud_00463>": 152631,
573
+ "<aud_00464>": 152632,
574
+ "<aud_00465>": 152633,
575
+ "<aud_00466>": 152634,
576
+ "<aud_00467>": 152635,
577
+ "<aud_00468>": 152636,
578
+ "<aud_00469>": 152637,
579
+ "<aud_00470>": 152638,
580
+ "<aud_00471>": 152639,
581
+ "<aud_00472>": 152640,
582
+ "<aud_00473>": 152641,
583
+ "<aud_00474>": 152642,
584
+ "<aud_00475>": 152643,
585
+ "<aud_00476>": 152644,
586
+ "<aud_00477>": 152645,
587
+ "<aud_00478>": 152646,
588
+ "<aud_00479>": 152647,
589
+ "<aud_00480>": 152648,
590
+ "<aud_00481>": 152649,
591
+ "<aud_00482>": 152650,
592
+ "<aud_00483>": 152651,
593
+ "<aud_00484>": 152652,
594
+ "<aud_00485>": 152653,
595
+ "<aud_00486>": 152654,
596
+ "<aud_00487>": 152655,
597
+ "<aud_00488>": 152656,
598
+ "<aud_00489>": 152657,
599
+ "<aud_00490>": 152658,
600
+ "<aud_00491>": 152659,
601
+ "<aud_00492>": 152660,
602
+ "<aud_00493>": 152661,
603
+ "<aud_00494>": 152662,
604
+ "<aud_00495>": 152663,
605
+ "<aud_00496>": 152664,
606
+ "<aud_00497>": 152665,
607
+ "<aud_00498>": 152666,
608
+ "<aud_00499>": 152667,
609
+ "<aud_00500>": 152668,
610
+ "<|gen|>": 151938,
611
+ "<|/gen|>": 151938,
612
+ "<|think|>": 151938,
613
+ "<|/think|>": 151938,
614
+ "<t5_00100>": 152268,
615
+ "<t5_00101>": 152269,
616
+ "<t5_00102>": 152270,
617
+ "<t5_00103>": 152271,
618
+ "<t5_00104>": 152272,
619
+ "<t5_00105>": 152273,
620
+ "<t5_00106>": 152274,
621
+ "<t5_00107>": 152275,
622
+ "<t5_00108>": 152276,
623
+ "<t5_00109>": 152277,
624
+ "<t5_00110>": 152278,
625
+ "<t5_00111>": 152279,
626
+ "<t5_00112>": 152280,
627
+ "<t5_00113>": 152281,
628
+ "<t5_00114>": 152282,
629
+ "<t5_00115>": 152283,
630
+ "<t5_00116>": 152284,
631
+ "<t5_00117>": 152285,
632
+ "<t5_00118>": 152286,
633
+ "<t5_00119>": 152287,
634
+ "<t5_00120>": 152288,
635
+ "<t5_00121>": 152289,
636
+ "<t5_00122>": 152290,
637
+ "<t5_00123>": 152291,
638
+ "<t5_00124>": 152292,
639
+ "<t5_00125>": 152293,
640
+ "<t5_00126>": 152294,
641
+ "<t5_00127>": 152295,
642
+ "<t5_00128>": 152296,
643
+ "<t5_00129>": 152297,
644
+ "<t5_00130>": 152298,
645
+ "<t5_00131>": 152299,
646
+ "<t5_00132>": 152300,
647
+ "<t5_00133>": 152301,
648
+ "<t5_00134>": 152302,
649
+ "<t5_00135>": 152303,
650
+ "<t5_00136>": 152304,
651
+ "<t5_00137>": 152305,
652
+ "<t5_00138>": 152306,
653
+ "<t5_00139>": 152307,
654
+ "<t5_00140>": 152308,
655
+ "<t5_00141>": 152309,
656
+ "<t5_00142>": 152310,
657
+ "<t5_00143>": 152311,
658
+ "<t5_00144>": 152312,
659
+ "<t5_00145>": 152313,
660
+ "<t5_00146>": 152314,
661
+ "<t5_00147>": 152315,
662
+ "<t5_00148>": 152316,
663
+ "<t5_00149>": 152317,
664
+ "<t5_00150>": 152318,
665
+ "<t5_00151>": 152319,
666
+ "<t5_00152>": 152320,
667
+ "<t5_00153>": 152321,
668
+ "<t5_00154>": 152322,
669
+ "<t5_00155>": 152323,
670
+ "<t5_00156>": 152324,
671
+ "<t5_00157>": 152325,
672
+ "<t5_00158>": 152326,
673
+ "<t5_00159>": 152327,
674
+ "<t5_00160>": 152328,
675
+ "<t5_00161>": 152329,
676
+ "<t5_00162>": 152330,
677
+ "<t5_00163>": 152331,
678
+ "<t5_00164>": 152332,
679
+ "<t5_00165>": 152333,
680
+ "<t5_00166>": 152334,
681
+ "<t5_00167>": 152335,
682
+ "<t5_00168>": 152336,
683
+ "<t5_00169>": 152337,
684
+ "<t5_00170>": 152338,
685
+ "<t5_00171>": 152339,
686
+ "<t5_00172>": 152340,
687
+ "<t5_00173>": 152341,
688
+ "<t5_00174>": 152342,
689
+ "<t5_00175>": 152343,
690
+ "<t5_00176>": 152344,
691
+ "<t5_00177>": 152345,
692
+ "<t5_00178>": 152346,
693
+ "<t5_00179>": 152347,
694
+ "<t5_00180>": 152348,
695
+ "<t5_00181>": 152349,
696
+ "<t5_00182>": 152350,
697
+ "<t5_00183>": 152351,
698
+ "<t5_00184>": 152352,
699
+ "<t5_00185>": 152353,
700
+ "<t5_00186>": 152354,
701
+ "<t5_00187>": 152355,
702
+ "<t5_00188>": 152356,
703
+ "<t5_00189>": 152357,
704
+ "<t5_00190>": 152358,
705
+ "<t5_00191>": 152359,
706
+ "<t5_00192>": 152360,
707
+ "<t5_00193>": 152361,
708
+ "<t5_00194>": 152362,
709
+ "<t5_00195>": 152363,
710
+ "<t5_00196>": 152364,
711
+ "<t5_00197>": 152365,
712
+ "<t5_00198>": 152366,
713
+ "<t5_00199>": 152367,
714
+ "<t5_00200>": 152368,
715
+ "<t5_00201>": 152369,
716
+ "<t5_00202>": 152370,
717
+ "<t5_00203>": 152371,
718
+ "<t5_00204>": 152372,
719
+ "<t5_00205>": 152373,
720
+ "<t5_00206>": 152374,
721
+ "<t5_00207>": 152375,
722
+ "<t5_00208>": 152376,
723
+ "<t5_00209>": 152377,
724
+ "<t5_00210>": 152378,
725
+ "<t5_00211>": 152379,
726
+ "<t5_00212>": 152380,
727
+ "<t5_00213>": 152381,
728
+ "<t5_00214>": 152382,
729
+ "<t5_00215>": 152383,
730
+ "<t5_00216>": 152384,
731
+ "<t5_00217>": 152385,
732
+ "<t5_00218>": 152386,
733
+ "<t5_00219>": 152387,
734
+ "<t5_00220>": 152388,
735
+ "<t5_00221>": 152389,
736
+ "<t5_00222>": 152390,
737
+ "<t5_00223>": 152391,
738
+ "<t5_00224>": 152392,
739
+ "<t5_00225>": 152393,
740
+ "<t5_00226>": 152394,
741
+ "<t5_00227>": 152395,
742
+ "<t5_00228>": 152396,
743
+ "<t5_00229>": 152397,
744
+ "<t5_00230>": 152398,
745
+ "<t5_00231>": 152399,
746
+ "<t5_00232>": 152400,
747
+ "<t5_00233>": 152401,
748
+ "<t5_00234>": 152402,
749
+ "<t5_00235>": 152403,
750
+ "<t5_00236>": 152404,
751
+ "<t5_00237>": 152405,
752
+ "<t5_00238>": 152406,
753
+ "<t5_00239>": 152407,
754
+ "<t5_00240>": 152408,
755
+ "<t5_00241>": 152409,
756
+ "<t5_00242>": 152410,
757
+ "<t5_00243>": 152411,
758
+ "<t5_00244>": 152412,
759
+ "<t5_00245>": 152413,
760
+ "<t5_00246>": 152414,
761
+ "<t5_00247>": 152415,
762
+ "<t5_00248>": 152416,
763
+ "<t5_00249>": 152417,
764
+ "<t5_00250>": 152418,
765
+ "<t5_00251>": 152419,
766
+ "<t5_00252>": 152420,
767
+ "<t5_00253>": 152421,
768
+ "<t5_00254>": 152422,
769
+ "<t5_00255>": 152423,
770
+ "<t5_00256>": 152424,
771
+ "<t5_00257>": 152425,
772
+ "<t5_00258>": 152426,
773
+ "<t5_00259>": 152427,
774
+ "<t5_00260>": 152428,
775
+ "<t5_00261>": 152429,
776
+ "<t5_00262>": 152430,
777
+ "<t5_00263>": 152431,
778
+ "<t5_00264>": 152432,
779
+ "<t5_00265>": 152433,
780
+ "<t5_00266>": 152434,
781
+ "<t5_00267>": 152435,
782
+ "<t5_00268>": 152436,
783
+ "<t5_00269>": 152437,
784
+ "<t5_00270>": 152438,
785
+ "<t5_00271>": 152439,
786
+ "<t5_00272>": 152440,
787
+ "<t5_00273>": 152441,
788
+ "<t5_00274>": 152442,
789
+ "<t5_00275>": 152443,
790
+ "<t5_00276>": 152444,
791
+ "<t5_00277>": 152445,
792
+ "<t5_00278>": 152446,
793
+ "<t5_00279>": 152447,
794
+ "<t5_00280>": 152448,
795
+ "<t5_00281>": 152449,
796
+ "<t5_00282>": 152450,
797
+ "<t5_00283>": 152451,
798
+ "<t5_00284>": 152452,
799
+ "<t5_00285>": 152453,
800
+ "<t5_00286>": 152454,
801
+ "<t5_00287>": 152455,
802
+ "<t5_00288>": 152456,
803
+ "<t5_00289>": 152457,
804
+ "<t5_00290>": 152458,
805
+ "<t5_00291>": 152459,
806
+ "<t5_00292>": 152460,
807
+ "<t5_00293>": 152461,
808
+ "<t5_00294>": 152462,
809
+ "<t5_00295>": 152463,
810
+ "<t5_00296>": 152464,
811
+ "<t5_00297>": 152465,
812
+ "<t5_00298>": 152466,
813
+ "<t5_00299>": 152467,
814
+ "<t5_00300>": 152468
815
+ }
Qwen2.5-3B-Instruct/added_tokens_multi_audio.json ADDED
@@ -0,0 +1,815 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</t5>": 151937,
3
+ "<video_padding>": 151939,
4
+ "<box_end>": 152165,
5
+ "<box_start>": 152164,
6
+ "<t5>": 151936,
7
+ "<t5_00000>": 151836,
8
+ "<t5_00001>": 151837,
9
+ "<t5_00002>": 151838,
10
+ "<t5_00003>": 151839,
11
+ "<t5_00004>": 151840,
12
+ "<t5_00005>": 151841,
13
+ "<t5_00006>": 151842,
14
+ "<t5_00007>": 151843,
15
+ "<t5_00008>": 151844,
16
+ "<t5_00009>": 151845,
17
+ "<t5_00010>": 151846,
18
+ "<t5_00011>": 151847,
19
+ "<t5_00012>": 151848,
20
+ "<t5_00013>": 151849,
21
+ "<t5_00014>": 151850,
22
+ "<t5_00015>": 151851,
23
+ "<t5_00016>": 151852,
24
+ "<t5_00017>": 151853,
25
+ "<t5_00018>": 151854,
26
+ "<t5_00019>": 151855,
27
+ "<t5_00020>": 151856,
28
+ "<t5_00021>": 151857,
29
+ "<t5_00022>": 151858,
30
+ "<t5_00023>": 151859,
31
+ "<t5_00024>": 151860,
32
+ "<t5_00025>": 151861,
33
+ "<t5_00026>": 151862,
34
+ "<t5_00027>": 151863,
35
+ "<t5_00028>": 151864,
36
+ "<t5_00029>": 151865,
37
+ "<t5_00030>": 151866,
38
+ "<t5_00031>": 151867,
39
+ "<t5_00032>": 151868,
40
+ "<t5_00033>": 151869,
41
+ "<t5_00034>": 151870,
42
+ "<t5_00035>": 151871,
43
+ "<t5_00036>": 151872,
44
+ "<t5_00037>": 151873,
45
+ "<t5_00038>": 151874,
46
+ "<t5_00039>": 151875,
47
+ "<t5_00040>": 151876,
48
+ "<t5_00041>": 151877,
49
+ "<t5_00042>": 151878,
50
+ "<t5_00043>": 151879,
51
+ "<t5_00044>": 151880,
52
+ "<t5_00045>": 151881,
53
+ "<t5_00046>": 151882,
54
+ "<t5_00047>": 151883,
55
+ "<t5_00048>": 151884,
56
+ "<t5_00049>": 151885,
57
+ "<t5_00050>": 151886,
58
+ "<t5_00051>": 151887,
59
+ "<t5_00052>": 151888,
60
+ "<t5_00053>": 151889,
61
+ "<t5_00054>": 151890,
62
+ "<t5_00055>": 151891,
63
+ "<t5_00056>": 151892,
64
+ "<t5_00057>": 151893,
65
+ "<t5_00058>": 151894,
66
+ "<t5_00059>": 151895,
67
+ "<t5_00060>": 151896,
68
+ "<t5_00061>": 151897,
69
+ "<t5_00062>": 151898,
70
+ "<t5_00063>": 151899,
71
+ "<t5_00064>": 151900,
72
+ "<t5_00065>": 151901,
73
+ "<t5_00066>": 151902,
74
+ "<t5_00067>": 151903,
75
+ "<t5_00068>": 151904,
76
+ "<t5_00069>": 151905,
77
+ "<t5_00070>": 151906,
78
+ "<t5_00071>": 151907,
79
+ "<t5_00072>": 151908,
80
+ "<t5_00073>": 151909,
81
+ "<t5_00074>": 151910,
82
+ "<t5_00075>": 151911,
83
+ "<t5_00076>": 151912,
84
+ "<t5_00077>": 151913,
85
+ "<t5_00078>": 151914,
86
+ "<t5_00079>": 151915,
87
+ "<t5_00080>": 151916,
88
+ "<t5_00081>": 151917,
89
+ "<t5_00082>": 151918,
90
+ "<t5_00083>": 151919,
91
+ "<t5_00084>": 151920,
92
+ "<t5_00085>": 151921,
93
+ "<t5_00086>": 151922,
94
+ "<t5_00087>": 151923,
95
+ "<t5_00088>": 151924,
96
+ "<t5_00089>": 151925,
97
+ "<t5_00090>": 151926,
98
+ "<t5_00091>": 151927,
99
+ "<t5_00092>": 151928,
100
+ "<t5_00093>": 151929,
101
+ "<t5_00094>": 151930,
102
+ "<t5_00095>": 151931,
103
+ "<t5_00096>": 151932,
104
+ "<t5_00097>": 151933,
105
+ "<t5_00098>": 151934,
106
+ "<t5_00099>": 151935,
107
+ "<aud>": 152166,
108
+ "</aud>": 152167,
109
+ "<aud_00000>": 152168,
110
+ "<aud_00001>": 152169,
111
+ "<aud_00002>": 152170,
112
+ "<aud_00003>": 152171,
113
+ "<aud_00004>": 152172,
114
+ "<aud_00005>": 152173,
115
+ "<aud_00006>": 152174,
116
+ "<aud_00007>": 152175,
117
+ "<aud_00008>": 152176,
118
+ "<aud_00009>": 152177,
119
+ "<aud_00010>": 152178,
120
+ "<aud_00011>": 152179,
121
+ "<aud_00012>": 152180,
122
+ "<aud_00013>": 152181,
123
+ "<aud_00014>": 152182,
124
+ "<aud_00015>": 152183,
125
+ "<aud_00016>": 152184,
126
+ "<aud_00017>": 152185,
127
+ "<aud_00018>": 152186,
128
+ "<aud_00019>": 152187,
129
+ "<aud_00020>": 152188,
130
+ "<aud_00021>": 152189,
131
+ "<aud_00022>": 152190,
132
+ "<aud_00023>": 152191,
133
+ "<aud_00024>": 152192,
134
+ "<aud_00025>": 152193,
135
+ "<aud_00026>": 152194,
136
+ "<aud_00027>": 152195,
137
+ "<aud_00028>": 152196,
138
+ "<aud_00029>": 152197,
139
+ "<aud_00030>": 152198,
140
+ "<aud_00031>": 152199,
141
+ "<aud_00032>": 152200,
142
+ "<aud_00033>": 152201,
143
+ "<aud_00034>": 152202,
144
+ "<aud_00035>": 152203,
145
+ "<aud_00036>": 152204,
146
+ "<aud_00037>": 152205,
147
+ "<aud_00038>": 152206,
148
+ "<aud_00039>": 152207,
149
+ "<aud_00040>": 152208,
150
+ "<aud_00041>": 152209,
151
+ "<aud_00042>": 152210,
152
+ "<aud_00043>": 152211,
153
+ "<aud_00044>": 152212,
154
+ "<aud_00045>": 152213,
155
+ "<aud_00046>": 152214,
156
+ "<aud_00047>": 152215,
157
+ "<aud_00048>": 152216,
158
+ "<aud_00049>": 152217,
159
+ "<aud_00050>": 152218,
160
+ "<aud_00051>": 152219,
161
+ "<aud_00052>": 152220,
162
+ "<aud_00053>": 152221,
163
+ "<aud_00054>": 152222,
164
+ "<aud_00055>": 152223,
165
+ "<aud_00056>": 152224,
166
+ "<aud_00057>": 152225,
167
+ "<aud_00058>": 152226,
168
+ "<aud_00059>": 152227,
169
+ "<aud_00060>": 152228,
170
+ "<aud_00061>": 152229,
171
+ "<aud_00062>": 152230,
172
+ "<aud_00063>": 152231,
173
+ "<aud_00064>": 152232,
174
+ "<aud_00065>": 152233,
175
+ "<aud_00066>": 152234,
176
+ "<aud_00067>": 152235,
177
+ "<aud_00068>": 152236,
178
+ "<aud_00069>": 152237,
179
+ "<aud_00070>": 152238,
180
+ "<aud_00071>": 152239,
181
+ "<aud_00072>": 152240,
182
+ "<aud_00073>": 152241,
183
+ "<aud_00074>": 152242,
184
+ "<aud_00075>": 152243,
185
+ "<aud_00076>": 152244,
186
+ "<aud_00077>": 152245,
187
+ "<aud_00078>": 152246,
188
+ "<aud_00079>": 152247,
189
+ "<aud_00080>": 152248,
190
+ "<aud_00081>": 152249,
191
+ "<aud_00082>": 152250,
192
+ "<aud_00083>": 152251,
193
+ "<aud_00084>": 152252,
194
+ "<aud_00085>": 152253,
195
+ "<aud_00086>": 152254,
196
+ "<aud_00087>": 152255,
197
+ "<aud_00088>": 152256,
198
+ "<aud_00089>": 152257,
199
+ "<aud_00090>": 152258,
200
+ "<aud_00091>": 152259,
201
+ "<aud_00092>": 152260,
202
+ "<aud_00093>": 152261,
203
+ "<aud_00094>": 152262,
204
+ "<aud_00095>": 152263,
205
+ "<aud_00096>": 152264,
206
+ "<aud_00097>": 152265,
207
+ "<aud_00098>": 152266,
208
+ "<aud_00099>": 152267,
209
+ "<aud_00100>": 152268,
210
+ "<aud_00101>": 152269,
211
+ "<aud_00102>": 152270,
212
+ "<aud_00103>": 152271,
213
+ "<aud_00104>": 152272,
214
+ "<aud_00105>": 152273,
215
+ "<aud_00106>": 152274,
216
+ "<aud_00107>": 152275,
217
+ "<aud_00108>": 152276,
218
+ "<aud_00109>": 152277,
219
+ "<aud_00110>": 152278,
220
+ "<aud_00111>": 152279,
221
+ "<aud_00112>": 152280,
222
+ "<aud_00113>": 152281,
223
+ "<aud_00114>": 152282,
224
+ "<aud_00115>": 152283,
225
+ "<aud_00116>": 152284,
226
+ "<aud_00117>": 152285,
227
+ "<aud_00118>": 152286,
228
+ "<aud_00119>": 152287,
229
+ "<aud_00120>": 152288,
230
+ "<aud_00121>": 152289,
231
+ "<aud_00122>": 152290,
232
+ "<aud_00123>": 152291,
233
+ "<aud_00124>": 152292,
234
+ "<aud_00125>": 152293,
235
+ "<aud_00126>": 152294,
236
+ "<aud_00127>": 152295,
237
+ "<aud_00128>": 152296,
238
+ "<aud_00129>": 152297,
239
+ "<aud_00130>": 152298,
240
+ "<aud_00131>": 152299,
241
+ "<aud_00132>": 152300,
242
+ "<aud_00133>": 152301,
243
+ "<aud_00134>": 152302,
244
+ "<aud_00135>": 152303,
245
+ "<aud_00136>": 152304,
246
+ "<aud_00137>": 152305,
247
+ "<aud_00138>": 152306,
248
+ "<aud_00139>": 152307,
249
+ "<aud_00140>": 152308,
250
+ "<aud_00141>": 152309,
251
+ "<aud_00142>": 152310,
252
+ "<aud_00143>": 152311,
253
+ "<aud_00144>": 152312,
254
+ "<aud_00145>": 152313,
255
+ "<aud_00146>": 152314,
256
+ "<aud_00147>": 152315,
257
+ "<aud_00148>": 152316,
258
+ "<aud_00149>": 152317,
259
+ "<aud_00150>": 152318,
260
+ "<aud_00151>": 152319,
261
+ "<aud_00152>": 152320,
262
+ "<aud_00153>": 152321,
263
+ "<aud_00154>": 152322,
264
+ "<aud_00155>": 152323,
265
+ "<aud_00156>": 152324,
266
+ "<aud_00157>": 152325,
267
+ "<aud_00158>": 152326,
268
+ "<aud_00159>": 152327,
269
+ "<aud_00160>": 152328,
270
+ "<aud_00161>": 152329,
271
+ "<aud_00162>": 152330,
272
+ "<aud_00163>": 152331,
273
+ "<aud_00164>": 152332,
274
+ "<aud_00165>": 152333,
275
+ "<aud_00166>": 152334,
276
+ "<aud_00167>": 152335,
277
+ "<aud_00168>": 152336,
278
+ "<aud_00169>": 152337,
279
+ "<aud_00170>": 152338,
280
+ "<aud_00171>": 152339,
281
+ "<aud_00172>": 152340,
282
+ "<aud_00173>": 152341,
283
+ "<aud_00174>": 152342,
284
+ "<aud_00175>": 152343,
285
+ "<aud_00176>": 152344,
286
+ "<aud_00177>": 152345,
287
+ "<aud_00178>": 152346,
288
+ "<aud_00179>": 152347,
289
+ "<aud_00180>": 152348,
290
+ "<aud_00181>": 152349,
291
+ "<aud_00182>": 152350,
292
+ "<aud_00183>": 152351,
293
+ "<aud_00184>": 152352,
294
+ "<aud_00185>": 152353,
295
+ "<aud_00186>": 152354,
296
+ "<aud_00187>": 152355,
297
+ "<aud_00188>": 152356,
298
+ "<aud_00189>": 152357,
299
+ "<aud_00190>": 152358,
300
+ "<aud_00191>": 152359,
301
+ "<aud_00192>": 152360,
302
+ "<aud_00193>": 152361,
303
+ "<aud_00194>": 152362,
304
+ "<aud_00195>": 152363,
305
+ "<aud_00196>": 152364,
306
+ "<aud_00197>": 152365,
307
+ "<aud_00198>": 152366,
308
+ "<aud_00199>": 152367,
309
+ "<aud_00200>": 152368,
310
+ "<aud_00201>": 152369,
311
+ "<aud_00202>": 152370,
312
+ "<aud_00203>": 152371,
313
+ "<aud_00204>": 152372,
314
+ "<aud_00205>": 152373,
315
+ "<aud_00206>": 152374,
316
+ "<aud_00207>": 152375,
317
+ "<aud_00208>": 152376,
318
+ "<aud_00209>": 152377,
319
+ "<aud_00210>": 152378,
320
+ "<aud_00211>": 152379,
321
+ "<aud_00212>": 152380,
322
+ "<aud_00213>": 152381,
323
+ "<aud_00214>": 152382,
324
+ "<aud_00215>": 152383,
325
+ "<aud_00216>": 152384,
326
+ "<aud_00217>": 152385,
327
+ "<aud_00218>": 152386,
328
+ "<aud_00219>": 152387,
329
+ "<aud_00220>": 152388,
330
+ "<aud_00221>": 152389,
331
+ "<aud_00222>": 152390,
332
+ "<aud_00223>": 152391,
333
+ "<aud_00224>": 152392,
334
+ "<aud_00225>": 152393,
335
+ "<aud_00226>": 152394,
336
+ "<aud_00227>": 152395,
337
+ "<aud_00228>": 152396,
338
+ "<aud_00229>": 152397,
339
+ "<aud_00230>": 152398,
340
+ "<aud_00231>": 152399,
341
+ "<aud_00232>": 152400,
342
+ "<aud_00233>": 152401,
343
+ "<aud_00234>": 152402,
344
+ "<aud_00235>": 152403,
345
+ "<aud_00236>": 152404,
346
+ "<aud_00237>": 152405,
347
+ "<aud_00238>": 152406,
348
+ "<aud_00239>": 152407,
349
+ "<aud_00240>": 152408,
350
+ "<aud_00241>": 152409,
351
+ "<aud_00242>": 152410,
352
+ "<aud_00243>": 152411,
353
+ "<aud_00244>": 152412,
354
+ "<aud_00245>": 152413,
355
+ "<aud_00246>": 152414,
356
+ "<aud_00247>": 152415,
357
+ "<aud_00248>": 152416,
358
+ "<aud_00249>": 152417,
359
+ "<aud_00250>": 152418,
360
+ "<aud_00251>": 152419,
361
+ "<aud_00252>": 152420,
362
+ "<aud_00253>": 152421,
363
+ "<aud_00254>": 152422,
364
+ "<aud_00255>": 152423,
365
+ "<aud_00256>": 152424,
366
+ "<aud_00257>": 152425,
367
+ "<aud_00258>": 152426,
368
+ "<aud_00259>": 152427,
369
+ "<aud_00260>": 152428,
370
+ "<aud_00261>": 152429,
371
+ "<aud_00262>": 152430,
372
+ "<aud_00263>": 152431,
373
+ "<aud_00264>": 152432,
374
+ "<aud_00265>": 152433,
375
+ "<aud_00266>": 152434,
376
+ "<aud_00267>": 152435,
377
+ "<aud_00268>": 152436,
378
+ "<aud_00269>": 152437,
379
+ "<aud_00270>": 152438,
380
+ "<aud_00271>": 152439,
381
+ "<aud_00272>": 152440,
382
+ "<aud_00273>": 152441,
383
+ "<aud_00274>": 152442,
384
+ "<aud_00275>": 152443,
385
+ "<aud_00276>": 152444,
386
+ "<aud_00277>": 152445,
387
+ "<aud_00278>": 152446,
388
+ "<aud_00279>": 152447,
389
+ "<aud_00280>": 152448,
390
+ "<aud_00281>": 152449,
391
+ "<aud_00282>": 152450,
392
+ "<aud_00283>": 152451,
393
+ "<aud_00284>": 152452,
394
+ "<aud_00285>": 152453,
395
+ "<aud_00286>": 152454,
396
+ "<aud_00287>": 152455,
397
+ "<aud_00288>": 152456,
398
+ "<aud_00289>": 152457,
399
+ "<aud_00290>": 152458,
400
+ "<aud_00291>": 152459,
401
+ "<aud_00292>": 152460,
402
+ "<aud_00293>": 152461,
403
+ "<aud_00294>": 152462,
404
+ "<aud_00295>": 152463,
405
+ "<aud_00296>": 152464,
406
+ "<aud_00297>": 152465,
407
+ "<aud_00298>": 152466,
408
+ "<aud_00299>": 152467,
409
+ "<aud_00300>": 152468,
410
+ "<aud_00301>": 152469,
411
+ "<aud_00302>": 152470,
412
+ "<aud_00303>": 152471,
413
+ "<aud_00304>": 152472,
414
+ "<aud_00305>": 152473,
415
+ "<aud_00306>": 152474,
416
+ "<aud_00307>": 152475,
417
+ "<aud_00308>": 152476,
418
+ "<aud_00309>": 152477,
419
+ "<aud_00310>": 152478,
420
+ "<aud_00311>": 152479,
421
+ "<aud_00312>": 152480,
422
+ "<aud_00313>": 152481,
423
+ "<aud_00314>": 152482,
424
+ "<aud_00315>": 152483,
425
+ "<aud_00316>": 152484,
426
+ "<aud_00317>": 152485,
427
+ "<aud_00318>": 152486,
428
+ "<aud_00319>": 152487,
429
+ "<aud_00320>": 152488,
430
+ "<aud_00321>": 152489,
431
+ "<aud_00322>": 152490,
432
+ "<aud_00323>": 152491,
433
+ "<aud_00324>": 152492,
434
+ "<aud_00325>": 152493,
435
+ "<aud_00326>": 152494,
436
+ "<aud_00327>": 152495,
437
+ "<aud_00328>": 152496,
438
+ "<aud_00329>": 152497,
439
+ "<aud_00330>": 152498,
440
+ "<aud_00331>": 152499,
441
+ "<aud_00332>": 152500,
442
+ "<aud_00333>": 152501,
443
+ "<aud_00334>": 152502,
444
+ "<aud_00335>": 152503,
445
+ "<aud_00336>": 152504,
446
+ "<aud_00337>": 152505,
447
+ "<aud_00338>": 152506,
448
+ "<aud_00339>": 152507,
449
+ "<aud_00340>": 152508,
450
+ "<aud_00341>": 152509,
451
+ "<aud_00342>": 152510,
452
+ "<aud_00343>": 152511,
453
+ "<aud_00344>": 152512,
454
+ "<aud_00345>": 152513,
455
+ "<aud_00346>": 152514,
456
+ "<aud_00347>": 152515,
457
+ "<aud_00348>": 152516,
458
+ "<aud_00349>": 152517,
459
+ "<aud_00350>": 152518,
460
+ "<aud_00351>": 152519,
461
+ "<aud_00352>": 152520,
462
+ "<aud_00353>": 152521,
463
+ "<aud_00354>": 152522,
464
+ "<aud_00355>": 152523,
465
+ "<aud_00356>": 152524,
466
+ "<aud_00357>": 152525,
467
+ "<aud_00358>": 152526,
468
+ "<aud_00359>": 152527,
469
+ "<aud_00360>": 152528,
470
+ "<aud_00361>": 152529,
471
+ "<aud_00362>": 152530,
472
+ "<aud_00363>": 152531,
473
+ "<aud_00364>": 152532,
474
+ "<aud_00365>": 152533,
475
+ "<aud_00366>": 152534,
476
+ "<aud_00367>": 152535,
477
+ "<aud_00368>": 152536,
478
+ "<aud_00369>": 152537,
479
+ "<aud_00370>": 152538,
480
+ "<aud_00371>": 152539,
481
+ "<aud_00372>": 152540,
482
+ "<aud_00373>": 152541,
483
+ "<aud_00374>": 152542,
484
+ "<aud_00375>": 152543,
485
+ "<aud_00376>": 152544,
486
+ "<aud_00377>": 152545,
487
+ "<aud_00378>": 152546,
488
+ "<aud_00379>": 152547,
489
+ "<aud_00380>": 152548,
490
+ "<aud_00381>": 152549,
491
+ "<aud_00382>": 152550,
492
+ "<aud_00383>": 152551,
493
+ "<aud_00384>": 152552,
494
+ "<aud_00385>": 152553,
495
+ "<aud_00386>": 152554,
496
+ "<aud_00387>": 152555,
497
+ "<aud_00388>": 152556,
498
+ "<aud_00389>": 152557,
499
+ "<aud_00390>": 152558,
500
+ "<aud_00391>": 152559,
501
+ "<aud_00392>": 152560,
502
+ "<aud_00393>": 152561,
503
+ "<aud_00394>": 152562,
504
+ "<aud_00395>": 152563,
505
+ "<aud_00396>": 152564,
506
+ "<aud_00397>": 152565,
507
+ "<aud_00398>": 152566,
508
+ "<aud_00399>": 152567,
509
+ "<aud_00400>": 152568,
510
+ "<aud_00401>": 152569,
511
+ "<aud_00402>": 152570,
512
+ "<aud_00403>": 152571,
513
+ "<aud_00404>": 152572,
514
+ "<aud_00405>": 152573,
515
+ "<aud_00406>": 152574,
516
+ "<aud_00407>": 152575,
517
+ "<aud_00408>": 152576,
518
+ "<aud_00409>": 152577,
519
+ "<aud_00410>": 152578,
520
+ "<aud_00411>": 152579,
521
+ "<aud_00412>": 152580,
522
+ "<aud_00413>": 152581,
523
+ "<aud_00414>": 152582,
524
+ "<aud_00415>": 152583,
525
+ "<aud_00416>": 152584,
526
+ "<aud_00417>": 152585,
527
+ "<aud_00418>": 152586,
528
+ "<aud_00419>": 152587,
529
+ "<aud_00420>": 152588,
530
+ "<aud_00421>": 152589,
531
+ "<aud_00422>": 152590,
532
+ "<aud_00423>": 152591,
533
+ "<aud_00424>": 152592,
534
+ "<aud_00425>": 152593,
535
+ "<aud_00426>": 152594,
536
+ "<aud_00427>": 152595,
537
+ "<aud_00428>": 152596,
538
+ "<aud_00429>": 152597,
539
+ "<aud_00430>": 152598,
540
+ "<aud_00431>": 152599,
541
+ "<aud_00432>": 152600,
542
+ "<aud_00433>": 152601,
543
+ "<aud_00434>": 152602,
544
+ "<aud_00435>": 152603,
545
+ "<aud_00436>": 152604,
546
+ "<aud_00437>": 152605,
547
+ "<aud_00438>": 152606,
548
+ "<aud_00439>": 152607,
549
+ "<aud_00440>": 152608,
550
+ "<aud_00441>": 152609,
551
+ "<aud_00442>": 152610,
552
+ "<aud_00443>": 152611,
553
+ "<aud_00444>": 152612,
554
+ "<aud_00445>": 152613,
555
+ "<aud_00446>": 152614,
556
+ "<aud_00447>": 152615,
557
+ "<aud_00448>": 152616,
558
+ "<aud_00449>": 152617,
559
+ "<aud_00450>": 152618,
560
+ "<aud_00451>": 152619,
561
+ "<aud_00452>": 152620,
562
+ "<aud_00453>": 152621,
563
+ "<aud_00454>": 152622,
564
+ "<aud_00455>": 152623,
565
+ "<aud_00456>": 152624,
566
+ "<aud_00457>": 152625,
567
+ "<aud_00458>": 152626,
568
+ "<aud_00459>": 152627,
569
+ "<aud_00460>": 152628,
570
+ "<aud_00461>": 152629,
571
+ "<aud_00462>": 152630,
572
+ "<aud_00463>": 152631,
573
+ "<aud_00464>": 152632,
574
+ "<aud_00465>": 152633,
575
+ "<aud_00466>": 152634,
576
+ "<aud_00467>": 152635,
577
+ "<aud_00468>": 152636,
578
+ "<aud_00469>": 152637,
579
+ "<aud_00470>": 152638,
580
+ "<aud_00471>": 152639,
581
+ "<aud_00472>": 152640,
582
+ "<aud_00473>": 152641,
583
+ "<aud_00474>": 152642,
584
+ "<aud_00475>": 152643,
585
+ "<aud_00476>": 152644,
586
+ "<aud_00477>": 152645,
587
+ "<aud_00478>": 152646,
588
+ "<aud_00479>": 152647,
589
+ "<aud_00480>": 152648,
590
+ "<aud_00481>": 152649,
591
+ "<aud_00482>": 152650,
592
+ "<aud_00483>": 152651,
593
+ "<aud_00484>": 152652,
594
+ "<aud_00485>": 152653,
595
+ "<aud_00486>": 152654,
596
+ "<aud_00487>": 152655,
597
+ "<aud_00488>": 152656,
598
+ "<aud_00489>": 152657,
599
+ "<aud_00490>": 152658,
600
+ "<aud_00491>": 152659,
601
+ "<aud_00492>": 152660,
602
+ "<aud_00493>": 152661,
603
+ "<aud_00494>": 152662,
604
+ "<aud_00495>": 152663,
605
+ "<aud_00496>": 152664,
606
+ "<aud_00497>": 152665,
607
+ "<aud_00498>": 152666,
608
+ "<aud_00499>": 152667,
609
+ "<aud_00500>": 152668,
610
+ "<|gen|>": 151938,
611
+ "<|/gen|>": 151938,
612
+ "<|think|>": 151938,
613
+ "<|/think|>": 151938,
614
+ "<t5_00100>": 152268,
615
+ "<t5_00101>": 152269,
616
+ "<t5_00102>": 152270,
617
+ "<t5_00103>": 152271,
618
+ "<t5_00104>": 152272,
619
+ "<t5_00105>": 152273,
620
+ "<t5_00106>": 152274,
621
+ "<t5_00107>": 152275,
622
+ "<t5_00108>": 152276,
623
+ "<t5_00109>": 152277,
624
+ "<t5_00110>": 152278,
625
+ "<t5_00111>": 152279,
626
+ "<t5_00112>": 152280,
627
+ "<t5_00113>": 152281,
628
+ "<t5_00114>": 152282,
629
+ "<t5_00115>": 152283,
630
+ "<t5_00116>": 152284,
631
+ "<t5_00117>": 152285,
632
+ "<t5_00118>": 152286,
633
+ "<t5_00119>": 152287,
634
+ "<t5_00120>": 152288,
635
+ "<t5_00121>": 152289,
636
+ "<t5_00122>": 152290,
637
+ "<t5_00123>": 152291,
638
+ "<t5_00124>": 152292,
639
+ "<t5_00125>": 152293,
640
+ "<t5_00126>": 152294,
641
+ "<t5_00127>": 152295,
642
+ "<t5_00128>": 152296,
643
+ "<t5_00129>": 152297,
644
+ "<t5_00130>": 152298,
645
+ "<t5_00131>": 152299,
646
+ "<t5_00132>": 152300,
647
+ "<t5_00133>": 152301,
648
+ "<t5_00134>": 152302,
649
+ "<t5_00135>": 152303,
650
+ "<t5_00136>": 152304,
651
+ "<t5_00137>": 152305,
652
+ "<t5_00138>": 152306,
653
+ "<t5_00139>": 152307,
654
+ "<t5_00140>": 152308,
655
+ "<t5_00141>": 152309,
656
+ "<t5_00142>": 152310,
657
+ "<t5_00143>": 152311,
658
+ "<t5_00144>": 152312,
659
+ "<t5_00145>": 152313,
660
+ "<t5_00146>": 152314,
661
+ "<t5_00147>": 152315,
662
+ "<t5_00148>": 152316,
663
+ "<t5_00149>": 152317,
664
+ "<t5_00150>": 152318,
665
+ "<t5_00151>": 152319,
666
+ "<t5_00152>": 152320,
667
+ "<t5_00153>": 152321,
668
+ "<t5_00154>": 152322,
669
+ "<t5_00155>": 152323,
670
+ "<t5_00156>": 152324,
671
+ "<t5_00157>": 152325,
672
+ "<t5_00158>": 152326,
673
+ "<t5_00159>": 152327,
674
+ "<t5_00160>": 152328,
675
+ "<t5_00161>": 152329,
676
+ "<t5_00162>": 152330,
677
+ "<t5_00163>": 152331,
678
+ "<t5_00164>": 152332,
679
+ "<t5_00165>": 152333,
680
+ "<t5_00166>": 152334,
681
+ "<t5_00167>": 152335,
682
+ "<t5_00168>": 152336,
683
+ "<t5_00169>": 152337,
684
+ "<t5_00170>": 152338,
685
+ "<t5_00171>": 152339,
686
+ "<t5_00172>": 152340,
687
+ "<t5_00173>": 152341,
688
+ "<t5_00174>": 152342,
689
+ "<t5_00175>": 152343,
690
+ "<t5_00176>": 152344,
691
+ "<t5_00177>": 152345,
692
+ "<t5_00178>": 152346,
693
+ "<t5_00179>": 152347,
694
+ "<t5_00180>": 152348,
695
+ "<t5_00181>": 152349,
696
+ "<t5_00182>": 152350,
697
+ "<t5_00183>": 152351,
698
+ "<t5_00184>": 152352,
699
+ "<t5_00185>": 152353,
700
+ "<t5_00186>": 152354,
701
+ "<t5_00187>": 152355,
702
+ "<t5_00188>": 152356,
703
+ "<t5_00189>": 152357,
704
+ "<t5_00190>": 152358,
705
+ "<t5_00191>": 152359,
706
+ "<t5_00192>": 152360,
707
+ "<t5_00193>": 152361,
708
+ "<t5_00194>": 152362,
709
+ "<t5_00195>": 152363,
710
+ "<t5_00196>": 152364,
711
+ "<t5_00197>": 152365,
712
+ "<t5_00198>": 152366,
713
+ "<t5_00199>": 152367,
714
+ "<t5_00200>": 152368,
715
+ "<t5_00201>": 152369,
716
+ "<t5_00202>": 152370,
717
+ "<t5_00203>": 152371,
718
+ "<t5_00204>": 152372,
719
+ "<t5_00205>": 152373,
720
+ "<t5_00206>": 152374,
721
+ "<t5_00207>": 152375,
722
+ "<t5_00208>": 152376,
723
+ "<t5_00209>": 152377,
724
+ "<t5_00210>": 152378,
725
+ "<t5_00211>": 152379,
726
+ "<t5_00212>": 152380,
727
+ "<t5_00213>": 152381,
728
+ "<t5_00214>": 152382,
729
+ "<t5_00215>": 152383,
730
+ "<t5_00216>": 152384,
731
+ "<t5_00217>": 152385,
732
+ "<t5_00218>": 152386,
733
+ "<t5_00219>": 152387,
734
+ "<t5_00220>": 152388,
735
+ "<t5_00221>": 152389,
736
+ "<t5_00222>": 152390,
737
+ "<t5_00223>": 152391,
738
+ "<t5_00224>": 152392,
739
+ "<t5_00225>": 152393,
740
+ "<t5_00226>": 152394,
741
+ "<t5_00227>": 152395,
742
+ "<t5_00228>": 152396,
743
+ "<t5_00229>": 152397,
744
+ "<t5_00230>": 152398,
745
+ "<t5_00231>": 152399,
746
+ "<t5_00232>": 152400,
747
+ "<t5_00233>": 152401,
748
+ "<t5_00234>": 152402,
749
+ "<t5_00235>": 152403,
750
+ "<t5_00236>": 152404,
751
+ "<t5_00237>": 152405,
752
+ "<t5_00238>": 152406,
753
+ "<t5_00239>": 152407,
754
+ "<t5_00240>": 152408,
755
+ "<t5_00241>": 152409,
756
+ "<t5_00242>": 152410,
757
+ "<t5_00243>": 152411,
758
+ "<t5_00244>": 152412,
759
+ "<t5_00245>": 152413,
760
+ "<t5_00246>": 152414,
761
+ "<t5_00247>": 152415,
762
+ "<t5_00248>": 152416,
763
+ "<t5_00249>": 152417,
764
+ "<t5_00250>": 152418,
765
+ "<t5_00251>": 152419,
766
+ "<t5_00252>": 152420,
767
+ "<t5_00253>": 152421,
768
+ "<t5_00254>": 152422,
769
+ "<t5_00255>": 152423,
770
+ "<t5_00256>": 152424,
771
+ "<t5_00257>": 152425,
772
+ "<t5_00258>": 152426,
773
+ "<t5_00259>": 152427,
774
+ "<t5_00260>": 152428,
775
+ "<t5_00261>": 152429,
776
+ "<t5_00262>": 152430,
777
+ "<t5_00263>": 152431,
778
+ "<t5_00264>": 152432,
779
+ "<t5_00265>": 152433,
780
+ "<t5_00266>": 152434,
781
+ "<t5_00267>": 152435,
782
+ "<t5_00268>": 152436,
783
+ "<t5_00269>": 152437,
784
+ "<t5_00270>": 152438,
785
+ "<t5_00271>": 152439,
786
+ "<t5_00272>": 152440,
787
+ "<t5_00273>": 152441,
788
+ "<t5_00274>": 152442,
789
+ "<t5_00275>": 152443,
790
+ "<t5_00276>": 152444,
791
+ "<t5_00277>": 152445,
792
+ "<t5_00278>": 152446,
793
+ "<t5_00279>": 152447,
794
+ "<t5_00280>": 152448,
795
+ "<t5_00281>": 152449,
796
+ "<t5_00282>": 152450,
797
+ "<t5_00283>": 152451,
798
+ "<t5_00284>": 152452,
799
+ "<t5_00285>": 152453,
800
+ "<t5_00286>": 152454,
801
+ "<t5_00287>": 152455,
802
+ "<t5_00288>": 152456,
803
+ "<t5_00289>": 152457,
804
+ "<t5_00290>": 152458,
805
+ "<t5_00291>": 152459,
806
+ "<t5_00292>": 152460,
807
+ "<t5_00293>": 152461,
808
+ "<t5_00294>": 152462,
809
+ "<t5_00295>": 152463,
810
+ "<t5_00296>": 152464,
811
+ "<t5_00297>": 152465,
812
+ "<t5_00298>": 152466,
813
+ "<t5_00299>": 152467,
814
+ "<t5_00300>": 152468
815
+ }
Qwen2.5-3B-Instruct/config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2ForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 2048,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 11008,
12
+ "max_position_embeddings": 32768,
13
+ "max_window_layers": 70,
14
+ "model_type": "qwen2",
15
+ "num_attention_heads": 16,
16
+ "num_hidden_layers": 36,
17
+ "num_key_value_heads": 2,
18
+ "rms_norm_eps": 1e-06,
19
+ "rope_theta": 1000000.0,
20
+ "sliding_window": 32768,
21
+ "tie_word_embeddings": true,
22
+ "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.43.1",
24
+ "use_cache": true,
25
+ "use_sliding_window": false,
26
+ "vocab_size": 151936
27
+ }
Qwen2.5-3B-Instruct/generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "pad_token_id": 151643,
4
+ "do_sample": true,
5
+ "eos_token_id": [
6
+ 151645,
7
+ 151643
8
+ ],
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_p": 0.8,
12
+ "top_k": 20,
13
+ "transformers_version": "4.37.0"
14
+ }
Qwen2.5-3B-Instruct/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
Qwen2.5-3B-Instruct/model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67347b23fb4165b652eb6611f5e1f2a06dfcddba8e909df1b2b0b1857bee06c2
3
+ size 3968658944
Qwen2.5-3B-Instruct/model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a40d941d0e7e0b966ad8b62bb6d6b7c88cce1299197b599d9d0a4ce59aabfc1d
3
+ size 2203268048
Qwen2.5-3B-Instruct/model.safetensors.index.json ADDED
@@ -0,0 +1,441 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6171877376
4
+ },
5
+ "weight_map": {
6
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
18
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
28
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
30
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
37
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
40
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
42
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
49
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
52
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
54
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
61
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
64
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
66
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
73
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
76
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
78
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
85
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
88
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
90
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
97
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
100
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
102
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
109
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
112
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
114
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
121
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
124
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
126
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
127
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
129
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
130
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
133
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
134
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
135
+ "model.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
136
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
137
+ "model.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
138
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
139
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
140
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
141
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
142
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
143
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
144
+ "model.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
145
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
146
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
147
+ "model.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
148
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
149
+ "model.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
150
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
151
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
152
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
153
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
154
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
155
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
156
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
157
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
158
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
159
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
160
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
161
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
162
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
163
+ "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
164
+ "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
165
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
166
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
167
+ "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
168
+ "model.layers.20.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
169
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
170
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
171
+ "model.layers.20.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
172
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
173
+ "model.layers.20.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
174
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
175
+ "model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
176
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
178
+ "model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
179
+ "model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
180
+ "model.layers.21.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
181
+ "model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
182
+ "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
183
+ "model.layers.21.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
184
+ "model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
185
+ "model.layers.21.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
186
+ "model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
187
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.22.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
193
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.22.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
196
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
197
+ "model.layers.22.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
198
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
199
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
200
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
201
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
202
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
203
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
204
+ "model.layers.23.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
205
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
206
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
207
+ "model.layers.23.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
208
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
209
+ "model.layers.23.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
210
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
211
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
212
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
213
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
214
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
215
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
216
+ "model.layers.24.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
217
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
218
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
219
+ "model.layers.24.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
220
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
221
+ "model.layers.24.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
222
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
223
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
224
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
225
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
226
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
227
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
228
+ "model.layers.25.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
229
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
230
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
231
+ "model.layers.25.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
232
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
233
+ "model.layers.25.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
234
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
235
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
236
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
237
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
238
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
239
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
240
+ "model.layers.26.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
241
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
242
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
243
+ "model.layers.26.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
244
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
245
+ "model.layers.26.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
246
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
247
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
248
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
249
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
250
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
251
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
252
+ "model.layers.27.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
253
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
254
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
255
+ "model.layers.27.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
256
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
257
+ "model.layers.27.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
258
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
259
+ "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
260
+ "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
261
+ "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
262
+ "model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
263
+ "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
264
+ "model.layers.28.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
265
+ "model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
266
+ "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
267
+ "model.layers.28.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
268
+ "model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
269
+ "model.layers.28.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
270
+ "model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
271
+ "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
272
+ "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
273
+ "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
274
+ "model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
275
+ "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
276
+ "model.layers.29.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
277
+ "model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
278
+ "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
279
+ "model.layers.29.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
280
+ "model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
281
+ "model.layers.29.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
282
+ "model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
283
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
284
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
285
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
286
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
287
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
288
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
289
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
290
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
291
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
292
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
293
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
294
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
295
+ "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
296
+ "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
297
+ "model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
298
+ "model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
299
+ "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
300
+ "model.layers.30.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
301
+ "model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
302
+ "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
303
+ "model.layers.30.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
304
+ "model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
305
+ "model.layers.30.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
306
+ "model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
307
+ "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
308
+ "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
309
+ "model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
310
+ "model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
311
+ "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
312
+ "model.layers.31.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
313
+ "model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
314
+ "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
315
+ "model.layers.31.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
316
+ "model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
317
+ "model.layers.31.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
318
+ "model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
319
+ "model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
320
+ "model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
321
+ "model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
322
+ "model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
323
+ "model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
324
+ "model.layers.32.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
325
+ "model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
326
+ "model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
327
+ "model.layers.32.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
328
+ "model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
329
+ "model.layers.32.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
330
+ "model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
331
+ "model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
332
+ "model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
333
+ "model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
334
+ "model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
335
+ "model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
336
+ "model.layers.33.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
337
+ "model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
338
+ "model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
339
+ "model.layers.33.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
340
+ "model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
341
+ "model.layers.33.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
342
+ "model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
343
+ "model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
344
+ "model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
345
+ "model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
346
+ "model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
347
+ "model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
348
+ "model.layers.34.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
349
+ "model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
350
+ "model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
351
+ "model.layers.34.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
352
+ "model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
353
+ "model.layers.34.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
354
+ "model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
355
+ "model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
356
+ "model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
357
+ "model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
358
+ "model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
359
+ "model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
360
+ "model.layers.35.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
361
+ "model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
362
+ "model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
363
+ "model.layers.35.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
364
+ "model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
365
+ "model.layers.35.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
366
+ "model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
367
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
368
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
369
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
370
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
371
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
372
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
373
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
374
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
375
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
376
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
377
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
378
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
379
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
380
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
381
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
382
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
383
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
384
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
385
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
386
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
387
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
388
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
389
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
390
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
391
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
392
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
393
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
394
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
395
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
396
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
397
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
398
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
399
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
400
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
401
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
402
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
403
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
404
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
405
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
406
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
407
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
408
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
409
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
410
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
411
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
412
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
413
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
414
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
415
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
416
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
417
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
418
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
419
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
420
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
421
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
422
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
423
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
424
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
425
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
426
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
427
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
428
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
429
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
430
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
431
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
432
+ "model.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
433
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
434
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
435
+ "model.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
436
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
437
+ "model.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
438
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
439
+ "model.norm.weight": "model-00002-of-00002.safetensors"
440
+ }
441
+ }
Qwen2.5-3B-Instruct/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
Qwen2.5-3B-Instruct/tokenizer_config.json ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|im_end|>",
201
+ "errors": "replace",
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "split_special_tokens": false,
205
+ "tokenizer_class": "Qwen2Tokenizer",
206
+ "unk_token": null
207
+ }
Qwen2.5-3B-Instruct/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
Qwen2.5-3B-Instruct/vocab_old.json ADDED
The diff for this file is too large to render. See raw diff