Upload folder using huggingface_hub
Browse files- .gitattributes +1 -0
- added_tokens.json +1054 -0
- chat_template.jinja +120 -0
- config.json +66 -0
- generation_config.json +13 -0
- merges.txt +0 -0
- model-00001-of-00002.safetensors +3 -0
- model-00002-of-00002.safetensors +3 -0
- model.safetensors.index.json +721 -0
- processor_config.json +84 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +0 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
|
@@ -0,0 +1,1054 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</think>": 151668,
|
| 3 |
+
"</tool_call>": 151658,
|
| 4 |
+
"</tool_response>": 151666,
|
| 5 |
+
"<mt_0000>": 151670,
|
| 6 |
+
"<mt_0001>": 151671,
|
| 7 |
+
"<mt_0002>": 151672,
|
| 8 |
+
"<mt_0003>": 151673,
|
| 9 |
+
"<mt_0004>": 151674,
|
| 10 |
+
"<mt_0005>": 151675,
|
| 11 |
+
"<mt_0006>": 151676,
|
| 12 |
+
"<mt_0007>": 151677,
|
| 13 |
+
"<mt_0008>": 151678,
|
| 14 |
+
"<mt_0009>": 151679,
|
| 15 |
+
"<mt_0010>": 151680,
|
| 16 |
+
"<mt_0011>": 151681,
|
| 17 |
+
"<mt_0012>": 151682,
|
| 18 |
+
"<mt_0013>": 151683,
|
| 19 |
+
"<mt_0014>": 151684,
|
| 20 |
+
"<mt_0015>": 151685,
|
| 21 |
+
"<mt_0016>": 151686,
|
| 22 |
+
"<mt_0017>": 151687,
|
| 23 |
+
"<mt_0018>": 151688,
|
| 24 |
+
"<mt_0019>": 151689,
|
| 25 |
+
"<mt_0020>": 151690,
|
| 26 |
+
"<mt_0021>": 151691,
|
| 27 |
+
"<mt_0022>": 151692,
|
| 28 |
+
"<mt_0023>": 151693,
|
| 29 |
+
"<mt_0024>": 151694,
|
| 30 |
+
"<mt_0025>": 151695,
|
| 31 |
+
"<mt_0026>": 151696,
|
| 32 |
+
"<mt_0027>": 151697,
|
| 33 |
+
"<mt_0028>": 151698,
|
| 34 |
+
"<mt_0029>": 151699,
|
| 35 |
+
"<mt_0030>": 151700,
|
| 36 |
+
"<mt_0031>": 151701,
|
| 37 |
+
"<mt_0032>": 151702,
|
| 38 |
+
"<mt_0033>": 151703,
|
| 39 |
+
"<mt_0034>": 151704,
|
| 40 |
+
"<mt_0035>": 151705,
|
| 41 |
+
"<mt_0036>": 151706,
|
| 42 |
+
"<mt_0037>": 151707,
|
| 43 |
+
"<mt_0038>": 151708,
|
| 44 |
+
"<mt_0039>": 151709,
|
| 45 |
+
"<mt_0040>": 151710,
|
| 46 |
+
"<mt_0041>": 151711,
|
| 47 |
+
"<mt_0042>": 151712,
|
| 48 |
+
"<mt_0043>": 151713,
|
| 49 |
+
"<mt_0044>": 151714,
|
| 50 |
+
"<mt_0045>": 151715,
|
| 51 |
+
"<mt_0046>": 151716,
|
| 52 |
+
"<mt_0047>": 151717,
|
| 53 |
+
"<mt_0048>": 151718,
|
| 54 |
+
"<mt_0049>": 151719,
|
| 55 |
+
"<mt_0050>": 151720,
|
| 56 |
+
"<mt_0051>": 151721,
|
| 57 |
+
"<mt_0052>": 151722,
|
| 58 |
+
"<mt_0053>": 151723,
|
| 59 |
+
"<mt_0054>": 151724,
|
| 60 |
+
"<mt_0055>": 151725,
|
| 61 |
+
"<mt_0056>": 151726,
|
| 62 |
+
"<mt_0057>": 151727,
|
| 63 |
+
"<mt_0058>": 151728,
|
| 64 |
+
"<mt_0059>": 151729,
|
| 65 |
+
"<mt_0060>": 151730,
|
| 66 |
+
"<mt_0061>": 151731,
|
| 67 |
+
"<mt_0062>": 151732,
|
| 68 |
+
"<mt_0063>": 151733,
|
| 69 |
+
"<mt_0064>": 151734,
|
| 70 |
+
"<mt_0065>": 151735,
|
| 71 |
+
"<mt_0066>": 151736,
|
| 72 |
+
"<mt_0067>": 151737,
|
| 73 |
+
"<mt_0068>": 151738,
|
| 74 |
+
"<mt_0069>": 151739,
|
| 75 |
+
"<mt_0070>": 151740,
|
| 76 |
+
"<mt_0071>": 151741,
|
| 77 |
+
"<mt_0072>": 151742,
|
| 78 |
+
"<mt_0073>": 151743,
|
| 79 |
+
"<mt_0074>": 151744,
|
| 80 |
+
"<mt_0075>": 151745,
|
| 81 |
+
"<mt_0076>": 151746,
|
| 82 |
+
"<mt_0077>": 151747,
|
| 83 |
+
"<mt_0078>": 151748,
|
| 84 |
+
"<mt_0079>": 151749,
|
| 85 |
+
"<mt_0080>": 151750,
|
| 86 |
+
"<mt_0081>": 151751,
|
| 87 |
+
"<mt_0082>": 151752,
|
| 88 |
+
"<mt_0083>": 151753,
|
| 89 |
+
"<mt_0084>": 151754,
|
| 90 |
+
"<mt_0085>": 151755,
|
| 91 |
+
"<mt_0086>": 151756,
|
| 92 |
+
"<mt_0087>": 151757,
|
| 93 |
+
"<mt_0088>": 151758,
|
| 94 |
+
"<mt_0089>": 151759,
|
| 95 |
+
"<mt_0090>": 151760,
|
| 96 |
+
"<mt_0091>": 151761,
|
| 97 |
+
"<mt_0092>": 151762,
|
| 98 |
+
"<mt_0093>": 151763,
|
| 99 |
+
"<mt_0094>": 151764,
|
| 100 |
+
"<mt_0095>": 151765,
|
| 101 |
+
"<mt_0096>": 151766,
|
| 102 |
+
"<mt_0097>": 151767,
|
| 103 |
+
"<mt_0098>": 151768,
|
| 104 |
+
"<mt_0099>": 151769,
|
| 105 |
+
"<mt_0100>": 151770,
|
| 106 |
+
"<mt_0101>": 151771,
|
| 107 |
+
"<mt_0102>": 151772,
|
| 108 |
+
"<mt_0103>": 151773,
|
| 109 |
+
"<mt_0104>": 151774,
|
| 110 |
+
"<mt_0105>": 151775,
|
| 111 |
+
"<mt_0106>": 151776,
|
| 112 |
+
"<mt_0107>": 151777,
|
| 113 |
+
"<mt_0108>": 151778,
|
| 114 |
+
"<mt_0109>": 151779,
|
| 115 |
+
"<mt_0110>": 151780,
|
| 116 |
+
"<mt_0111>": 151781,
|
| 117 |
+
"<mt_0112>": 151782,
|
| 118 |
+
"<mt_0113>": 151783,
|
| 119 |
+
"<mt_0114>": 151784,
|
| 120 |
+
"<mt_0115>": 151785,
|
| 121 |
+
"<mt_0116>": 151786,
|
| 122 |
+
"<mt_0117>": 151787,
|
| 123 |
+
"<mt_0118>": 151788,
|
| 124 |
+
"<mt_0119>": 151789,
|
| 125 |
+
"<mt_0120>": 151790,
|
| 126 |
+
"<mt_0121>": 151791,
|
| 127 |
+
"<mt_0122>": 151792,
|
| 128 |
+
"<mt_0123>": 151793,
|
| 129 |
+
"<mt_0124>": 151794,
|
| 130 |
+
"<mt_0125>": 151795,
|
| 131 |
+
"<mt_0126>": 151796,
|
| 132 |
+
"<mt_0127>": 151797,
|
| 133 |
+
"<mt_0128>": 151798,
|
| 134 |
+
"<mt_0129>": 151799,
|
| 135 |
+
"<mt_0130>": 151800,
|
| 136 |
+
"<mt_0131>": 151801,
|
| 137 |
+
"<mt_0132>": 151802,
|
| 138 |
+
"<mt_0133>": 151803,
|
| 139 |
+
"<mt_0134>": 151804,
|
| 140 |
+
"<mt_0135>": 151805,
|
| 141 |
+
"<mt_0136>": 151806,
|
| 142 |
+
"<mt_0137>": 151807,
|
| 143 |
+
"<mt_0138>": 151808,
|
| 144 |
+
"<mt_0139>": 151809,
|
| 145 |
+
"<mt_0140>": 151810,
|
| 146 |
+
"<mt_0141>": 151811,
|
| 147 |
+
"<mt_0142>": 151812,
|
| 148 |
+
"<mt_0143>": 151813,
|
| 149 |
+
"<mt_0144>": 151814,
|
| 150 |
+
"<mt_0145>": 151815,
|
| 151 |
+
"<mt_0146>": 151816,
|
| 152 |
+
"<mt_0147>": 151817,
|
| 153 |
+
"<mt_0148>": 151818,
|
| 154 |
+
"<mt_0149>": 151819,
|
| 155 |
+
"<mt_0150>": 151820,
|
| 156 |
+
"<mt_0151>": 151821,
|
| 157 |
+
"<mt_0152>": 151822,
|
| 158 |
+
"<mt_0153>": 151823,
|
| 159 |
+
"<mt_0154>": 151824,
|
| 160 |
+
"<mt_0155>": 151825,
|
| 161 |
+
"<mt_0156>": 151826,
|
| 162 |
+
"<mt_0157>": 151827,
|
| 163 |
+
"<mt_0158>": 151828,
|
| 164 |
+
"<mt_0159>": 151829,
|
| 165 |
+
"<mt_0160>": 151830,
|
| 166 |
+
"<mt_0161>": 151831,
|
| 167 |
+
"<mt_0162>": 151832,
|
| 168 |
+
"<mt_0163>": 151833,
|
| 169 |
+
"<mt_0164>": 151834,
|
| 170 |
+
"<mt_0165>": 151835,
|
| 171 |
+
"<mt_0166>": 151836,
|
| 172 |
+
"<mt_0167>": 151837,
|
| 173 |
+
"<mt_0168>": 151838,
|
| 174 |
+
"<mt_0169>": 151839,
|
| 175 |
+
"<mt_0170>": 151840,
|
| 176 |
+
"<mt_0171>": 151841,
|
| 177 |
+
"<mt_0172>": 151842,
|
| 178 |
+
"<mt_0173>": 151843,
|
| 179 |
+
"<mt_0174>": 151844,
|
| 180 |
+
"<mt_0175>": 151845,
|
| 181 |
+
"<mt_0176>": 151846,
|
| 182 |
+
"<mt_0177>": 151847,
|
| 183 |
+
"<mt_0178>": 151848,
|
| 184 |
+
"<mt_0179>": 151849,
|
| 185 |
+
"<mt_0180>": 151850,
|
| 186 |
+
"<mt_0181>": 151851,
|
| 187 |
+
"<mt_0182>": 151852,
|
| 188 |
+
"<mt_0183>": 151853,
|
| 189 |
+
"<mt_0184>": 151854,
|
| 190 |
+
"<mt_0185>": 151855,
|
| 191 |
+
"<mt_0186>": 151856,
|
| 192 |
+
"<mt_0187>": 151857,
|
| 193 |
+
"<mt_0188>": 151858,
|
| 194 |
+
"<mt_0189>": 151859,
|
| 195 |
+
"<mt_0190>": 151860,
|
| 196 |
+
"<mt_0191>": 151861,
|
| 197 |
+
"<mt_0192>": 151862,
|
| 198 |
+
"<mt_0193>": 151863,
|
| 199 |
+
"<mt_0194>": 151864,
|
| 200 |
+
"<mt_0195>": 151865,
|
| 201 |
+
"<mt_0196>": 151866,
|
| 202 |
+
"<mt_0197>": 151867,
|
| 203 |
+
"<mt_0198>": 151868,
|
| 204 |
+
"<mt_0199>": 151869,
|
| 205 |
+
"<mt_0200>": 151870,
|
| 206 |
+
"<mt_0201>": 151871,
|
| 207 |
+
"<mt_0202>": 151872,
|
| 208 |
+
"<mt_0203>": 151873,
|
| 209 |
+
"<mt_0204>": 151874,
|
| 210 |
+
"<mt_0205>": 151875,
|
| 211 |
+
"<mt_0206>": 151876,
|
| 212 |
+
"<mt_0207>": 151877,
|
| 213 |
+
"<mt_0208>": 151878,
|
| 214 |
+
"<mt_0209>": 151879,
|
| 215 |
+
"<mt_0210>": 151880,
|
| 216 |
+
"<mt_0211>": 151881,
|
| 217 |
+
"<mt_0212>": 151882,
|
| 218 |
+
"<mt_0213>": 151883,
|
| 219 |
+
"<mt_0214>": 151884,
|
| 220 |
+
"<mt_0215>": 151885,
|
| 221 |
+
"<mt_0216>": 151886,
|
| 222 |
+
"<mt_0217>": 151887,
|
| 223 |
+
"<mt_0218>": 151888,
|
| 224 |
+
"<mt_0219>": 151889,
|
| 225 |
+
"<mt_0220>": 151890,
|
| 226 |
+
"<mt_0221>": 151891,
|
| 227 |
+
"<mt_0222>": 151892,
|
| 228 |
+
"<mt_0223>": 151893,
|
| 229 |
+
"<mt_0224>": 151894,
|
| 230 |
+
"<mt_0225>": 151895,
|
| 231 |
+
"<mt_0226>": 151896,
|
| 232 |
+
"<mt_0227>": 151897,
|
| 233 |
+
"<mt_0228>": 151898,
|
| 234 |
+
"<mt_0229>": 151899,
|
| 235 |
+
"<mt_0230>": 151900,
|
| 236 |
+
"<mt_0231>": 151901,
|
| 237 |
+
"<mt_0232>": 151902,
|
| 238 |
+
"<mt_0233>": 151903,
|
| 239 |
+
"<mt_0234>": 151904,
|
| 240 |
+
"<mt_0235>": 151905,
|
| 241 |
+
"<mt_0236>": 151906,
|
| 242 |
+
"<mt_0237>": 151907,
|
| 243 |
+
"<mt_0238>": 151908,
|
| 244 |
+
"<mt_0239>": 151909,
|
| 245 |
+
"<mt_0240>": 151910,
|
| 246 |
+
"<mt_0241>": 151911,
|
| 247 |
+
"<mt_0242>": 151912,
|
| 248 |
+
"<mt_0243>": 151913,
|
| 249 |
+
"<mt_0244>": 151914,
|
| 250 |
+
"<mt_0245>": 151915,
|
| 251 |
+
"<mt_0246>": 151916,
|
| 252 |
+
"<mt_0247>": 151917,
|
| 253 |
+
"<mt_0248>": 151918,
|
| 254 |
+
"<mt_0249>": 151919,
|
| 255 |
+
"<mt_0250>": 151920,
|
| 256 |
+
"<mt_0251>": 151921,
|
| 257 |
+
"<mt_0252>": 151922,
|
| 258 |
+
"<mt_0253>": 151923,
|
| 259 |
+
"<mt_0254>": 151924,
|
| 260 |
+
"<mt_0255>": 151925,
|
| 261 |
+
"<mt_0256>": 151926,
|
| 262 |
+
"<mt_0257>": 151927,
|
| 263 |
+
"<mt_0258>": 151928,
|
| 264 |
+
"<mt_0259>": 151929,
|
| 265 |
+
"<mt_0260>": 151930,
|
| 266 |
+
"<mt_0261>": 151931,
|
| 267 |
+
"<mt_0262>": 151932,
|
| 268 |
+
"<mt_0263>": 151933,
|
| 269 |
+
"<mt_0264>": 151934,
|
| 270 |
+
"<mt_0265>": 151935,
|
| 271 |
+
"<mt_0266>": 151936,
|
| 272 |
+
"<mt_0267>": 151937,
|
| 273 |
+
"<mt_0268>": 151938,
|
| 274 |
+
"<mt_0269>": 151939,
|
| 275 |
+
"<mt_0270>": 151940,
|
| 276 |
+
"<mt_0271>": 151941,
|
| 277 |
+
"<mt_0272>": 151942,
|
| 278 |
+
"<mt_0273>": 151943,
|
| 279 |
+
"<mt_0274>": 151944,
|
| 280 |
+
"<mt_0275>": 151945,
|
| 281 |
+
"<mt_0276>": 151946,
|
| 282 |
+
"<mt_0277>": 151947,
|
| 283 |
+
"<mt_0278>": 151948,
|
| 284 |
+
"<mt_0279>": 151949,
|
| 285 |
+
"<mt_0280>": 151950,
|
| 286 |
+
"<mt_0281>": 151951,
|
| 287 |
+
"<mt_0282>": 151952,
|
| 288 |
+
"<mt_0283>": 151953,
|
| 289 |
+
"<mt_0284>": 151954,
|
| 290 |
+
"<mt_0285>": 151955,
|
| 291 |
+
"<mt_0286>": 151956,
|
| 292 |
+
"<mt_0287>": 151957,
|
| 293 |
+
"<mt_0288>": 151958,
|
| 294 |
+
"<mt_0289>": 151959,
|
| 295 |
+
"<mt_0290>": 151960,
|
| 296 |
+
"<mt_0291>": 151961,
|
| 297 |
+
"<mt_0292>": 151962,
|
| 298 |
+
"<mt_0293>": 151963,
|
| 299 |
+
"<mt_0294>": 151964,
|
| 300 |
+
"<mt_0295>": 151965,
|
| 301 |
+
"<mt_0296>": 151966,
|
| 302 |
+
"<mt_0297>": 151967,
|
| 303 |
+
"<mt_0298>": 151968,
|
| 304 |
+
"<mt_0299>": 151969,
|
| 305 |
+
"<mt_0300>": 151970,
|
| 306 |
+
"<mt_0301>": 151971,
|
| 307 |
+
"<mt_0302>": 151972,
|
| 308 |
+
"<mt_0303>": 151973,
|
| 309 |
+
"<mt_0304>": 151974,
|
| 310 |
+
"<mt_0305>": 151975,
|
| 311 |
+
"<mt_0306>": 151976,
|
| 312 |
+
"<mt_0307>": 151977,
|
| 313 |
+
"<mt_0308>": 151978,
|
| 314 |
+
"<mt_0309>": 151979,
|
| 315 |
+
"<mt_0310>": 151980,
|
| 316 |
+
"<mt_0311>": 151981,
|
| 317 |
+
"<mt_0312>": 151982,
|
| 318 |
+
"<mt_0313>": 151983,
|
| 319 |
+
"<mt_0314>": 151984,
|
| 320 |
+
"<mt_0315>": 151985,
|
| 321 |
+
"<mt_0316>": 151986,
|
| 322 |
+
"<mt_0317>": 151987,
|
| 323 |
+
"<mt_0318>": 151988,
|
| 324 |
+
"<mt_0319>": 151989,
|
| 325 |
+
"<mt_0320>": 151990,
|
| 326 |
+
"<mt_0321>": 151991,
|
| 327 |
+
"<mt_0322>": 151992,
|
| 328 |
+
"<mt_0323>": 151993,
|
| 329 |
+
"<mt_0324>": 151994,
|
| 330 |
+
"<mt_0325>": 151995,
|
| 331 |
+
"<mt_0326>": 151996,
|
| 332 |
+
"<mt_0327>": 151997,
|
| 333 |
+
"<mt_0328>": 151998,
|
| 334 |
+
"<mt_0329>": 151999,
|
| 335 |
+
"<mt_0330>": 152000,
|
| 336 |
+
"<mt_0331>": 152001,
|
| 337 |
+
"<mt_0332>": 152002,
|
| 338 |
+
"<mt_0333>": 152003,
|
| 339 |
+
"<mt_0334>": 152004,
|
| 340 |
+
"<mt_0335>": 152005,
|
| 341 |
+
"<mt_0336>": 152006,
|
| 342 |
+
"<mt_0337>": 152007,
|
| 343 |
+
"<mt_0338>": 152008,
|
| 344 |
+
"<mt_0339>": 152009,
|
| 345 |
+
"<mt_0340>": 152010,
|
| 346 |
+
"<mt_0341>": 152011,
|
| 347 |
+
"<mt_0342>": 152012,
|
| 348 |
+
"<mt_0343>": 152013,
|
| 349 |
+
"<mt_0344>": 152014,
|
| 350 |
+
"<mt_0345>": 152015,
|
| 351 |
+
"<mt_0346>": 152016,
|
| 352 |
+
"<mt_0347>": 152017,
|
| 353 |
+
"<mt_0348>": 152018,
|
| 354 |
+
"<mt_0349>": 152019,
|
| 355 |
+
"<mt_0350>": 152020,
|
| 356 |
+
"<mt_0351>": 152021,
|
| 357 |
+
"<mt_0352>": 152022,
|
| 358 |
+
"<mt_0353>": 152023,
|
| 359 |
+
"<mt_0354>": 152024,
|
| 360 |
+
"<mt_0355>": 152025,
|
| 361 |
+
"<mt_0356>": 152026,
|
| 362 |
+
"<mt_0357>": 152027,
|
| 363 |
+
"<mt_0358>": 152028,
|
| 364 |
+
"<mt_0359>": 152029,
|
| 365 |
+
"<mt_0360>": 152030,
|
| 366 |
+
"<mt_0361>": 152031,
|
| 367 |
+
"<mt_0362>": 152032,
|
| 368 |
+
"<mt_0363>": 152033,
|
| 369 |
+
"<mt_0364>": 152034,
|
| 370 |
+
"<mt_0365>": 152035,
|
| 371 |
+
"<mt_0366>": 152036,
|
| 372 |
+
"<mt_0367>": 152037,
|
| 373 |
+
"<mt_0368>": 152038,
|
| 374 |
+
"<mt_0369>": 152039,
|
| 375 |
+
"<mt_0370>": 152040,
|
| 376 |
+
"<mt_0371>": 152041,
|
| 377 |
+
"<mt_0372>": 152042,
|
| 378 |
+
"<mt_0373>": 152043,
|
| 379 |
+
"<mt_0374>": 152044,
|
| 380 |
+
"<mt_0375>": 152045,
|
| 381 |
+
"<mt_0376>": 152046,
|
| 382 |
+
"<mt_0377>": 152047,
|
| 383 |
+
"<mt_0378>": 152048,
|
| 384 |
+
"<mt_0379>": 152049,
|
| 385 |
+
"<mt_0380>": 152050,
|
| 386 |
+
"<mt_0381>": 152051,
|
| 387 |
+
"<mt_0382>": 152052,
|
| 388 |
+
"<mt_0383>": 152053,
|
| 389 |
+
"<mt_0384>": 152054,
|
| 390 |
+
"<mt_0385>": 152055,
|
| 391 |
+
"<mt_0386>": 152056,
|
| 392 |
+
"<mt_0387>": 152057,
|
| 393 |
+
"<mt_0388>": 152058,
|
| 394 |
+
"<mt_0389>": 152059,
|
| 395 |
+
"<mt_0390>": 152060,
|
| 396 |
+
"<mt_0391>": 152061,
|
| 397 |
+
"<mt_0392>": 152062,
|
| 398 |
+
"<mt_0393>": 152063,
|
| 399 |
+
"<mt_0394>": 152064,
|
| 400 |
+
"<mt_0395>": 152065,
|
| 401 |
+
"<mt_0396>": 152066,
|
| 402 |
+
"<mt_0397>": 152067,
|
| 403 |
+
"<mt_0398>": 152068,
|
| 404 |
+
"<mt_0399>": 152069,
|
| 405 |
+
"<mt_0400>": 152070,
|
| 406 |
+
"<mt_0401>": 152071,
|
| 407 |
+
"<mt_0402>": 152072,
|
| 408 |
+
"<mt_0403>": 152073,
|
| 409 |
+
"<mt_0404>": 152074,
|
| 410 |
+
"<mt_0405>": 152075,
|
| 411 |
+
"<mt_0406>": 152076,
|
| 412 |
+
"<mt_0407>": 152077,
|
| 413 |
+
"<mt_0408>": 152078,
|
| 414 |
+
"<mt_0409>": 152079,
|
| 415 |
+
"<mt_0410>": 152080,
|
| 416 |
+
"<mt_0411>": 152081,
|
| 417 |
+
"<mt_0412>": 152082,
|
| 418 |
+
"<mt_0413>": 152083,
|
| 419 |
+
"<mt_0414>": 152084,
|
| 420 |
+
"<mt_0415>": 152085,
|
| 421 |
+
"<mt_0416>": 152086,
|
| 422 |
+
"<mt_0417>": 152087,
|
| 423 |
+
"<mt_0418>": 152088,
|
| 424 |
+
"<mt_0419>": 152089,
|
| 425 |
+
"<mt_0420>": 152090,
|
| 426 |
+
"<mt_0421>": 152091,
|
| 427 |
+
"<mt_0422>": 152092,
|
| 428 |
+
"<mt_0423>": 152093,
|
| 429 |
+
"<mt_0424>": 152094,
|
| 430 |
+
"<mt_0425>": 152095,
|
| 431 |
+
"<mt_0426>": 152096,
|
| 432 |
+
"<mt_0427>": 152097,
|
| 433 |
+
"<mt_0428>": 152098,
|
| 434 |
+
"<mt_0429>": 152099,
|
| 435 |
+
"<mt_0430>": 152100,
|
| 436 |
+
"<mt_0431>": 152101,
|
| 437 |
+
"<mt_0432>": 152102,
|
| 438 |
+
"<mt_0433>": 152103,
|
| 439 |
+
"<mt_0434>": 152104,
|
| 440 |
+
"<mt_0435>": 152105,
|
| 441 |
+
"<mt_0436>": 152106,
|
| 442 |
+
"<mt_0437>": 152107,
|
| 443 |
+
"<mt_0438>": 152108,
|
| 444 |
+
"<mt_0439>": 152109,
|
| 445 |
+
"<mt_0440>": 152110,
|
| 446 |
+
"<mt_0441>": 152111,
|
| 447 |
+
"<mt_0442>": 152112,
|
| 448 |
+
"<mt_0443>": 152113,
|
| 449 |
+
"<mt_0444>": 152114,
|
| 450 |
+
"<mt_0445>": 152115,
|
| 451 |
+
"<mt_0446>": 152116,
|
| 452 |
+
"<mt_0447>": 152117,
|
| 453 |
+
"<mt_0448>": 152118,
|
| 454 |
+
"<mt_0449>": 152119,
|
| 455 |
+
"<mt_0450>": 152120,
|
| 456 |
+
"<mt_0451>": 152121,
|
| 457 |
+
"<mt_0452>": 152122,
|
| 458 |
+
"<mt_0453>": 152123,
|
| 459 |
+
"<mt_0454>": 152124,
|
| 460 |
+
"<mt_0455>": 152125,
|
| 461 |
+
"<mt_0456>": 152126,
|
| 462 |
+
"<mt_0457>": 152127,
|
| 463 |
+
"<mt_0458>": 152128,
|
| 464 |
+
"<mt_0459>": 152129,
|
| 465 |
+
"<mt_0460>": 152130,
|
| 466 |
+
"<mt_0461>": 152131,
|
| 467 |
+
"<mt_0462>": 152132,
|
| 468 |
+
"<mt_0463>": 152133,
|
| 469 |
+
"<mt_0464>": 152134,
|
| 470 |
+
"<mt_0465>": 152135,
|
| 471 |
+
"<mt_0466>": 152136,
|
| 472 |
+
"<mt_0467>": 152137,
|
| 473 |
+
"<mt_0468>": 152138,
|
| 474 |
+
"<mt_0469>": 152139,
|
| 475 |
+
"<mt_0470>": 152140,
|
| 476 |
+
"<mt_0471>": 152141,
|
| 477 |
+
"<mt_0472>": 152142,
|
| 478 |
+
"<mt_0473>": 152143,
|
| 479 |
+
"<mt_0474>": 152144,
|
| 480 |
+
"<mt_0475>": 152145,
|
| 481 |
+
"<mt_0476>": 152146,
|
| 482 |
+
"<mt_0477>": 152147,
|
| 483 |
+
"<mt_0478>": 152148,
|
| 484 |
+
"<mt_0479>": 152149,
|
| 485 |
+
"<mt_0480>": 152150,
|
| 486 |
+
"<mt_0481>": 152151,
|
| 487 |
+
"<mt_0482>": 152152,
|
| 488 |
+
"<mt_0483>": 152153,
|
| 489 |
+
"<mt_0484>": 152154,
|
| 490 |
+
"<mt_0485>": 152155,
|
| 491 |
+
"<mt_0486>": 152156,
|
| 492 |
+
"<mt_0487>": 152157,
|
| 493 |
+
"<mt_0488>": 152158,
|
| 494 |
+
"<mt_0489>": 152159,
|
| 495 |
+
"<mt_0490>": 152160,
|
| 496 |
+
"<mt_0491>": 152161,
|
| 497 |
+
"<mt_0492>": 152162,
|
| 498 |
+
"<mt_0493>": 152163,
|
| 499 |
+
"<mt_0494>": 152164,
|
| 500 |
+
"<mt_0495>": 152165,
|
| 501 |
+
"<mt_0496>": 152166,
|
| 502 |
+
"<mt_0497>": 152167,
|
| 503 |
+
"<mt_0498>": 152168,
|
| 504 |
+
"<mt_0499>": 152169,
|
| 505 |
+
"<mt_0500>": 152170,
|
| 506 |
+
"<mt_0501>": 152171,
|
| 507 |
+
"<mt_0502>": 152172,
|
| 508 |
+
"<mt_0503>": 152173,
|
| 509 |
+
"<mt_0504>": 152174,
|
| 510 |
+
"<mt_0505>": 152175,
|
| 511 |
+
"<mt_0506>": 152176,
|
| 512 |
+
"<mt_0507>": 152177,
|
| 513 |
+
"<mt_0508>": 152178,
|
| 514 |
+
"<mt_0509>": 152179,
|
| 515 |
+
"<mt_0510>": 152180,
|
| 516 |
+
"<mt_0511>": 152181,
|
| 517 |
+
"<mt_0512>": 152182,
|
| 518 |
+
"<mt_0513>": 152183,
|
| 519 |
+
"<mt_0514>": 152184,
|
| 520 |
+
"<mt_0515>": 152185,
|
| 521 |
+
"<mt_0516>": 152186,
|
| 522 |
+
"<mt_0517>": 152187,
|
| 523 |
+
"<mt_0518>": 152188,
|
| 524 |
+
"<mt_0519>": 152189,
|
| 525 |
+
"<mt_0520>": 152190,
|
| 526 |
+
"<mt_0521>": 152191,
|
| 527 |
+
"<mt_0522>": 152192,
|
| 528 |
+
"<mt_0523>": 152193,
|
| 529 |
+
"<mt_0524>": 152194,
|
| 530 |
+
"<mt_0525>": 152195,
|
| 531 |
+
"<mt_0526>": 152196,
|
| 532 |
+
"<mt_0527>": 152197,
|
| 533 |
+
"<mt_0528>": 152198,
|
| 534 |
+
"<mt_0529>": 152199,
|
| 535 |
+
"<mt_0530>": 152200,
|
| 536 |
+
"<mt_0531>": 152201,
|
| 537 |
+
"<mt_0532>": 152202,
|
| 538 |
+
"<mt_0533>": 152203,
|
| 539 |
+
"<mt_0534>": 152204,
|
| 540 |
+
"<mt_0535>": 152205,
|
| 541 |
+
"<mt_0536>": 152206,
|
| 542 |
+
"<mt_0537>": 152207,
|
| 543 |
+
"<mt_0538>": 152208,
|
| 544 |
+
"<mt_0539>": 152209,
|
| 545 |
+
"<mt_0540>": 152210,
|
| 546 |
+
"<mt_0541>": 152211,
|
| 547 |
+
"<mt_0542>": 152212,
|
| 548 |
+
"<mt_0543>": 152213,
|
| 549 |
+
"<mt_0544>": 152214,
|
| 550 |
+
"<mt_0545>": 152215,
|
| 551 |
+
"<mt_0546>": 152216,
|
| 552 |
+
"<mt_0547>": 152217,
|
| 553 |
+
"<mt_0548>": 152218,
|
| 554 |
+
"<mt_0549>": 152219,
|
| 555 |
+
"<mt_0550>": 152220,
|
| 556 |
+
"<mt_0551>": 152221,
|
| 557 |
+
"<mt_0552>": 152222,
|
| 558 |
+
"<mt_0553>": 152223,
|
| 559 |
+
"<mt_0554>": 152224,
|
| 560 |
+
"<mt_0555>": 152225,
|
| 561 |
+
"<mt_0556>": 152226,
|
| 562 |
+
"<mt_0557>": 152227,
|
| 563 |
+
"<mt_0558>": 152228,
|
| 564 |
+
"<mt_0559>": 152229,
|
| 565 |
+
"<mt_0560>": 152230,
|
| 566 |
+
"<mt_0561>": 152231,
|
| 567 |
+
"<mt_0562>": 152232,
|
| 568 |
+
"<mt_0563>": 152233,
|
| 569 |
+
"<mt_0564>": 152234,
|
| 570 |
+
"<mt_0565>": 152235,
|
| 571 |
+
"<mt_0566>": 152236,
|
| 572 |
+
"<mt_0567>": 152237,
|
| 573 |
+
"<mt_0568>": 152238,
|
| 574 |
+
"<mt_0569>": 152239,
|
| 575 |
+
"<mt_0570>": 152240,
|
| 576 |
+
"<mt_0571>": 152241,
|
| 577 |
+
"<mt_0572>": 152242,
|
| 578 |
+
"<mt_0573>": 152243,
|
| 579 |
+
"<mt_0574>": 152244,
|
| 580 |
+
"<mt_0575>": 152245,
|
| 581 |
+
"<mt_0576>": 152246,
|
| 582 |
+
"<mt_0577>": 152247,
|
| 583 |
+
"<mt_0578>": 152248,
|
| 584 |
+
"<mt_0579>": 152249,
|
| 585 |
+
"<mt_0580>": 152250,
|
| 586 |
+
"<mt_0581>": 152251,
|
| 587 |
+
"<mt_0582>": 152252,
|
| 588 |
+
"<mt_0583>": 152253,
|
| 589 |
+
"<mt_0584>": 152254,
|
| 590 |
+
"<mt_0585>": 152255,
|
| 591 |
+
"<mt_0586>": 152256,
|
| 592 |
+
"<mt_0587>": 152257,
|
| 593 |
+
"<mt_0588>": 152258,
|
| 594 |
+
"<mt_0589>": 152259,
|
| 595 |
+
"<mt_0590>": 152260,
|
| 596 |
+
"<mt_0591>": 152261,
|
| 597 |
+
"<mt_0592>": 152262,
|
| 598 |
+
"<mt_0593>": 152263,
|
| 599 |
+
"<mt_0594>": 152264,
|
| 600 |
+
"<mt_0595>": 152265,
|
| 601 |
+
"<mt_0596>": 152266,
|
| 602 |
+
"<mt_0597>": 152267,
|
| 603 |
+
"<mt_0598>": 152268,
|
| 604 |
+
"<mt_0599>": 152269,
|
| 605 |
+
"<mt_0600>": 152270,
|
| 606 |
+
"<mt_0601>": 152271,
|
| 607 |
+
"<mt_0602>": 152272,
|
| 608 |
+
"<mt_0603>": 152273,
|
| 609 |
+
"<mt_0604>": 152274,
|
| 610 |
+
"<mt_0605>": 152275,
|
| 611 |
+
"<mt_0606>": 152276,
|
| 612 |
+
"<mt_0607>": 152277,
|
| 613 |
+
"<mt_0608>": 152278,
|
| 614 |
+
"<mt_0609>": 152279,
|
| 615 |
+
"<mt_0610>": 152280,
|
| 616 |
+
"<mt_0611>": 152281,
|
| 617 |
+
"<mt_0612>": 152282,
|
| 618 |
+
"<mt_0613>": 152283,
|
| 619 |
+
"<mt_0614>": 152284,
|
| 620 |
+
"<mt_0615>": 152285,
|
| 621 |
+
"<mt_0616>": 152286,
|
| 622 |
+
"<mt_0617>": 152287,
|
| 623 |
+
"<mt_0618>": 152288,
|
| 624 |
+
"<mt_0619>": 152289,
|
| 625 |
+
"<mt_0620>": 152290,
|
| 626 |
+
"<mt_0621>": 152291,
|
| 627 |
+
"<mt_0622>": 152292,
|
| 628 |
+
"<mt_0623>": 152293,
|
| 629 |
+
"<mt_0624>": 152294,
|
| 630 |
+
"<mt_0625>": 152295,
|
| 631 |
+
"<mt_0626>": 152296,
|
| 632 |
+
"<mt_0627>": 152297,
|
| 633 |
+
"<mt_0628>": 152298,
|
| 634 |
+
"<mt_0629>": 152299,
|
| 635 |
+
"<mt_0630>": 152300,
|
| 636 |
+
"<mt_0631>": 152301,
|
| 637 |
+
"<mt_0632>": 152302,
|
| 638 |
+
"<mt_0633>": 152303,
|
| 639 |
+
"<mt_0634>": 152304,
|
| 640 |
+
"<mt_0635>": 152305,
|
| 641 |
+
"<mt_0636>": 152306,
|
| 642 |
+
"<mt_0637>": 152307,
|
| 643 |
+
"<mt_0638>": 152308,
|
| 644 |
+
"<mt_0639>": 152309,
|
| 645 |
+
"<mt_0640>": 152310,
|
| 646 |
+
"<mt_0641>": 152311,
|
| 647 |
+
"<mt_0642>": 152312,
|
| 648 |
+
"<mt_0643>": 152313,
|
| 649 |
+
"<mt_0644>": 152314,
|
| 650 |
+
"<mt_0645>": 152315,
|
| 651 |
+
"<mt_0646>": 152316,
|
| 652 |
+
"<mt_0647>": 152317,
|
| 653 |
+
"<mt_0648>": 152318,
|
| 654 |
+
"<mt_0649>": 152319,
|
| 655 |
+
"<mt_0650>": 152320,
|
| 656 |
+
"<mt_0651>": 152321,
|
| 657 |
+
"<mt_0652>": 152322,
|
| 658 |
+
"<mt_0653>": 152323,
|
| 659 |
+
"<mt_0654>": 152324,
|
| 660 |
+
"<mt_0655>": 152325,
|
| 661 |
+
"<mt_0656>": 152326,
|
| 662 |
+
"<mt_0657>": 152327,
|
| 663 |
+
"<mt_0658>": 152328,
|
| 664 |
+
"<mt_0659>": 152329,
|
| 665 |
+
"<mt_0660>": 152330,
|
| 666 |
+
"<mt_0661>": 152331,
|
| 667 |
+
"<mt_0662>": 152332,
|
| 668 |
+
"<mt_0663>": 152333,
|
| 669 |
+
"<mt_0664>": 152334,
|
| 670 |
+
"<mt_0665>": 152335,
|
| 671 |
+
"<mt_0666>": 152336,
|
| 672 |
+
"<mt_0667>": 152337,
|
| 673 |
+
"<mt_0668>": 152338,
|
| 674 |
+
"<mt_0669>": 152339,
|
| 675 |
+
"<mt_0670>": 152340,
|
| 676 |
+
"<mt_0671>": 152341,
|
| 677 |
+
"<mt_0672>": 152342,
|
| 678 |
+
"<mt_0673>": 152343,
|
| 679 |
+
"<mt_0674>": 152344,
|
| 680 |
+
"<mt_0675>": 152345,
|
| 681 |
+
"<mt_0676>": 152346,
|
| 682 |
+
"<mt_0677>": 152347,
|
| 683 |
+
"<mt_0678>": 152348,
|
| 684 |
+
"<mt_0679>": 152349,
|
| 685 |
+
"<mt_0680>": 152350,
|
| 686 |
+
"<mt_0681>": 152351,
|
| 687 |
+
"<mt_0682>": 152352,
|
| 688 |
+
"<mt_0683>": 152353,
|
| 689 |
+
"<mt_0684>": 152354,
|
| 690 |
+
"<mt_0685>": 152355,
|
| 691 |
+
"<mt_0686>": 152356,
|
| 692 |
+
"<mt_0687>": 152357,
|
| 693 |
+
"<mt_0688>": 152358,
|
| 694 |
+
"<mt_0689>": 152359,
|
| 695 |
+
"<mt_0690>": 152360,
|
| 696 |
+
"<mt_0691>": 152361,
|
| 697 |
+
"<mt_0692>": 152362,
|
| 698 |
+
"<mt_0693>": 152363,
|
| 699 |
+
"<mt_0694>": 152364,
|
| 700 |
+
"<mt_0695>": 152365,
|
| 701 |
+
"<mt_0696>": 152366,
|
| 702 |
+
"<mt_0697>": 152367,
|
| 703 |
+
"<mt_0698>": 152368,
|
| 704 |
+
"<mt_0699>": 152369,
|
| 705 |
+
"<mt_0700>": 152370,
|
| 706 |
+
"<mt_0701>": 152371,
|
| 707 |
+
"<mt_0702>": 152372,
|
| 708 |
+
"<mt_0703>": 152373,
|
| 709 |
+
"<mt_0704>": 152374,
|
| 710 |
+
"<mt_0705>": 152375,
|
| 711 |
+
"<mt_0706>": 152376,
|
| 712 |
+
"<mt_0707>": 152377,
|
| 713 |
+
"<mt_0708>": 152378,
|
| 714 |
+
"<mt_0709>": 152379,
|
| 715 |
+
"<mt_0710>": 152380,
|
| 716 |
+
"<mt_0711>": 152381,
|
| 717 |
+
"<mt_0712>": 152382,
|
| 718 |
+
"<mt_0713>": 152383,
|
| 719 |
+
"<mt_0714>": 152384,
|
| 720 |
+
"<mt_0715>": 152385,
|
| 721 |
+
"<mt_0716>": 152386,
|
| 722 |
+
"<mt_0717>": 152387,
|
| 723 |
+
"<mt_0718>": 152388,
|
| 724 |
+
"<mt_0719>": 152389,
|
| 725 |
+
"<mt_0720>": 152390,
|
| 726 |
+
"<mt_0721>": 152391,
|
| 727 |
+
"<mt_0722>": 152392,
|
| 728 |
+
"<mt_0723>": 152393,
|
| 729 |
+
"<mt_0724>": 152394,
|
| 730 |
+
"<mt_0725>": 152395,
|
| 731 |
+
"<mt_0726>": 152396,
|
| 732 |
+
"<mt_0727>": 152397,
|
| 733 |
+
"<mt_0728>": 152398,
|
| 734 |
+
"<mt_0729>": 152399,
|
| 735 |
+
"<mt_0730>": 152400,
|
| 736 |
+
"<mt_0731>": 152401,
|
| 737 |
+
"<mt_0732>": 152402,
|
| 738 |
+
"<mt_0733>": 152403,
|
| 739 |
+
"<mt_0734>": 152404,
|
| 740 |
+
"<mt_0735>": 152405,
|
| 741 |
+
"<mt_0736>": 152406,
|
| 742 |
+
"<mt_0737>": 152407,
|
| 743 |
+
"<mt_0738>": 152408,
|
| 744 |
+
"<mt_0739>": 152409,
|
| 745 |
+
"<mt_0740>": 152410,
|
| 746 |
+
"<mt_0741>": 152411,
|
| 747 |
+
"<mt_0742>": 152412,
|
| 748 |
+
"<mt_0743>": 152413,
|
| 749 |
+
"<mt_0744>": 152414,
|
| 750 |
+
"<mt_0745>": 152415,
|
| 751 |
+
"<mt_0746>": 152416,
|
| 752 |
+
"<mt_0747>": 152417,
|
| 753 |
+
"<mt_0748>": 152418,
|
| 754 |
+
"<mt_0749>": 152419,
|
| 755 |
+
"<mt_0750>": 152420,
|
| 756 |
+
"<mt_0751>": 152421,
|
| 757 |
+
"<mt_0752>": 152422,
|
| 758 |
+
"<mt_0753>": 152423,
|
| 759 |
+
"<mt_0754>": 152424,
|
| 760 |
+
"<mt_0755>": 152425,
|
| 761 |
+
"<mt_0756>": 152426,
|
| 762 |
+
"<mt_0757>": 152427,
|
| 763 |
+
"<mt_0758>": 152428,
|
| 764 |
+
"<mt_0759>": 152429,
|
| 765 |
+
"<mt_0760>": 152430,
|
| 766 |
+
"<mt_0761>": 152431,
|
| 767 |
+
"<mt_0762>": 152432,
|
| 768 |
+
"<mt_0763>": 152433,
|
| 769 |
+
"<mt_0764>": 152434,
|
| 770 |
+
"<mt_0765>": 152435,
|
| 771 |
+
"<mt_0766>": 152436,
|
| 772 |
+
"<mt_0767>": 152437,
|
| 773 |
+
"<mt_0768>": 152438,
|
| 774 |
+
"<mt_0769>": 152439,
|
| 775 |
+
"<mt_0770>": 152440,
|
| 776 |
+
"<mt_0771>": 152441,
|
| 777 |
+
"<mt_0772>": 152442,
|
| 778 |
+
"<mt_0773>": 152443,
|
| 779 |
+
"<mt_0774>": 152444,
|
| 780 |
+
"<mt_0775>": 152445,
|
| 781 |
+
"<mt_0776>": 152446,
|
| 782 |
+
"<mt_0777>": 152447,
|
| 783 |
+
"<mt_0778>": 152448,
|
| 784 |
+
"<mt_0779>": 152449,
|
| 785 |
+
"<mt_0780>": 152450,
|
| 786 |
+
"<mt_0781>": 152451,
|
| 787 |
+
"<mt_0782>": 152452,
|
| 788 |
+
"<mt_0783>": 152453,
|
| 789 |
+
"<mt_0784>": 152454,
|
| 790 |
+
"<mt_0785>": 152455,
|
| 791 |
+
"<mt_0786>": 152456,
|
| 792 |
+
"<mt_0787>": 152457,
|
| 793 |
+
"<mt_0788>": 152458,
|
| 794 |
+
"<mt_0789>": 152459,
|
| 795 |
+
"<mt_0790>": 152460,
|
| 796 |
+
"<mt_0791>": 152461,
|
| 797 |
+
"<mt_0792>": 152462,
|
| 798 |
+
"<mt_0793>": 152463,
|
| 799 |
+
"<mt_0794>": 152464,
|
| 800 |
+
"<mt_0795>": 152465,
|
| 801 |
+
"<mt_0796>": 152466,
|
| 802 |
+
"<mt_0797>": 152467,
|
| 803 |
+
"<mt_0798>": 152468,
|
| 804 |
+
"<mt_0799>": 152469,
|
| 805 |
+
"<mt_0800>": 152470,
|
| 806 |
+
"<mt_0801>": 152471,
|
| 807 |
+
"<mt_0802>": 152472,
|
| 808 |
+
"<mt_0803>": 152473,
|
| 809 |
+
"<mt_0804>": 152474,
|
| 810 |
+
"<mt_0805>": 152475,
|
| 811 |
+
"<mt_0806>": 152476,
|
| 812 |
+
"<mt_0807>": 152477,
|
| 813 |
+
"<mt_0808>": 152478,
|
| 814 |
+
"<mt_0809>": 152479,
|
| 815 |
+
"<mt_0810>": 152480,
|
| 816 |
+
"<mt_0811>": 152481,
|
| 817 |
+
"<mt_0812>": 152482,
|
| 818 |
+
"<mt_0813>": 152483,
|
| 819 |
+
"<mt_0814>": 152484,
|
| 820 |
+
"<mt_0815>": 152485,
|
| 821 |
+
"<mt_0816>": 152486,
|
| 822 |
+
"<mt_0817>": 152487,
|
| 823 |
+
"<mt_0818>": 152488,
|
| 824 |
+
"<mt_0819>": 152489,
|
| 825 |
+
"<mt_0820>": 152490,
|
| 826 |
+
"<mt_0821>": 152491,
|
| 827 |
+
"<mt_0822>": 152492,
|
| 828 |
+
"<mt_0823>": 152493,
|
| 829 |
+
"<mt_0824>": 152494,
|
| 830 |
+
"<mt_0825>": 152495,
|
| 831 |
+
"<mt_0826>": 152496,
|
| 832 |
+
"<mt_0827>": 152497,
|
| 833 |
+
"<mt_0828>": 152498,
|
| 834 |
+
"<mt_0829>": 152499,
|
| 835 |
+
"<mt_0830>": 152500,
|
| 836 |
+
"<mt_0831>": 152501,
|
| 837 |
+
"<mt_0832>": 152502,
|
| 838 |
+
"<mt_0833>": 152503,
|
| 839 |
+
"<mt_0834>": 152504,
|
| 840 |
+
"<mt_0835>": 152505,
|
| 841 |
+
"<mt_0836>": 152506,
|
| 842 |
+
"<mt_0837>": 152507,
|
| 843 |
+
"<mt_0838>": 152508,
|
| 844 |
+
"<mt_0839>": 152509,
|
| 845 |
+
"<mt_0840>": 152510,
|
| 846 |
+
"<mt_0841>": 152511,
|
| 847 |
+
"<mt_0842>": 152512,
|
| 848 |
+
"<mt_0843>": 152513,
|
| 849 |
+
"<mt_0844>": 152514,
|
| 850 |
+
"<mt_0845>": 152515,
|
| 851 |
+
"<mt_0846>": 152516,
|
| 852 |
+
"<mt_0847>": 152517,
|
| 853 |
+
"<mt_0848>": 152518,
|
| 854 |
+
"<mt_0849>": 152519,
|
| 855 |
+
"<mt_0850>": 152520,
|
| 856 |
+
"<mt_0851>": 152521,
|
| 857 |
+
"<mt_0852>": 152522,
|
| 858 |
+
"<mt_0853>": 152523,
|
| 859 |
+
"<mt_0854>": 152524,
|
| 860 |
+
"<mt_0855>": 152525,
|
| 861 |
+
"<mt_0856>": 152526,
|
| 862 |
+
"<mt_0857>": 152527,
|
| 863 |
+
"<mt_0858>": 152528,
|
| 864 |
+
"<mt_0859>": 152529,
|
| 865 |
+
"<mt_0860>": 152530,
|
| 866 |
+
"<mt_0861>": 152531,
|
| 867 |
+
"<mt_0862>": 152532,
|
| 868 |
+
"<mt_0863>": 152533,
|
| 869 |
+
"<mt_0864>": 152534,
|
| 870 |
+
"<mt_0865>": 152535,
|
| 871 |
+
"<mt_0866>": 152536,
|
| 872 |
+
"<mt_0867>": 152537,
|
| 873 |
+
"<mt_0868>": 152538,
|
| 874 |
+
"<mt_0869>": 152539,
|
| 875 |
+
"<mt_0870>": 152540,
|
| 876 |
+
"<mt_0871>": 152541,
|
| 877 |
+
"<mt_0872>": 152542,
|
| 878 |
+
"<mt_0873>": 152543,
|
| 879 |
+
"<mt_0874>": 152544,
|
| 880 |
+
"<mt_0875>": 152545,
|
| 881 |
+
"<mt_0876>": 152546,
|
| 882 |
+
"<mt_0877>": 152547,
|
| 883 |
+
"<mt_0878>": 152548,
|
| 884 |
+
"<mt_0879>": 152549,
|
| 885 |
+
"<mt_0880>": 152550,
|
| 886 |
+
"<mt_0881>": 152551,
|
| 887 |
+
"<mt_0882>": 152552,
|
| 888 |
+
"<mt_0883>": 152553,
|
| 889 |
+
"<mt_0884>": 152554,
|
| 890 |
+
"<mt_0885>": 152555,
|
| 891 |
+
"<mt_0886>": 152556,
|
| 892 |
+
"<mt_0887>": 152557,
|
| 893 |
+
"<mt_0888>": 152558,
|
| 894 |
+
"<mt_0889>": 152559,
|
| 895 |
+
"<mt_0890>": 152560,
|
| 896 |
+
"<mt_0891>": 152561,
|
| 897 |
+
"<mt_0892>": 152562,
|
| 898 |
+
"<mt_0893>": 152563,
|
| 899 |
+
"<mt_0894>": 152564,
|
| 900 |
+
"<mt_0895>": 152565,
|
| 901 |
+
"<mt_0896>": 152566,
|
| 902 |
+
"<mt_0897>": 152567,
|
| 903 |
+
"<mt_0898>": 152568,
|
| 904 |
+
"<mt_0899>": 152569,
|
| 905 |
+
"<mt_0900>": 152570,
|
| 906 |
+
"<mt_0901>": 152571,
|
| 907 |
+
"<mt_0902>": 152572,
|
| 908 |
+
"<mt_0903>": 152573,
|
| 909 |
+
"<mt_0904>": 152574,
|
| 910 |
+
"<mt_0905>": 152575,
|
| 911 |
+
"<mt_0906>": 152576,
|
| 912 |
+
"<mt_0907>": 152577,
|
| 913 |
+
"<mt_0908>": 152578,
|
| 914 |
+
"<mt_0909>": 152579,
|
| 915 |
+
"<mt_0910>": 152580,
|
| 916 |
+
"<mt_0911>": 152581,
|
| 917 |
+
"<mt_0912>": 152582,
|
| 918 |
+
"<mt_0913>": 152583,
|
| 919 |
+
"<mt_0914>": 152584,
|
| 920 |
+
"<mt_0915>": 152585,
|
| 921 |
+
"<mt_0916>": 152586,
|
| 922 |
+
"<mt_0917>": 152587,
|
| 923 |
+
"<mt_0918>": 152588,
|
| 924 |
+
"<mt_0919>": 152589,
|
| 925 |
+
"<mt_0920>": 152590,
|
| 926 |
+
"<mt_0921>": 152591,
|
| 927 |
+
"<mt_0922>": 152592,
|
| 928 |
+
"<mt_0923>": 152593,
|
| 929 |
+
"<mt_0924>": 152594,
|
| 930 |
+
"<mt_0925>": 152595,
|
| 931 |
+
"<mt_0926>": 152596,
|
| 932 |
+
"<mt_0927>": 152597,
|
| 933 |
+
"<mt_0928>": 152598,
|
| 934 |
+
"<mt_0929>": 152599,
|
| 935 |
+
"<mt_0930>": 152600,
|
| 936 |
+
"<mt_0931>": 152601,
|
| 937 |
+
"<mt_0932>": 152602,
|
| 938 |
+
"<mt_0933>": 152603,
|
| 939 |
+
"<mt_0934>": 152604,
|
| 940 |
+
"<mt_0935>": 152605,
|
| 941 |
+
"<mt_0936>": 152606,
|
| 942 |
+
"<mt_0937>": 152607,
|
| 943 |
+
"<mt_0938>": 152608,
|
| 944 |
+
"<mt_0939>": 152609,
|
| 945 |
+
"<mt_0940>": 152610,
|
| 946 |
+
"<mt_0941>": 152611,
|
| 947 |
+
"<mt_0942>": 152612,
|
| 948 |
+
"<mt_0943>": 152613,
|
| 949 |
+
"<mt_0944>": 152614,
|
| 950 |
+
"<mt_0945>": 152615,
|
| 951 |
+
"<mt_0946>": 152616,
|
| 952 |
+
"<mt_0947>": 152617,
|
| 953 |
+
"<mt_0948>": 152618,
|
| 954 |
+
"<mt_0949>": 152619,
|
| 955 |
+
"<mt_0950>": 152620,
|
| 956 |
+
"<mt_0951>": 152621,
|
| 957 |
+
"<mt_0952>": 152622,
|
| 958 |
+
"<mt_0953>": 152623,
|
| 959 |
+
"<mt_0954>": 152624,
|
| 960 |
+
"<mt_0955>": 152625,
|
| 961 |
+
"<mt_0956>": 152626,
|
| 962 |
+
"<mt_0957>": 152627,
|
| 963 |
+
"<mt_0958>": 152628,
|
| 964 |
+
"<mt_0959>": 152629,
|
| 965 |
+
"<mt_0960>": 152630,
|
| 966 |
+
"<mt_0961>": 152631,
|
| 967 |
+
"<mt_0962>": 152632,
|
| 968 |
+
"<mt_0963>": 152633,
|
| 969 |
+
"<mt_0964>": 152634,
|
| 970 |
+
"<mt_0965>": 152635,
|
| 971 |
+
"<mt_0966>": 152636,
|
| 972 |
+
"<mt_0967>": 152637,
|
| 973 |
+
"<mt_0968>": 152638,
|
| 974 |
+
"<mt_0969>": 152639,
|
| 975 |
+
"<mt_0970>": 152640,
|
| 976 |
+
"<mt_0971>": 152641,
|
| 977 |
+
"<mt_0972>": 152642,
|
| 978 |
+
"<mt_0973>": 152643,
|
| 979 |
+
"<mt_0974>": 152644,
|
| 980 |
+
"<mt_0975>": 152645,
|
| 981 |
+
"<mt_0976>": 152646,
|
| 982 |
+
"<mt_0977>": 152647,
|
| 983 |
+
"<mt_0978>": 152648,
|
| 984 |
+
"<mt_0979>": 152649,
|
| 985 |
+
"<mt_0980>": 152650,
|
| 986 |
+
"<mt_0981>": 152651,
|
| 987 |
+
"<mt_0982>": 152652,
|
| 988 |
+
"<mt_0983>": 152653,
|
| 989 |
+
"<mt_0984>": 152654,
|
| 990 |
+
"<mt_0985>": 152655,
|
| 991 |
+
"<mt_0986>": 152656,
|
| 992 |
+
"<mt_0987>": 152657,
|
| 993 |
+
"<mt_0988>": 152658,
|
| 994 |
+
"<mt_0989>": 152659,
|
| 995 |
+
"<mt_0990>": 152660,
|
| 996 |
+
"<mt_0991>": 152661,
|
| 997 |
+
"<mt_0992>": 152662,
|
| 998 |
+
"<mt_0993>": 152663,
|
| 999 |
+
"<mt_0994>": 152664,
|
| 1000 |
+
"<mt_0995>": 152665,
|
| 1001 |
+
"<mt_0996>": 152666,
|
| 1002 |
+
"<mt_0997>": 152667,
|
| 1003 |
+
"<mt_0998>": 152668,
|
| 1004 |
+
"<mt_0999>": 152669,
|
| 1005 |
+
"<mt_1000>": 152670,
|
| 1006 |
+
"<mt_1001>": 152671,
|
| 1007 |
+
"<mt_1002>": 152672,
|
| 1008 |
+
"<mt_1003>": 152673,
|
| 1009 |
+
"<mt_1004>": 152674,
|
| 1010 |
+
"<mt_1005>": 152675,
|
| 1011 |
+
"<mt_1006>": 152676,
|
| 1012 |
+
"<mt_1007>": 152677,
|
| 1013 |
+
"<mt_1008>": 152678,
|
| 1014 |
+
"<mt_1009>": 152679,
|
| 1015 |
+
"<mt_1010>": 152680,
|
| 1016 |
+
"<mt_1011>": 152681,
|
| 1017 |
+
"<mt_1012>": 152682,
|
| 1018 |
+
"<mt_1013>": 152683,
|
| 1019 |
+
"<mt_1014>": 152684,
|
| 1020 |
+
"<mt_1015>": 152685,
|
| 1021 |
+
"<mt_1016>": 152686,
|
| 1022 |
+
"<mt_1017>": 152687,
|
| 1023 |
+
"<mt_1018>": 152688,
|
| 1024 |
+
"<mt_1019>": 152689,
|
| 1025 |
+
"<mt_1020>": 152690,
|
| 1026 |
+
"<mt_1021>": 152691,
|
| 1027 |
+
"<mt_1022>": 152692,
|
| 1028 |
+
"<mt_1023>": 152693,
|
| 1029 |
+
"<mt_end>": 152694,
|
| 1030 |
+
"<mt_start>": 151669,
|
| 1031 |
+
"<think>": 151667,
|
| 1032 |
+
"<tool_call>": 151657,
|
| 1033 |
+
"<tool_response>": 151665,
|
| 1034 |
+
"<|box_end|>": 151649,
|
| 1035 |
+
"<|box_start|>": 151648,
|
| 1036 |
+
"<|endoftext|>": 151643,
|
| 1037 |
+
"<|file_sep|>": 151664,
|
| 1038 |
+
"<|fim_middle|>": 151660,
|
| 1039 |
+
"<|fim_pad|>": 151662,
|
| 1040 |
+
"<|fim_prefix|>": 151659,
|
| 1041 |
+
"<|fim_suffix|>": 151661,
|
| 1042 |
+
"<|im_end|>": 151645,
|
| 1043 |
+
"<|im_start|>": 151644,
|
| 1044 |
+
"<|image_pad|>": 151655,
|
| 1045 |
+
"<|object_ref_end|>": 151647,
|
| 1046 |
+
"<|object_ref_start|>": 151646,
|
| 1047 |
+
"<|quad_end|>": 151651,
|
| 1048 |
+
"<|quad_start|>": 151650,
|
| 1049 |
+
"<|repo_name|>": 151663,
|
| 1050 |
+
"<|video_pad|>": 151656,
|
| 1051 |
+
"<|vision_end|>": 151653,
|
| 1052 |
+
"<|vision_pad|>": 151654,
|
| 1053 |
+
"<|vision_start|>": 151652
|
| 1054 |
+
}
|
chat_template.jinja
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{%- if tools %}
|
| 2 |
+
{{- '<|im_start|>system\n' }}
|
| 3 |
+
{%- if messages[0].role == 'system' %}
|
| 4 |
+
{%- if messages[0].content is string %}
|
| 5 |
+
{{- messages[0].content }}
|
| 6 |
+
{%- else %}
|
| 7 |
+
{%- for content in messages[0].content %}
|
| 8 |
+
{%- if 'text' in content %}
|
| 9 |
+
{{- content.text }}
|
| 10 |
+
{%- endif %}
|
| 11 |
+
{%- endfor %}
|
| 12 |
+
{%- endif %}
|
| 13 |
+
{{- '\n\n' }}
|
| 14 |
+
{%- endif %}
|
| 15 |
+
{{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
|
| 16 |
+
{%- for tool in tools %}
|
| 17 |
+
{{- "\n" }}
|
| 18 |
+
{{- tool | tojson }}
|
| 19 |
+
{%- endfor %}
|
| 20 |
+
{{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
|
| 21 |
+
{%- else %}
|
| 22 |
+
{%- if messages[0].role == 'system' %}
|
| 23 |
+
{{- '<|im_start|>system\n' }}
|
| 24 |
+
{%- if messages[0].content is string %}
|
| 25 |
+
{{- messages[0].content }}
|
| 26 |
+
{%- else %}
|
| 27 |
+
{%- for content in messages[0].content %}
|
| 28 |
+
{%- if 'text' in content %}
|
| 29 |
+
{{- content.text }}
|
| 30 |
+
{%- endif %}
|
| 31 |
+
{%- endfor %}
|
| 32 |
+
{%- endif %}
|
| 33 |
+
{{- '<|im_end|>\n' }}
|
| 34 |
+
{%- endif %}
|
| 35 |
+
{%- endif %}
|
| 36 |
+
{%- set image_count = namespace(value=0) %}
|
| 37 |
+
{%- set video_count = namespace(value=0) %}
|
| 38 |
+
{%- for message in messages %}
|
| 39 |
+
{%- if message.role == "user" %}
|
| 40 |
+
{{- '<|im_start|>' + message.role + '\n' }}
|
| 41 |
+
{%- if message.content is string %}
|
| 42 |
+
{{- message.content }}
|
| 43 |
+
{%- else %}
|
| 44 |
+
{%- for content in message.content %}
|
| 45 |
+
{%- if content.type == 'image' or 'image' in content or 'image_url' in content %}
|
| 46 |
+
{%- set image_count.value = image_count.value + 1 %}
|
| 47 |
+
{%- if add_vision_id %}Picture {{ image_count.value }}: {% endif -%}
|
| 48 |
+
<|vision_start|><|image_pad|><|vision_end|>
|
| 49 |
+
{%- elif content.type == 'video' or 'video' in content %}
|
| 50 |
+
{%- set video_count.value = video_count.value + 1 %}
|
| 51 |
+
{%- if add_vision_id %}Video {{ video_count.value }}: {% endif -%}
|
| 52 |
+
<|vision_start|><|video_pad|><|vision_end|>
|
| 53 |
+
{%- elif 'text' in content %}
|
| 54 |
+
{{- content.text }}
|
| 55 |
+
{%- endif %}
|
| 56 |
+
{%- endfor %}
|
| 57 |
+
{%- endif %}
|
| 58 |
+
{{- '<|im_end|>\n' }}
|
| 59 |
+
{%- elif message.role == "assistant" %}
|
| 60 |
+
{{- '<|im_start|>' + message.role + '\n' }}
|
| 61 |
+
{%- if message.content is string %}
|
| 62 |
+
{{- message.content }}
|
| 63 |
+
{%- else %}
|
| 64 |
+
{%- for content_item in message.content %}
|
| 65 |
+
{%- if 'text' in content_item %}
|
| 66 |
+
{{- content_item.text }}
|
| 67 |
+
{%- endif %}
|
| 68 |
+
{%- endfor %}
|
| 69 |
+
{%- endif %}
|
| 70 |
+
{%- if message.tool_calls %}
|
| 71 |
+
{%- for tool_call in message.tool_calls %}
|
| 72 |
+
{%- if (loop.first and message.content) or (not loop.first) %}
|
| 73 |
+
{{- '\n' }}
|
| 74 |
+
{%- endif %}
|
| 75 |
+
{%- if tool_call.function %}
|
| 76 |
+
{%- set tool_call = tool_call.function %}
|
| 77 |
+
{%- endif %}
|
| 78 |
+
{{- '<tool_call>\n{"name": "' }}
|
| 79 |
+
{{- tool_call.name }}
|
| 80 |
+
{{- '", "arguments": ' }}
|
| 81 |
+
{%- if tool_call.arguments is string %}
|
| 82 |
+
{{- tool_call.arguments }}
|
| 83 |
+
{%- else %}
|
| 84 |
+
{{- tool_call.arguments | tojson }}
|
| 85 |
+
{%- endif %}
|
| 86 |
+
{{- '}\n</tool_call>' }}
|
| 87 |
+
{%- endfor %}
|
| 88 |
+
{%- endif %}
|
| 89 |
+
{{- '<|im_end|>\n' }}
|
| 90 |
+
{%- elif message.role == "tool" %}
|
| 91 |
+
{%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
|
| 92 |
+
{{- '<|im_start|>user' }}
|
| 93 |
+
{%- endif %}
|
| 94 |
+
{{- '\n<tool_response>\n' }}
|
| 95 |
+
{%- if message.content is string %}
|
| 96 |
+
{{- message.content }}
|
| 97 |
+
{%- else %}
|
| 98 |
+
{%- for content in message.content %}
|
| 99 |
+
{%- if content.type == 'image' or 'image' in content or 'image_url' in content %}
|
| 100 |
+
{%- set image_count.value = image_count.value + 1 %}
|
| 101 |
+
{%- if add_vision_id %}Picture {{ image_count.value }}: {% endif -%}
|
| 102 |
+
<|vision_start|><|image_pad|><|vision_end|>
|
| 103 |
+
{%- elif content.type == 'video' or 'video' in content %}
|
| 104 |
+
{%- set video_count.value = video_count.value + 1 %}
|
| 105 |
+
{%- if add_vision_id %}Video {{ video_count.value }}: {% endif -%}
|
| 106 |
+
<|vision_start|><|video_pad|><|vision_end|>
|
| 107 |
+
{%- elif 'text' in content %}
|
| 108 |
+
{{- content.text }}
|
| 109 |
+
{%- endif %}
|
| 110 |
+
{%- endfor %}
|
| 111 |
+
{%- endif %}
|
| 112 |
+
{{- '\n</tool_response>' }}
|
| 113 |
+
{%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
|
| 114 |
+
{{- '<|im_end|>\n' }}
|
| 115 |
+
{%- endif %}
|
| 116 |
+
{%- endif %}
|
| 117 |
+
{%- endfor %}
|
| 118 |
+
{%- if add_generation_prompt %}
|
| 119 |
+
{{- '<|im_start|>assistant\n' }}
|
| 120 |
+
{%- endif %}
|
config.json
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Qwen3VLForConditionalGeneration"
|
| 4 |
+
],
|
| 5 |
+
"dtype": "bfloat16",
|
| 6 |
+
"image_token_id": 151655,
|
| 7 |
+
"model_type": "qwen3_vl",
|
| 8 |
+
"text_config": {
|
| 9 |
+
"attention_bias": false,
|
| 10 |
+
"attention_dropout": 0.0,
|
| 11 |
+
"bos_token_id": 151643,
|
| 12 |
+
"dtype": "bfloat16",
|
| 13 |
+
"eos_token_id": 151645,
|
| 14 |
+
"head_dim": 128,
|
| 15 |
+
"hidden_act": "silu",
|
| 16 |
+
"hidden_size": 2560,
|
| 17 |
+
"initializer_range": 0.02,
|
| 18 |
+
"intermediate_size": 9728,
|
| 19 |
+
"max_position_embeddings": 262144,
|
| 20 |
+
"model_type": "qwen3_vl_text",
|
| 21 |
+
"num_attention_heads": 32,
|
| 22 |
+
"num_hidden_layers": 36,
|
| 23 |
+
"num_key_value_heads": 8,
|
| 24 |
+
"rms_norm_eps": 1e-06,
|
| 25 |
+
"rope_parameters": {
|
| 26 |
+
"mrope_interleaved": true,
|
| 27 |
+
"mrope_section": [
|
| 28 |
+
24,
|
| 29 |
+
20,
|
| 30 |
+
20
|
| 31 |
+
],
|
| 32 |
+
"rope_theta": 5000000,
|
| 33 |
+
"rope_type": "default"
|
| 34 |
+
},
|
| 35 |
+
"rope_theta": 5000000,
|
| 36 |
+
"tie_word_embeddings": true,
|
| 37 |
+
"use_cache": true,
|
| 38 |
+
"vocab_size": 152695
|
| 39 |
+
},
|
| 40 |
+
"tie_word_embeddings": true,
|
| 41 |
+
"transformers_version": "5.0.0.dev0",
|
| 42 |
+
"video_token_id": 151656,
|
| 43 |
+
"vision_config": {
|
| 44 |
+
"deepstack_visual_indexes": [
|
| 45 |
+
5,
|
| 46 |
+
11,
|
| 47 |
+
17
|
| 48 |
+
],
|
| 49 |
+
"depth": 24,
|
| 50 |
+
"dtype": "bfloat16",
|
| 51 |
+
"hidden_act": "gelu_pytorch_tanh",
|
| 52 |
+
"hidden_size": 1024,
|
| 53 |
+
"in_channels": 3,
|
| 54 |
+
"initializer_range": 0.02,
|
| 55 |
+
"intermediate_size": 4096,
|
| 56 |
+
"model_type": "qwen3_vl",
|
| 57 |
+
"num_heads": 16,
|
| 58 |
+
"num_position_embeddings": 2304,
|
| 59 |
+
"out_hidden_size": 2560,
|
| 60 |
+
"patch_size": 16,
|
| 61 |
+
"spatial_merge_size": 2,
|
| 62 |
+
"temporal_patch_size": 2
|
| 63 |
+
},
|
| 64 |
+
"vision_end_token_id": 151653,
|
| 65 |
+
"vision_start_token_id": 151652
|
| 66 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token_id": 151643,
|
| 3 |
+
"do_sample": true,
|
| 4 |
+
"eos_token_id": [
|
| 5 |
+
151645,
|
| 6 |
+
151643
|
| 7 |
+
],
|
| 8 |
+
"pad_token_id": 151643,
|
| 9 |
+
"temperature": 0.7,
|
| 10 |
+
"top_k": 20,
|
| 11 |
+
"top_p": 0.8,
|
| 12 |
+
"transformers_version": "5.0.0.dev0"
|
| 13 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c018865139807c983929b2cda251257a970d922765dd1d69faf0a5ffbcf216b9
|
| 3 |
+
size 4994383960
|
model-00002-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9ef6977f8be72357dcdd5e762837e2b616c5d16e276cbe394eba908a34dc59c1
|
| 3 |
+
size 3885221448
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,721 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_parameters": 4439758848,
|
| 4 |
+
"total_size": 8879517696
|
| 5 |
+
},
|
| 6 |
+
"weight_map": {
|
| 7 |
+
"model.language_model.embed_tokens.weight": "model-00001-of-00002.safetensors",
|
| 8 |
+
"model.language_model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 9 |
+
"model.language_model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 10 |
+
"model.language_model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 11 |
+
"model.language_model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 12 |
+
"model.language_model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 13 |
+
"model.language_model.layers.0.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 14 |
+
"model.language_model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 15 |
+
"model.language_model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 16 |
+
"model.language_model.layers.0.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 17 |
+
"model.language_model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 18 |
+
"model.language_model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 19 |
+
"model.language_model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 20 |
+
"model.language_model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 21 |
+
"model.language_model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 22 |
+
"model.language_model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 23 |
+
"model.language_model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 24 |
+
"model.language_model.layers.1.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 25 |
+
"model.language_model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 26 |
+
"model.language_model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 27 |
+
"model.language_model.layers.1.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 28 |
+
"model.language_model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 29 |
+
"model.language_model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 30 |
+
"model.language_model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 31 |
+
"model.language_model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 32 |
+
"model.language_model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 33 |
+
"model.language_model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 34 |
+
"model.language_model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 35 |
+
"model.language_model.layers.10.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 36 |
+
"model.language_model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 37 |
+
"model.language_model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 38 |
+
"model.language_model.layers.10.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 39 |
+
"model.language_model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 40 |
+
"model.language_model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 41 |
+
"model.language_model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 42 |
+
"model.language_model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 43 |
+
"model.language_model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 44 |
+
"model.language_model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 45 |
+
"model.language_model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 46 |
+
"model.language_model.layers.11.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 47 |
+
"model.language_model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 48 |
+
"model.language_model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 49 |
+
"model.language_model.layers.11.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 50 |
+
"model.language_model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 51 |
+
"model.language_model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 52 |
+
"model.language_model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 53 |
+
"model.language_model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 54 |
+
"model.language_model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 55 |
+
"model.language_model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 56 |
+
"model.language_model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 57 |
+
"model.language_model.layers.12.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 58 |
+
"model.language_model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 59 |
+
"model.language_model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 60 |
+
"model.language_model.layers.12.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 61 |
+
"model.language_model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 62 |
+
"model.language_model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 63 |
+
"model.language_model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 64 |
+
"model.language_model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 65 |
+
"model.language_model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 66 |
+
"model.language_model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 67 |
+
"model.language_model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 68 |
+
"model.language_model.layers.13.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 69 |
+
"model.language_model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 70 |
+
"model.language_model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 71 |
+
"model.language_model.layers.13.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 72 |
+
"model.language_model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 73 |
+
"model.language_model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 74 |
+
"model.language_model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 75 |
+
"model.language_model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 76 |
+
"model.language_model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 77 |
+
"model.language_model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 78 |
+
"model.language_model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 79 |
+
"model.language_model.layers.14.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 80 |
+
"model.language_model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 81 |
+
"model.language_model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 82 |
+
"model.language_model.layers.14.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 83 |
+
"model.language_model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 84 |
+
"model.language_model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 85 |
+
"model.language_model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 86 |
+
"model.language_model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 87 |
+
"model.language_model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 88 |
+
"model.language_model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 89 |
+
"model.language_model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 90 |
+
"model.language_model.layers.15.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 91 |
+
"model.language_model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 92 |
+
"model.language_model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 93 |
+
"model.language_model.layers.15.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 94 |
+
"model.language_model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 95 |
+
"model.language_model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 96 |
+
"model.language_model.layers.16.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 97 |
+
"model.language_model.layers.16.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 98 |
+
"model.language_model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 99 |
+
"model.language_model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 100 |
+
"model.language_model.layers.16.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 101 |
+
"model.language_model.layers.16.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 102 |
+
"model.language_model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 103 |
+
"model.language_model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 104 |
+
"model.language_model.layers.16.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 105 |
+
"model.language_model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 106 |
+
"model.language_model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 107 |
+
"model.language_model.layers.17.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 108 |
+
"model.language_model.layers.17.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 109 |
+
"model.language_model.layers.17.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 110 |
+
"model.language_model.layers.17.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 111 |
+
"model.language_model.layers.17.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 112 |
+
"model.language_model.layers.17.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 113 |
+
"model.language_model.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 114 |
+
"model.language_model.layers.17.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 115 |
+
"model.language_model.layers.17.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 116 |
+
"model.language_model.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 117 |
+
"model.language_model.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 118 |
+
"model.language_model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 119 |
+
"model.language_model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 120 |
+
"model.language_model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 121 |
+
"model.language_model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 122 |
+
"model.language_model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 123 |
+
"model.language_model.layers.18.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 124 |
+
"model.language_model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 125 |
+
"model.language_model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 126 |
+
"model.language_model.layers.18.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 127 |
+
"model.language_model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 128 |
+
"model.language_model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 129 |
+
"model.language_model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 130 |
+
"model.language_model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 131 |
+
"model.language_model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 132 |
+
"model.language_model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 133 |
+
"model.language_model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 134 |
+
"model.language_model.layers.19.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 135 |
+
"model.language_model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 136 |
+
"model.language_model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 137 |
+
"model.language_model.layers.19.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 138 |
+
"model.language_model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 139 |
+
"model.language_model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 140 |
+
"model.language_model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 141 |
+
"model.language_model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 142 |
+
"model.language_model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 143 |
+
"model.language_model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 144 |
+
"model.language_model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 145 |
+
"model.language_model.layers.2.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 146 |
+
"model.language_model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 147 |
+
"model.language_model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 148 |
+
"model.language_model.layers.2.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 149 |
+
"model.language_model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 150 |
+
"model.language_model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 151 |
+
"model.language_model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 152 |
+
"model.language_model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 153 |
+
"model.language_model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 154 |
+
"model.language_model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 155 |
+
"model.language_model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 156 |
+
"model.language_model.layers.20.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 157 |
+
"model.language_model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 158 |
+
"model.language_model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 159 |
+
"model.language_model.layers.20.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 160 |
+
"model.language_model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 161 |
+
"model.language_model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 162 |
+
"model.language_model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 163 |
+
"model.language_model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 164 |
+
"model.language_model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 165 |
+
"model.language_model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 166 |
+
"model.language_model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 167 |
+
"model.language_model.layers.21.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 168 |
+
"model.language_model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 169 |
+
"model.language_model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 170 |
+
"model.language_model.layers.21.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 171 |
+
"model.language_model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 172 |
+
"model.language_model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 173 |
+
"model.language_model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 174 |
+
"model.language_model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 175 |
+
"model.language_model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 176 |
+
"model.language_model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 177 |
+
"model.language_model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 178 |
+
"model.language_model.layers.22.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 179 |
+
"model.language_model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 180 |
+
"model.language_model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 181 |
+
"model.language_model.layers.22.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 182 |
+
"model.language_model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 183 |
+
"model.language_model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 184 |
+
"model.language_model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 185 |
+
"model.language_model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 186 |
+
"model.language_model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 187 |
+
"model.language_model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 188 |
+
"model.language_model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 189 |
+
"model.language_model.layers.23.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 190 |
+
"model.language_model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 191 |
+
"model.language_model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 192 |
+
"model.language_model.layers.23.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 193 |
+
"model.language_model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 194 |
+
"model.language_model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 195 |
+
"model.language_model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 196 |
+
"model.language_model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 197 |
+
"model.language_model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 198 |
+
"model.language_model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 199 |
+
"model.language_model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 200 |
+
"model.language_model.layers.24.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 201 |
+
"model.language_model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 202 |
+
"model.language_model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 203 |
+
"model.language_model.layers.24.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 204 |
+
"model.language_model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 205 |
+
"model.language_model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 206 |
+
"model.language_model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 207 |
+
"model.language_model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 208 |
+
"model.language_model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 209 |
+
"model.language_model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 210 |
+
"model.language_model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 211 |
+
"model.language_model.layers.25.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 212 |
+
"model.language_model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 213 |
+
"model.language_model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 214 |
+
"model.language_model.layers.25.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 215 |
+
"model.language_model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 216 |
+
"model.language_model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 217 |
+
"model.language_model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 218 |
+
"model.language_model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 219 |
+
"model.language_model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 220 |
+
"model.language_model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 221 |
+
"model.language_model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 222 |
+
"model.language_model.layers.26.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 223 |
+
"model.language_model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 224 |
+
"model.language_model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 225 |
+
"model.language_model.layers.26.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 226 |
+
"model.language_model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 227 |
+
"model.language_model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 228 |
+
"model.language_model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 229 |
+
"model.language_model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 230 |
+
"model.language_model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 231 |
+
"model.language_model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 232 |
+
"model.language_model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 233 |
+
"model.language_model.layers.27.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 234 |
+
"model.language_model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 235 |
+
"model.language_model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 236 |
+
"model.language_model.layers.27.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 237 |
+
"model.language_model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 238 |
+
"model.language_model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 239 |
+
"model.language_model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 240 |
+
"model.language_model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 241 |
+
"model.language_model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 242 |
+
"model.language_model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 243 |
+
"model.language_model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 244 |
+
"model.language_model.layers.28.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 245 |
+
"model.language_model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 246 |
+
"model.language_model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 247 |
+
"model.language_model.layers.28.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 248 |
+
"model.language_model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 249 |
+
"model.language_model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 250 |
+
"model.language_model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 251 |
+
"model.language_model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 252 |
+
"model.language_model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 253 |
+
"model.language_model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 254 |
+
"model.language_model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 255 |
+
"model.language_model.layers.29.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 256 |
+
"model.language_model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 257 |
+
"model.language_model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 258 |
+
"model.language_model.layers.29.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 259 |
+
"model.language_model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 260 |
+
"model.language_model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 261 |
+
"model.language_model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 262 |
+
"model.language_model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 263 |
+
"model.language_model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 264 |
+
"model.language_model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 265 |
+
"model.language_model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 266 |
+
"model.language_model.layers.3.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 267 |
+
"model.language_model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 268 |
+
"model.language_model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 269 |
+
"model.language_model.layers.3.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 270 |
+
"model.language_model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 271 |
+
"model.language_model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 272 |
+
"model.language_model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 273 |
+
"model.language_model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 274 |
+
"model.language_model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 275 |
+
"model.language_model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 276 |
+
"model.language_model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 277 |
+
"model.language_model.layers.30.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 278 |
+
"model.language_model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 279 |
+
"model.language_model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 280 |
+
"model.language_model.layers.30.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 281 |
+
"model.language_model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 282 |
+
"model.language_model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 283 |
+
"model.language_model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 284 |
+
"model.language_model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 285 |
+
"model.language_model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 286 |
+
"model.language_model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 287 |
+
"model.language_model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 288 |
+
"model.language_model.layers.31.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 289 |
+
"model.language_model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 290 |
+
"model.language_model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 291 |
+
"model.language_model.layers.31.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 292 |
+
"model.language_model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 293 |
+
"model.language_model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 294 |
+
"model.language_model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 295 |
+
"model.language_model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 296 |
+
"model.language_model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 297 |
+
"model.language_model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 298 |
+
"model.language_model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 299 |
+
"model.language_model.layers.32.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 300 |
+
"model.language_model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 301 |
+
"model.language_model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 302 |
+
"model.language_model.layers.32.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 303 |
+
"model.language_model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 304 |
+
"model.language_model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 305 |
+
"model.language_model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 306 |
+
"model.language_model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 307 |
+
"model.language_model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 308 |
+
"model.language_model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 309 |
+
"model.language_model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 310 |
+
"model.language_model.layers.33.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 311 |
+
"model.language_model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 312 |
+
"model.language_model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 313 |
+
"model.language_model.layers.33.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 314 |
+
"model.language_model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 315 |
+
"model.language_model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 316 |
+
"model.language_model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 317 |
+
"model.language_model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 318 |
+
"model.language_model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 319 |
+
"model.language_model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 320 |
+
"model.language_model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 321 |
+
"model.language_model.layers.34.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 322 |
+
"model.language_model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 323 |
+
"model.language_model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 324 |
+
"model.language_model.layers.34.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 325 |
+
"model.language_model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 326 |
+
"model.language_model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 327 |
+
"model.language_model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 328 |
+
"model.language_model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 329 |
+
"model.language_model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 330 |
+
"model.language_model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 331 |
+
"model.language_model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 332 |
+
"model.language_model.layers.35.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
|
| 333 |
+
"model.language_model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 334 |
+
"model.language_model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 335 |
+
"model.language_model.layers.35.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
|
| 336 |
+
"model.language_model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 337 |
+
"model.language_model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 338 |
+
"model.language_model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 339 |
+
"model.language_model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 340 |
+
"model.language_model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 341 |
+
"model.language_model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 342 |
+
"model.language_model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 343 |
+
"model.language_model.layers.4.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 344 |
+
"model.language_model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 345 |
+
"model.language_model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 346 |
+
"model.language_model.layers.4.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 347 |
+
"model.language_model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 348 |
+
"model.language_model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 349 |
+
"model.language_model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 350 |
+
"model.language_model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 351 |
+
"model.language_model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 352 |
+
"model.language_model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 353 |
+
"model.language_model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 354 |
+
"model.language_model.layers.5.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 355 |
+
"model.language_model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 356 |
+
"model.language_model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 357 |
+
"model.language_model.layers.5.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 358 |
+
"model.language_model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 359 |
+
"model.language_model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 360 |
+
"model.language_model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 361 |
+
"model.language_model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 362 |
+
"model.language_model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 363 |
+
"model.language_model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 364 |
+
"model.language_model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 365 |
+
"model.language_model.layers.6.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 366 |
+
"model.language_model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 367 |
+
"model.language_model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 368 |
+
"model.language_model.layers.6.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 369 |
+
"model.language_model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 370 |
+
"model.language_model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 371 |
+
"model.language_model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 372 |
+
"model.language_model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 373 |
+
"model.language_model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 374 |
+
"model.language_model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 375 |
+
"model.language_model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 376 |
+
"model.language_model.layers.7.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 377 |
+
"model.language_model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 378 |
+
"model.language_model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 379 |
+
"model.language_model.layers.7.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 380 |
+
"model.language_model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 381 |
+
"model.language_model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 382 |
+
"model.language_model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 383 |
+
"model.language_model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 384 |
+
"model.language_model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 385 |
+
"model.language_model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 386 |
+
"model.language_model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 387 |
+
"model.language_model.layers.8.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 388 |
+
"model.language_model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 389 |
+
"model.language_model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 390 |
+
"model.language_model.layers.8.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 391 |
+
"model.language_model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 392 |
+
"model.language_model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 393 |
+
"model.language_model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 394 |
+
"model.language_model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 395 |
+
"model.language_model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 396 |
+
"model.language_model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 397 |
+
"model.language_model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 398 |
+
"model.language_model.layers.9.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 399 |
+
"model.language_model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 400 |
+
"model.language_model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 401 |
+
"model.language_model.layers.9.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 402 |
+
"model.language_model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 403 |
+
"model.language_model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 404 |
+
"model.language_model.norm.weight": "model-00002-of-00002.safetensors",
|
| 405 |
+
"model.visual.blocks.0.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 406 |
+
"model.visual.blocks.0.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 407 |
+
"model.visual.blocks.0.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 408 |
+
"model.visual.blocks.0.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 409 |
+
"model.visual.blocks.0.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 410 |
+
"model.visual.blocks.0.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 411 |
+
"model.visual.blocks.0.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 412 |
+
"model.visual.blocks.0.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 413 |
+
"model.visual.blocks.0.norm1.bias": "model-00001-of-00002.safetensors",
|
| 414 |
+
"model.visual.blocks.0.norm1.weight": "model-00001-of-00002.safetensors",
|
| 415 |
+
"model.visual.blocks.0.norm2.bias": "model-00001-of-00002.safetensors",
|
| 416 |
+
"model.visual.blocks.0.norm2.weight": "model-00001-of-00002.safetensors",
|
| 417 |
+
"model.visual.blocks.1.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 418 |
+
"model.visual.blocks.1.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 419 |
+
"model.visual.blocks.1.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 420 |
+
"model.visual.blocks.1.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 421 |
+
"model.visual.blocks.1.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 422 |
+
"model.visual.blocks.1.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 423 |
+
"model.visual.blocks.1.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 424 |
+
"model.visual.blocks.1.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 425 |
+
"model.visual.blocks.1.norm1.bias": "model-00001-of-00002.safetensors",
|
| 426 |
+
"model.visual.blocks.1.norm1.weight": "model-00001-of-00002.safetensors",
|
| 427 |
+
"model.visual.blocks.1.norm2.bias": "model-00001-of-00002.safetensors",
|
| 428 |
+
"model.visual.blocks.1.norm2.weight": "model-00001-of-00002.safetensors",
|
| 429 |
+
"model.visual.blocks.10.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 430 |
+
"model.visual.blocks.10.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 431 |
+
"model.visual.blocks.10.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 432 |
+
"model.visual.blocks.10.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 433 |
+
"model.visual.blocks.10.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 434 |
+
"model.visual.blocks.10.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 435 |
+
"model.visual.blocks.10.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 436 |
+
"model.visual.blocks.10.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 437 |
+
"model.visual.blocks.10.norm1.bias": "model-00001-of-00002.safetensors",
|
| 438 |
+
"model.visual.blocks.10.norm1.weight": "model-00001-of-00002.safetensors",
|
| 439 |
+
"model.visual.blocks.10.norm2.bias": "model-00001-of-00002.safetensors",
|
| 440 |
+
"model.visual.blocks.10.norm2.weight": "model-00001-of-00002.safetensors",
|
| 441 |
+
"model.visual.blocks.11.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 442 |
+
"model.visual.blocks.11.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 443 |
+
"model.visual.blocks.11.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 444 |
+
"model.visual.blocks.11.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 445 |
+
"model.visual.blocks.11.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 446 |
+
"model.visual.blocks.11.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 447 |
+
"model.visual.blocks.11.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 448 |
+
"model.visual.blocks.11.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 449 |
+
"model.visual.blocks.11.norm1.bias": "model-00001-of-00002.safetensors",
|
| 450 |
+
"model.visual.blocks.11.norm1.weight": "model-00001-of-00002.safetensors",
|
| 451 |
+
"model.visual.blocks.11.norm2.bias": "model-00001-of-00002.safetensors",
|
| 452 |
+
"model.visual.blocks.11.norm2.weight": "model-00001-of-00002.safetensors",
|
| 453 |
+
"model.visual.blocks.12.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 454 |
+
"model.visual.blocks.12.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 455 |
+
"model.visual.blocks.12.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 456 |
+
"model.visual.blocks.12.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 457 |
+
"model.visual.blocks.12.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 458 |
+
"model.visual.blocks.12.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 459 |
+
"model.visual.blocks.12.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 460 |
+
"model.visual.blocks.12.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 461 |
+
"model.visual.blocks.12.norm1.bias": "model-00001-of-00002.safetensors",
|
| 462 |
+
"model.visual.blocks.12.norm1.weight": "model-00001-of-00002.safetensors",
|
| 463 |
+
"model.visual.blocks.12.norm2.bias": "model-00001-of-00002.safetensors",
|
| 464 |
+
"model.visual.blocks.12.norm2.weight": "model-00001-of-00002.safetensors",
|
| 465 |
+
"model.visual.blocks.13.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 466 |
+
"model.visual.blocks.13.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 467 |
+
"model.visual.blocks.13.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 468 |
+
"model.visual.blocks.13.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 469 |
+
"model.visual.blocks.13.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 470 |
+
"model.visual.blocks.13.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 471 |
+
"model.visual.blocks.13.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 472 |
+
"model.visual.blocks.13.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 473 |
+
"model.visual.blocks.13.norm1.bias": "model-00001-of-00002.safetensors",
|
| 474 |
+
"model.visual.blocks.13.norm1.weight": "model-00001-of-00002.safetensors",
|
| 475 |
+
"model.visual.blocks.13.norm2.bias": "model-00001-of-00002.safetensors",
|
| 476 |
+
"model.visual.blocks.13.norm2.weight": "model-00001-of-00002.safetensors",
|
| 477 |
+
"model.visual.blocks.14.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 478 |
+
"model.visual.blocks.14.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 479 |
+
"model.visual.blocks.14.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 480 |
+
"model.visual.blocks.14.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 481 |
+
"model.visual.blocks.14.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 482 |
+
"model.visual.blocks.14.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 483 |
+
"model.visual.blocks.14.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 484 |
+
"model.visual.blocks.14.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 485 |
+
"model.visual.blocks.14.norm1.bias": "model-00001-of-00002.safetensors",
|
| 486 |
+
"model.visual.blocks.14.norm1.weight": "model-00001-of-00002.safetensors",
|
| 487 |
+
"model.visual.blocks.14.norm2.bias": "model-00001-of-00002.safetensors",
|
| 488 |
+
"model.visual.blocks.14.norm2.weight": "model-00001-of-00002.safetensors",
|
| 489 |
+
"model.visual.blocks.15.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 490 |
+
"model.visual.blocks.15.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 491 |
+
"model.visual.blocks.15.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 492 |
+
"model.visual.blocks.15.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 493 |
+
"model.visual.blocks.15.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 494 |
+
"model.visual.blocks.15.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 495 |
+
"model.visual.blocks.15.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 496 |
+
"model.visual.blocks.15.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 497 |
+
"model.visual.blocks.15.norm1.bias": "model-00001-of-00002.safetensors",
|
| 498 |
+
"model.visual.blocks.15.norm1.weight": "model-00001-of-00002.safetensors",
|
| 499 |
+
"model.visual.blocks.15.norm2.bias": "model-00001-of-00002.safetensors",
|
| 500 |
+
"model.visual.blocks.15.norm2.weight": "model-00001-of-00002.safetensors",
|
| 501 |
+
"model.visual.blocks.16.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 502 |
+
"model.visual.blocks.16.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 503 |
+
"model.visual.blocks.16.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 504 |
+
"model.visual.blocks.16.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 505 |
+
"model.visual.blocks.16.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 506 |
+
"model.visual.blocks.16.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 507 |
+
"model.visual.blocks.16.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 508 |
+
"model.visual.blocks.16.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 509 |
+
"model.visual.blocks.16.norm1.bias": "model-00001-of-00002.safetensors",
|
| 510 |
+
"model.visual.blocks.16.norm1.weight": "model-00001-of-00002.safetensors",
|
| 511 |
+
"model.visual.blocks.16.norm2.bias": "model-00001-of-00002.safetensors",
|
| 512 |
+
"model.visual.blocks.16.norm2.weight": "model-00001-of-00002.safetensors",
|
| 513 |
+
"model.visual.blocks.17.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 514 |
+
"model.visual.blocks.17.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 515 |
+
"model.visual.blocks.17.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 516 |
+
"model.visual.blocks.17.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 517 |
+
"model.visual.blocks.17.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 518 |
+
"model.visual.blocks.17.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 519 |
+
"model.visual.blocks.17.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 520 |
+
"model.visual.blocks.17.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 521 |
+
"model.visual.blocks.17.norm1.bias": "model-00001-of-00002.safetensors",
|
| 522 |
+
"model.visual.blocks.17.norm1.weight": "model-00001-of-00002.safetensors",
|
| 523 |
+
"model.visual.blocks.17.norm2.bias": "model-00001-of-00002.safetensors",
|
| 524 |
+
"model.visual.blocks.17.norm2.weight": "model-00001-of-00002.safetensors",
|
| 525 |
+
"model.visual.blocks.18.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 526 |
+
"model.visual.blocks.18.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 527 |
+
"model.visual.blocks.18.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 528 |
+
"model.visual.blocks.18.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 529 |
+
"model.visual.blocks.18.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 530 |
+
"model.visual.blocks.18.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 531 |
+
"model.visual.blocks.18.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 532 |
+
"model.visual.blocks.18.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 533 |
+
"model.visual.blocks.18.norm1.bias": "model-00001-of-00002.safetensors",
|
| 534 |
+
"model.visual.blocks.18.norm1.weight": "model-00001-of-00002.safetensors",
|
| 535 |
+
"model.visual.blocks.18.norm2.bias": "model-00001-of-00002.safetensors",
|
| 536 |
+
"model.visual.blocks.18.norm2.weight": "model-00001-of-00002.safetensors",
|
| 537 |
+
"model.visual.blocks.19.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 538 |
+
"model.visual.blocks.19.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 539 |
+
"model.visual.blocks.19.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 540 |
+
"model.visual.blocks.19.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 541 |
+
"model.visual.blocks.19.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 542 |
+
"model.visual.blocks.19.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 543 |
+
"model.visual.blocks.19.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 544 |
+
"model.visual.blocks.19.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 545 |
+
"model.visual.blocks.19.norm1.bias": "model-00001-of-00002.safetensors",
|
| 546 |
+
"model.visual.blocks.19.norm1.weight": "model-00001-of-00002.safetensors",
|
| 547 |
+
"model.visual.blocks.19.norm2.bias": "model-00001-of-00002.safetensors",
|
| 548 |
+
"model.visual.blocks.19.norm2.weight": "model-00001-of-00002.safetensors",
|
| 549 |
+
"model.visual.blocks.2.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 550 |
+
"model.visual.blocks.2.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 551 |
+
"model.visual.blocks.2.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 552 |
+
"model.visual.blocks.2.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 553 |
+
"model.visual.blocks.2.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 554 |
+
"model.visual.blocks.2.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 555 |
+
"model.visual.blocks.2.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 556 |
+
"model.visual.blocks.2.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 557 |
+
"model.visual.blocks.2.norm1.bias": "model-00001-of-00002.safetensors",
|
| 558 |
+
"model.visual.blocks.2.norm1.weight": "model-00001-of-00002.safetensors",
|
| 559 |
+
"model.visual.blocks.2.norm2.bias": "model-00001-of-00002.safetensors",
|
| 560 |
+
"model.visual.blocks.2.norm2.weight": "model-00001-of-00002.safetensors",
|
| 561 |
+
"model.visual.blocks.20.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 562 |
+
"model.visual.blocks.20.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 563 |
+
"model.visual.blocks.20.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 564 |
+
"model.visual.blocks.20.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 565 |
+
"model.visual.blocks.20.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 566 |
+
"model.visual.blocks.20.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 567 |
+
"model.visual.blocks.20.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 568 |
+
"model.visual.blocks.20.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 569 |
+
"model.visual.blocks.20.norm1.bias": "model-00001-of-00002.safetensors",
|
| 570 |
+
"model.visual.blocks.20.norm1.weight": "model-00001-of-00002.safetensors",
|
| 571 |
+
"model.visual.blocks.20.norm2.bias": "model-00001-of-00002.safetensors",
|
| 572 |
+
"model.visual.blocks.20.norm2.weight": "model-00001-of-00002.safetensors",
|
| 573 |
+
"model.visual.blocks.21.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 574 |
+
"model.visual.blocks.21.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 575 |
+
"model.visual.blocks.21.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 576 |
+
"model.visual.blocks.21.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 577 |
+
"model.visual.blocks.21.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 578 |
+
"model.visual.blocks.21.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 579 |
+
"model.visual.blocks.21.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 580 |
+
"model.visual.blocks.21.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 581 |
+
"model.visual.blocks.21.norm1.bias": "model-00001-of-00002.safetensors",
|
| 582 |
+
"model.visual.blocks.21.norm1.weight": "model-00001-of-00002.safetensors",
|
| 583 |
+
"model.visual.blocks.21.norm2.bias": "model-00001-of-00002.safetensors",
|
| 584 |
+
"model.visual.blocks.21.norm2.weight": "model-00001-of-00002.safetensors",
|
| 585 |
+
"model.visual.blocks.22.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 586 |
+
"model.visual.blocks.22.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 587 |
+
"model.visual.blocks.22.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 588 |
+
"model.visual.blocks.22.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 589 |
+
"model.visual.blocks.22.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 590 |
+
"model.visual.blocks.22.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 591 |
+
"model.visual.blocks.22.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 592 |
+
"model.visual.blocks.22.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 593 |
+
"model.visual.blocks.22.norm1.bias": "model-00001-of-00002.safetensors",
|
| 594 |
+
"model.visual.blocks.22.norm1.weight": "model-00001-of-00002.safetensors",
|
| 595 |
+
"model.visual.blocks.22.norm2.bias": "model-00001-of-00002.safetensors",
|
| 596 |
+
"model.visual.blocks.22.norm2.weight": "model-00001-of-00002.safetensors",
|
| 597 |
+
"model.visual.blocks.23.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 598 |
+
"model.visual.blocks.23.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 599 |
+
"model.visual.blocks.23.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 600 |
+
"model.visual.blocks.23.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 601 |
+
"model.visual.blocks.23.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 602 |
+
"model.visual.blocks.23.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 603 |
+
"model.visual.blocks.23.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 604 |
+
"model.visual.blocks.23.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 605 |
+
"model.visual.blocks.23.norm1.bias": "model-00001-of-00002.safetensors",
|
| 606 |
+
"model.visual.blocks.23.norm1.weight": "model-00001-of-00002.safetensors",
|
| 607 |
+
"model.visual.blocks.23.norm2.bias": "model-00001-of-00002.safetensors",
|
| 608 |
+
"model.visual.blocks.23.norm2.weight": "model-00001-of-00002.safetensors",
|
| 609 |
+
"model.visual.blocks.3.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 610 |
+
"model.visual.blocks.3.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 611 |
+
"model.visual.blocks.3.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 612 |
+
"model.visual.blocks.3.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 613 |
+
"model.visual.blocks.3.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 614 |
+
"model.visual.blocks.3.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 615 |
+
"model.visual.blocks.3.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 616 |
+
"model.visual.blocks.3.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 617 |
+
"model.visual.blocks.3.norm1.bias": "model-00001-of-00002.safetensors",
|
| 618 |
+
"model.visual.blocks.3.norm1.weight": "model-00001-of-00002.safetensors",
|
| 619 |
+
"model.visual.blocks.3.norm2.bias": "model-00001-of-00002.safetensors",
|
| 620 |
+
"model.visual.blocks.3.norm2.weight": "model-00001-of-00002.safetensors",
|
| 621 |
+
"model.visual.blocks.4.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 622 |
+
"model.visual.blocks.4.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 623 |
+
"model.visual.blocks.4.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 624 |
+
"model.visual.blocks.4.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 625 |
+
"model.visual.blocks.4.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 626 |
+
"model.visual.blocks.4.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 627 |
+
"model.visual.blocks.4.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 628 |
+
"model.visual.blocks.4.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 629 |
+
"model.visual.blocks.4.norm1.bias": "model-00001-of-00002.safetensors",
|
| 630 |
+
"model.visual.blocks.4.norm1.weight": "model-00001-of-00002.safetensors",
|
| 631 |
+
"model.visual.blocks.4.norm2.bias": "model-00001-of-00002.safetensors",
|
| 632 |
+
"model.visual.blocks.4.norm2.weight": "model-00001-of-00002.safetensors",
|
| 633 |
+
"model.visual.blocks.5.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 634 |
+
"model.visual.blocks.5.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 635 |
+
"model.visual.blocks.5.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 636 |
+
"model.visual.blocks.5.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 637 |
+
"model.visual.blocks.5.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 638 |
+
"model.visual.blocks.5.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 639 |
+
"model.visual.blocks.5.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 640 |
+
"model.visual.blocks.5.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 641 |
+
"model.visual.blocks.5.norm1.bias": "model-00001-of-00002.safetensors",
|
| 642 |
+
"model.visual.blocks.5.norm1.weight": "model-00001-of-00002.safetensors",
|
| 643 |
+
"model.visual.blocks.5.norm2.bias": "model-00001-of-00002.safetensors",
|
| 644 |
+
"model.visual.blocks.5.norm2.weight": "model-00001-of-00002.safetensors",
|
| 645 |
+
"model.visual.blocks.6.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 646 |
+
"model.visual.blocks.6.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 647 |
+
"model.visual.blocks.6.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 648 |
+
"model.visual.blocks.6.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 649 |
+
"model.visual.blocks.6.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 650 |
+
"model.visual.blocks.6.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 651 |
+
"model.visual.blocks.6.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 652 |
+
"model.visual.blocks.6.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 653 |
+
"model.visual.blocks.6.norm1.bias": "model-00001-of-00002.safetensors",
|
| 654 |
+
"model.visual.blocks.6.norm1.weight": "model-00001-of-00002.safetensors",
|
| 655 |
+
"model.visual.blocks.6.norm2.bias": "model-00001-of-00002.safetensors",
|
| 656 |
+
"model.visual.blocks.6.norm2.weight": "model-00001-of-00002.safetensors",
|
| 657 |
+
"model.visual.blocks.7.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 658 |
+
"model.visual.blocks.7.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 659 |
+
"model.visual.blocks.7.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 660 |
+
"model.visual.blocks.7.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 661 |
+
"model.visual.blocks.7.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 662 |
+
"model.visual.blocks.7.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 663 |
+
"model.visual.blocks.7.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 664 |
+
"model.visual.blocks.7.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 665 |
+
"model.visual.blocks.7.norm1.bias": "model-00001-of-00002.safetensors",
|
| 666 |
+
"model.visual.blocks.7.norm1.weight": "model-00001-of-00002.safetensors",
|
| 667 |
+
"model.visual.blocks.7.norm2.bias": "model-00001-of-00002.safetensors",
|
| 668 |
+
"model.visual.blocks.7.norm2.weight": "model-00001-of-00002.safetensors",
|
| 669 |
+
"model.visual.blocks.8.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 670 |
+
"model.visual.blocks.8.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 671 |
+
"model.visual.blocks.8.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 672 |
+
"model.visual.blocks.8.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 673 |
+
"model.visual.blocks.8.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 674 |
+
"model.visual.blocks.8.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 675 |
+
"model.visual.blocks.8.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 676 |
+
"model.visual.blocks.8.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 677 |
+
"model.visual.blocks.8.norm1.bias": "model-00001-of-00002.safetensors",
|
| 678 |
+
"model.visual.blocks.8.norm1.weight": "model-00001-of-00002.safetensors",
|
| 679 |
+
"model.visual.blocks.8.norm2.bias": "model-00001-of-00002.safetensors",
|
| 680 |
+
"model.visual.blocks.8.norm2.weight": "model-00001-of-00002.safetensors",
|
| 681 |
+
"model.visual.blocks.9.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 682 |
+
"model.visual.blocks.9.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 683 |
+
"model.visual.blocks.9.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 684 |
+
"model.visual.blocks.9.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 685 |
+
"model.visual.blocks.9.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 686 |
+
"model.visual.blocks.9.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 687 |
+
"model.visual.blocks.9.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 688 |
+
"model.visual.blocks.9.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 689 |
+
"model.visual.blocks.9.norm1.bias": "model-00001-of-00002.safetensors",
|
| 690 |
+
"model.visual.blocks.9.norm1.weight": "model-00001-of-00002.safetensors",
|
| 691 |
+
"model.visual.blocks.9.norm2.bias": "model-00001-of-00002.safetensors",
|
| 692 |
+
"model.visual.blocks.9.norm2.weight": "model-00001-of-00002.safetensors",
|
| 693 |
+
"model.visual.deepstack_merger_list.0.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 694 |
+
"model.visual.deepstack_merger_list.0.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 695 |
+
"model.visual.deepstack_merger_list.0.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 696 |
+
"model.visual.deepstack_merger_list.0.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 697 |
+
"model.visual.deepstack_merger_list.0.norm.bias": "model-00001-of-00002.safetensors",
|
| 698 |
+
"model.visual.deepstack_merger_list.0.norm.weight": "model-00001-of-00002.safetensors",
|
| 699 |
+
"model.visual.deepstack_merger_list.1.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 700 |
+
"model.visual.deepstack_merger_list.1.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 701 |
+
"model.visual.deepstack_merger_list.1.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 702 |
+
"model.visual.deepstack_merger_list.1.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 703 |
+
"model.visual.deepstack_merger_list.1.norm.bias": "model-00001-of-00002.safetensors",
|
| 704 |
+
"model.visual.deepstack_merger_list.1.norm.weight": "model-00001-of-00002.safetensors",
|
| 705 |
+
"model.visual.deepstack_merger_list.2.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 706 |
+
"model.visual.deepstack_merger_list.2.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 707 |
+
"model.visual.deepstack_merger_list.2.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 708 |
+
"model.visual.deepstack_merger_list.2.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 709 |
+
"model.visual.deepstack_merger_list.2.norm.bias": "model-00001-of-00002.safetensors",
|
| 710 |
+
"model.visual.deepstack_merger_list.2.norm.weight": "model-00001-of-00002.safetensors",
|
| 711 |
+
"model.visual.merger.linear_fc1.bias": "model-00001-of-00002.safetensors",
|
| 712 |
+
"model.visual.merger.linear_fc1.weight": "model-00001-of-00002.safetensors",
|
| 713 |
+
"model.visual.merger.linear_fc2.bias": "model-00001-of-00002.safetensors",
|
| 714 |
+
"model.visual.merger.linear_fc2.weight": "model-00001-of-00002.safetensors",
|
| 715 |
+
"model.visual.merger.norm.bias": "model-00001-of-00002.safetensors",
|
| 716 |
+
"model.visual.merger.norm.weight": "model-00001-of-00002.safetensors",
|
| 717 |
+
"model.visual.patch_embed.proj.bias": "model-00001-of-00002.safetensors",
|
| 718 |
+
"model.visual.patch_embed.proj.weight": "model-00001-of-00002.safetensors",
|
| 719 |
+
"model.visual.pos_embed.weight": "model-00001-of-00002.safetensors"
|
| 720 |
+
}
|
| 721 |
+
}
|
processor_config.json
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"image_processor": {
|
| 3 |
+
"crop_size": null,
|
| 4 |
+
"data_format": "channels_first",
|
| 5 |
+
"device": null,
|
| 6 |
+
"disable_grouping": null,
|
| 7 |
+
"do_center_crop": null,
|
| 8 |
+
"do_convert_rgb": true,
|
| 9 |
+
"do_normalize": true,
|
| 10 |
+
"do_pad": null,
|
| 11 |
+
"do_rescale": true,
|
| 12 |
+
"do_resize": true,
|
| 13 |
+
"image_mean": [
|
| 14 |
+
0.5,
|
| 15 |
+
0.5,
|
| 16 |
+
0.5
|
| 17 |
+
],
|
| 18 |
+
"image_processor_type": "Qwen2VLImageProcessorFast",
|
| 19 |
+
"image_std": [
|
| 20 |
+
0.5,
|
| 21 |
+
0.5,
|
| 22 |
+
0.5
|
| 23 |
+
],
|
| 24 |
+
"input_data_format": null,
|
| 25 |
+
"max_pixels": null,
|
| 26 |
+
"merge_size": 2,
|
| 27 |
+
"min_pixels": null,
|
| 28 |
+
"pad_size": null,
|
| 29 |
+
"patch_size": 16,
|
| 30 |
+
"processor_class": "Qwen3VLProcessor",
|
| 31 |
+
"resample": 3,
|
| 32 |
+
"rescale_factor": 0.00392156862745098,
|
| 33 |
+
"return_tensors": null,
|
| 34 |
+
"size": {
|
| 35 |
+
"longest_edge": 16777216,
|
| 36 |
+
"shortest_edge": 65536
|
| 37 |
+
},
|
| 38 |
+
"temporal_patch_size": 2
|
| 39 |
+
},
|
| 40 |
+
"processor_class": "Qwen3VLProcessor",
|
| 41 |
+
"video_processor": {
|
| 42 |
+
"crop_size": null,
|
| 43 |
+
"data_format": "channels_first",
|
| 44 |
+
"default_to_square": true,
|
| 45 |
+
"device": null,
|
| 46 |
+
"do_center_crop": null,
|
| 47 |
+
"do_convert_rgb": true,
|
| 48 |
+
"do_normalize": true,
|
| 49 |
+
"do_pad": null,
|
| 50 |
+
"do_rescale": true,
|
| 51 |
+
"do_resize": true,
|
| 52 |
+
"do_sample_frames": true,
|
| 53 |
+
"fps": 2,
|
| 54 |
+
"image_mean": [
|
| 55 |
+
0.5,
|
| 56 |
+
0.5,
|
| 57 |
+
0.5
|
| 58 |
+
],
|
| 59 |
+
"image_std": [
|
| 60 |
+
0.5,
|
| 61 |
+
0.5,
|
| 62 |
+
0.5
|
| 63 |
+
],
|
| 64 |
+
"input_data_format": null,
|
| 65 |
+
"max_frames": 768,
|
| 66 |
+
"merge_size": 2,
|
| 67 |
+
"min_frames": 4,
|
| 68 |
+
"num_frames": null,
|
| 69 |
+
"pad_size": null,
|
| 70 |
+
"patch_size": 16,
|
| 71 |
+
"processor_class": "Qwen3VLProcessor",
|
| 72 |
+
"resample": 3,
|
| 73 |
+
"rescale_factor": 0.00392156862745098,
|
| 74 |
+
"return_metadata": false,
|
| 75 |
+
"return_tensors": null,
|
| 76 |
+
"size": {
|
| 77 |
+
"longest_edge": 25165824,
|
| 78 |
+
"shortest_edge": 4096
|
| 79 |
+
},
|
| 80 |
+
"temporal_patch_size": 2,
|
| 81 |
+
"video_metadata": null,
|
| 82 |
+
"video_processor_type": "Qwen3VLVideoProcessor"
|
| 83 |
+
}
|
| 84 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|im_end|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:088e02e542b29ae2c94d508f916ed71239a71da5752956ce0a95d39a6f41d429
|
| 3 |
+
size 11614516
|
tokenizer_config.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|