File size: 820 Bytes
680b912 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 | {
"version": 2,
"weight_format": "mxtq",
"profile": "JANGTQ_K",
"source_model": {
"name": "MiniMax-M2.7",
"org": "MiniMaxAI",
"architecture": "minimax_m2"
},
"mxtq_seed": 42,
"mxtq_bits": {
"routed_expert": {
"gate_proj": 2,
"down_proj": 4,
"up_proj": 2
},
"attention": 8,
"shared_expert": 8,
"embed_tokens": 8,
"lm_head": 8,
"norms_router_biases": 16
},
"quantization": {
"method": "affine+mxtq",
"group_size": 64,
"bits_default": 4
},
"capabilities": {
"reasoning_parser": "qwen3",
"tool_parser": "minimax",
"think_in_template": true,
"supports_tools": true,
"supports_thinking": true,
"family": "minimax_m2",
"modality": "text",
"cache_type": "kv"
},
"routed_expert_layout": "prestacked"
} |