{ "architectures": [ "MiniMindOmni" ], "audio_hidden_size": 512, "audio_ids": [ 16 ], "audio_pad_token": 2049, "audio_special_token": "<|audio_pad|>", "audio_spk_token": 2051, "audio_stop_token": 2050, "audio_vocab_size": 2112, "auto_map": { "AutoConfig": "model_omni.OmniConfig", "AutoModelForCausalLM": "model_omni.MiniMindOmni" }, "bos_token_id": 1, "bridge_layer": 3, "dropout": 0.0, "dtype": "bfloat16", "eos_token_id": 2, "flash_attn": true, "head_dim": 96, "hidden_act": "silu", "hidden_size": 768, "image_hidden_size": 768, "image_ids": [ 12 ], "image_special_token": "<|image_pad|>", "image_token_len": 64, "inference_rope_scaling": false, "intermediate_size": 2432, "max_position_embeddings": 32768, "model_type": "minimind-o", "moe_intermediate_size": 2432, "norm_topk_prob": true, "num_attention_heads": 8, "num_experts": 4, "num_experts_per_tok": 1, "num_hidden_layers": 8, "num_key_value_heads": 4, "num_talker_hidden_layers": 4, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 1000000.0, "router_aux_loss_coef": 0.0005, "spk_emb_size": 192, "talker_hidden_size": 768, "think_end_ids": [ 26, 234, 234 ], "transformers_version": "4.57.6", "use_moe": false, "vocab_size": 6400 }