{ "architectures": [ "HybridQwen3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "bmojo_config": { "head_dim": 128, "hidden_size": 4096, "num_heads": 32, "num_key_value_heads": 8, "rms_norm_eps": 1e-06, "ssm_mixer": "gka", "tie_attn_weights": true, "window_size": 2048 }, "bos_token_id": 151643, "dtype": "bfloat16", "eos_token_id": 151645, "gka_config": { "bp_lambda": true, "chunk_size": 64, "conv_size": 4, "gla_rescale": true, "head_dim": 128, "hidden_size": 4096, "norm_eps": 1e-06, "num_iter": 30, "num_k_heads": 8, "num_q_heads": 32, "num_v_heads": 8, "ridge_strength": 0.02, "solver_type": "chebyshev", "use_alpha_connection": true, "use_beta_gate": true, "use_forgetting_gate": true, "use_forgetting_gate_kk": true, "use_gate": true, "use_v_conv": true }, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "hybrid_override_pattern": "*-BMF-BMF-BMF-BMF-BMF-BMF-*-*-BMF-BMF-BMF-*-*-*-*-*-*-*-*-*-*-*-*-*-*-BMF-BMF-BMF-*-BMF-BMF-BMF-BMF-BMF-BMF", "initializer_range": 0.02, "intermediate_size": 12288, "layer_types": [ "*", "BMF", "BMF", "BMF", "BMF", "BMF", "BMF", "*", "*", "BMF", "BMF", "BMF", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "*", "BMF", "BMF", "BMF", "*", "BMF", "BMF", "BMF", "BMF", "BMF", "BMF" ], "max_position_embeddings": 131072, "max_window_layers": 36, "model_type": "hybrid_qwen3", "num_attention_heads": 32, "num_hidden_layers": 36, "num_key_value_heads": 8, "pad_token_id": null, "rms_norm_eps": 1e-06, "rope_parameters": { "rope_theta": 5000000, "rope_type": "default" }, "sliding_window": null, "tie_word_embeddings": false, "transformers_version": "5.3.0", "use_cache": false, "use_sliding_window": false, "vocab_size": 151936 }