| { |
| "_name_or_path": "v2ray/dbrx-base-fixed", |
| "architectures": [ |
| "DbrxForCausalLM" |
| ], |
| "attn_config": { |
| "clip_qkv": 8, |
| "kv_n_heads": 8, |
| "model_type": "", |
| "rope_theta": 500000 |
| }, |
| "auto_map": { |
| "AutoConfig": "configuration_dbrx.DbrxConfig", |
| "AutoModelForCausalLM": "modeling_dbrx.DbrxForCausalLM" |
| }, |
| "d_model": 6144, |
| "emb_pdrop": 0.0, |
| "ffn_config": { |
| "ffn_hidden_size": 10752, |
| "model_type": "", |
| "moe_jitter_eps": 0.01, |
| "moe_loss_weight": 0.05, |
| "moe_num_experts": 16, |
| "moe_top_k": 4 |
| }, |
| "initializer_range": 0.02, |
| "max_seq_len": 32768, |
| "model_type": "dbrx", |
| "n_heads": 48, |
| "n_layers": 40, |
| "output_router_logits": false, |
| "resid_pdrop": 0.0, |
| "router_aux_loss_coef": 0.05, |
| "tie_word_embeddings": false, |
| "torch_dtype": "bfloat16", |
| "transformers_version": "4.39.1", |
| "use_cache": true, |
| "vocab_size": 100352 |
| } |
|
|