yury-zyphra commited on
Commit
e9cdaa6
·
verified ·
1 Parent(s): 9fe44ea

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -10,12 +10,12 @@
10
  "bias_activation_fusion": true,
11
  "bos_token_id": 2,
12
  "cca": true,
13
- "cca_num_q_heads": 8,
14
  "dtype": "bfloat16",
15
  "eos_token_id": 106,
16
  "ffn_hidden_size": 4096,
17
  "gated_linear_unit": true,
18
  "hidden_size": 2048,
 
19
  "kv_channels": 128,
20
  "lm_head_bias": false,
21
  "mamba_cache_dtype": "float32",
@@ -24,7 +24,7 @@
24
  "moe_router_topk": 1,
25
  "norm_epsilon": 1e-05,
26
  "normalization": "RMSNorm",
27
- "num_attention_heads": 16,
28
  "num_experts": 16,
29
  "num_hidden_layers": 80,
30
  "num_key_value_heads": 2,
 
10
  "bias_activation_fusion": true,
11
  "bos_token_id": 2,
12
  "cca": true,
 
13
  "dtype": "bfloat16",
14
  "eos_token_id": 106,
15
  "ffn_hidden_size": 4096,
16
  "gated_linear_unit": true,
17
  "hidden_size": 2048,
18
+ "head_dim": 128,
19
  "kv_channels": 128,
20
  "lm_head_bias": false,
21
  "mamba_cache_dtype": "float32",
 
24
  "moe_router_topk": 1,
25
  "norm_epsilon": 1e-05,
26
  "normalization": "RMSNorm",
27
+ "num_attention_heads": 8,
28
  "num_experts": 16,
29
  "num_hidden_layers": 80,
30
  "num_key_value_heads": 2,