File size: 529 Bytes
4dd7afe | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 | {
"architectures": [
"LlamaEdgeForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_llama_edge.LlamaEdgeConfig",
"AutoModel": "modeling_llama_edge.LlamaEdgeForCausalLM"
},
"dim": 4096,
"dtype": "float32",
"ffn_dim_multiplier": 1.3,
"intermediate_size": 14336,
"max_seq_len": 8192,
"model_type": "llama_edge",
"multiple_of": 256,
"n_heads": 32,
"n_kv_heads": 8,
"n_layers": 32,
"norm_eps": 1e-05,
"rope_theta": 500000.0,
"transformers_version": "4.57.3",
"vocab_size": 9942
}
|