{ "architectures": [ "LlamaEdgeForCausalLM" ], "auto_map": { "AutoConfig": "configuration_llama_edge.LlamaEdgeConfig", "AutoModel": "modeling_llama_edge.LlamaEdgeForCausalLM" }, "dim": 4096, "dtype": "float32", "ffn_dim_multiplier": 1.3, "intermediate_size": 14336, "max_seq_len": 8192, "model_type": "llama_edge", "multiple_of": 256, "n_heads": 32, "n_kv_heads": 8, "n_layers": 32, "norm_eps": 1e-05, "rope_theta": 500000.0, "transformers_version": "4.57.3", "vocab_size": 9942 }