{ "architectures": ["BVVForCausalLM"], "auto_map": { "AutoConfig": "model_n_embed_1024_n_layer_32.BVVConfig", "AutoModel": "model_n_embed_1024_n_layer_32.BVVForCausalLM", "AutoModelForCausalLM": "model_n_embed_1024_n_layer_32.BVVForCausalLM" }, "model_type": "model_n_embed_1024_n_layer_32", "vocab_size": 65536, "block_size": 1024, "n_embd": 1024, "d_model": 1024, "n_layer": 32, "n_head": 32, "pad_id": 57344, "pad_token_id": 57344, "bos_token": "", "eos_token": "", "unk_token": "", "pad_token": "", "torch_dtype": "float32" }