HOP4NLP3 / config.json
TCMVince's picture
Update config.json
d259ac1 verified
{
"_name_or_path": "FOR_JOSEPH/NRJ-BASE-125K",
"activation": "relu",
"alpha": 1.0,
"architectures": [
"BertEnergyModelForMaskedLM"
],
"attention_probs_dropout_prob": 0.0,
"auto_map": {
"AutoConfig": "hf_configuration.BertEnergyConfig",
"AutoModelForMaskedLM": "mlm.BertEnergyModelForMaskedLM"
},
"beta": null,
"bias": true,
"compile": true,
"embedding_dim": 768,
"hidden_dropout_prob": 0.1,
"hidden_size": 3072,
"initializer_hopfield_range": 0.002,
"initializer_range": 0.02,
"intermediate_size": 12288,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert_energy",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 3,
"path": null,
"positional": true,
"share_layers": true,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.49.0",
"vocab_size": 30000
}