| { | |
| "vocab_size": 8197, | |
| "max_seq_len": 512, | |
| "n_layers": 6, | |
| "n_heads": 8, | |
| "d_model": 512, | |
| "d_ff": 2048, | |
| "dropout": 0.25, | |
| "pad_id": 8196, | |
| "bias": false, | |
| "variant": "adhd", | |
| "transformers_version": "5.5.4", | |
| "architectures": null, | |
| "output_hidden_states": false, | |
| "return_dict": true, | |
| "dtype": null, | |
| "chunk_size_feed_forward": 0, | |
| "is_encoder_decoder": false, | |
| "id2label": { | |
| "0": "LABEL_0", | |
| "1": "LABEL_1" | |
| }, | |
| "label2id": { | |
| "LABEL_0": 0, | |
| "LABEL_1": 1 | |
| }, | |
| "problem_type": null, | |
| "_name_or_path": "", | |
| "pad_token_id": 8196, | |
| "auto_map": { | |
| "AutoConfig": "configuration_interpgpt.InterpGPTConfig", | |
| "AutoModel": "modeling_interpgpt.InterpGPTModel" | |
| }, | |
| "model_type": "interpgpt", | |
| "output_attentions": false | |
| } |