| { |
| "architectures": [ |
| "GPJTGPT2ModelForCausalLM" |
| ], |
| "auto_map": { |
| "AutoConfig": "configuration_gpjtgpt2.GPJTGPT2Config", |
| "AutoModel": "modeling_gpjtgpt2.GPJTGPT2Model", |
| "AutoModelForCausalLM": "modeling_gpjtgpt2.GPJTGPT2ModelForCausalLM" |
| }, |
| "cfg": { |
| "context_length": 1024, |
| "drop_rate": 0.1, |
| "emb_dim": 768, |
| "n_heads": 12, |
| "n_layers": 12, |
| "qkv_bias": false, |
| "vocab_size": 50257 |
| }, |
| "dtype": "float32", |
| "model_type": "gpjtgpt2", |
| "num_hidden_layers": 12, |
| "transformers_version": "4.57.6", |
| "use_cache": false |
| } |
|
|