| { |
| "_name_or_path": "state-spaces/mamba-790m-hf", |
| "architectures": [ |
| "MambaForTokenClassification" |
| ], |
| "bos_token_id": 0, |
| "conv_kernel": 4, |
| "d_model": 2048, |
| "eos_token_id": 0, |
| "expand": 2, |
| "fused_add_norm": true, |
| "hidden_act": "silu", |
| "hidden_size": 1536, |
| "id2label": { |
| "0": "B-LOC", |
| "1": "B-ORG", |
| "2": "B-PER", |
| "3": "I-LOC", |
| "4": "I-ORG", |
| "5": "I-PER", |
| "6": "O" |
| }, |
| "initializer_range": 0.1, |
| "intermediate_size": 3072, |
| "label2id": { |
| "B-LOC": 0, |
| "B-ORG": 1, |
| "B-PER": 2, |
| "I-LOC": 3, |
| "I-ORG": 4, |
| "I-PER": 5, |
| "O": 6 |
| }, |
| "layer_norm_epsilon": 1e-05, |
| "model_type": "mamba", |
| "n_layer": 48, |
| "num_hidden_layers": 48, |
| "pad_token_id": 0, |
| "pad_vocab_size_multiple": 8, |
| "rescale_prenorm_residual": false, |
| "residual_in_fp32": true, |
| "rms_norm": true, |
| "ssm_cfg": {}, |
| "state_size": 16, |
| "time_step_floor": 0.0001, |
| "time_step_init_scheme": "random", |
| "time_step_max": 0.1, |
| "time_step_min": 0.001, |
| "time_step_rank": 96, |
| "time_step_scale": 1.0, |
| "torch_dtype": "float32", |
| "transformers_version": "4.46.0", |
| "use_bias": false, |
| "use_cache": true, |
| "use_conv_bias": true, |
| "use_mambapy": false, |
| "vocab_size": 50280 |
| } |
|
|