{ "activation": "gelu", "architectures": [ "DistilBertForSequenceClassification" ], "attention_dropout": 0.1, "bos_token_id": null, "dim": 768, "dropout": 0.1, "dtype": "float32", "eos_token_id": null, "hidden_dim": 3072, "id2label": { "0": "math.AC", "1": "cs.CV", "2": "cs.AI", "3": "cs.SY", "4": "math.GR", "5": "cs.CE", "6": "cs.PL", "7": "cs.IT", "8": "cs.DS", "9": "cs.NE", "10": "math.ST" }, "initializer_range": 0.02, "label2id": { "cs.AI": 2, "cs.CE": 5, "cs.CV": 1, "cs.DS": 8, "cs.IT": 7, "cs.NE": 9, "cs.PL": 6, "cs.SY": 3, "math.AC": 0, "math.GR": 4, "math.ST": 10 }, "max_position_embeddings": 512, "model_type": "distilbert", "n_heads": 12, "n_layers": 6, "pad_token_id": 0, "problem_type": "single_label_classification", "qa_dropout": 0.1, "seq_classif_dropout": 0.2, "sinusoidal_pos_embds": false, "tie_weights_": true, "tie_word_embeddings": true, "transformers_version": "5.0.0", "use_cache": false, "vocab_size": 30522 }