| { |
| "_name_or_path": "/media/tuat-ai/New Volume/cywork/Koelectra-nsmc/train/sullivan/small/v1/checkpoint-7800/", |
| "_num_labels": 2, |
| "architectures": [ |
| "ElectraForSequenceClassification" |
| ], |
| "attention_probs_dropout_prob": 0.1, |
| "classifier_dropout": null, |
| "embedding_size": 128, |
| "hidden_act": "gelu", |
| "hidden_dropout_prob": 0.1, |
| "hidden_size": 256, |
| "id2label": { |
| "0": "quota", |
| "1": "\uae30\ubd80 \uc694\uccad", |
| "2": "\uac8c\uc2dc\uae00", |
| "3": "\ub178\ud2b8", |
| "4": "\uc774\ubbf8\uc9c0", |
| "5": "\uc804\uccb4", |
| "6": "\ucd2c\uc601", |
| "7": "\ubb38\uc11c\uc778\uc2dd", |
| "8": "\ube5b \ubc1d\uae30", |
| "9": "\ub354\ubcf4\uae30", |
| "10": "\ubb38\uc790\uc2a4\uce94", |
| "11": "AI\ubaa8\ub4dc", |
| "12": "\uc0c9\uc0c1\uc778\uc2dd", |
| "13": "\ud604\uc7ac\ubaa8\ub4dc", |
| "14": "\uc774\ubbf8\uc9c0\ubb18\uc0ac", |
| "15": "\ubb3c\uac74\ucc3e\uae30", |
| "16": "\uc5bc\uad74\uc778\uc2dd", |
| "17": "\ub9ac\uc5bc\uc544\uc774\uc988", |
| "18": "\uc637\uc778\uc2dd", |
| "19": "\uc9c0\ud3d0\uc778\uc2dd", |
| "20": "\ub3cb\ubcf4\uae30", |
| "21": "\ubb38\uc790\uc778\uc2dd" |
| }, |
| "initializer_range": 0.02, |
| "intermediate_size": 1024, |
| "label2id": { |
| "AI\ubaa8\ub4dc": 11, |
| "quota": 0, |
| "\uac8c\uc2dc\uae00": 2, |
| "\uae30\ubd80 \uc694\uccad": 1, |
| "\ub178\ud2b8": 3, |
| "\ub354\ubcf4\uae30": 9, |
| "\ub3cb\ubcf4\uae30": 20, |
| "\ub9ac\uc5bc\uc544\uc774\uc988": 17, |
| "\ubb38\uc11c\uc778\uc2dd": 7, |
| "\ubb38\uc790\uc2a4\uce94": 10, |
| "\ubb38\uc790\uc778\uc2dd": 21, |
| "\ubb3c\uac74\ucc3e\uae30": 15, |
| "\ube5b \ubc1d\uae30": 8, |
| "\uc0c9\uc0c1\uc778\uc2dd": 12, |
| "\uc5bc\uad74\uc778\uc2dd": 16, |
| "\uc637\uc778\uc2dd": 18, |
| "\uc774\ubbf8\uc9c0": 4, |
| "\uc774\ubbf8\uc9c0\ubb18\uc0ac": 14, |
| "\uc804\uccb4": 5, |
| "\uc9c0\ud3d0\uc778\uc2dd": 19, |
| "\ucd2c\uc601": 6, |
| "\ud604\uc7ac\ubaa8\ub4dc": 13 |
| }, |
| "layer_norm_eps": 1e-12, |
| "max_position_embeddings": 512, |
| "model_type": "electra", |
| "num_attention_heads": 4, |
| "num_hidden_layers": 12, |
| "output_past": true, |
| "pad_token_id": 0, |
| "position_embedding_type": "absolute", |
| "problem_type": "single_label_classification", |
| "summary_activation": "gelu", |
| "summary_last_dropout": 0.1, |
| "summary_type": "first", |
| "summary_use_proj": true, |
| "torch_dtype": "float32", |
| "transformers_version": "4.26.1", |
| "type_vocab_size": 2, |
| "use_cache": true, |
| "vocab_size": 32200 |
| } |
|
|