File size: 1,087 Bytes
11a7660 a0c5694 11a7660 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 | """
HuggingFace PretrainedConfig for InterpGPT / TaskGPT.
Mirrors gpt_model.GPTConfig but subclasses transformers.PretrainedConfig
so `AutoConfig` / `AutoModel.from_pretrained(..., trust_remote_code=True)` work.
"""
from transformers import PretrainedConfig
class InterpGPTConfig(PretrainedConfig):
model_type = "interpgpt"
def __init__(
self,
vocab_size: int = 8192,
max_seq_len: int = 512,
n_layers: int = 6,
n_heads: int = 8,
d_model: int = 512,
d_ff: int = 2048,
dropout: float = 0.1,
pad_id: int = 0,
bias: bool = False,
variant: str = "standard",
**kwargs,
):
self.vocab_size = vocab_size
self.max_seq_len = max_seq_len
self.n_layers = n_layers
self.n_heads = n_heads
self.d_model = d_model
self.d_ff = d_ff
self.dropout = dropout
self.pad_id = pad_id
self.bias = bias
self.variant = variant
kwargs.pop("pad_token_id", None)
super().__init__(pad_token_id=pad_id, **kwargs)
|