{ "tokenizer_class": "SeqCondTokenizer", "auto_map": { "AutoTokenizer": [ "tokenization_seqcond.SeqCondTokenizer", null ] }, "model_max_length": 4096, "eos_token": "<|im_end|>", "bos_token": null, "unk_token": null, "pad_token": "<|im_end|>", "additional_special_tokens": [ "<|im_start|>", "<|think_start|>", "<|think_end|>" ] }