{ "add_prefix_space": false, "backend": "tokenizers", "bos_token": "", "eos_token": "", "errors": "replace", "full_tokenizer_file": null, "is_local": true, "model_max_length": 1000000000000000019884624838656, "pad_token": "", "sep_token": "", "tokenizer_class": "GPT2Tokenizer", "unk_token": "<|UNKNOWN|>" }