| { | |
| "added_tokens_decoder": { | |
| "6": { | |
| "content": "!", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "7": { | |
| "content": "*", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "8": { | |
| "content": "/", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "9": { | |
| "content": "@", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| } | |
| }, | |
| "auto_map": { | |
| "AutoTokenizer": [ | |
| "tokenizers.DNATokenizer", | |
| null | |
| ] | |
| }, | |
| "bos_token": "@", | |
| "clean_up_tokenization_spaces": true, | |
| "eos_token": "*", | |
| "model_max_length": 2048, | |
| "pad_token": "!", | |
| "sep_token": "/", | |
| "tokenizer_class": "DNATokenizer" | |
| } |