File size: 203 Bytes
24b4f96 | 1 2 3 4 5 6 7 | from transformers import BertTokenizerFast
from .tokenization import NewTokenizer
class NewTokenizerFast(BertTokenizerFast):
slow_tokenizer_class = NewTokenizer
special_attribute_present = True
|
24b4f96 | 1 2 3 4 5 6 7 | from transformers import BertTokenizerFast
from .tokenization import NewTokenizer
class NewTokenizerFast(BertTokenizerFast):
slow_tokenizer_class = NewTokenizer
special_attribute_present = True
|