test_dynamic_tokenizer / tokenization_fast.py
sgugger's picture
Initial commit
08a3578
from transformers import BertTokenizerFast
from .tokenization import NewTokenizer
class NewTokenizerFast(BertTokenizerFast):
slow_tokenizer_class = NewTokenizer
special_attribute_present =True