master
/ transformers / utils / dummy_sentencepiece_and_tokenizers_objects.py

dummy_sentencepiece_and_tokenizers_objects.py @3c11360 raw · history · blame

1
2
3
4
5
6
7
8
9
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_backends


SLOW_TO_FAST_CONVERTERS = None


def convert_slow_tokenizer(*args, **kwargs):
    requires_backends(convert_slow_tokenizer, ["sentencepiece", "tokenizers"])