diff --git a/bindings/python/py_src/tokenizers/__init__.pyi b/bindings/python/py_src/tokenizers/__init__.pyi index 4f396ae5..b73c838c 100644 --- a/bindings/python/py_src/tokenizers/__init__.pyi +++ b/bindings/python/py_src/tokenizers/__init__.pyi @@ -28,13 +28,14 @@ class AddedToken: normalized (:obj:`bool`, defaults to :obj:`True` with :meth:`~tokenizers.Tokenizer.add_tokens` and :obj:`False` with :meth:`~tokenizers.Tokenizer.add_special_tokens`): Defines whether this token should match against the normalized version of the input text. For example, with the added token ``"yesterday"``, and a normalizer in charge of - lowercasing the text, the token could be extracted from the input ``"I saw a lion - Yesterday"``. + lowercasing the text, the token could be extract from the input ``"I saw a lion + Yesterday"``. special (:obj:`bool`, defaults to :obj:`False` with :meth:`~tokenizers.Tokenizer.add_tokens` and :obj:`False` with :meth:`~tokenizers.Tokenizer.add_special_tokens`): Defines whether this token should be skipped when decoding. + """ - def __init__(self, content, single_word=False, lstrip=False, rstrip=False, normalized=True): + def __init__(self, content, single_word=False, lstrip=False, rstrip=False, normalized=True, special=False): pass @property def content(self): @@ -65,7 +66,7 @@ class AddedToken: """ Get the value of the :obj:`single_word` option """ - pass + pass @property def special(self): """