Files
tokenizers/docs/source/entities.inc
2020-11-02 17:07:27 -05:00

99 lines
2.7 KiB
C++

.. entities:: python
:global:
class
class
classmethod
class method
Tokenizer
:class:`~tokenizers.Tokenizer`
Tokenizer.train
:meth:`~tokenizers.Tokenizer.train`
Tokenizer.save
:meth:`~tokenizers.Tokenizer.save`
Tokenizer.from_file
:meth:`~tokenizers.Tokenizer.from_file`
Tokenizer.encode
:meth:`~tokenizers.Tokenizer.encode`
Tokenizer.encode_batch
:meth:`~tokenizers.Tokenizer.encode_batch`
Tokenizer.token_to_id
:meth:`~tokenizers.Tokenizer.token_to_id`
Tokenizer.enable_padding
:meth:`~tokenizers.Tokenizer.enable_padding`
Encoding
:class:`~tokenizers.Encoding`
TemplateProcessing
:class:`~tokenizers.processors.TemplateProcessing`
Normalizer
:class:`~tokenizers.normalizers.Normalizer`
normalizers.Sequence
:class:`~tokenizers.normalizers.Sequence`
.. entities:: rust
:global:
class
struct
classmethod
static method
Tokenizer
:rust:struct:`~tokenizers::tokenizer::Tokenizer`
Tokenizer.train
:rust:meth:`~tokenizers::tokenizer::Tokenizer::train`
Tokenizer.save
:rust:meth:`~tokenizers::tokenizer::Tokenizer::save`
Tokenizer.from_file
:rust:meth:`~tokenizers::tokenizer::Tokenizer::from_file`
Tokenizer.encode
:rust:meth:`~tokenizers::tokenizer::Tokenizer::encode`
Tokenizer.encode_batch
:rust:meth:`~tokenizers::tokenizer::Tokenizer::encode_batch`
Tokenizer.token_to_id
:rust:meth:`~tokenizers::tokenizer::Tokenizer::token_to_id`
Tokenizer.enable_padding
:rust:meth:`~tokenizers::tokenizer::Tokenizer::enable_padding`
Encoding
:rust:struct:`~tokenizers::tokenizer::Encoding`
TemplateProcessing
:rust:struct:`~tokenizers::processors::template::TemplateProcessing`
Normalizer
:rust:trait:`~tokenizers::tokenizer::Normalizer`
normalizers.Sequence
:rust:struct:`~tokenizers::normalizers::utils::Sequence`
.. entities:: node
:global:
class
class
classmethod
static method
Tokenizer
:obj:`Tokenizer`
Tokenizer.train
:obj:`Tokenizer.train()`
Tokenizer.save
:obj:`Tokenizer.save()`
Tokenizer.from_file
:obj:`Tokenizer.fromFile()`
Tokenizer.encode
:obj:`Tokenizer.encode()`
Tokenizer.encode_batch
:obj:`Tokenizer.encodeBatch()`
Tokenizer.token_to_id
:obj:`Tokenizer.tokenToId()`
Tokenizer.enable_padding
:obj:`Tokenizer.setPadding()`
Encoding
:obj:`Encoding`
TemplateProcessing
:obj:`TemplateProcessing`
Normalizer
:obj:`Normalizer`
normalizers.Sequence
:obj:`Sequence`