mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-22 16:25:30 +00:00
Doc - Extract global entities to their own file
This commit is contained in:
98
docs/source/entities.inc
Normal file
98
docs/source/entities.inc
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
.. entities:: python
|
||||||
|
|
||||||
|
:global:
|
||||||
|
|
||||||
|
class
|
||||||
|
class
|
||||||
|
classmethod
|
||||||
|
class method
|
||||||
|
Tokenizer
|
||||||
|
:class:`~tokenizers.Tokenizer`
|
||||||
|
Tokenizer.train
|
||||||
|
:meth:`~tokenizers.Tokenizer.train`
|
||||||
|
Tokenizer.save
|
||||||
|
:meth:`~tokenizers.Tokenizer.save`
|
||||||
|
Tokenizer.from_file
|
||||||
|
:meth:`~tokenizers.Tokenizer.from_file`
|
||||||
|
Tokenizer.encode
|
||||||
|
:meth:`~tokenizers.Tokenizer.encode`
|
||||||
|
Tokenizer.encode_batch
|
||||||
|
:meth:`~tokenizers.Tokenizer.encode_batch`
|
||||||
|
Tokenizer.token_to_id
|
||||||
|
:meth:`~tokenizers.Tokenizer.token_to_id`
|
||||||
|
Tokenizer.enable_padding
|
||||||
|
:meth:`~tokenizers.Tokenizer.enable_padding`
|
||||||
|
Encoding
|
||||||
|
:class:`~tokenizers.Encoding`
|
||||||
|
TemplateProcessing
|
||||||
|
:class:`~tokenizers.processors.TemplateProcessing`
|
||||||
|
Normalizer
|
||||||
|
:class:`~tokenizers.normalizers.Normalizer`
|
||||||
|
normalizers.Sequence
|
||||||
|
:class:`~tokenizers.normalizers.Sequence`
|
||||||
|
|
||||||
|
.. entities:: rust
|
||||||
|
|
||||||
|
:global:
|
||||||
|
|
||||||
|
class
|
||||||
|
struct
|
||||||
|
classmethod
|
||||||
|
static method
|
||||||
|
Tokenizer
|
||||||
|
:rust:struct:`~tokenizers::tokenizer::Tokenizer`
|
||||||
|
Tokenizer.train
|
||||||
|
:rust:meth:`~tokenizers::tokenizer::Tokenizer::train`
|
||||||
|
Tokenizer.save
|
||||||
|
:rust:meth:`~tokenizers::tokenizer::Tokenizer::save`
|
||||||
|
Tokenizer.from_file
|
||||||
|
:rust:meth:`~tokenizers::tokenizer::Tokenizer::from_file`
|
||||||
|
Tokenizer.encode
|
||||||
|
:rust:meth:`~tokenizers::tokenizer::Tokenizer::encode`
|
||||||
|
Tokenizer.encode_batch
|
||||||
|
:rust:meth:`~tokenizers::tokenizer::Tokenizer::encode_batch`
|
||||||
|
Tokenizer.token_to_id
|
||||||
|
:rust:meth:`~tokenizers::tokenizer::Tokenizer::token_to_id`
|
||||||
|
Tokenizer.enable_padding
|
||||||
|
:rust:meth:`~tokenizers::tokenizer::Tokenizer::enable_padding`
|
||||||
|
Encoding
|
||||||
|
:rust:struct:`~tokenizers::tokenizer::Encoding`
|
||||||
|
TemplateProcessing
|
||||||
|
:rust:struct:`~tokenizers::processors::template::TemplateProcessing`
|
||||||
|
Normalizer
|
||||||
|
:rust:trait:`~tokenizers::tokenizer::Normalizer`
|
||||||
|
normalizers.Sequence
|
||||||
|
:rust:struct:`~tokenizers::normalizers::utils::Sequence`
|
||||||
|
|
||||||
|
.. entities:: node
|
||||||
|
|
||||||
|
:global:
|
||||||
|
|
||||||
|
class
|
||||||
|
class
|
||||||
|
classmethod
|
||||||
|
static method
|
||||||
|
Tokenizer
|
||||||
|
:obj:`Tokenizer`
|
||||||
|
Tokenizer.train
|
||||||
|
:obj:`Tokenizer.train()`
|
||||||
|
Tokenizer.save
|
||||||
|
:obj:`Tokenizer.save()`
|
||||||
|
Tokenizer.from_file
|
||||||
|
:obj:`Tokenizer.fromFile()`
|
||||||
|
Tokenizer.encode
|
||||||
|
:obj:`Tokenizer.encode()`
|
||||||
|
Tokenizer.encode_batch
|
||||||
|
:obj:`Tokenizer.encodeBatch()`
|
||||||
|
Tokenizer.token_to_id
|
||||||
|
:obj:`Tokenizer.tokenToId()`
|
||||||
|
Tokenizer.enable_padding
|
||||||
|
:obj:`Tokenizer.setPadding()`
|
||||||
|
Encoding
|
||||||
|
:obj:`Encoding`
|
||||||
|
TemplateProcessing
|
||||||
|
:obj:`TemplateProcessing`
|
||||||
|
Normalizer
|
||||||
|
:obj:`Normalizer`
|
||||||
|
normalizers.Sequence
|
||||||
|
:obj:`Sequence`
|
@ -38,61 +38,4 @@ Main features:
|
|||||||
|
|
||||||
api/reference
|
api/reference
|
||||||
|
|
||||||
|
.. include:: entities.inc
|
||||||
.. entities:: python
|
|
||||||
|
|
||||||
:global:
|
|
||||||
|
|
||||||
class
|
|
||||||
class
|
|
||||||
classmethod
|
|
||||||
class method
|
|
||||||
Tokenizer
|
|
||||||
:class:`~tokenizers.Tokenizer`
|
|
||||||
Tokenizer.train
|
|
||||||
:meth:`~tokenizers.Tokenizer.train`
|
|
||||||
Tokenizer.save
|
|
||||||
:meth:`~tokenizers.Tokenizer.save`
|
|
||||||
Tokenizer.from_file
|
|
||||||
:meth:`~tokenizers.Tokenizer.from_file`
|
|
||||||
Tokenizer.encode
|
|
||||||
:meth:`~tokenizers.Tokenizer.encode`
|
|
||||||
Tokenizer.encode_batch
|
|
||||||
:meth:`~tokenizers.Tokenizer.encode_batch`
|
|
||||||
Tokenizer.token_to_id
|
|
||||||
:meth:`~tokenizers.Tokenizer.token_to_id`
|
|
||||||
Tokenizer.enable_padding
|
|
||||||
:meth:`~tokenizers.Tokenizer.enable_padding`
|
|
||||||
Encoding
|
|
||||||
:class:`~tokenizers.Encoding`
|
|
||||||
TemplateProcessing
|
|
||||||
:class:`~tokenizers.processors.TemplateProcessing`
|
|
||||||
|
|
||||||
.. entities:: rust
|
|
||||||
|
|
||||||
:global:
|
|
||||||
|
|
||||||
class
|
|
||||||
struct
|
|
||||||
classmethod
|
|
||||||
static method
|
|
||||||
Tokenizer
|
|
||||||
:rust:struct:`~tokenizers::tokenizer::Tokenizer`
|
|
||||||
Tokenizer.train
|
|
||||||
:rust:meth:`~tokenizers::tokenizer::Tokenizer::train`
|
|
||||||
Tokenizer.save
|
|
||||||
:rust:meth:`~tokenizers::tokenizer::Tokenizer::save`
|
|
||||||
Tokenizer.from_file
|
|
||||||
:rust:meth:`~tokenizers::tokenizer::Tokenizer::from_file`
|
|
||||||
Tokenizer.encode
|
|
||||||
:rust:meth:`~tokenizers::tokenizer::Tokenizer::encode`
|
|
||||||
Tokenizer.encode_batch
|
|
||||||
:rust:meth:`~tokenizers::tokenizer::Tokenizer::encode_batch`
|
|
||||||
Tokenizer.token_to_id
|
|
||||||
:rust:meth:`~tokenizers::tokenizer::Tokenizer::token_to_id`
|
|
||||||
Tokenizer.enable_padding
|
|
||||||
:rust:meth:`~tokenizers::tokenizer::Tokenizer::enable_padding`
|
|
||||||
Encoding
|
|
||||||
:rust:struct:`~tokenizers::tokenizer::Encoding`
|
|
||||||
TemplateProcessing
|
|
||||||
:rust:struct:`~tokenizers::processors::template::TemplateProcessing`
|
|
||||||
|
Reference in New Issue
Block a user