Fix hashlink ids

This commit is contained in:
Mishig Davaadorj
2022-04-18 12:13:40 +02:00
parent f6ba840e3e
commit 5c97125d22
10 changed files with 48 additions and 48 deletions

View File

@ -2,7 +2,7 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## AddedToken[[tokenizers.AddedToken]] ## AddedToken
[[autodoc]] tokenizers.AddedToken [[autodoc]] tokenizers.AddedToken
- content - content

View File

@ -2,23 +2,23 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## BPEDecoder[[tokenizers.decoders.BPEDecoder]] ## BPEDecoder
[[autodoc]] tokenizers.decoders.BPEDecoder [[autodoc]] tokenizers.decoders.BPEDecoder
## ByteLevel[[tokenizers.decoders.ByteLevel]] ## ByteLevel
[[autodoc]] tokenizers.decoders.ByteLevel [[autodoc]] tokenizers.decoders.ByteLevel
## CTC[[tokenizers.decoders.CTC]] ## CTC
[[autodoc]] tokenizers.decoders.CTC [[autodoc]] tokenizers.decoders.CTC
## Metaspace[[tokenizers.decoders.Metaspace]] ## Metaspace
[[autodoc]] tokenizers.decoders.Metaspace [[autodoc]] tokenizers.decoders.Metaspace
## WordPiece[[tokenizers.decoders.WordPiece]] ## WordPiece
[[autodoc]] tokenizers.decoders.WordPiece [[autodoc]] tokenizers.decoders.WordPiece
</python> </python>

View File

@ -2,7 +2,7 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## Encoding[[tokenizers.Encoding]] ## Encoding
[[autodoc]] tokenizers.Encoding [[autodoc]] tokenizers.Encoding
- all - all

View File

@ -2,23 +2,23 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## BPE[[tokenizers.models.BPE]] ## BPE
[[autodoc]] tokenizers.models.BPE [[autodoc]] tokenizers.models.BPE
## Model[[tokenizers.models.Model]] ## Model
[[autodoc]] tokenizers.models.Model [[autodoc]] tokenizers.models.Model
## Unigram[[tokenizers.models.Unigram]] ## Unigram
[[autodoc]] tokenizers.models.Unigram [[autodoc]] tokenizers.models.Unigram
## WordLevel[[tokenizers.models.WordLevel]] ## WordLevel
[[autodoc]] tokenizers.models.WordLevel [[autodoc]] tokenizers.models.WordLevel
## WordPiece[[tokenizers.models.WordPiece]] ## WordPiece
[[autodoc]] tokenizers.models.WordPiece [[autodoc]] tokenizers.models.WordPiece
</python> </python>

View File

@ -2,55 +2,55 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## BertNormalizer[[tokenizers.normalizers.BertNormalizer]] ## BertNormalizer
[[autodoc]] tokenizers.normalizers.BertNormalizer [[autodoc]] tokenizers.normalizers.BertNormalizer
## Lowercase[[tokenizers.normalizers.Lowercase]] ## Lowercase
[[autodoc]] tokenizers.normalizers.Lowercase [[autodoc]] tokenizers.normalizers.Lowercase
## NFC[[tokenizers.normalizers.NFC]] ## NFC
[[autodoc]] tokenizers.normalizers.NFC [[autodoc]] tokenizers.normalizers.NFC
## NFD[[tokenizers.normalizers.NFD]] ## NFD
[[autodoc]] tokenizers.normalizers.NFD [[autodoc]] tokenizers.normalizers.NFD
## NFKC[[tokenizers.normalizers.NFKC]] ## NFKC
[[autodoc]] tokenizers.normalizers.NFKC [[autodoc]] tokenizers.normalizers.NFKC
## NFKD[[tokenizers.normalizers.NFKD]] ## NFKD
[[autodoc]] tokenizers.normalizers.NFKD [[autodoc]] tokenizers.normalizers.NFKD
## Nmt[[tokenizers.normalizers.Nmt]] ## Nmt
[[autodoc]] tokenizers.normalizers.Nmt [[autodoc]] tokenizers.normalizers.Nmt
## Normalizer[[tokenizers.normalizers.Normalizer]] ## Normalizer
[[autodoc]] tokenizers.normalizers.Normalizer [[autodoc]] tokenizers.normalizers.Normalizer
## Precompiled[[tokenizers.normalizers.Precompiled]] ## Precompiled
[[autodoc]] tokenizers.normalizers.Precompiled [[autodoc]] tokenizers.normalizers.Precompiled
## Replace[[tokenizers.normalizers.Replace]] ## Replace
[[autodoc]] tokenizers.normalizers.Replace [[autodoc]] tokenizers.normalizers.Replace
## Sequence[[tokenizers.normalizers.Sequence]] ## Sequence
[[autodoc]] tokenizers.normalizers.Sequence [[autodoc]] tokenizers.normalizers.Sequence
## Strip[[tokenizers.normalizers.Strip]] ## Strip
[[autodoc]] tokenizers.normalizers.Strip [[autodoc]] tokenizers.normalizers.Strip
## StripAccents[[tokenizers.normalizers.StripAccents]] ## StripAccents
[[autodoc]] tokenizers.normalizers.StripAccents [[autodoc]] tokenizers.normalizers.StripAccents
</python> </python>

View File

@ -2,19 +2,19 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## BertProcessing[[tokenizers.processors.BertProcessing]] ## BertProcessing
[[autodoc]] tokenizers.processors.BertProcessing [[autodoc]] tokenizers.processors.BertProcessing
## ByteLevel[[tokenizers.processors.ByteLevel]] ## ByteLevel
[[autodoc]] tokenizers.processors.ByteLevel [[autodoc]] tokenizers.processors.ByteLevel
## RobertaProcessing[[tokenizers.processors.RobertaProcessing]] ## RobertaProcessing
[[autodoc]] tokenizers.processors.RobertaProcessing [[autodoc]] tokenizers.processors.RobertaProcessing
## TemplateProcessing[[tokenizers.processors.TemplateProcessing]] ## TemplateProcessing
[[autodoc]] tokenizers.processors.TemplateProcessing [[autodoc]] tokenizers.processors.TemplateProcessing
</python> </python>

View File

@ -2,51 +2,51 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## BertPreTokenizer[[tokenizers.pre_tokenizers.BertPreTokenizer]] ## BertPreTokenizer
[[autodoc]] tokenizers.pre_tokenizers.BertPreTokenizer [[autodoc]] tokenizers.pre_tokenizers.BertPreTokenizer
## ByteLevel[[tokenizers.pre_tokenizers.ByteLevel]] ## ByteLevel
[[autodoc]] tokenizers.pre_tokenizers.ByteLevel [[autodoc]] tokenizers.pre_tokenizers.ByteLevel
## CharDelimiterSplit[[tokenizers.pre_tokenizers.CharDelimiterSplit]] ## CharDelimiterSplit
[[autodoc]] tokenizers.pre_tokenizers.CharDelimiterSplit [[autodoc]] tokenizers.pre_tokenizers.CharDelimiterSplit
## Digits[[tokenizers.pre_tokenizers.Digits]] ## Digits
[[autodoc]] tokenizers.pre_tokenizers.Digits [[autodoc]] tokenizers.pre_tokenizers.Digits
## Metaspace[[tokenizers.pre_tokenizers.Metaspace]] ## Metaspace
[[autodoc]] tokenizers.pre_tokenizers.Metaspace [[autodoc]] tokenizers.pre_tokenizers.Metaspace
## PreTokenizer[[tokenizers.pre_tokenizers.PreTokenizer]] ## PreTokenizer
[[autodoc]] tokenizers.pre_tokenizers.PreTokenizer [[autodoc]] tokenizers.pre_tokenizers.PreTokenizer
## Punctuation[[tokenizers.pre_tokenizers.Punctuation]] ## Punctuation
[[autodoc]] tokenizers.pre_tokenizers.Punctuation [[autodoc]] tokenizers.pre_tokenizers.Punctuation
## Sequence[[tokenizers.pre_tokenizers.Sequence]] ## Sequence
[[autodoc]] tokenizers.pre_tokenizers.Sequence [[autodoc]] tokenizers.pre_tokenizers.Sequence
## Split[[tokenizers.pre_tokenizers.Split]] ## Split
[[autodoc]] tokenizers.pre_tokenizers.Split [[autodoc]] tokenizers.pre_tokenizers.Split
## UnicodeScripts[[tokenizers.pre_tokenizers.UnicodeScripts]] ## UnicodeScripts
[[autodoc]] tokenizers.pre_tokenizers.UnicodeScripts [[autodoc]] tokenizers.pre_tokenizers.UnicodeScripts
## Whitespace[[tokenizers.pre_tokenizers.Whitespace]] ## Whitespace
[[autodoc]] tokenizers.pre_tokenizers.Whitespace [[autodoc]] tokenizers.pre_tokenizers.Whitespace
## WhitespaceSplit[[tokenizers.pre_tokenizers.WhitespaceSplit]] ## WhitespaceSplit
[[autodoc]] tokenizers.pre_tokenizers.WhitespaceSplit [[autodoc]] tokenizers.pre_tokenizers.WhitespaceSplit
</python> </python>

View File

@ -2,7 +2,7 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## Tokenizer[[tokenizers.Tokenizer]] ## Tokenizer
[[autodoc]] tokenizers.Tokenizer [[autodoc]] tokenizers.Tokenizer
- all - all

View File

@ -2,19 +2,19 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## BpeTrainer[[tokenizers.trainers.BpeTrainer]] ## BpeTrainer
[[autodoc]] tokenizers.trainers.BpeTrainer [[autodoc]] tokenizers.trainers.BpeTrainer
## UnigramTrainer[[tokenizers.trainers.UnigramTrainer]] ## UnigramTrainer
[[autodoc]] tokenizers.trainers.UnigramTrainer [[autodoc]] tokenizers.trainers.UnigramTrainer
## WordLevelTrainer[[tokenizers.trainers.WordLevelTrainer]] ## WordLevelTrainer
[[autodoc]] tokenizers.trainers.WordLevelTrainer [[autodoc]] tokenizers.trainers.WordLevelTrainer
## WordPieceTrainer[[tokenizers.trainers.WordPieceTrainer]] ## WordPieceTrainer
[[autodoc]] tokenizers.trainers.WordPieceTrainer [[autodoc]] tokenizers.trainers.WordPieceTrainer
</python> </python>

View File

@ -2,11 +2,11 @@
<tokenizerslangcontent> <tokenizerslangcontent>
<python> <python>
## Annotation[[tokenizers.tools.Annotation]] ## Annotation
[[autodoc]] tokenizers.tools.Annotation [[autodoc]] tokenizers.tools.Annotation
## EncodingVisualizer[[tokenizers.tools.EncodingVisualizer]] ## EncodingVisualizer
[[autodoc]] tokenizers.tools.EncodingVisualizer [[autodoc]] tokenizers.tools.EncodingVisualizer
- __call__ - __call__