mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-22 16:25:30 +00:00
27 lines
505 B
Plaintext
27 lines
505 B
Plaintext
# Encoding
|
|
|
|
<tokenizerslangcontent>
|
|
<python>
|
|
## Encoding
|
|
|
|
[[autodoc]] tokenizers.Encoding
|
|
- all
|
|
- attention_mask
|
|
- ids
|
|
- n_sequences
|
|
- offsets
|
|
- overflowing
|
|
- sequence_ids
|
|
- special_tokens_mask
|
|
- tokens
|
|
- type_ids
|
|
- word_ids
|
|
- words
|
|
</python>
|
|
<rust>
|
|
The Rust API Reference is available directly on the [Docs.rs](https://docs.rs/tokenizers/latest/tokenizers/) website.
|
|
</rust>
|
|
<node>
|
|
The node API has not been documented yet.
|
|
</node>
|
|
</tokenizerslangcontent> |