mirror of
https://github.com/mii443/tokenizers.git
synced 2025-12-06 04:38:23 +00:00
Python - Bindings for WhitespaceSplit
This commit is contained in:
@@ -37,6 +37,7 @@ fn pre_tokenizers(_py: Python, m: &PyModule) -> PyResult<()> {
|
||||
m.add_class::<pre_tokenizers::PreTokenizer>()?;
|
||||
m.add_class::<pre_tokenizers::ByteLevel>()?;
|
||||
m.add_class::<pre_tokenizers::Whitespace>()?;
|
||||
m.add_class::<pre_tokenizers::WhitespaceSplit>()?;
|
||||
m.add_class::<pre_tokenizers::BertPreTokenizer>()?;
|
||||
m.add_class::<pre_tokenizers::Metaspace>()?;
|
||||
Ok(())
|
||||
|
||||
Reference in New Issue
Block a user