mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
Fix strip python type (#1602)
* update * the fix * Revert "update" This reverts commit 4c2f32f116479b0ec8ccd7c832f86cbc8787d8a9. * add a test and rebase * style * oups
This commit is contained in:
@ -65,7 +65,7 @@ impl PyNormalizer {
|
||||
Py::new(py, (PyBertNormalizer {}, base))?.into_py(py)
|
||||
}
|
||||
NormalizerWrapper::StripNormalizer(_) => {
|
||||
Py::new(py, (PyBertNormalizer {}, base))?.into_py(py)
|
||||
Py::new(py, (PyStrip {}, base))?.into_py(py)
|
||||
}
|
||||
NormalizerWrapper::Prepend(_) => Py::new(py, (PyPrepend {}, base))?.into_py(py),
|
||||
NormalizerWrapper::ByteLevel(_) => {
|
||||
|
Reference in New Issue
Block a user