mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-22 16:25:30 +00:00
pyo3 v0.18 migration (#1173)
* pyo v0.18 migration * Fix formatting issues of black
This commit is contained in:
@ -24,7 +24,7 @@ class JiebaPreTokenizer:
|
||||
# Just an odd example...
|
||||
splits = []
|
||||
last = 0
|
||||
for (i, char) in enumerate(str(normalized_string)):
|
||||
for i, char in enumerate(str(normalized_string)):
|
||||
if char.isnumeric() and int(char) % 2 == 1:
|
||||
splits.append(normalized_string[last:i])
|
||||
last = i
|
||||
|
Reference in New Issue
Block a user