mirror of
https://github.com/mii443/tokenizers.git
synced 2025-12-03 11:18:29 +00:00
fix more tests
This commit is contained in:
@@ -226,7 +226,7 @@ impl PyBpeTrainer {
|
||||
if let Ok(content) = token.extract::<String>() {
|
||||
Ok(tk::tokenizer::AddedToken::from(content, true))
|
||||
} else if let Ok(mut token) = token.extract::<PyRefMut<PyAddedToken>>() {
|
||||
token.special = true;
|
||||
token.special = false;
|
||||
Ok(token.get_token())
|
||||
} else {
|
||||
Err(exceptions::PyTypeError::new_err(
|
||||
|
||||
@@ -19,7 +19,7 @@ class TestAddedToken:
|
||||
assert type(added_token) == AddedToken
|
||||
assert str(added_token) == "<mask>"
|
||||
assert (
|
||||
repr(added_token) == 'AddedToken("<mask>", rstrip=False, lstrip=False, single_word=False, normalized=True)'
|
||||
repr(added_token) == 'AddedToken("<mask>", rstrip=False, lstrip=False, single_word=False, normalized=True, special=False)'
|
||||
)
|
||||
assert added_token.rstrip == False
|
||||
assert added_token.lstrip == False
|
||||
|
||||
Reference in New Issue
Block a user