mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-22 16:25:30 +00:00
#[allow(dead_code)] // Suppress the "method is never used" warning
This commit is contained in:
@ -180,8 +180,8 @@ impl AddedVocabulary {
|
||||
split_normalized_trie: (normalized_trie, vec![]),
|
||||
}
|
||||
}
|
||||
|
||||
/// Size of the additional vocabulary
|
||||
#[allow(dead_code)] // Suppress the "method is never used" warning
|
||||
pub fn len(&self) -> usize {
|
||||
self.added_tokens_map.len()
|
||||
}
|
||||
@ -585,7 +585,9 @@ mod tests {
|
||||
),
|
||||
1
|
||||
);
|
||||
assert_eq!(vocab.len(), 1);
|
||||
|
||||
let vocab_len: usize = vocab.len();
|
||||
assert_eq!(vocab_len, 1);
|
||||
|
||||
// Does not add multiple time the same token
|
||||
assert_eq!(
|
||||
@ -685,7 +687,7 @@ mod tests {
|
||||
assert_eq!(token.content, "hey"); // Token was already there
|
||||
|
||||
token.special = true;
|
||||
assert_eq!(token.special, true); // Token was already there
|
||||
assert!(token.special); // Token was already there
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -821,6 +823,8 @@ mod tests {
|
||||
let mut vocab = AddedVocabulary::new();
|
||||
let normalizer = Lowercase;
|
||||
|
||||
assert_eq!(vocab.len(), 0);
|
||||
|
||||
vocab.add_tokens(
|
||||
&[AddedToken::from("<mask>", false).single_word(true)],
|
||||
&model,
|
||||
|
Reference in New Issue
Block a user