mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-22 16:25:30 +00:00
clippy
This commit is contained in:
@ -261,7 +261,7 @@ impl AddedVocabulary {
|
||||
.values()
|
||||
.cloned()
|
||||
.max()
|
||||
.map_or(model.get_vocab_size() as u32, |max| max.clone() + 1)
|
||||
.map_or(model.get_vocab_size() as u32, |max| max + 1)
|
||||
};
|
||||
// Make sure we modify the previous entry
|
||||
self.added_tokens_map
|
||||
|
Reference in New Issue
Block a user