mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
fmt
This commit is contained in:
@ -675,10 +675,9 @@ mod tests {
|
||||
assert_eq!(vocab.get_vocab()["another_two"], 4); // Token idx not changed
|
||||
|
||||
// Just checking that we can set the content of the string in rust
|
||||
let mut token:AddedToken = AddedToken::from("Hey", false);
|
||||
let mut token: AddedToken = AddedToken::from("Hey", false);
|
||||
token.content = "hey".to_string();
|
||||
assert_eq!(token.content, "hey"); // Token was already there
|
||||
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -660,8 +660,8 @@ where
|
||||
}
|
||||
|
||||
/// Get the added vocabulary only
|
||||
|
||||
/// Get the added tokens decoder
|
||||
|
||||
/// Get the added tokens decoder
|
||||
pub fn get_added_tokens_decoder(&self) -> &HashMap<u32, AddedToken> {
|
||||
self.added_vocabulary.get_vocab_r()
|
||||
}
|
||||
|
Reference in New Issue
Block a user