From 2dca47681038fc69f29d18801ea61e4e085138c0 Mon Sep 17 00:00:00 2001 From: Arthur Zucker Date: Fri, 1 Sep 2023 18:48:50 +0000 Subject: [PATCH] fix some tests --- tokenizers/src/tokenizer/added_vocabulary.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tokenizers/src/tokenizer/added_vocabulary.rs b/tokenizers/src/tokenizer/added_vocabulary.rs index 84d12021..01ec59e6 100644 --- a/tokenizers/src/tokenizer/added_vocabulary.rs +++ b/tokenizers/src/tokenizer/added_vocabulary.rs @@ -607,7 +607,7 @@ mod tests { assert_eq!(vocab.add_tokens(&[added_token.clone()], &model, normalizer),1); assert_eq!(vocab.len(), 3); - assert_eq!(vocab.get_vocab_r()[&1], added_token); + assert_eq!(vocab.get_vocab_r()[&0], added_token); } #[test] @@ -652,8 +652,8 @@ mod tests { (3,AddedToken::from("added_token_2", true)), (0,AddedToken::from("test", true)), ])); - - assert!(!vocab.added_tokens_map.contains_key("test")); + assert!(vocab.added_tokens_map.contains_key("test")); + assert!(vocab.added_tokens_map_r.contains_key(&0)); } #[test]