mirror of
https://github.com/mii443/tokenizers.git
synced 2025-12-06 04:38:23 +00:00
Fixing convert/check scripts.
This commit is contained in:
@@ -397,7 +397,7 @@ def check(pretrained, filename):
|
||||
tok_total_time += tok - trans
|
||||
|
||||
if ids != tok_ids:
|
||||
if check_details(line, ids, tok_ids, tokenizer, transformer_tokenizer):
|
||||
if check_details(line, ids, tok_ids, transformer_tokenizer, tokenizer):
|
||||
continue
|
||||
assert ids == tok_ids, f"Error in line {i}: {line} {ids} != {tok_ids}"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user