mirror of
https://github.com/mii443/tokenizers.git
synced 2025-12-03 11:18:29 +00:00
Fixed BPE.read_files -> BPE.read_file in SentencePieceBPETokenizer
This commit is contained in:
committed by
Anthony MOI
parent
a2289d49b4
commit
f7c61c267a
@@ -49,7 +49,7 @@ class SentencePieceBPETokenizer(BaseTokenizer):
|
||||
|
||||
@staticmethod
|
||||
def from_file(vocab_filename: str, merges_filename: str, **kwargs):
|
||||
vocab, merges = BPE.read_files(vocab_filename, merges_filename)
|
||||
vocab, merges = BPE.read_file(vocab_filename, merges_filename)
|
||||
return SentencePieceBPETokenizer(vocab, merges, **kwargs)
|
||||
|
||||
def train(
|
||||
|
||||
Reference in New Issue
Block a user