mirror of
https://github.com/mii443/tokenizers.git
synced 2025-12-08 13:48:19 +00:00
Python - hotfix BertWordPieceTokenizer decoder
This commit is contained in:
@@ -50,7 +50,7 @@ class BertWordPieceTokenizer(BaseTokenizer):
|
|||||||
tokenizer.post_processor = BertProcessing(
|
tokenizer.post_processor = BertProcessing(
|
||||||
(sep_token, sep_token_id), (cls_token, cls_token_id)
|
(sep_token, sep_token_id), (cls_token, cls_token_id)
|
||||||
)
|
)
|
||||||
tokenizer.decoders = decoders.WordPiece(prefix=wordpieces_prefix)
|
tokenizer.decoder = decoders.WordPiece(prefix=wordpieces_prefix)
|
||||||
|
|
||||||
parameters = {
|
parameters = {
|
||||||
"model": "BertWordPiece",
|
"model": "BertWordPiece",
|
||||||
|
|||||||
Reference in New Issue
Block a user