mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-22 16:25:30 +00:00
revise type specification (#1289)
This commit is contained in:
@ -16,7 +16,7 @@ class SentencePieceUnigramTokenizer(BaseTokenizer):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vocab: Optional[str] = None,
|
||||
vocab: Optional[List[Tuple[str, float]]] = None,
|
||||
replacement: str = "▁",
|
||||
add_prefix_space: bool = True,
|
||||
):
|
||||
|
Reference in New Issue
Block a user