mirror of
https://github.com/mii443/tokenizers.git
synced 2025-12-05 04:08:22 +00:00
Removing forgotten places.
This commit is contained in:
committed by
Anthony MOI
parent
857948e5b8
commit
76b86f6901
@@ -3,7 +3,6 @@ from .. import pre_tokenizers
|
||||
PreTokenizer = pre_tokenizers.PreTokenizer
|
||||
ByteLevel = pre_tokenizers.ByteLevel
|
||||
Whitespace = pre_tokenizers.Whitespace
|
||||
Deduplication = pre_tokenizers.Deduplication
|
||||
Punctuation = pre_tokenizers.Punctuation
|
||||
Sequence = pre_tokenizers.Sequence
|
||||
WhitespaceSplit = pre_tokenizers.WhitespaceSplit
|
||||
|
||||
Reference in New Issue
Block a user