Python - Adding tokenizers classes - WIP

This commit is contained in:
Anthony MOI
2020-01-07 16:20:20 -05:00
parent 6294d342d5
commit 63063118df
6 changed files with 220 additions and 1 deletions

View File

@ -20,7 +20,8 @@ setup(
"tokenizers.normalizers",
"tokenizers.pre_tokenizers",
"tokenizers.processors",
"tokenizers.trainers"
"tokenizers.trainers",
"tokenizers.implementations",
],
package_data = {
'tokenizers': [ 'py.typed', '__init__.pyi' ],
@ -30,6 +31,7 @@ setup(
'tokenizers.pre_tokenizers': [ 'py.typed', '__init__.pyi' ],
'tokenizers.processors': [ 'py.typed', '__init__.pyi' ],
'tokenizers.trainers': [ 'py.typed', '__init__.pyi' ],
'tokenizers.implementations': [ 'py.typed' ],
},
zip_safe=False,
)