mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
Fixing test dependency.
This commit is contained in:
committed by
Anthony MOI
parent
ee3860c029
commit
7b5c2b92c6
@ -59,6 +59,15 @@ def openai_files(data_dir):
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def train_files(data_dir):
|
||||
return {
|
||||
"wagahaiwa": download(
|
||||
"https://storage.googleapis.com/tokenizers/unigram_wagahaiwa_nekodearu.txt"
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def multiprocessing_with_parallelism(tokenizer, enabled: bool):
|
||||
"""
|
||||
This helper can be used to test that disabling parallelism avoids dead locks when the
|
||||
|
Reference in New Issue
Block a user