mirror of
https://github.com/mii443/tokenizers.git
synced 2025-12-07 21:28:19 +00:00
Updating python formatting. (#1079)
* Updating python formatting. * Forgot gh action. * Skipping isort to prevent circular imports. * Updating stub. * Removing `isort` (it contradicts `stub.py`). * Fixing weird stub black/isort disagreeement.
This commit is contained in:
@@ -1,8 +1,9 @@
|
||||
import pytest
|
||||
|
||||
from ..utils import data_dir, bert_files, multiprocessing_with_parallelism
|
||||
from tokenizers import BertWordPieceTokenizer
|
||||
|
||||
from ..utils import bert_files, data_dir, multiprocessing_with_parallelism
|
||||
|
||||
|
||||
class TestBertWordPieceTokenizer:
|
||||
def test_basic_encode(self, bert_files):
|
||||
|
||||
Reference in New Issue
Block a user