Doc - Fixes some CI fails

This commit is contained in:
Anthony MOI
2020-11-02 10:14:34 -05:00
committed by Anthony MOI
parent 324aa2930a
commit d788a950ac
4 changed files with 10 additions and 11 deletions

View File

@ -26,8 +26,6 @@ function deploy_doc(){
rsync -zvr --delete build/html/ "$HOST_NAME:$DOC_PATH/$LANG/$2"
cp -r build/html/_static .
done
elif [ "$2" == "latest" ]; then
push_version $1 $2 $3
elif [ "$4" != "override" ] && ssh "$HOST_NAME" "[ -d $DOC_PATH/$3/$2 ]"; then
echo "Directory" $2 "already exists"
rsync -zvr --delete _static/ "$HOST_NAME:$DOC_PATH/$3/$2/_static"
@ -40,14 +38,14 @@ function deploy_doc(){
deploy_doc "$GITHUB_SHA" master
# Rust versions
deploy_doc "$GITHUB_SHA" latest rust
deploy_doc "$GITHUB_SHA" latest rust override
# Node versions
deploy_doc "$GITHUB_SHA" latest node
deploy_doc "$GITHUB_SHA" latest node override
# Python versions
deploy_doc "$GITHUB_SHA" v0.9.0 python override
deploy_doc "$GITHUB_SHA" v0.9.1 python override
deploy_doc "$GITHUB_SHA" v0.9.2 python override
deploy_doc "$GITHUB_SHA" v0.9.3 python override
deploy_doc "$GITHUB_SHA" latest python
deploy_doc "$GITHUB_SHA" latest python override

View File

@ -76,4 +76,4 @@ jobs:
- name: Run JS tests
working-directory: ./bindings/node
run: npm test
run: make test

View File

@ -2,11 +2,12 @@ from ..utils import data_dir, doc_wiki_tokenizer, doc_pipeline_bert_tokenizer
from tokenizers import Tokenizer
def print(*args, **kwargs):
pass
class TestPipeline:
def test_pipeline(self, doc_wiki_tokenizer):
def print(*args, **kwargs):
pass
try:
# START reload_tokenizer
from tokenizers import Tokenizer
@ -142,7 +143,7 @@ class TestPipeline:
bert_tokenizer.save("data/bert-wiki.json")
# END bert_train_tokenizer
def test_bert_example(self):
def test_bert_example(self, doc_pipeline_bert_tokenizer):
try:
bert_tokenizer = Tokenizer.from_file("data/bert-wiki.json")
except Exception:

View File

@ -93,7 +93,7 @@ fn quicktour_slow_train() -> tokenizers::Result<()> {
Ok(())
}
#[allow(unused_imports)]
#[allow(unused_imports, clippy::type_complexity)]
fn quicktour_get_tokenizer_trainer() -> tokenizers::Result<(
TokenizerImpl<
BPE,