tokenizer.save has the wrong arguments compared to documentation (#901)

* tokenizer.save has the wrong arguments compared to documentation

* Fixing doc of `save` function.

Co-authored-by: Nicolas Patry <patry.nicolas@protonmail.com>
This commit is contained in:
Thomas Wang
2022-02-15 17:55:55 +01:00
committed by GitHub
parent 448054f3c7
commit 88d718207a
4 changed files with 29 additions and 5 deletions

View File

@ -1135,6 +1135,12 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "paste"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0744126afe1a6dd7f394cb50a716dbe086cb06e255e53d8d0185d82828358fb5"
[[package]]
name = "percent-encoding"
version = "2.1.0"
@ -1174,6 +1180,10 @@ dependencies = [
"unicode-xid",
]
[[package]]
name = "proc_macros"
version = "0.1.0"
[[package]]
name = "quote"
version = "1.0.10"
@ -1668,7 +1678,7 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "tokenizers"
version = "0.11.1"
version = "0.11.2"
dependencies = [
"aho-corasick",
"cached-path",
@ -1681,6 +1691,8 @@ dependencies = [
"lazy_static",
"log",
"onig",
"paste",
"proc_macros",
"rand 0.7.3",
"rayon",
"rayon-cond",

View File

@ -1158,6 +1158,12 @@ dependencies = [
"proc-macro-hack",
]
[[package]]
name = "paste"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0744126afe1a6dd7f394cb50a716dbe086cb06e255e53d8d0185d82828358fb5"
[[package]]
name = "paste-impl"
version = "0.1.18"
@ -1212,6 +1218,10 @@ dependencies = [
"unicode-xid",
]
[[package]]
name = "proc_macros"
version = "0.1.0"
[[package]]
name = "pyo3"
version = "0.12.4"
@ -1223,7 +1233,7 @@ dependencies = [
"inventory",
"libc",
"parking_lot",
"paste",
"paste 0.1.18",
"pyo3cls",
"unindent",
]
@ -1743,7 +1753,7 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "tokenizers"
version = "0.11.1"
version = "0.11.2"
dependencies = [
"aho-corasick",
"cached-path",
@ -1756,6 +1766,8 @@ dependencies = [
"lazy_static",
"log",
"onig",
"paste 1.0.6",
"proc_macros",
"rand 0.7.3",
"rayon",
"rayon-cond",

View File

@ -1012,7 +1012,7 @@ class Tokenizer:
The `optional` :class:`~tokenizers.pre_tokenizers.PreTokenizer` in use by the Tokenizer
"""
pass
def save(self, pretty=True):
def save(self, path, pretty=True):
"""
Save the :class:`~tokenizers.Tokenizer` to the file at the given path.

View File

@ -604,7 +604,7 @@ impl PyTokenizer {
/// pretty (:obj:`bool`, defaults to :obj:`True`):
/// Whether the JSON file should be pretty formatted.
#[args(pretty = true)]
#[text_signature = "(self, pretty=True)"]
#[text_signature = "(self, path, pretty=True)"]
fn save(&self, path: &str, pretty: bool) -> PyResult<()> {
ToPyResult(self.tokenizer.save(path, pretty)).into()
}