Clippy fixes. (#846)

* Clippy fixes.

* Drop support for Python 3.6

* Remove other 3.6

* Re-enabling caches for build (5h + seems too long and issue seems
solved)

https://github.com/actions/virtual-environments/issues/572

* `npm audit fix`.

* Fix yaml ?

* Pyarrow issue fixed: https://github.com/huggingface/datasets/pull/2268

* Installing dev libraries.

* Install python dev elsewhere ?

* Typo.

* No sudo.

* ...

* Testing the GH again.

* Maybe v2 will fix ?

* Fixing tests on MacOS Python 3.8+
This commit is contained in:
Nicolas Patry
2021-12-15 15:55:48 +01:00
committed by GitHub
parent 1dc19e0dd4
commit c1100ec542
23 changed files with 365 additions and 402 deletions

View File

@@ -51,7 +51,7 @@ impl PyEncoding {
let data = serde_json::to_string(&self.encoding).map_err(|e| {
exceptions::PyException::new_err(format!(
"Error while attempting to pickle Encoding: {}",
e.to_string()
e
))
})?;
Ok(PyBytes::new(py, data.as_bytes()).to_object(py))
@@ -63,7 +63,7 @@ impl PyEncoding {
self.encoding = serde_json::from_slice(s.as_bytes()).map_err(|e| {
exceptions::PyException::new_err(format!(
"Error while attempting to unpickle Encoding: {}",
e.to_string()
e
))
})?;
Ok(())

View File

@@ -103,7 +103,7 @@ impl PyModel {
let data = serde_json::to_string(&self.model).map_err(|e| {
exceptions::PyException::new_err(format!(
"Error while attempting to pickle Model: {}",
e.to_string()
e
))
})?;
Ok(PyBytes::new(py, data.as_bytes()).to_object(py))
@@ -115,7 +115,7 @@ impl PyModel {
self.model = serde_json::from_slice(s.as_bytes()).map_err(|e| {
exceptions::PyException::new_err(format!(
"Error while attempting to unpickle Model: {}",
e.to_string()
e
))
})?;
Ok(())

View File

@@ -112,7 +112,7 @@ impl PyPreTokenizer {
let data = serde_json::to_string(&self.pretok).map_err(|e| {
exceptions::PyException::new_err(format!(
"Error while attempting to pickle PreTokenizer: {}",
e.to_string()
e
))
})?;
Ok(PyBytes::new(py, data.as_bytes()).to_object(py))

View File

@@ -71,7 +71,7 @@ impl PyPostProcessor {
let data = serde_json::to_string(self.processor.as_ref()).map_err(|e| {
exceptions::PyException::new_err(format!(
"Error while attempting to pickle PostProcessor: {}",
e.to_string()
e
))
})?;
Ok(PyBytes::new(py, data.as_bytes()).to_object(py))
@@ -83,7 +83,7 @@ impl PyPostProcessor {
self.processor = serde_json::from_slice(s.as_bytes()).map_err(|e| {
exceptions::PyException::new_err(format!(
"Error while attempting to unpickle PostProcessor: {}",
e.to_string()
e
))
})?;
Ok(())

View File

@@ -559,7 +559,7 @@ impl PyNormalizedStringRefMut {
fn filter(&mut self, func: &PyAny) -> PyResult<()> {
self.inner
.map_mut(|mut n| filter(&mut n, func))
.map_mut(|n| filter(n, func))
.ok_or_else(PyNormalizedStringRefMut::destroyed_error)??;
Ok(())
}
@@ -573,7 +573,7 @@ impl PyNormalizedStringRefMut {
fn map(&mut self, func: &PyAny) -> PyResult<()> {
self.inner
.map_mut(|mut n| map(&mut n, func))
.map_mut(|n| map(n, func))
.ok_or_else(PyNormalizedStringRefMut::destroyed_error)??;
Ok(())
}

View File

@@ -98,6 +98,10 @@ def doc_pipeline_bert_tokenizer(data_dir):
)
# On MacOS Python 3.8+ the default was modified to `spawn`, we need `fork` in tests.
mp.set_start_method("fork")
def multiprocessing_with_parallelism(tokenizer, enabled: bool):
"""
This helper can be used to test that disabling parallelism avoids dead locks when the