Remove Tokenizer::normalize

This is actually a legacy function that doesn't really make sense now, and is getting really difficult to keep. So we remove it.
This commit is contained in:
Anthony MOI
2020-08-18 17:52:25 -04:00
committed by Anthony MOI
parent 18e3799b1d
commit 504d8c85d8
8 changed files with 25 additions and 104 deletions

View File

@ -509,15 +509,6 @@ impl PyTokenizer {
})
}
fn normalize(&self, sentence: &str) -> PyResult<String> {
ToPyResult(
self.tokenizer
.normalize(sentence)
.map(|s| s.get().to_owned()),
)
.into()
}
/// Input can be:
/// encode("A single sequence")
/// encode("A sequence", "And its pair")