Handle special tokens

This commit is contained in:
Anthony MOI
2019-12-19 19:48:16 -05:00
parent 7f032b62df
commit a8d68d516d
2 changed files with 24 additions and 0 deletions

View File

@ -233,6 +233,10 @@ impl Tokenizer {
Ok(self.tokenizer.add_tokens(&tokens))
}
fn add_special_tokens(&mut self, tokens: Vec<&str>) -> PyResult<usize> {
Ok(self.tokenizer.add_special_tokens(&tokens))
}
fn train(&mut self, trainer: &Trainer, files: Vec<String>) -> PyResult<()> {
trainer.trainer.execute(|trainer| {
if let Err(e) = self.tokenizer.train(trainer, files) {