mirror of
https://github.com/mii443/tokenizers.git
synced 2025-09-01 14:59:20 +00:00
Python - Fix cases where str expected instead of AddedToken
This commit is contained in:
@ -3,6 +3,7 @@ extern crate tokenizers as tk;
|
||||
use pyo3::exceptions;
|
||||
use pyo3::prelude::*;
|
||||
use pyo3::types::*;
|
||||
use pyo3::PyObjectProtocol;
|
||||
|
||||
use super::decoders::Decoder;
|
||||
use super::encoding::Encoding;
|
||||
@ -45,6 +46,19 @@ impl AddedToken {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
#[pyproto]
|
||||
impl PyObjectProtocol for AddedToken {
|
||||
fn __str__(&'p self) -> PyResult<&'p str> {
|
||||
Ok(&self.token.content)
|
||||
}
|
||||
|
||||
fn __repr__(&self) -> PyResult<String> {
|
||||
Ok(format!(
|
||||
"AddedToken(\"{}\", rstrip={}, lstrip={}, single_word={})",
|
||||
self.token.content, self.token.rstrip, self.token.lstrip, self.token.single_word
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[pyclass(dict)]
|
||||
pub struct Tokenizer {
|
||||
|
Reference in New Issue
Block a user