mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
make content writable in python
This commit is contained in:
@ -55,6 +55,8 @@ use crate::utils::{MaybeSizedIterator, PyBufferedIterator};
|
||||
/// text. For example, with the added token ``"yesterday"``, and a normalizer in charge of
|
||||
/// lowercasing the text, the token could be extract from the input ``"I saw a lion
|
||||
/// Yesterday"``.
|
||||
/// special (:obj:`bool`, defaults to :obj:`False` with :meth:`~tokenizers.Tokenizer.add_tokens` and :obj:`False` with :meth:`~tokenizers.Tokenizer.add_special_tokens`):
|
||||
/// Defines whether this token should be skipped when decoding.
|
||||
///
|
||||
#[pyclass(dict, module = "tokenizers", name = "AddedToken")]
|
||||
pub struct PyAddedToken {
|
||||
@ -179,6 +181,12 @@ impl PyAddedToken {
|
||||
&self.content
|
||||
}
|
||||
|
||||
/// Set the content of this :obj:`AddedToken`
|
||||
#[setter]
|
||||
fn set_content(&self, content: String){
|
||||
self.get_token().content = content
|
||||
}
|
||||
|
||||
/// Get the value of the :obj:`rstrip` option
|
||||
#[getter]
|
||||
fn get_rstrip(&self) -> bool {
|
||||
|
Reference in New Issue
Block a user