mirror of
https://github.com/mii443/tokenizers.git
synced 2025-09-01 23:09:34 +00:00
Handle vocab size with added tokens
This commit is contained in:
@ -36,9 +36,8 @@ impl Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
#[getter]
|
||||
fn get_vocab_size(&self) -> usize {
|
||||
self.tokenizer.get_vocab_size()
|
||||
fn get_vocab_size(&self, with_added_tokens: bool) -> usize {
|
||||
self.tokenizer.get_vocab_size(with_added_tokens)
|
||||
}
|
||||
|
||||
fn with_model(&mut self, model: &mut Model) -> PyResult<()> {
|
||||
|
Reference in New Issue
Block a user