mirror of
https://github.com/mii443/tokenizers.git
synced 2025-09-01 23:09:34 +00:00
Python - expost get_vocab
on Tokenizer
This commit is contained in:
@ -4,6 +4,7 @@ use pyo3::exceptions;
|
||||
use pyo3::prelude::*;
|
||||
use pyo3::types::*;
|
||||
use pyo3::PyObjectProtocol;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::decoders::Decoder;
|
||||
use super::encoding::Encoding;
|
||||
@ -87,20 +88,13 @@ impl Tokenizer {
|
||||
.map_or(0, |p| p.as_ref().added_tokens(is_pair)))
|
||||
}
|
||||
|
||||
#[args(kwargs = "**")]
|
||||
fn get_vocab_size(&self, kwargs: Option<&PyDict>) -> PyResult<usize> {
|
||||
let mut with_added_tokens = true;
|
||||
|
||||
if let Some(kwargs) = kwargs {
|
||||
for (key, value) in kwargs {
|
||||
let key: &str = key.extract()?;
|
||||
match key {
|
||||
"with_added_tokens" => with_added_tokens = value.extract()?,
|
||||
_ => println!("Ignored unknown kwarg option {}", key),
|
||||
}
|
||||
}
|
||||
}
|
||||
#[args(with_added_tokens = true)]
|
||||
fn get_vocab(&self, with_added_tokens: bool) -> PyResult<HashMap<String, u32>> {
|
||||
Ok(self.tokenizer.get_vocab(with_added_tokens))
|
||||
}
|
||||
|
||||
#[args(with_added_tokens = true)]
|
||||
fn get_vocab_size(&self, with_added_tokens: bool) -> PyResult<usize> {
|
||||
Ok(self.tokenizer.get_vocab_size(with_added_tokens))
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user