mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
Python - Add Encoding length
This commit is contained in:
@ -4,7 +4,7 @@ use crate::error::PyError;
|
||||
use pyo3::exceptions;
|
||||
use pyo3::prelude::*;
|
||||
use pyo3::types::*;
|
||||
use pyo3::{PyMappingProtocol, PyObjectProtocol};
|
||||
use pyo3::{PyMappingProtocol, PyObjectProtocol, PySequenceProtocol};
|
||||
use tk::tokenizer::PaddingDirection;
|
||||
|
||||
fn get_range(item: PyObject, max_len: usize) -> PyResult<std::ops::Range<usize>> {
|
||||
@ -133,6 +133,13 @@ impl PyObjectProtocol for Encoding {
|
||||
}
|
||||
}
|
||||
|
||||
#[pyproto]
|
||||
impl PySequenceProtocol for Encoding {
|
||||
fn __len__(self) -> PyResult<usize> {
|
||||
Ok(self.encoding.get_ids().len())
|
||||
}
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl Encoding {
|
||||
#[getter]
|
||||
|
Reference in New Issue
Block a user