mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
* Bump pyo3 dependency versions * Fix deprecation warnings from pyo3 --------- Co-authored-by: Mike Lui <mikelui@meta.com>
24 lines
559 B
Rust
24 lines
559 B
Rust
use onig::Regex;
|
|
use pyo3::exceptions;
|
|
use pyo3::prelude::*;
|
|
|
|
/// Instantiate a new Regex with the given pattern
|
|
#[pyclass(module = "tokenizers", name = "Regex")]
|
|
pub struct PyRegex {
|
|
pub inner: Regex,
|
|
pub pattern: String,
|
|
}
|
|
|
|
#[pymethods]
|
|
impl PyRegex {
|
|
#[new]
|
|
#[pyo3(text_signature = "(self, pattern)")]
|
|
fn new(s: &str) -> PyResult<Self> {
|
|
Ok(Self {
|
|
inner: Regex::new(s)
|
|
.map_err(|e| exceptions::PyException::new_err(e.description().to_owned()))?,
|
|
pattern: s.to_owned(),
|
|
})
|
|
}
|
|
}
|