mirror of
https://github.com/mii443/tokenizers.git
synced 2025-09-02 15:29:21 +00:00
21 lines
396 B
Rust
21 lines
396 B
Rust
extern crate tokenizers as tk;
|
|
|
|
use pyo3::prelude::*;
|
|
|
|
#[pyclass]
|
|
struct WhitespaceTokenizer {}
|
|
|
|
#[pymethods]
|
|
impl WhitespaceTokenizer {
|
|
#[staticmethod]
|
|
fn tokenize(s: String) -> PyResult<Vec<String>> {
|
|
Ok(tk::WhitespaceTokenizer::tokenize(&s))
|
|
}
|
|
}
|
|
|
|
#[pymodule]
|
|
fn tokenizers(py: Python, m: &PyModule) -> PyResult<()> {
|
|
m.add_class::<WhitespaceTokenizer>()?;
|
|
Ok(())
|
|
}
|