* PyO3 0.22

* Fix python stubs

* Remove name arg from PyModel::save Python signature

---------

Co-authored-by: Dimitris Iliopoulos <diliopoulos@fb.com>
This commit is contained in:
Dimitris Iliopoulos
2024-11-01 05:17:23 -04:00
committed by GitHub
parent 41e0eaa561
commit 6ade8c2d21
11 changed files with 51 additions and 51 deletions

View File

@ -14,8 +14,8 @@ serde = { version = "1.0", features = [ "rc", "derive" ]}
serde_json = "1.0" serde_json = "1.0"
libc = "0.2" libc = "0.2"
env_logger = "0.11" env_logger = "0.11"
pyo3 = { version = "0.21" } pyo3 = { version = "0.22", features = ["py-clone"] }
numpy = "0.21" numpy = "0.22"
ndarray = "0.15" ndarray = "0.15"
itertools = "0.12" itertools = "0.12"
@ -24,7 +24,7 @@ path = "../../tokenizers"
[dev-dependencies] [dev-dependencies]
tempfile = "3.10" tempfile = "3.10"
pyo3 = { version = "0.21", features = ["auto-initialize"] } pyo3 = { version = "0.22", features = ["auto-initialize", "py-clone"] }
[features] [features]
defaut = ["pyo3/extension-module"] defaut = ["pyo3/extension-module"]

View File

@ -88,9 +88,9 @@ impl PyDecoder {
} }
fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> {
match state.extract::<&PyBytes>(py) { match state.extract::<&[u8]>(py) {
Ok(s) => { Ok(s) => {
self.decoder = serde_json::from_slice(s.as_bytes()).map_err(|e| { self.decoder = serde_json::from_slice(s).map_err(|e| {
exceptions::PyException::new_err(format!( exceptions::PyException::new_err(format!(
"Error while attempting to unpickle Decoder: {}", "Error while attempting to unpickle Decoder: {}",
e e

View File

@ -41,9 +41,9 @@ impl PyEncoding {
} }
fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> {
match state.extract::<&PyBytes>(py) { match state.extract::<&[u8]>(py) {
Ok(s) => { Ok(s) => {
self.encoding = serde_json::from_slice(s.as_bytes()).map_err(|e| { self.encoding = serde_json::from_slice(s).map_err(|e| {
exceptions::PyException::new_err(format!( exceptions::PyException::new_err(format!(
"Error while attempting to unpickle Encoding: {}", "Error while attempting to unpickle Encoding: {}",
e e

View File

@ -109,9 +109,9 @@ impl PyModel {
} }
fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> {
match state.extract::<&PyBytes>(py) { match state.extract::<&[u8]>(py) {
Ok(s) => { Ok(s) => {
self.model = serde_json::from_slice(s.as_bytes()).map_err(|e| { self.model = serde_json::from_slice(s).map_err(|e| {
exceptions::PyException::new_err(format!( exceptions::PyException::new_err(format!(
"Error while attempting to unpickle Model: {}", "Error while attempting to unpickle Model: {}",
e e
@ -181,7 +181,7 @@ impl PyModel {
/// ///
/// Returns: /// Returns:
/// :obj:`List[str]`: The list of saved files /// :obj:`List[str]`: The list of saved files
#[pyo3(text_signature = "(self, folder, prefix)")] #[pyo3(signature = (folder, prefix=None, name=None), text_signature = "(self, folder, prefix)")]
fn save<'a>( fn save<'a>(
&self, &self,
py: Python<'_>, py: Python<'_>,
@ -835,7 +835,7 @@ pub struct PyUnigram {}
#[pymethods] #[pymethods]
impl PyUnigram { impl PyUnigram {
#[new] #[new]
#[pyo3(text_signature = "(self, vocab, unk_id, byte_fallback)")] #[pyo3(signature = (vocab=None, unk_id=None, byte_fallback=None), text_signature = "(self, vocab, unk_id, byte_fallback)")]
fn new( fn new(
vocab: Option<Vec<(String, f64)>>, vocab: Option<Vec<(String, f64)>>,
unk_id: Option<usize>, unk_id: Option<usize>,

View File

@ -118,9 +118,9 @@ impl PyNormalizer {
} }
fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> {
match state.extract::<&PyBytes>(py) { match state.extract::<&[u8]>(py) {
Ok(s) => { Ok(s) => {
self.normalizer = serde_json::from_slice(s.as_bytes()).map_err(|e| { self.normalizer = serde_json::from_slice(s).map_err(|e| {
exceptions::PyException::new_err(format!( exceptions::PyException::new_err(format!(
"Error while attempting to unpickle Normalizer: {}", "Error while attempting to unpickle Normalizer: {}",
e e

View File

@ -122,9 +122,9 @@ impl PyPreTokenizer {
} }
fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> {
match state.extract::<&PyBytes>(py) { match state.extract::<&[u8]>(py) {
Ok(s) => { Ok(s) => {
let unpickled = serde_json::from_slice(s.as_bytes()).map_err(|e| { let unpickled = serde_json::from_slice(s).map_err(|e| {
exceptions::PyException::new_err(format!( exceptions::PyException::new_err(format!(
"Error while attempting to unpickle PreTokenizer: {}", "Error while attempting to unpickle PreTokenizer: {}",
e e

View File

@ -82,9 +82,9 @@ impl PyPostProcessor {
} }
fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> {
match state.extract::<&PyBytes>(py) { match state.extract::<&[u8]>(py) {
Ok(s) => { Ok(s) => {
self.processor = serde_json::from_slice(s.as_bytes()).map_err(|e| { self.processor = serde_json::from_slice(s).map_err(|e| {
exceptions::PyException::new_err(format!( exceptions::PyException::new_err(format!(
"Error while attempting to unpickle PostProcessor: {}", "Error while attempting to unpickle PostProcessor: {}",
e e
@ -272,7 +272,7 @@ impl From<PySpecialToken> for SpecialToken {
} }
impl FromPyObject<'_> for PySpecialToken { impl FromPyObject<'_> for PySpecialToken {
fn extract(ob: &PyAny) -> PyResult<Self> { fn extract_bound(ob: &Bound<'_, PyAny>) -> PyResult<Self> {
if let Ok(v) = ob.extract::<(String, u32)>() { if let Ok(v) = ob.extract::<(String, u32)>() {
Ok(Self(v.into())) Ok(Self(v.into()))
} else if let Ok(v) = ob.extract::<(u32, String)>() { } else if let Ok(v) = ob.extract::<(u32, String)>() {
@ -312,7 +312,7 @@ impl From<PyTemplate> for Template {
} }
impl FromPyObject<'_> for PyTemplate { impl FromPyObject<'_> for PyTemplate {
fn extract(ob: &PyAny) -> PyResult<Self> { fn extract_bound(ob: &Bound<'_, PyAny>) -> PyResult<Self> {
if let Ok(s) = ob.extract::<&str>() { if let Ok(s) = ob.extract::<&str>() {
Ok(Self( Ok(Self(
s.try_into().map_err(exceptions::PyValueError::new_err)?, s.try_into().map_err(exceptions::PyValueError::new_err)?,

View File

@ -2,7 +2,7 @@ use serde::Serialize;
use std::collections::{hash_map::DefaultHasher, HashMap}; use std::collections::{hash_map::DefaultHasher, HashMap};
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use numpy::{npyffi, PyArray1}; use numpy::{npyffi, PyArray1, PyArrayMethods};
use pyo3::class::basic::CompareOp; use pyo3::class::basic::CompareOp;
use pyo3::exceptions; use pyo3::exceptions;
use pyo3::intern; use pyo3::intern;
@ -156,7 +156,7 @@ impl PyAddedToken {
} }
fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> {
match state.extract::<&PyDict>(py) { match state.downcast_bound::<PyDict>(py) {
Ok(state) => { Ok(state) => {
for (key, value) in state { for (key, value) in state {
let key: &str = key.extract()?; let key: &str = key.extract()?;
@ -172,7 +172,7 @@ impl PyAddedToken {
} }
Ok(()) Ok(())
} }
Err(e) => Err(e), Err(e) => Err(e.into()),
} }
} }
@ -263,10 +263,10 @@ impl PyAddedToken {
struct TextInputSequence<'s>(tk::InputSequence<'s>); struct TextInputSequence<'s>(tk::InputSequence<'s>);
impl<'s> FromPyObject<'s> for TextInputSequence<'s> { impl<'s> FromPyObject<'s> for TextInputSequence<'s> {
fn extract(ob: &'s PyAny) -> PyResult<Self> { fn extract_bound(ob: &Bound<'s, PyAny>) -> PyResult<Self> {
let err = exceptions::PyTypeError::new_err("TextInputSequence must be str"); let err = exceptions::PyTypeError::new_err("TextInputSequence must be str");
if let Ok(s) = ob.downcast::<PyString>() { if let Ok(s) = ob.extract::<String>() {
Ok(Self(s.to_string_lossy().into())) Ok(Self(s.into()))
} else { } else {
Err(err) Err(err)
} }
@ -280,7 +280,7 @@ impl<'s> From<TextInputSequence<'s>> for tk::InputSequence<'s> {
struct PyArrayUnicode(Vec<String>); struct PyArrayUnicode(Vec<String>);
impl FromPyObject<'_> for PyArrayUnicode { impl FromPyObject<'_> for PyArrayUnicode {
fn extract(ob: &PyAny) -> PyResult<Self> { fn extract_bound(ob: &Bound<'_, PyAny>) -> PyResult<Self> {
// SAFETY Making sure the pointer is a valid numpy array requires calling numpy C code // SAFETY Making sure the pointer is a valid numpy array requires calling numpy C code
if unsafe { npyffi::PyArray_Check(ob.py(), ob.as_ptr()) } == 0 { if unsafe { npyffi::PyArray_Check(ob.py(), ob.as_ptr()) } == 0 {
return Err(exceptions::PyTypeError::new_err("Expected an np.array")); return Err(exceptions::PyTypeError::new_err("Expected an np.array"));
@ -291,8 +291,8 @@ impl FromPyObject<'_> for PyArrayUnicode {
let desc = (*arr).descr; let desc = (*arr).descr;
( (
(*desc).type_num, (*desc).type_num,
(*desc).elsize as usize, npyffi::PyDataType_ELSIZE(ob.py(), desc) as usize,
(*desc).alignment as usize, npyffi::PyDataType_ALIGNMENT(ob.py(), desc) as usize,
(*arr).data, (*arr).data,
(*arr).nd, (*arr).nd,
(*arr).flags, (*arr).flags,
@ -347,7 +347,7 @@ impl From<PyArrayUnicode> for tk::InputSequence<'_> {
struct PyArrayStr(Vec<String>); struct PyArrayStr(Vec<String>);
impl FromPyObject<'_> for PyArrayStr { impl FromPyObject<'_> for PyArrayStr {
fn extract(ob: &PyAny) -> PyResult<Self> { fn extract_bound(ob: &Bound<'_, PyAny>) -> PyResult<Self> {
let array = ob.downcast::<PyArray1<PyObject>>()?; let array = ob.downcast::<PyArray1<PyObject>>()?;
let seq = array let seq = array
.readonly() .readonly()
@ -370,7 +370,7 @@ impl From<PyArrayStr> for tk::InputSequence<'_> {
struct PreTokenizedInputSequence<'s>(tk::InputSequence<'s>); struct PreTokenizedInputSequence<'s>(tk::InputSequence<'s>);
impl<'s> FromPyObject<'s> for PreTokenizedInputSequence<'s> { impl<'s> FromPyObject<'s> for PreTokenizedInputSequence<'s> {
fn extract(ob: &'s PyAny) -> PyResult<Self> { fn extract_bound(ob: &Bound<'s, PyAny>) -> PyResult<Self> {
if let Ok(seq) = ob.extract::<PyArrayUnicode>() { if let Ok(seq) = ob.extract::<PyArrayUnicode>() {
return Ok(Self(seq.into())); return Ok(Self(seq.into()));
} }
@ -400,17 +400,17 @@ impl<'s> From<PreTokenizedInputSequence<'s>> for tk::InputSequence<'s> {
struct TextEncodeInput<'s>(tk::EncodeInput<'s>); struct TextEncodeInput<'s>(tk::EncodeInput<'s>);
impl<'s> FromPyObject<'s> for TextEncodeInput<'s> { impl<'s> FromPyObject<'s> for TextEncodeInput<'s> {
fn extract(ob: &'s PyAny) -> PyResult<Self> { fn extract_bound(ob: &Bound<'s, PyAny>) -> PyResult<Self> {
if let Ok(i) = ob.extract::<TextInputSequence>() { if let Ok(i) = ob.extract::<TextInputSequence>() {
return Ok(Self(i.into())); return Ok(Self(i.into()));
} }
if let Ok((i1, i2)) = ob.extract::<(TextInputSequence, TextInputSequence)>() { if let Ok((i1, i2)) = ob.extract::<(TextInputSequence, TextInputSequence)>() {
return Ok(Self((i1, i2).into())); return Ok(Self((i1, i2).into()));
} }
if let Ok(arr) = ob.extract::<Vec<&PyAny>>() { if let Ok(arr) = ob.downcast::<PyList>() {
if arr.len() == 2 { if arr.len() == 2 {
let first = arr[0].extract::<TextInputSequence>()?; let first = arr.get_item(0)?.extract::<TextInputSequence>()?;
let second = arr[1].extract::<TextInputSequence>()?; let second = arr.get_item(1)?.extract::<TextInputSequence>()?;
return Ok(Self((first, second).into())); return Ok(Self((first, second).into()));
} }
} }
@ -426,7 +426,7 @@ impl<'s> From<TextEncodeInput<'s>> for tk::tokenizer::EncodeInput<'s> {
} }
struct PreTokenizedEncodeInput<'s>(tk::EncodeInput<'s>); struct PreTokenizedEncodeInput<'s>(tk::EncodeInput<'s>);
impl<'s> FromPyObject<'s> for PreTokenizedEncodeInput<'s> { impl<'s> FromPyObject<'s> for PreTokenizedEncodeInput<'s> {
fn extract(ob: &'s PyAny) -> PyResult<Self> { fn extract_bound(ob: &Bound<'s, PyAny>) -> PyResult<Self> {
if let Ok(i) = ob.extract::<PreTokenizedInputSequence>() { if let Ok(i) = ob.extract::<PreTokenizedInputSequence>() {
return Ok(Self(i.into())); return Ok(Self(i.into()));
} }
@ -434,10 +434,10 @@ impl<'s> FromPyObject<'s> for PreTokenizedEncodeInput<'s> {
{ {
return Ok(Self((i1, i2).into())); return Ok(Self((i1, i2).into()));
} }
if let Ok(arr) = ob.extract::<Vec<&PyAny>>() { if let Ok(arr) = ob.downcast::<PyList>() {
if arr.len() == 2 { if arr.len() == 2 {
let first = arr[0].extract::<PreTokenizedInputSequence>()?; let first = arr.get_item(0)?.extract::<PreTokenizedInputSequence>()?;
let second = arr[1].extract::<PreTokenizedInputSequence>()?; let second = arr.get_item(1)?.extract::<PreTokenizedInputSequence>()?;
return Ok(Self((first, second).into())); return Ok(Self((first, second).into()));
} }
} }
@ -498,9 +498,9 @@ impl PyTokenizer {
} }
fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> {
match state.extract::<&PyBytes>(py) { match state.extract::<&[u8]>(py) {
Ok(s) => { Ok(s) => {
self.tokenizer = serde_json::from_slice(s.as_bytes()).map_err(|e| { self.tokenizer = serde_json::from_slice(s).map_err(|e| {
exceptions::PyException::new_err(format!( exceptions::PyException::new_err(format!(
"Error while attempting to unpickle Tokenizer: {}", "Error while attempting to unpickle Tokenizer: {}",
e e
@ -1030,7 +1030,7 @@ impl PyTokenizer {
fn encode_batch( fn encode_batch(
&self, &self,
py: Python<'_>, py: Python<'_>,
input: Vec<&PyAny>, input: Bound<'_, PyList>,
is_pretokenized: bool, is_pretokenized: bool,
add_special_tokens: bool, add_special_tokens: bool,
) -> PyResult<Vec<PyEncoding>> { ) -> PyResult<Vec<PyEncoding>> {
@ -1091,7 +1091,7 @@ impl PyTokenizer {
fn encode_batch_fast( fn encode_batch_fast(
&self, &self,
py: Python<'_>, py: Python<'_>,
input: Vec<&PyAny>, input: Bound<'_, PyList>,
is_pretokenized: bool, is_pretokenized: bool,
add_special_tokens: bool, add_special_tokens: bool,
) -> PyResult<Vec<PyEncoding>> { ) -> PyResult<Vec<PyEncoding>> {

View File

@ -55,9 +55,9 @@ impl PyTrainer {
} }
fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> {
match state.extract::<&PyBytes>(py) { match state.extract::<&[u8]>(py) {
Ok(s) => { Ok(s) => {
let unpickled = serde_json::from_slice(s.as_bytes()).map_err(|e| { let unpickled = serde_json::from_slice(s).map_err(|e| {
exceptions::PyException::new_err(format!( exceptions::PyException::new_err(format!(
"Error while attempting to unpickle PyTrainer: {}", "Error while attempting to unpickle PyTrainer: {}",
e e

View File

@ -60,7 +60,7 @@ pub enum PyRange<'s> {
#[pyo3(annotation = "Tuple[uint, uint]")] #[pyo3(annotation = "Tuple[uint, uint]")]
Range(usize, usize), Range(usize, usize),
#[pyo3(annotation = "slice")] #[pyo3(annotation = "slice")]
Slice(&'s PySlice), Slice(Bound<'s, PySlice>),
} }
impl PyRange<'_> { impl PyRange<'_> {
pub fn to_range(&self, max_len: usize) -> PyResult<std::ops::Range<usize>> { pub fn to_range(&self, max_len: usize) -> PyResult<std::ops::Range<usize>> {
@ -83,7 +83,7 @@ impl PyRange<'_> {
} }
PyRange::Range(s, e) => Ok(*s..*e), PyRange::Range(s, e) => Ok(*s..*e),
PyRange::Slice(s) => { PyRange::Slice(s) => {
let r = s.indices(max_len as std::os::raw::c_long)?; let r = s.indices(max_len.try_into()?)?;
Ok(r.start as usize..r.stop as usize) Ok(r.start as usize..r.stop as usize)
} }
} }
@ -94,7 +94,7 @@ impl PyRange<'_> {
pub struct PySplitDelimiterBehavior(pub SplitDelimiterBehavior); pub struct PySplitDelimiterBehavior(pub SplitDelimiterBehavior);
impl FromPyObject<'_> for PySplitDelimiterBehavior { impl FromPyObject<'_> for PySplitDelimiterBehavior {
fn extract(obj: &PyAny) -> PyResult<Self> { fn extract_bound(obj: &Bound<'_, PyAny>) -> PyResult<Self> {
let s = obj.extract::<&str>()?; let s = obj.extract::<&str>()?;
Ok(Self(match s { Ok(Self(match s {

View File

@ -56,7 +56,7 @@ fn tokenize(pretok: &mut PreTokenizedString, func: &Bound<'_, PyAny>) -> PyResul
ToPyResult(pretok.tokenize(|normalized| { ToPyResult(pretok.tokenize(|normalized| {
let output = func.call((normalized.get(),), None)?; let output = func.call((normalized.get(),), None)?;
Ok(output Ok(output
.extract::<&PyList>()? .extract::<Bound<PyList>>()?
.into_iter() .into_iter()
.map(|obj| Ok(Token::from(obj.extract::<PyToken>()?))) .map(|obj| Ok(Token::from(obj.extract::<PyToken>()?)))
.collect::<PyResult<Vec<_>>>()?) .collect::<PyResult<Vec<_>>>()?)
@ -69,7 +69,7 @@ fn tokenize(pretok: &mut PreTokenizedString, func: &Bound<'_, PyAny>) -> PyResul
#[derive(Clone)] #[derive(Clone)]
pub struct PyOffsetReferential(OffsetReferential); pub struct PyOffsetReferential(OffsetReferential);
impl FromPyObject<'_> for PyOffsetReferential { impl FromPyObject<'_> for PyOffsetReferential {
fn extract(obj: &PyAny) -> PyResult<Self> { fn extract_bound(obj: &Bound<'_, PyAny>) -> PyResult<Self> {
let s = obj.extract::<&str>()?; let s = obj.extract::<&str>()?;
Ok(Self(match s { Ok(Self(match s {
@ -85,7 +85,7 @@ impl FromPyObject<'_> for PyOffsetReferential {
#[derive(Clone)] #[derive(Clone)]
pub struct PyOffsetType(OffsetType); pub struct PyOffsetType(OffsetType);
impl FromPyObject<'_> for PyOffsetType { impl FromPyObject<'_> for PyOffsetType {
fn extract(obj: &PyAny) -> PyResult<Self> { fn extract_bound(obj: &Bound<'_, PyAny>) -> PyResult<Self> {
let s = obj.extract::<&str>()?; let s = obj.extract::<&str>()?;
Ok(Self(match s { Ok(Self(match s {