mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
New clippy comments?
This commit is contained in:
@ -181,10 +181,9 @@ impl PyEncoding {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self
|
||||
.encoding
|
||||
.pad(length, pad_id, pad_type_id, pad_token, direction))
|
||||
self.encoding
|
||||
.pad(length, pad_id, pad_type_id, pad_token, direction);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[args(kwargs = "**")]
|
||||
@ -200,7 +199,7 @@ impl PyEncoding {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.encoding.truncate(max_length, stride))
|
||||
self.encoding.truncate(max_length, stride);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user