mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
Update Python bindings with new interface
This commit is contained in:
@ -75,11 +75,7 @@ impl Encoding {
|
||||
#[args(growing_offsets = true)]
|
||||
fn merge(encodings: Vec<PyRef<Encoding>>, growing_offsets: bool) -> Encoding {
|
||||
tk::tokenizer::Encoding::merge(
|
||||
encodings
|
||||
.into_iter()
|
||||
.map(|e| e.encoding.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice(),
|
||||
encodings.into_iter().map(|e| e.encoding.clone()),
|
||||
growing_offsets,
|
||||
)
|
||||
.into()
|
||||
|
Reference in New Issue
Block a user