mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
New clippy comments?
This commit is contained in:
@ -170,9 +170,7 @@ impl PySequence {
|
||||
for n in normalizers.iter() {
|
||||
let normalizer: PyRef<PyNormalizer> = n.extract()?;
|
||||
match &normalizer.normalizer {
|
||||
PyNormalizerWrapper::Sequence(inner) => {
|
||||
sequence.extend(inner.iter().map(|i| i.clone()))
|
||||
}
|
||||
PyNormalizerWrapper::Sequence(inner) => sequence.extend(inner.iter().cloned()),
|
||||
PyNormalizerWrapper::Wrapped(inner) => sequence.push(inner.clone()),
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user