mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
Using clippy 1.67 (#1167)
This commit is contained in:
@ -322,11 +322,7 @@ impl AddedVocabulary {
|
|||||||
/// This method returns a list "splits", each of them being a pair of Offsets
|
/// This method returns a list "splits", each of them being a pair of Offsets
|
||||||
/// and an optional ID if it is an AddedToken.
|
/// and an optional ID if it is an AddedToken.
|
||||||
/// The list of splits cover the entire input string.
|
/// The list of splits cover the entire input string.
|
||||||
fn find_matches<'a>(
|
fn find_matches(&self, sentence: &str, split_re: &MatchingSet) -> Vec<(Option<u32>, Offsets)> {
|
||||||
&self,
|
|
||||||
sentence: &str,
|
|
||||||
split_re: &'a MatchingSet,
|
|
||||||
) -> Vec<(Option<u32>, Offsets)> {
|
|
||||||
if sentence.is_empty() {
|
if sentence.is_empty() {
|
||||||
return vec![(None, (0, 0))];
|
return vec![(None, (0, 0))];
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user