mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-22 16:25:30 +00:00
clean derive partial eq
This commit is contained in:
@ -11,7 +11,7 @@ use std::collections::{HashMap, HashSet};
|
||||
/// like:
|
||||
/// - Whether they should only match single words
|
||||
/// - Whether to include any whitespace on its left or right
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct AddedToken {
|
||||
/// The content of the added token
|
||||
pub content: String,
|
||||
@ -91,17 +91,6 @@ impl std::hash::Hash for AddedToken {
|
||||
self.content.hash(state);
|
||||
}
|
||||
}
|
||||
impl std::cmp::PartialEq for AddedToken {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.content == other.content
|
||||
&& self.special == other.special
|
||||
&& self.lstrip == other.lstrip
|
||||
&& self.rstrip == other.rstrip
|
||||
&& self.normalized == other.normalized
|
||||
&& self.single_word == other.single_word
|
||||
}
|
||||
}
|
||||
impl std::cmp::Eq for AddedToken {}
|
||||
|
||||
type MatchingSet = (AhoCorasick, Vec<u32>);
|
||||
|
||||
|
Reference in New Issue
Block a user