diff --git a/bindings/python/src/tokenizer.rs b/bindings/python/src/tokenizer.rs index a491a036..0e1c62ec 100644 --- a/bindings/python/src/tokenizer.rs +++ b/bindings/python/src/tokenizer.rs @@ -663,16 +663,19 @@ impl PyTokenizer { } /// Get the underlying vocabulary - /// + /// /// Returns: /// :obj:`Dict[int, AddedToken]`: The vocabulary #[pyo3(signature = ())] #[pyo3(text_signature = "(self)")] fn get_added_tokens_decoder(&self) -> HashMap { - self.tokenizer.get_added_tokens_decoder().into_iter().map(|(key, value)| (key, value.into())).collect() + self.tokenizer + .get_added_tokens_decoder() + .into_iter() + .map(|(key, value)| (key, value.into())) + .collect() } - /// Get the size of the underlying vocabulary /// /// Args: