mirror of
https://github.com/mii443/tokenizers.git
synced 2025-09-01 14:59:20 +00:00
Cargo.toml fix number of keywords
This commit is contained in:
@ -7,7 +7,7 @@ homepage = "https://github.com/huggingface/tokenizers"
|
||||
repository = "https://github.com/huggingface/tokenizers"
|
||||
documentation = "https://docs.rs/tokenizers/"
|
||||
license = "Apache-2.0"
|
||||
keywords = ["text", "tokenizer", "tokenization", "NLP", "huggingface", "BPE", "WordPiece"]
|
||||
keywords = ["tokenizer", "NLP", "huggingface", "BPE", "WordPiece"]
|
||||
readme = "./README.md"
|
||||
description = """
|
||||
Provides an implementation of today's most used tokenizers,
|
||||
|
Reference in New Issue
Block a user