mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 16:49:27 +00:00
Bump version for release
This commit is contained in:
6
bindings/python/Cargo.lock
generated
6
bindings/python/Cargo.lock
generated
@ -555,7 +555,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokenizers"
|
name = "tokenizers"
|
||||||
version = "0.0.11"
|
version = "0.0.12"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"indicatif 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"indicatif 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -571,10 +571,10 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokenizers-python"
|
name = "tokenizers-python"
|
||||||
version = "0.0.11"
|
version = "0.0.12"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pyo3 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
"pyo3 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"tokenizers 0.0.11",
|
"tokenizers 0.0.12",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "tokenizers-python"
|
name = "tokenizers-python"
|
||||||
version = "0.0.11"
|
version = "0.0.12"
|
||||||
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ from setuptools_rust import Binding, RustExtension
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="tokenizers",
|
name="tokenizers",
|
||||||
version="0.0.11",
|
version="0.0.12",
|
||||||
description="Fast and Customizable Tokenizers",
|
description="Fast and Customizable Tokenizers",
|
||||||
long_description=open("README.md", "r", encoding="utf-8").read(),
|
long_description=open("README.md", "r", encoding="utf-8").read(),
|
||||||
long_description_content_type="text/markdown",
|
long_description_content_type="text/markdown",
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
__version__ = "0.0.11"
|
__version__ = "0.0.12"
|
||||||
|
|
||||||
from .tokenizers import Tokenizer
|
from .tokenizers import Tokenizer
|
||||||
from .tokenizers import decoders
|
from .tokenizers import decoders
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
name = "tokenizers"
|
name = "tokenizers"
|
||||||
version = "0.0.11"
|
version = "0.0.12"
|
||||||
homepage = "https://github.com/huggingface/tokenizers"
|
homepage = "https://github.com/huggingface/tokenizers"
|
||||||
repository = "https://github.com/huggingface/tokenizers"
|
repository = "https://github.com/huggingface/tokenizers"
|
||||||
documentation = "https://docs.rs/tokenizers/"
|
documentation = "https://docs.rs/tokenizers/"
|
||||||
|
Reference in New Issue
Block a user