Python - Update CHANGELOG and bump to 0.9.2 for release

This commit is contained in:
Anthony MOI
2020-10-15 10:14:58 -04:00
parent 2ccd16bf5c
commit 91f602f744
5 changed files with 10 additions and 4 deletions

View File

@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.9.2]
### Fixed
- [#464] Fix a problem with RobertaProcessing being deserialized as BertProcessing
## [0.9.1] ## [0.9.1]
### Fixed ### Fixed
@ -243,6 +248,7 @@ delimiter (Works like `.split(delimiter)`)
- Fix a bug with the IDs associated with added tokens. - Fix a bug with the IDs associated with added tokens.
- Fix a bug that was causing crashes in Python 3.5 - Fix a bug that was causing crashes in Python 3.5
[#464]: https://github.com/huggingface/tokenizers/pull/464
[#459]: https://github.com/huggingface/tokenizers/pull/459 [#459]: https://github.com/huggingface/tokenizers/pull/459
[#420]: https://github.com/huggingface/tokenizers/pull/420 [#420]: https://github.com/huggingface/tokenizers/pull/420
[#417]: https://github.com/huggingface/tokenizers/pull/417 [#417]: https://github.com/huggingface/tokenizers/pull/417

View File

@ -893,7 +893,7 @@ dependencies = [
[[package]] [[package]]
name = "tokenizers-python" name = "tokenizers-python"
version = "0.9.1" version = "0.9.2"
dependencies = [ dependencies = [
"env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.79 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.79 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -1,6 +1,6 @@
[package] [package]
name = "tokenizers-python" name = "tokenizers-python"
version = "0.9.1" version = "0.9.2"
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"] authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
edition = "2018" edition = "2018"

View File

@ -1,4 +1,4 @@
__version__ = "0.9.1" __version__ = "0.9.2"
from typing import Tuple, Union, Tuple, List from typing import Tuple, Union, Tuple, List
from enum import Enum from enum import Enum

View File

@ -6,7 +6,7 @@ extras["testing"] = ["pytest"]
setup( setup(
name="tokenizers", name="tokenizers",
version="0.9.1", version="0.9.2",
description="Fast and Customizable Tokenizers", description="Fast and Customizable Tokenizers",
long_description=open("README.md", "r", encoding="utf-8").read(), long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown", long_description_content_type="text/markdown",