mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-22 16:25:30 +00:00
Python - Update CHANGELOG and bump to 0.9.2 for release
This commit is contained in:
@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.9.2]
|
||||
|
||||
### Fixed
|
||||
- [#464] Fix a problem with RobertaProcessing being deserialized as BertProcessing
|
||||
|
||||
## [0.9.1]
|
||||
|
||||
### Fixed
|
||||
@ -243,6 +248,7 @@ delimiter (Works like `.split(delimiter)`)
|
||||
- Fix a bug with the IDs associated with added tokens.
|
||||
- Fix a bug that was causing crashes in Python 3.5
|
||||
|
||||
[#464]: https://github.com/huggingface/tokenizers/pull/464
|
||||
[#459]: https://github.com/huggingface/tokenizers/pull/459
|
||||
[#420]: https://github.com/huggingface/tokenizers/pull/420
|
||||
[#417]: https://github.com/huggingface/tokenizers/pull/417
|
||||
|
2
bindings/python/Cargo.lock
generated
2
bindings/python/Cargo.lock
generated
@ -893,7 +893,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokenizers-python"
|
||||
version = "0.9.1"
|
||||
version = "0.9.2"
|
||||
dependencies = [
|
||||
"env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tokenizers-python"
|
||||
version = "0.9.1"
|
||||
version = "0.9.2"
|
||||
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
__version__ = "0.9.1"
|
||||
__version__ = "0.9.2"
|
||||
|
||||
from typing import Tuple, Union, Tuple, List
|
||||
from enum import Enum
|
||||
|
@ -6,7 +6,7 @@ extras["testing"] = ["pytest"]
|
||||
|
||||
setup(
|
||||
name="tokenizers",
|
||||
version="0.9.1",
|
||||
version="0.9.2",
|
||||
description="Fast and Customizable Tokenizers",
|
||||
long_description=open("README.md", "r", encoding="utf-8").read(),
|
||||
long_description_content_type="text/markdown",
|
||||
|
Reference in New Issue
Block a user