mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
Preparing for 0.11.6 release.
This commit is contained in:
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "tokenizers",
|
"name": "tokenizers",
|
||||||
"version": "0.8.2",
|
"version": "0.8.3",
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"types": "./dist/index.d.ts",
|
"types": "./dist/index.d.ts",
|
||||||
|
@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
|
|||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.11.6]
|
||||||
|
|
||||||
|
- [#919] Fixing single_word AddedToken. (regression from 0.11.2)
|
||||||
|
- [#916] Deserializing faster `added_tokens` by loading them in batch.
|
||||||
|
|
||||||
## [0.11.5]
|
## [0.11.5]
|
||||||
|
|
||||||
- [#895] Build `python 3.10` wheels.
|
- [#895] Build `python 3.10` wheels.
|
||||||
@ -355,6 +360,8 @@ delimiter (Works like `.split(delimiter)`)
|
|||||||
- Fix a bug that was causing crashes in Python 3.5
|
- Fix a bug that was causing crashes in Python 3.5
|
||||||
|
|
||||||
|
|
||||||
|
[#919]: https://github.com/huggingface/tokenizers/pull/919
|
||||||
|
[#916]: https://github.com/huggingface/tokenizers/pull/916
|
||||||
[#895]: https://github.com/huggingface/tokenizers/pull/895
|
[#895]: https://github.com/huggingface/tokenizers/pull/895
|
||||||
[#884]: https://github.com/huggingface/tokenizers/pull/884
|
[#884]: https://github.com/huggingface/tokenizers/pull/884
|
||||||
[#882]: https://github.com/huggingface/tokenizers/pull/882
|
[#882]: https://github.com/huggingface/tokenizers/pull/882
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
__version__ = "0.11.0"
|
__version__ = "0.11.6"
|
||||||
|
|
||||||
from typing import Tuple, Union, Tuple, List
|
from typing import Tuple, Union, Tuple, List
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
@ -7,7 +7,7 @@ extras["docs"] = ["sphinx", "sphinx_rtd_theme", "setuptools_rust"]
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="tokenizers",
|
name="tokenizers",
|
||||||
version="0.11.5",
|
version="0.11.6",
|
||||||
description="Fast and Customizable Tokenizers",
|
description="Fast and Customizable Tokenizers",
|
||||||
long_description=open("README.md", "r", encoding="utf-8").read(),
|
long_description=open("README.md", "r", encoding="utf-8").read(),
|
||||||
long_description_content_type="text/markdown",
|
long_description_content_type="text/markdown",
|
||||||
|
@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
|
|||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.11.3]
|
||||||
|
|
||||||
|
- [#919] Fixing single_word AddedToken. (regression from 0.11.2)
|
||||||
|
- [#916] Deserializing faster `added_tokens` by loading them in batch.
|
||||||
|
|
||||||
## [0.11.2]
|
## [0.11.2]
|
||||||
|
|
||||||
- [#884] Fixing bad deserialization following inclusion of a default for Punctuation
|
- [#884] Fixing bad deserialization following inclusion of a default for Punctuation
|
||||||
@ -135,6 +140,8 @@ advised, but that's not the question)
|
|||||||
split up in multiple bytes
|
split up in multiple bytes
|
||||||
- [#174]: The `LongestFirst` truncation strategy had a bug
|
- [#174]: The `LongestFirst` truncation strategy had a bug
|
||||||
|
|
||||||
|
[#919]: https://github.com/huggingface/tokenizers/pull/919
|
||||||
|
[#916]: https://github.com/huggingface/tokenizers/pull/916
|
||||||
[#884]: https://github.com/huggingface/tokenizers/pull/884
|
[#884]: https://github.com/huggingface/tokenizers/pull/884
|
||||||
[#882]: https://github.com/huggingface/tokenizers/pull/882
|
[#882]: https://github.com/huggingface/tokenizers/pull/882
|
||||||
[#868]: https://github.com/huggingface/tokenizers/pull/868
|
[#868]: https://github.com/huggingface/tokenizers/pull/868
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
name = "tokenizers"
|
name = "tokenizers"
|
||||||
version = "0.11.2"
|
version = "0.11.3"
|
||||||
homepage = "https://github.com/huggingface/tokenizers"
|
homepage = "https://github.com/huggingface/tokenizers"
|
||||||
repository = "https://github.com/huggingface/tokenizers"
|
repository = "https://github.com/huggingface/tokenizers"
|
||||||
documentation = "https://docs.rs/tokenizers/"
|
documentation = "https://docs.rs/tokenizers/"
|
||||||
|
Reference in New Issue
Block a user