mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-22 16:25:30 +00:00
New version. (#1082)
* New version. The actual release will happen *before* PyO3 0.17.2 because the tests were ran before than. * Manylinux2014 necessary now with Rust 1.64.
This commit is contained in:
4
.github/workflows/python-release.yml
vendored
4
.github/workflows/python-release.yml
vendored
@ -16,8 +16,8 @@ jobs:
|
||||
|
||||
create_wheels_manylinux:
|
||||
runs-on: ubuntu-latest
|
||||
name: Create wheels for manylinux2010
|
||||
container: quay.io/pypa/manylinux2010_x86_64
|
||||
name: Create wheels for manylinux2014
|
||||
container: quay.io/pypa/manylinux2014_x86_64
|
||||
steps:
|
||||
# v1 is required when using manylinux2010
|
||||
- uses: actions/checkout@v1
|
||||
|
@ -1,3 +1,7 @@
|
||||
## [0.13.1]
|
||||
|
||||
- [#1072] Fixing Roberta type ids.
|
||||
|
||||
## [0.13.0]
|
||||
|
||||
- [#1008] `Decoder` is now a composable trait, but without being backward incompatible
|
||||
@ -165,6 +169,7 @@ The files must now be provided first when calling `tokenizer.train(files, traine
|
||||
- Actually add special tokens in tokenizers implementations ([acef252](https://github.com/huggingface/tokenizers/commit/acef252dacc43adc414175cfc325668ad1488753))
|
||||
|
||||
|
||||
[#1072]: https://github.com/huggingface/tokenizers/pull/1072
|
||||
[#956]: https://github.com/huggingface/tokenizers/pull/956
|
||||
[#1008]: https://github.com/huggingface/tokenizers/pull/1008
|
||||
[#1009]: https://github.com/huggingface/tokenizers/pull/1009
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "node"
|
||||
version = "0.13.1"
|
||||
version = "0.13.2"
|
||||
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
||||
license = "Apache-2.0"
|
||||
build = "build.rs"
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "tokenizers",
|
||||
"version": "0.13.1",
|
||||
"version": "0.13.2",
|
||||
"description": "",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
|
@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.13.1]
|
||||
|
||||
- [#1072] Fixing Roberta type ids.
|
||||
|
||||
## [0.13.0]
|
||||
|
||||
- [#956] PyO3 version upgrade
|
||||
@ -386,6 +390,7 @@ delimiter (Works like `.split(delimiter)`)
|
||||
- Fix a bug that was causing crashes in Python 3.5
|
||||
|
||||
|
||||
[#1072]: https://github.com/huggingface/tokenizers/pull/1072
|
||||
[#956]: https://github.com/huggingface/tokenizers/pull/956
|
||||
[#1008]: https://github.com/huggingface/tokenizers/pull/1008
|
||||
[#1009]: https://github.com/huggingface/tokenizers/pull/1009
|
||||
|
@ -1,4 +1,4 @@
|
||||
__version__ = "0.13.1.dev0"
|
||||
__version__ = "0.13.2.dev0"
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Tuple, Union
|
||||
|
@ -9,7 +9,7 @@ extras["dev"] = extras["testing"]
|
||||
|
||||
setup(
|
||||
name="tokenizers",
|
||||
version="0.13.1.dev0",
|
||||
version="0.13.2.dev0",
|
||||
description="Fast and Customizable Tokenizers",
|
||||
long_description=open("README.md", "r", encoding="utf-8").read(),
|
||||
long_description_content_type="text/markdown",
|
||||
|
@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.13.1]
|
||||
|
||||
- [#1072] Fixing Roberta type ids.
|
||||
|
||||
## [0.13.0]
|
||||
|
||||
- [#1009] `unstable_wasm` feature to support building on Wasm (it's unstable !)
|
||||
@ -166,6 +170,7 @@ split up in multiple bytes
|
||||
- [#174]: The `LongestFirst` truncation strategy had a bug
|
||||
|
||||
|
||||
[#1072]: https://github.com/huggingface/tokenizers/pull/1072
|
||||
[#956]: https://github.com/huggingface/tokenizers/pull/956
|
||||
[#1008]: https://github.com/huggingface/tokenizers/pull/1008
|
||||
[#1009]: https://github.com/huggingface/tokenizers/pull/1009
|
||||
|
@ -2,7 +2,7 @@
|
||||
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
||||
edition = "2018"
|
||||
name = "tokenizers"
|
||||
version = "0.13.1"
|
||||
version = "0.13.2"
|
||||
homepage = "https://github.com/huggingface/tokenizers"
|
||||
repository = "https://github.com/huggingface/tokenizers"
|
||||
documentation = "https://docs.rs/tokenizers/"
|
||||
|
Reference in New Issue
Block a user