mirror of
https://github.com/mii443/tokenizers.git
synced 2025-12-07 13:18:31 +00:00
Python release CI (#2)
This commit is contained in:
4
bindings/python/MANIFEST.in
Normal file
4
bindings/python/MANIFEST.in
Normal file
@@ -0,0 +1,4 @@
|
||||
include Cargo.toml
|
||||
recursive-include src *
|
||||
recursive-include tokenizers-lib *
|
||||
recursive-exclude tokenizers-lib/target *
|
||||
9
bindings/python/build-sdist.sh
Normal file
9
bindings/python/build-sdist.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
# Create a symlink for tokenizers-lib
|
||||
ln -sf ../../tokenizers tokenizers-lib
|
||||
# Modify cargo.toml to include this symlink
|
||||
sed -i 's/\.\.\/\.\.\/tokenizers/\.\/tokenizers-lib/' Cargo.toml
|
||||
# Build the source distribution
|
||||
python setup.py sdist
|
||||
@@ -4,9 +4,7 @@ set -ex
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain nightly-2019-11-01 -y
|
||||
export PATH="$HOME/.cargo/bin:$PATH"
|
||||
|
||||
cd /io/bindings/python
|
||||
|
||||
for PYBIN in /opt/python/{cp35-cp35m,cp36-cp36m,cp37-cp37m}/bin; do
|
||||
for PYBIN in /opt/python/{cp35-cp35m,cp36-cp36m,cp37-cp37m,cp38-cp38}/bin; do
|
||||
export PYTHON_SYS_EXECUTABLE="$PYBIN/python"
|
||||
|
||||
"${PYBIN}/pip" install -U setuptools-rust
|
||||
@@ -16,3 +14,10 @@ done
|
||||
for whl in dist/*.whl; do
|
||||
auditwheel repair "$whl" -w dist/
|
||||
done
|
||||
|
||||
# Keep only manylinux wheels
|
||||
rm dist/*-linux_*
|
||||
|
||||
# Upload wheels
|
||||
/opt/python/cp37-cp37m/bin/pip install -U awscli
|
||||
/opt/python/cp37-cp37m/bin/python -m awscli s3 sync --exact-timestamps ./dist "s3://tokenizers-releases/python/$GITHUB_SHA"
|
||||
|
||||
@@ -5,6 +5,9 @@ setup(
|
||||
name="tokenizers",
|
||||
version="0.0.2",
|
||||
description="Fast and Customizable Tokenizers",
|
||||
long_description=open("README.md", "r", encoding="utf-8").read(),
|
||||
long_description_content_type="text/markdown",
|
||||
keywords="NLP tokenizer BPE transformer deep learning"
|
||||
author="Anthony MOI",
|
||||
author_email="anthony@huggingface.co",
|
||||
url="https://github.com/huggingface/tokenizers",
|
||||
|
||||
Reference in New Issue
Block a user