mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 00:35:35 +00:00
Python readme
This commit is contained in:
6
bindings/python/Cargo.lock
generated
6
bindings/python/Cargo.lock
generated
@ -523,15 +523,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokenizers"
|
name = "tokenizers"
|
||||||
version = "0.1.0"
|
version = "0.0.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pyo3 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"pyo3 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"tokenizers-lib 0.1.0",
|
"tokenizers-lib 0.0.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokenizers-lib"
|
name = "tokenizers-lib"
|
||||||
version = "0.1.0"
|
version = "0.0.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"onig 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"onig 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "tokenizers"
|
name = "tokenizers"
|
||||||
version = "0.1.0"
|
version = "0.0.1"
|
||||||
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
|
10
bindings/python/README.md
Normal file
10
bindings/python/README.md
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
### Python Bindings
|
||||||
|
|
||||||
|
```
|
||||||
|
python3.7 -m venv .env
|
||||||
|
source .env/bin/activate
|
||||||
|
pip install maturin
|
||||||
|
maturin build
|
||||||
|
|
||||||
|
python example.py
|
||||||
|
```
|
3
bindings/python/example.py
Normal file
3
bindings/python/example.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from tokenizers import WhitespaceTokenizer
|
||||||
|
|
||||||
|
print(WhitespaceTokenizer.tokenize("Hey man!"))
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "tokenizers-lib"
|
name = "tokenizers-lib"
|
||||||
version = "0.1.0"
|
version = "0.0.1"
|
||||||
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
authors = ["Anthony MOI <m.anthony.moi@gmail.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user