Update python bindings

This commit is contained in:
Anthony MOI
2019-11-21 11:55:07 -05:00
parent 6853e6c904
commit c28a83cdc4
2 changed files with 33 additions and 2 deletions

View File

@ -43,7 +43,7 @@ Namespaces are one honking great idea -- let's do more of those!
tok_p = GPT2Tokenizer.from_pretrained('gpt2')
tok_r = Tokenizer.bpe_from_files(args.vocab, args.merges, pre_tokenizer="ByteLevel")
tok_r = Tokenizer.bpe_from_files(args.vocab, args.merges, pre_tokenizer="ByteLevel", decoder="ByteLevel")
def tokenize_r():
# return [ tok_r.encode(sentence) for sentence in text]
@ -66,4 +66,7 @@ encoded_p = tokenize_p()
end = time.time()
print(f"Transformer tokenizer took: {end - start} sec")
assert([ [ token.id for token in sentence] for sentence in encoded_r ] == encoded_p)
ids_r = [ [ token.id for token in sentence ] for sentence in encoded_r ]
assert(ids_r == encoded_p)
print(f"Decoded sentences: {tok_r.decode_batch(ids_r)}")