mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 16:49:27 +00:00
words -> word_ids & sequences -> sequence_ids
This commit is contained in:
@ -149,7 +149,7 @@ export class Encoding {
|
||||
return this._wordIndexes;
|
||||
}
|
||||
|
||||
return (this._wordIndexes = this._rawEncoding.getWords());
|
||||
return (this._wordIndexes = this._rawEncoding.getWordIds());
|
||||
}
|
||||
|
||||
get sequenceIndexes(): (number | undefined)[] {
|
||||
@ -157,7 +157,7 @@ export class Encoding {
|
||||
return this._sequenceIndexes;
|
||||
}
|
||||
|
||||
return (this._sequenceIndexes = this._rawEncoding.getSequences());
|
||||
return (this._sequenceIndexes = this._rawEncoding.getSequenceIds());
|
||||
}
|
||||
|
||||
/**
|
||||
|
Reference in New Issue
Block a user