mirror of
https://github.com/mii443/tokenizers.git
synced 2025-08-23 08:45:38 +00:00
Node - Fix new linting errors
This commit is contained in:
@ -21,7 +21,7 @@ export class Encoding {
|
||||
*/
|
||||
static merge(encodings: Encoding[], growingOffsets?: boolean): Encoding {
|
||||
const mergedRaw = mergeEncodings(
|
||||
encodings.map(e => e.rawEncoding),
|
||||
encodings.map((e) => e.rawEncoding),
|
||||
growingOffsets
|
||||
);
|
||||
|
||||
@ -82,7 +82,7 @@ export class Encoding {
|
||||
|
||||
return (this._overflowing = this._rawEncoding
|
||||
.getOverflowing()
|
||||
.map(e => new Encoding(e)));
|
||||
.map((e) => new Encoding(e)));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -218,7 +218,7 @@ export class Encoding {
|
||||
"_specialTokensMask",
|
||||
"_tokens",
|
||||
"_typeIds",
|
||||
"_wordIndexes"
|
||||
"_wordIndexes",
|
||||
]) {
|
||||
delete this[prop as keyof this];
|
||||
}
|
||||
|
Reference in New Issue
Block a user