First upload version 0.0.1
This commit is contained in:
29
node_modules/node-llama-cpp/dist/evaluator/LlamaModel/utils/TokenAttributes.d.ts
generated
vendored
Normal file
29
node_modules/node-llama-cpp/dist/evaluator/LlamaModel/utils/TokenAttributes.d.ts
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
import { Token } from "../../../types.js";
|
||||
export declare const enum TokenAttribute {
|
||||
undefined = 0,
|
||||
unknown = 1,
|
||||
unused = 2,
|
||||
normal = 4,
|
||||
control = 8,// SPECIAL
|
||||
userDefined = 16,
|
||||
byte = 32,
|
||||
normalized = 64,
|
||||
lstrip = 128,
|
||||
rstrip = 256,
|
||||
singleWord = 512
|
||||
}
|
||||
export declare class TokenAttributes {
|
||||
readonly token: Token;
|
||||
private constructor();
|
||||
get undefined(): boolean;
|
||||
get unknown(): boolean;
|
||||
get unused(): boolean;
|
||||
get normal(): boolean;
|
||||
get control(): boolean;
|
||||
get userDefined(): boolean;
|
||||
get byte(): boolean;
|
||||
get normalized(): boolean;
|
||||
get lstrip(): boolean;
|
||||
get rstrip(): boolean;
|
||||
get singleWord(): boolean;
|
||||
}
|
||||
65
node_modules/node-llama-cpp/dist/evaluator/LlamaModel/utils/TokenAttributes.js
generated
vendored
Normal file
65
node_modules/node-llama-cpp/dist/evaluator/LlamaModel/utils/TokenAttributes.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
// updated against `enum llama_token_attr` from `llama.h`
|
||||
export var TokenAttribute;
|
||||
(function (TokenAttribute) {
|
||||
TokenAttribute[TokenAttribute["undefined"] = 0] = "undefined";
|
||||
TokenAttribute[TokenAttribute["unknown"] = 1] = "unknown";
|
||||
TokenAttribute[TokenAttribute["unused"] = 2] = "unused";
|
||||
TokenAttribute[TokenAttribute["normal"] = 4] = "normal";
|
||||
TokenAttribute[TokenAttribute["control"] = 8] = "control";
|
||||
TokenAttribute[TokenAttribute["userDefined"] = 16] = "userDefined";
|
||||
TokenAttribute[TokenAttribute["byte"] = 32] = "byte";
|
||||
TokenAttribute[TokenAttribute["normalized"] = 64] = "normalized";
|
||||
TokenAttribute[TokenAttribute["lstrip"] = 128] = "lstrip";
|
||||
TokenAttribute[TokenAttribute["rstrip"] = 256] = "rstrip";
|
||||
TokenAttribute[TokenAttribute["singleWord"] = 512] = "singleWord";
|
||||
})(TokenAttribute || (TokenAttribute = {}));
|
||||
export class TokenAttributes {
|
||||
token;
|
||||
/** @internal */ _attributes;
|
||||
constructor(token, attributes) {
|
||||
this.token = token;
|
||||
this._attributes = attributes;
|
||||
}
|
||||
get undefined() {
|
||||
return this._attributes === TokenAttribute.undefined;
|
||||
}
|
||||
get unknown() {
|
||||
return this._hasAttribute(TokenAttribute.unknown);
|
||||
}
|
||||
get unused() {
|
||||
return this._hasAttribute(TokenAttribute.unused);
|
||||
}
|
||||
get normal() {
|
||||
return this._hasAttribute(TokenAttribute.normal);
|
||||
}
|
||||
get control() {
|
||||
return this._hasAttribute(TokenAttribute.control);
|
||||
}
|
||||
get userDefined() {
|
||||
return this._hasAttribute(TokenAttribute.userDefined);
|
||||
}
|
||||
get byte() {
|
||||
return this._hasAttribute(TokenAttribute.byte);
|
||||
}
|
||||
get normalized() {
|
||||
return this._hasAttribute(TokenAttribute.normalized);
|
||||
}
|
||||
get lstrip() {
|
||||
return this._hasAttribute(TokenAttribute.lstrip);
|
||||
}
|
||||
get rstrip() {
|
||||
return this._hasAttribute(TokenAttribute.rstrip);
|
||||
}
|
||||
get singleWord() {
|
||||
return this._hasAttribute(TokenAttribute.singleWord);
|
||||
}
|
||||
/** @internal */
|
||||
_hasAttribute(attribute) {
|
||||
return (this._attributes & attribute) === attribute;
|
||||
}
|
||||
/** @internal */
|
||||
static _create(token, attributes) {
|
||||
return new TokenAttributes(token, attributes);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=TokenAttributes.js.map
|
||||
1
node_modules/node-llama-cpp/dist/evaluator/LlamaModel/utils/TokenAttributes.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/evaluator/LlamaModel/utils/TokenAttributes.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"TokenAttributes.js","sourceRoot":"","sources":["../../../../src/evaluator/LlamaModel/utils/TokenAttributes.ts"],"names":[],"mappings":"AAEA,yDAAyD;AACzD,MAAM,CAAN,IAAkB,cAYjB;AAZD,WAAkB,cAAc;IAC5B,6DAAa,CAAA;IACb,yDAAgB,CAAA;IAChB,uDAAe,CAAA;IACf,uDAAe,CAAA;IACf,yDAAgB,CAAA;IAChB,kEAAoB,CAAA;IACpB,oDAAa,CAAA;IACb,gEAAmB,CAAA;IACnB,yDAAe,CAAA;IACf,yDAAe,CAAA;IACf,iEAAmB,CAAA;AACvB,CAAC,EAZiB,cAAc,KAAd,cAAc,QAY/B;AAED,MAAM,OAAO,eAAe;IACR,KAAK,CAAQ;IAC7B,gBAAgB,CAAkB,WAAW,CAAiB;IAE9D,YAAoB,KAAY,EAAE,UAA0B;QACxD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;IAClC,CAAC;IAED,IAAW,SAAS;QAChB,OAAO,IAAI,CAAC,WAAW,KAAK,cAAc,CAAC,SAAS,CAAC;IACzD,CAAC;IAED,IAAW,OAAO;QACd,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;IACtD,CAAC;IAED,IAAW,MAAM;QACb,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC;IAED,IAAW,MAAM;QACb,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC;IAED,IAAW,OAAO;QACd,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;IACtD,CAAC;IAED,IAAW,WAAW;QAClB,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC;IAC1D,CAAC;IAED,IAAW,IAAI;QACX,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;IACnD,CAAC;IAED,IAAW,UAAU;QACjB,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC;IACzD,CAAC;IAED,IAAW,MAAM;QACb,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC;IAED,IAAW,MAAM;QACb,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC;IAED,IAAW,UAAU;QACjB,OAAO,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC;IACzD,CAAC;IAED,gBAAgB;IACR,aAAa,CAAC,SAAyB;QAC3C,OAAO,CAAC,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC,KAAK,SAAS,CAAC;IACxD,CAAC;IAED,gBAAgB;IACT,MAAM,CAAC,OAAO,CAAC,KAAY,EAAE,UAA0B;QAC1D,OAAO,IAAI,eAAe,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;IAClD,CAAC;CACJ"}
|
||||
Reference in New Issue
Block a user