First upload version 0.0.1
This commit is contained in:
13
node_modules/node-llama-cpp/dist/utils/DisposeGuard.d.ts
generated
vendored
Normal file
13
node_modules/node-llama-cpp/dist/utils/DisposeGuard.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
export declare class DisposeGuard {
|
||||
constructor(parentDisposeGuards?: DisposeGuard[]);
|
||||
addParentDisposeGuard(parent: DisposeGuard): void;
|
||||
removeParentDisposeGuard(parent: DisposeGuard): void;
|
||||
acquireDisposeLock(): Promise<void>;
|
||||
createPreventDisposalHandle(ignoreAwaitingDispose?: boolean): DisposalPreventionHandle;
|
||||
}
|
||||
export declare class DisposalPreventionHandle {
|
||||
private constructor();
|
||||
dispose(): void;
|
||||
[Symbol.dispose](): void;
|
||||
get disposed(): boolean;
|
||||
}
|
||||
120
node_modules/node-llama-cpp/dist/utils/DisposeGuard.js
generated
vendored
Normal file
120
node_modules/node-llama-cpp/dist/utils/DisposeGuard.js
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
import { DisposedError } from "lifecycle-utils";
|
||||
export class DisposeGuard {
|
||||
/** @internal */ _preventionHandles = 0;
|
||||
/** @internal */ _awaitingDisposeLockCallbacks = [];
|
||||
/** @internal */ _disposeActivated = false;
|
||||
/** @internal */ _parentDisposeGuardsLocks = new Map();
|
||||
constructor(parentDisposeGuards = []) {
|
||||
for (const parent of parentDisposeGuards)
|
||||
this._parentDisposeGuardsLocks.set(parent, null);
|
||||
}
|
||||
addParentDisposeGuard(parent) {
|
||||
if (this._parentDisposeGuardsLocks.has(parent))
|
||||
return;
|
||||
this._parentDisposeGuardsLocks.set(parent, null);
|
||||
if (this._preventionHandles > 0)
|
||||
this._parentDisposeGuardsLocks.set(parent, parent.createPreventDisposalHandle(true));
|
||||
}
|
||||
removeParentDisposeGuard(parent) {
|
||||
const parentLock = this._parentDisposeGuardsLocks.get(parent);
|
||||
if (parentLock != null) {
|
||||
parentLock.dispose();
|
||||
this._parentDisposeGuardsLocks.delete(parent);
|
||||
}
|
||||
}
|
||||
async acquireDisposeLock() {
|
||||
return new Promise((accept) => {
|
||||
if (this._preventionHandles > 0)
|
||||
this._awaitingDisposeLockCallbacks.push(accept);
|
||||
else {
|
||||
this._disposeActivated = true;
|
||||
accept();
|
||||
}
|
||||
});
|
||||
}
|
||||
createPreventDisposalHandle(ignoreAwaitingDispose = false) {
|
||||
if (this._isDisposeActivated() || (!ignoreAwaitingDispose && this._hasAwaitingDisposeLocks()))
|
||||
throw new DisposedError();
|
||||
this._preventionHandles++;
|
||||
try {
|
||||
this._updateParentDisposeGuardLocks();
|
||||
}
|
||||
catch (err) {
|
||||
this._preventionHandles--;
|
||||
if (this._preventionHandles === 0)
|
||||
this._updateParentDisposeGuardLocks();
|
||||
throw err;
|
||||
}
|
||||
return DisposalPreventionHandle._create(() => {
|
||||
this._preventionHandles--;
|
||||
this._activateLocksIfNeeded();
|
||||
this._updateParentDisposeGuardLocks(true);
|
||||
});
|
||||
}
|
||||
/** @internal */
|
||||
_isDisposeActivated() {
|
||||
if (this._disposeActivated)
|
||||
return true;
|
||||
return [...this._parentDisposeGuardsLocks.keys()].some((parent) => parent._isDisposeActivated());
|
||||
}
|
||||
/** @internal */
|
||||
_activateLocksIfNeeded() {
|
||||
if (this._preventionHandles > 0)
|
||||
return;
|
||||
while (this._awaitingDisposeLockCallbacks.length > 0) {
|
||||
this._disposeActivated = true;
|
||||
this._awaitingDisposeLockCallbacks.shift()();
|
||||
}
|
||||
}
|
||||
/** @internal */
|
||||
_updateParentDisposeGuardLocks(onlyAllowRemoval = false) {
|
||||
if (this._preventionHandles === 0) {
|
||||
for (const parent of this._parentDisposeGuardsLocks.keys()) {
|
||||
const parentLock = this._parentDisposeGuardsLocks.get(parent);
|
||||
if (parentLock == null)
|
||||
continue;
|
||||
parentLock.dispose();
|
||||
this._parentDisposeGuardsLocks.set(parent, null);
|
||||
}
|
||||
}
|
||||
else if (!onlyAllowRemoval) {
|
||||
for (const parent of this._parentDisposeGuardsLocks.keys()) {
|
||||
if (this._parentDisposeGuardsLocks.get(parent) != null)
|
||||
continue;
|
||||
this._parentDisposeGuardsLocks.set(parent, parent.createPreventDisposalHandle(true));
|
||||
}
|
||||
}
|
||||
}
|
||||
/** @internal */
|
||||
_hasAwaitingDisposeLocks() {
|
||||
if (this._awaitingDisposeLockCallbacks.length > 0)
|
||||
return true;
|
||||
return [...this._parentDisposeGuardsLocks.keys()].some((parent) => parent._hasAwaitingDisposeLocks());
|
||||
}
|
||||
}
|
||||
export class DisposalPreventionHandle {
|
||||
/** @internal */
|
||||
_dispose;
|
||||
constructor(dispose) {
|
||||
this._dispose = dispose;
|
||||
this.dispose = this.dispose.bind(this);
|
||||
this[Symbol.dispose] = this[Symbol.dispose].bind(this);
|
||||
}
|
||||
dispose() {
|
||||
if (this._dispose != null) {
|
||||
this._dispose();
|
||||
this._dispose = null;
|
||||
}
|
||||
}
|
||||
[Symbol.dispose]() {
|
||||
this.dispose();
|
||||
}
|
||||
get disposed() {
|
||||
return this._dispose == null;
|
||||
}
|
||||
/** @internal */
|
||||
static _create(dispose) {
|
||||
return new DisposalPreventionHandle(dispose);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=DisposeGuard.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/DisposeGuard.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/DisposeGuard.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"DisposeGuard.js","sourceRoot":"","sources":["../../src/utils/DisposeGuard.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,aAAa,EAAC,MAAM,iBAAiB,CAAC;AAE9C,MAAM,OAAO,YAAY;IACrB,gBAAgB,CAAS,kBAAkB,GAAW,CAAC,CAAC;IACxD,gBAAgB,CAAS,6BAA6B,GAAmB,EAAE,CAAC;IAC5E,gBAAgB,CAAS,iBAAiB,GAAY,KAAK,CAAC;IAC5D,gBAAgB,CAAS,yBAAyB,GAAuD,IAAI,GAAG,EAAE,CAAC;IAEnH,YAAmB,sBAAsC,EAAE;QACvD,KAAK,MAAM,MAAM,IAAI,mBAAmB;YACpC,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;IACzD,CAAC;IAEM,qBAAqB,CAAC,MAAoB;QAC7C,IAAI,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,MAAM,CAAC;YAC1C,OAAO;QAEX,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAEjD,IAAI,IAAI,CAAC,kBAAkB,GAAG,CAAC;YAC3B,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,2BAA2B,CAAC,IAAI,CAAC,CAAC,CAAC;IAC7F,CAAC;IAEM,wBAAwB,CAAC,MAAoB;QAChD,MAAM,UAAU,GAAG,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QAE9D,IAAI,UAAU,IAAI,IAAI,EAAE,CAAC;YACrB,UAAU,CAAC,OAAO,EAAE,CAAC;YACrB,IAAI,CAAC,yBAAyB,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAClD,CAAC;IACL,CAAC;IAEM,KAAK,CAAC,kBAAkB;QAC3B,OAAO,IAAI,OAAO,CAAO,CAAC,MAAM,EAAE,EAAE;YAChC,IAAI,IAAI,CAAC,kBAAkB,GAAG,CAAC;gBAC3B,IAAI,CAAC,6BAA6B,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;iBAC/C,CAAC;gBACF,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC;gBAC9B,MAAM,EAAE,CAAC;YACb,CAAC;QACL,CAAC,CAAC,CAAC;IACP,CAAC;IAEM,2BAA2B,CAAC,wBAAiC,KAAK;QACrE,IAAI,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,CAAC,qBAAqB,IAAI,IAAI,CAAC,wBAAwB,EAAE,CAAC;YACzF,MAAM,IAAI,aAAa,EAAE,CAAC;QAE9B,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAC1B,IAAI,CAAC;YACD,IAAI,CAAC,8BAA8B,EAAE,CAAC;QAC1C,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACX,IAAI,CAAC,kBAAkB,EAAE,CAAC;YAE1B,IAAI,IAAI,CAAC,kBAAkB,KAAK,CAAC;gBAC7B,IAAI,CAAC,8BAA8B,EAAE,CAAC;YAE1C,MAAM,GAAG,CAAC;QACd,CAAC;QAED,OAAO,wBAAwB,CAAC,OAAO,CAAC,GAAG,EAAE;YACzC,IAAI,CAAC,kBAAkB,EAAE,CAAC;YAE1B,IAAI,CAAC,sBAAsB,EAAE,CAAC;YAC9B,IAAI,CAAC,8BAA8B,CAAC,IAAI,CAAC,CAAC;QAC9C,CAAC,CAAC,CAAC;IACP,CAAC;IAED,gBAAgB;IACR,mBAAmB;QACvB,IAAI,IAAI,CAAC,iBAAiB;YACtB,OAAO,IAAI,CAAC;QAEhB,OAAO,CAAC,GAAG,IAAI,CAAC,yBAAyB,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAC;IACrG,CAAC;IAED,gBAAgB;IACR,sBAAsB;QAC1B,IAAI,IAAI,CAAC,kBAAkB,GAAG,CAAC;YAC3B,OAAO;QAEX,OAAO,IAAI,CAAC,6BAA6B,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACnD,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC;YAC9B,IAAI,CAAC,6BAA6B,CAAC,KAAK,EAAG,EAAE,CAAC;QAClD,CAAC;IACL,CAAC;IAED,gBAAgB;IACR,8BAA8B,CAAC,mBAA4B,KAAK;QACpE,IAAI,IAAI,CAAC,kBAAkB,KAAK,CAAC,EAAE,CAAC;YAChC,KAAK,MAAM,MAAM,IAAI,IAAI,CAAC,yBAAyB,CAAC,IAAI,EAAE,EAAE,CAAC;gBACzD,MAAM,UAAU,GAAG,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;gBAE9D,IAAI,UAAU,IAAI,IAAI;oBAClB,SAAS;gBAEb,UAAU,CAAC,OAAO,EAAE,CAAC;gBACrB,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;YACrD,CAAC;QACL,CAAC;aAAM,IAAI,CAAC,gBAAgB,EAAE,CAAC;YAC3B,KAAK,MAAM,MAAM,IAAI,IAAI,CAAC,yBAAyB,CAAC,IAAI,EAAE,EAAE,CAAC;gBACzD,IAAI,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,IAAI;oBAClD,SAAS;gBAEb,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,2BAA2B,CAAC,IAAI,CAAC,CAAC,CAAC;YACzF,CAAC;QACL,CAAC;IACL,CAAC;IAED,gBAAgB;IACR,wBAAwB;QAC5B,IAAI,IAAI,CAAC,6BAA6B,CAAC,MAAM,GAAG,CAAC;YAC7C,OAAO,IAAI,CAAC;QAEhB,OAAO,CAAC,GAAG,IAAI,CAAC,yBAAyB,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,CAAC,wBAAwB,EAAE,CAAC,CAAC;IAC1G,CAAC;CACJ;AAED,MAAM,OAAO,wBAAwB;IACjC,gBAAgB;IACR,QAAQ,CAAsB;IAEtC,YAAoB,OAAmB;QACnC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;QAExB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC3D,CAAC;IAEM,OAAO;QACV,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,EAAE,CAAC;YACxB,IAAI,CAAC,QAAQ,EAAE,CAAC;YAChB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;QACzB,CAAC;IACL,CAAC;IAEM,CAAC,MAAM,CAAC,OAAO,CAAC;QACnB,IAAI,CAAC,OAAO,EAAE,CAAC;IACnB,CAAC;IAED,IAAW,QAAQ;QACf,OAAO,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC;IACjC,CAAC;IAED,gBAAgB;IACT,MAAM,CAAC,OAAO,CAAC,OAAmB;QACrC,OAAO,IAAI,wBAAwB,CAAC,OAAO,CAAC,CAAC;IACjD,CAAC;CACJ"}
|
||||
3
node_modules/node-llama-cpp/dist/utils/InsufficientMemoryError.d.ts
generated
vendored
Normal file
3
node_modules/node-llama-cpp/dist/utils/InsufficientMemoryError.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export declare class InsufficientMemoryError extends Error {
|
||||
constructor(message?: string);
|
||||
}
|
||||
6
node_modules/node-llama-cpp/dist/utils/InsufficientMemoryError.js
generated
vendored
Normal file
6
node_modules/node-llama-cpp/dist/utils/InsufficientMemoryError.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export class InsufficientMemoryError extends Error {
|
||||
constructor(message = "Insufficient memory") {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=InsufficientMemoryError.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/InsufficientMemoryError.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/InsufficientMemoryError.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"InsufficientMemoryError.js","sourceRoot":"","sources":["../../src/utils/InsufficientMemoryError.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,uBAAwB,SAAQ,KAAK;IAC9C,YAAmB,UAAkB,qBAAqB;QACtD,KAAK,CAAC,OAAO,CAAC,CAAC;IACnB,CAAC;CACJ"}
|
||||
92
node_modules/node-llama-cpp/dist/utils/LlamaText.d.ts
generated
vendored
Normal file
92
node_modules/node-llama-cpp/dist/utils/LlamaText.d.ts
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
import type { Token, Tokenizer } from "../types.js";
|
||||
export type LlamaTextValue = string | SpecialTokensText | SpecialToken;
|
||||
export type LlamaTextInputValue = LlamaTextValue | LlamaText | number | boolean | readonly LlamaTextInputValue[];
|
||||
export type LlamaTextJSON = string | LlamaTextJSONValue[];
|
||||
export type LlamaTextJSONValue = string | LlamaTextSpecialTokensTextJSON | LlamaTextSpecialTokenJSON;
|
||||
export type LlamaTextSpecialTokensTextJSON = {
|
||||
type: "specialTokensText";
|
||||
value: string;
|
||||
};
|
||||
export type LlamaTextSpecialTokenJSON = {
|
||||
type: "specialToken";
|
||||
value: string;
|
||||
};
|
||||
/**
|
||||
* @see [Using `LlamaText`](https://node-llama-cpp.withcat.ai/guide/llama-text) tutorial
|
||||
*/
|
||||
declare class LlamaText {
|
||||
readonly values: readonly LlamaTextValue[];
|
||||
/**
|
||||
* Can also be called without `new`
|
||||
*/
|
||||
constructor(...values: readonly LlamaTextInputValue[]);
|
||||
concat(value: LlamaTextInputValue): LlamaText;
|
||||
mapValues(mapper: (this: readonly LlamaTextValue[], value: LlamaTextValue, index: number, values: readonly LlamaTextValue[]) => LlamaTextInputValue): LlamaText;
|
||||
/**
|
||||
* Joins the values with the given separator.
|
||||
*
|
||||
* Note that the values are squashed when they are loaded into the `LlamaText`, so the separator is not added between adjacent strings.
|
||||
*
|
||||
* To add the separator on values before squashing them, use `LlamaText.joinValues` instead.
|
||||
*/
|
||||
joinValues(separator: LlamaText | LlamaTextValue): LlamaText;
|
||||
toString(): string;
|
||||
toJSON(): LlamaTextJSON;
|
||||
tokenize(tokenizer: Tokenizer, options?: "trimLeadingSpace"): Token[];
|
||||
compare(other: LlamaText): boolean;
|
||||
trimStart(): LlamaText;
|
||||
trimEnd(): LlamaText;
|
||||
includes(value: LlamaText): boolean;
|
||||
static fromJSON(json: LlamaTextJSON): LlamaText;
|
||||
static compare(a: LlamaText, b: LlamaText): boolean;
|
||||
/**
|
||||
* Attempt to convert tokens to a `LlamaText` while preserving special tokens.
|
||||
*
|
||||
* Non-standard special tokens that don't have a text representation are ignored.
|
||||
*/
|
||||
static fromTokens(tokenizer: Tokenizer, tokens: Token[]): LlamaText;
|
||||
/**
|
||||
* Join values with the given separator before squashing adjacent strings inside the values
|
||||
*/
|
||||
static joinValues(separator: LlamaText | string, values: readonly LlamaTextInputValue[]): LlamaText;
|
||||
static isLlamaText(value: unknown): value is LlamaText;
|
||||
}
|
||||
type LlamaTextConstructor = Omit<typeof LlamaText, "prototype"> & {
|
||||
new (...values: readonly LlamaTextInputValue[]): LlamaText;
|
||||
(...values: readonly LlamaTextInputValue[]): LlamaText;
|
||||
readonly prototype: typeof LlamaText.prototype;
|
||||
};
|
||||
declare const LlamaTextConstructor: LlamaTextConstructor;
|
||||
declare const _LlamaText: LlamaTextConstructor;
|
||||
type _LlamaText = LlamaText;
|
||||
export { _LlamaText as LlamaText, LlamaText as _LlamaText };
|
||||
export declare class SpecialTokensText {
|
||||
readonly value: string;
|
||||
constructor(value: string);
|
||||
toString(): string;
|
||||
tokenize(tokenizer: Tokenizer, trimLeadingSpace?: boolean): Token[];
|
||||
tokenizeSpecialTokensOnly(tokenizer: Tokenizer): (string | Token)[];
|
||||
toJSON(): LlamaTextSpecialTokensTextJSON;
|
||||
static fromJSON(json: LlamaTextSpecialTokensTextJSON): SpecialTokensText;
|
||||
static isSpecialTokensTextJSON(value: LlamaTextJSONValue): value is LlamaTextSpecialTokensTextJSON;
|
||||
/**
|
||||
* Wraps the value with a `SpecialTokensText` only if `shouldWrap` is true
|
||||
*/
|
||||
static wrapIf(shouldWrap: boolean, value: string): SpecialTokensText | string;
|
||||
}
|
||||
export type BuiltinSpecialTokenValue = "BOS" | "EOS" | "NL" | "EOT" | "SEP";
|
||||
export declare class SpecialToken {
|
||||
readonly value: BuiltinSpecialTokenValue;
|
||||
constructor(value: BuiltinSpecialTokenValue);
|
||||
toString(): BuiltinSpecialTokenValue;
|
||||
tokenize(tokenizer: Tokenizer): Token[];
|
||||
toJSON(): LlamaTextSpecialTokenJSON;
|
||||
static fromJSON(json: LlamaTextSpecialTokenJSON): SpecialToken;
|
||||
static isSpecialTokenJSON(value: LlamaTextJSONValue): value is LlamaTextSpecialTokenJSON;
|
||||
static getTokenToValueMap(tokenizer: Tokenizer): ReadonlyMap<Token | undefined, BuiltinSpecialTokenValue>;
|
||||
}
|
||||
export declare function isLlamaText(value: unknown): value is LlamaText;
|
||||
/**
|
||||
* Tokenize the given input using the given tokenizer, whether it's a `string` or a `LlamaText`
|
||||
*/
|
||||
export declare function tokenizeText(text: string | LlamaText, tokenizer: Tokenizer): Token[];
|
||||
527
node_modules/node-llama-cpp/dist/utils/LlamaText.js
generated
vendored
Normal file
527
node_modules/node-llama-cpp/dist/utils/LlamaText.js
generated
vendored
Normal file
@@ -0,0 +1,527 @@
|
||||
import { pushAll } from "./pushAll.js";
|
||||
/**
|
||||
* @see [Using `LlamaText`](https://node-llama-cpp.withcat.ai/guide/llama-text) tutorial
|
||||
*/
|
||||
class LlamaText {
|
||||
values;
|
||||
/**
|
||||
* Can also be called without `new`
|
||||
*/
|
||||
constructor(...values) {
|
||||
// the constructor logic is copied to `LlamaTextConstructor` to make the constructor callable as a normal function
|
||||
this.values = createHistoryFromStringsAndValues(values);
|
||||
}
|
||||
concat(value) {
|
||||
return new LlamaTextConstructor([...this.values, value]);
|
||||
}
|
||||
mapValues(mapper) {
|
||||
return new LlamaTextConstructor(this.values.map(mapper));
|
||||
}
|
||||
/**
|
||||
* Joins the values with the given separator.
|
||||
*
|
||||
* Note that the values are squashed when they are loaded into the `LlamaText`, so the separator is not added between adjacent strings.
|
||||
*
|
||||
* To add the separator on values before squashing them, use `LlamaText.joinValues` instead.
|
||||
*/
|
||||
joinValues(separator) {
|
||||
const newValues = [];
|
||||
for (let i = 0; i < this.values.length; i++) {
|
||||
newValues.push(this.values[i]);
|
||||
if (i !== this.values.length - 1) {
|
||||
if (isLlamaText(separator))
|
||||
pushAll(newValues, separator.values);
|
||||
else
|
||||
newValues.push(separator);
|
||||
}
|
||||
}
|
||||
return new LlamaTextConstructor(newValues);
|
||||
}
|
||||
toString() {
|
||||
return this.values
|
||||
.map((value) => {
|
||||
if (value instanceof SpecialToken)
|
||||
return value.toString();
|
||||
else if (value instanceof SpecialTokensText)
|
||||
return value.toString();
|
||||
else
|
||||
return value;
|
||||
})
|
||||
.join("");
|
||||
}
|
||||
toJSON() {
|
||||
if (this.values.length === 1 && typeof this.values[0] === "string")
|
||||
return this.values[0];
|
||||
else if (this.values.length === 0)
|
||||
return "";
|
||||
return this.values.map((value) => {
|
||||
if (value instanceof SpecialToken)
|
||||
return value.toJSON();
|
||||
else if (value instanceof SpecialTokensText)
|
||||
return value.toJSON();
|
||||
else
|
||||
return value;
|
||||
});
|
||||
}
|
||||
tokenize(tokenizer, options) {
|
||||
let textToTokenize = "";
|
||||
const res = [];
|
||||
const hasContent = () => (res.length > 0 || textToTokenize.length > 0);
|
||||
const resolveTokenizerOptions = () => (hasContent() ? "trimLeadingSpace" : options);
|
||||
for (const value of this.values) {
|
||||
if (value instanceof SpecialToken) {
|
||||
pushAll(res, tokenizer(textToTokenize, false, resolveTokenizerOptions()));
|
||||
pushAll(res, value.tokenize(tokenizer));
|
||||
textToTokenize = "";
|
||||
}
|
||||
else if (value instanceof SpecialTokensText) {
|
||||
pushAll(res, tokenizer(textToTokenize, false, resolveTokenizerOptions()));
|
||||
pushAll(res, value.tokenize(tokenizer, hasContent() || options === "trimLeadingSpace"));
|
||||
textToTokenize = "";
|
||||
}
|
||||
else
|
||||
textToTokenize += value;
|
||||
}
|
||||
pushAll(res, tokenizer(textToTokenize, false, resolveTokenizerOptions()));
|
||||
return res;
|
||||
}
|
||||
compare(other) {
|
||||
return LlamaTextConstructor.compare(this, other);
|
||||
}
|
||||
trimStart() {
|
||||
const newValues = this.values.slice();
|
||||
while (newValues.length > 0) {
|
||||
const firstValue = newValues[0];
|
||||
if (firstValue instanceof SpecialToken)
|
||||
break;
|
||||
if (firstValue instanceof SpecialTokensText) {
|
||||
const newValue = firstValue.value.trimStart();
|
||||
if (newValue === "") {
|
||||
newValues.shift();
|
||||
continue;
|
||||
}
|
||||
else if (newValue !== firstValue.value) {
|
||||
newValues[0] = new SpecialTokensText(newValue);
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
else if (typeof firstValue === "string") {
|
||||
const newValue = firstValue.trimStart();
|
||||
if (newValue === "") {
|
||||
newValues.shift();
|
||||
continue;
|
||||
}
|
||||
else if (newValue !== firstValue) {
|
||||
newValues[0] = newValue;
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
else
|
||||
void firstValue;
|
||||
}
|
||||
return new LlamaTextConstructor(newValues);
|
||||
}
|
||||
trimEnd() {
|
||||
const newValues = this.values.slice();
|
||||
while (newValues.length > 0) {
|
||||
const lastValue = newValues[newValues.length - 1];
|
||||
if (lastValue instanceof SpecialToken)
|
||||
break;
|
||||
if (lastValue instanceof SpecialTokensText) {
|
||||
const newValue = lastValue.value.trimEnd();
|
||||
if (newValue === "") {
|
||||
newValues.pop();
|
||||
continue;
|
||||
}
|
||||
else if (newValue !== lastValue.value) {
|
||||
newValues[newValues.length - 1] = new SpecialTokensText(newValue);
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
else if (typeof lastValue === "string") {
|
||||
const newValue = lastValue.trimEnd();
|
||||
if (newValue === "") {
|
||||
newValues.pop();
|
||||
continue;
|
||||
}
|
||||
else if (newValue !== lastValue) {
|
||||
newValues[newValues.length - 1] = newValue;
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
else
|
||||
void lastValue;
|
||||
}
|
||||
return new LlamaTextConstructor(newValues);
|
||||
}
|
||||
includes(value) {
|
||||
for (let i = 0; i <= this.values.length - value.values.length; i++) {
|
||||
const thisValue = this.values[i];
|
||||
let startMatch = compareLlamaTextValues(thisValue, value.values[0]);
|
||||
if (!startMatch && thisValue instanceof SpecialTokensText && value.values[0] instanceof SpecialTokensText) {
|
||||
startMatch = value.values.length > 1
|
||||
? thisValue.value.endsWith(value.values[0].value)
|
||||
: thisValue.value.includes(value.values[0].value);
|
||||
}
|
||||
if (!startMatch && typeof thisValue === "string" && typeof value.values[0] === "string") {
|
||||
startMatch = value.values.length > 1
|
||||
? thisValue.endsWith(value.values[0])
|
||||
: thisValue.includes(value.values[0]);
|
||||
}
|
||||
if (startMatch) {
|
||||
let j = 1;
|
||||
for (; j < value.values.length; j++) {
|
||||
const thisValue = this.values[i + j];
|
||||
const valueValue = value.values[j];
|
||||
let endMatch = compareLlamaTextValues(thisValue, valueValue);
|
||||
if (!endMatch && thisValue instanceof SpecialTokensText && valueValue instanceof SpecialTokensText) {
|
||||
endMatch = value.values.length - 1 === j
|
||||
? thisValue.value.startsWith(valueValue.value)
|
||||
: thisValue.value === valueValue.value;
|
||||
}
|
||||
if (!endMatch && typeof thisValue === "string" && typeof valueValue === "string") {
|
||||
endMatch = value.values.length - 1 === j
|
||||
? thisValue.startsWith(valueValue)
|
||||
: thisValue === valueValue;
|
||||
}
|
||||
if (!endMatch)
|
||||
break;
|
||||
}
|
||||
if (j === value.values.length)
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/** @internal */
|
||||
[Symbol.for("nodejs.util.inspect.custom")](depth, inspectOptions, inspect) {
|
||||
const inspectFunction = inspect ?? inspectOptions?.inspect;
|
||||
if (inspectFunction == null)
|
||||
return JSON.stringify(this.toJSON(), undefined, 4);
|
||||
return "LlamaText(" + inspectFunction(this.values, {
|
||||
...(inspectOptions ?? {}),
|
||||
depth: depth == null
|
||||
? undefined
|
||||
: Math.max(0, depth - 1)
|
||||
}) + ")";
|
||||
}
|
||||
static fromJSON(json) {
|
||||
// assigned to `LlamaTextConstructor` manually to expose this static method
|
||||
if (typeof json === "string")
|
||||
return new LlamaTextConstructor(json);
|
||||
return new LlamaTextConstructor(json.map((value) => {
|
||||
if (typeof value === "string")
|
||||
return value;
|
||||
else if (SpecialToken.isSpecialTokenJSON(value))
|
||||
return SpecialToken.fromJSON(value);
|
||||
else if (SpecialTokensText.isSpecialTokensTextJSON(value))
|
||||
return SpecialTokensText.fromJSON(value);
|
||||
else {
|
||||
void value;
|
||||
throw new Error(`Unknown value type: ${value}`);
|
||||
}
|
||||
}));
|
||||
}
|
||||
static compare(a, b) {
|
||||
// assigned to `LlamaTextConstructor` manually to expose this static method
|
||||
if (!isLlamaText(a) || !isLlamaText(b))
|
||||
return false;
|
||||
if (a.values.length !== b.values.length)
|
||||
return false;
|
||||
for (let i = 0; i < a.values.length; i++) {
|
||||
if (!compareLlamaTextValues(a.values[i], b.values[i]))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Attempt to convert tokens to a `LlamaText` while preserving special tokens.
|
||||
*
|
||||
* Non-standard special tokens that don't have a text representation are ignored.
|
||||
*/
|
||||
static fromTokens(tokenizer, tokens) {
|
||||
// assigned to `LlamaTextConstructor` manually to expose this static method
|
||||
const res = [];
|
||||
const pendingTokens = [];
|
||||
const addPendingTokens = () => {
|
||||
if (pendingTokens.length === 0)
|
||||
return;
|
||||
res.push(tokenizer.detokenize(pendingTokens, false));
|
||||
pendingTokens.length = 0;
|
||||
};
|
||||
const builtinTokens = SpecialToken.getTokenToValueMap(tokenizer);
|
||||
for (const token of tokens) {
|
||||
if (token == null)
|
||||
continue;
|
||||
const builtinTokenValue = builtinTokens.get(token);
|
||||
if (builtinTokenValue != null) {
|
||||
addPendingTokens();
|
||||
res.push(new SpecialToken(builtinTokenValue));
|
||||
continue;
|
||||
}
|
||||
const regularText = tokenizer.detokenize([token], false);
|
||||
const retokenizedRegularText = tokenizer(regularText, false, "trimLeadingSpace");
|
||||
if (retokenizedRegularText.length === 1 && retokenizedRegularText[0] === token) {
|
||||
pendingTokens.push(token);
|
||||
continue;
|
||||
}
|
||||
const specialText = tokenizer.detokenize([token], true);
|
||||
const retokenizedSpecialText = tokenizer(specialText, true, "trimLeadingSpace");
|
||||
if (retokenizedSpecialText.length === 1 && retokenizedSpecialText[0] === token) {
|
||||
addPendingTokens();
|
||||
res.push(new SpecialTokensText(specialText));
|
||||
continue;
|
||||
}
|
||||
pendingTokens.push(token);
|
||||
}
|
||||
addPendingTokens();
|
||||
return new LlamaTextConstructor(res);
|
||||
}
|
||||
/**
|
||||
* Join values with the given separator before squashing adjacent strings inside the values
|
||||
*/
|
||||
static joinValues(separator, values) {
|
||||
// assigned to `LlamaTextConstructor` manually to expose this static method
|
||||
const newValues = [];
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
const value = values[i];
|
||||
if (i !== 0)
|
||||
newValues.push(separator);
|
||||
newValues.push(value);
|
||||
}
|
||||
return new LlamaTextConstructor(newValues);
|
||||
}
|
||||
static isLlamaText(value) {
|
||||
// assigned to `LlamaTextConstructor` manually to expose this static method
|
||||
if (value instanceof LlamaTextConstructor || value instanceof LlamaText)
|
||||
return true;
|
||||
try {
|
||||
// detect a `LlamaText` created from a different module import
|
||||
return value != null && Object.getPrototypeOf(value)?._type === "LlamaText";
|
||||
}
|
||||
catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
Object.defineProperty(LlamaText.prototype, "_type", {
|
||||
enumerable: false,
|
||||
configurable: false,
|
||||
value: "LlamaText"
|
||||
});
|
||||
const LlamaTextConstructor = function LlamaText(...values) {
|
||||
// this makes the constructor callable also as a normal function
|
||||
if (new.target == null)
|
||||
return new LlamaTextConstructor(...values);
|
||||
this.values = createHistoryFromStringsAndValues(values);
|
||||
return this;
|
||||
};
|
||||
LlamaTextConstructor.prototype = Object.create(LlamaText.prototype);
|
||||
LlamaTextConstructor.prototype.constructor = LlamaTextConstructor;
|
||||
LlamaTextConstructor.fromJSON = LlamaText.fromJSON;
|
||||
LlamaTextConstructor.compare = LlamaText.compare;
|
||||
LlamaTextConstructor.fromTokens = LlamaText.fromTokens;
|
||||
LlamaTextConstructor.joinValues = LlamaText.joinValues;
|
||||
LlamaTextConstructor.isLlamaText = LlamaText.isLlamaText;
|
||||
const _LlamaText = LlamaTextConstructor;
|
||||
export { _LlamaText as LlamaText, LlamaText as _LlamaText };
|
||||
export class SpecialTokensText {
|
||||
value;
|
||||
constructor(value) {
|
||||
this.value = value;
|
||||
}
|
||||
toString() {
|
||||
return this.value;
|
||||
}
|
||||
tokenize(tokenizer, trimLeadingSpace = false) {
|
||||
return tokenizer(this.value, true, trimLeadingSpace ? "trimLeadingSpace" : undefined);
|
||||
}
|
||||
tokenizeSpecialTokensOnly(tokenizer) {
|
||||
const tokens = this.tokenize(tokenizer, true);
|
||||
const res = [];
|
||||
const pendingTextTokens = [];
|
||||
for (const token of tokens) {
|
||||
if (tokenizer.isSpecialToken(token)) {
|
||||
if (pendingTextTokens.length !== 0) {
|
||||
res.push(tokenizer.detokenize(pendingTextTokens, false));
|
||||
pendingTextTokens.length = 0;
|
||||
}
|
||||
res.push(token);
|
||||
}
|
||||
else
|
||||
pendingTextTokens.push(token);
|
||||
}
|
||||
if (pendingTextTokens.length !== 0)
|
||||
res.push(tokenizer.detokenize(pendingTextTokens, false));
|
||||
return res;
|
||||
}
|
||||
toJSON() {
|
||||
return {
|
||||
type: "specialTokensText",
|
||||
value: this.value
|
||||
};
|
||||
}
|
||||
/** @internal */
|
||||
[Symbol.for("nodejs.util.inspect.custom")](depth, inspectOptions, inspect) {
|
||||
const inspectFunction = inspect ?? inspectOptions?.inspect;
|
||||
if (inspectFunction == null)
|
||||
return JSON.stringify(this.toJSON(), undefined, 4);
|
||||
return "new SpecialTokensText(" + inspectFunction(this.value, {
|
||||
...(inspectOptions ?? {}),
|
||||
depth: depth == null
|
||||
? undefined
|
||||
: Math.max(0, depth - 1)
|
||||
}) + ")";
|
||||
}
|
||||
static fromJSON(json) {
|
||||
if (SpecialTokensText.isSpecialTokensTextJSON(json))
|
||||
return new SpecialTokensText(json.value);
|
||||
throw new Error(`Invalid JSON for SpecialTokensText: ${JSON.stringify(json)}`);
|
||||
}
|
||||
static isSpecialTokensTextJSON(value) {
|
||||
return value != null && typeof value === "object" && value.type === "specialTokensText";
|
||||
}
|
||||
/**
|
||||
* Wraps the value with a `SpecialTokensText` only if `shouldWrap` is true
|
||||
*/
|
||||
static wrapIf(shouldWrap, value) {
|
||||
if (shouldWrap)
|
||||
return new SpecialTokensText(value);
|
||||
else
|
||||
return value;
|
||||
}
|
||||
}
|
||||
export class SpecialToken {
|
||||
value;
|
||||
constructor(value) {
|
||||
this.value = value;
|
||||
}
|
||||
toString() {
|
||||
return this.value;
|
||||
}
|
||||
tokenize(tokenizer) {
|
||||
return tokenizer(this.value, "builtin");
|
||||
}
|
||||
toJSON() {
|
||||
return {
|
||||
type: "specialToken",
|
||||
value: this.value
|
||||
};
|
||||
}
|
||||
/** @internal */
|
||||
[Symbol.for("nodejs.util.inspect.custom")](depth, inspectOptions, inspect) {
|
||||
const inspectFunction = inspect ?? inspectOptions?.inspect;
|
||||
if (inspectFunction == null)
|
||||
return JSON.stringify(this.toJSON(), undefined, 4);
|
||||
return "new SpecialToken(" + inspectFunction(this.value, {
|
||||
...(inspectOptions ?? {}),
|
||||
depth: depth == null
|
||||
? undefined
|
||||
: Math.max(0, depth - 1)
|
||||
}) + ")";
|
||||
}
|
||||
static fromJSON(json) {
|
||||
if (SpecialToken.isSpecialTokenJSON(json))
|
||||
return new SpecialToken(json.value);
|
||||
throw new Error(`Invalid JSON for SpecialToken: ${JSON.stringify(json)}`);
|
||||
}
|
||||
static isSpecialTokenJSON(value) {
|
||||
return value != null && typeof value === "object" && value.type === "specialToken";
|
||||
}
|
||||
static getTokenToValueMap(tokenizer) {
|
||||
const supportedValues = [
|
||||
"BOS", "EOS", "NL", "EOT", "SEP"
|
||||
];
|
||||
void 0;
|
||||
const res = new Map(supportedValues.map((value) => ([tokenizer(value, "builtin")[0], value])));
|
||||
res.delete(undefined);
|
||||
return res;
|
||||
}
|
||||
}
|
||||
export function isLlamaText(value) {
|
||||
return LlamaText.isLlamaText(value);
|
||||
}
|
||||
/**
|
||||
* Tokenize the given input using the given tokenizer, whether it's a `string` or a `LlamaText`
|
||||
*/
|
||||
export function tokenizeText(text, tokenizer) {
|
||||
if (typeof text === "string")
|
||||
return tokenizer(text, false);
|
||||
else
|
||||
return text.tokenize(tokenizer);
|
||||
}
|
||||
function createHistoryFromStringsAndValues(values) {
|
||||
function addItemToRes(res, item) {
|
||||
if (item === undefined || item === "" || (item instanceof SpecialTokensText && item.value === ""))
|
||||
return res;
|
||||
else if (typeof item === "string" || item instanceof SpecialTokensText || item instanceof SpecialToken) {
|
||||
res.push(item);
|
||||
return res;
|
||||
}
|
||||
else if (isLlamaText(item)) {
|
||||
for (const value of item.values)
|
||||
res.push(value);
|
||||
return res;
|
||||
}
|
||||
else if (item instanceof Array) {
|
||||
for (const value of item) {
|
||||
if (isLlamaText(value)) {
|
||||
for (const innerValue of value.values)
|
||||
res.push(innerValue);
|
||||
}
|
||||
else if (value === "" || (value instanceof SpecialTokensText && value.value === ""))
|
||||
continue;
|
||||
else if (value instanceof Array)
|
||||
addItemToRes(res, value);
|
||||
else if (typeof value === "number" || typeof value === "boolean")
|
||||
res.push(String(value));
|
||||
else
|
||||
res.push(value);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
else if (typeof item === "number" || typeof item === "boolean") {
|
||||
res.push(String(item));
|
||||
return res;
|
||||
}
|
||||
return item;
|
||||
}
|
||||
function squashAdjacentItems(res, item) {
|
||||
if (res.length === 0) {
|
||||
res.push(item);
|
||||
return res;
|
||||
}
|
||||
const lastItem = res[res.length - 1];
|
||||
if (lastItem instanceof SpecialToken || item instanceof SpecialToken) {
|
||||
res.push(item);
|
||||
return res;
|
||||
}
|
||||
if (typeof lastItem === "string" && typeof item === "string") {
|
||||
res[res.length - 1] += item;
|
||||
return res;
|
||||
}
|
||||
else if (lastItem instanceof SpecialTokensText && item instanceof SpecialTokensText) {
|
||||
res[res.length - 1] = new SpecialTokensText(lastItem.value + item.value);
|
||||
return res;
|
||||
}
|
||||
res.push(item);
|
||||
return res;
|
||||
}
|
||||
return values
|
||||
.reduce(addItemToRes, [])
|
||||
.reduce(squashAdjacentItems, []);
|
||||
}
|
||||
function compareLlamaTextValues(a, b) {
|
||||
if (a instanceof SpecialTokensText && b instanceof SpecialTokensText)
|
||||
return a.value === b.value;
|
||||
else if (a instanceof SpecialToken && b instanceof SpecialToken)
|
||||
return a.value === b.value;
|
||||
else if (a !== b)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
//# sourceMappingURL=LlamaText.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/LlamaText.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/LlamaText.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
12
node_modules/node-llama-cpp/dist/utils/LruCache.d.ts
generated
vendored
Normal file
12
node_modules/node-llama-cpp/dist/utils/LruCache.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
export declare class LruCache<Key, Value> {
|
||||
readonly maxSize: number;
|
||||
constructor(maxSize: number, { onDelete }?: {
|
||||
onDelete?(key: Key, value: Value): void;
|
||||
});
|
||||
get(key: Key): Value | undefined;
|
||||
set(key: Key, value: Value): this;
|
||||
get firstKey(): Key | undefined;
|
||||
clear(): void;
|
||||
keys(): MapIterator<Key>;
|
||||
delete(key: Key): void;
|
||||
}
|
||||
44
node_modules/node-llama-cpp/dist/utils/LruCache.js
generated
vendored
Normal file
44
node_modules/node-llama-cpp/dist/utils/LruCache.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
export class LruCache {
|
||||
maxSize;
|
||||
/** @internal */ _cache = new Map();
|
||||
/** @internal */ _onDelete;
|
||||
constructor(maxSize, { onDelete } = {}) {
|
||||
this.maxSize = maxSize;
|
||||
this._onDelete = onDelete;
|
||||
}
|
||||
get(key) {
|
||||
if (!this._cache.has(key))
|
||||
return undefined;
|
||||
// move the key to the end of the cache
|
||||
const item = this._cache.get(key);
|
||||
this._cache.delete(key);
|
||||
this._cache.set(key, item);
|
||||
return item;
|
||||
}
|
||||
set(key, value) {
|
||||
if (this._cache.has(key))
|
||||
this._cache.delete(key);
|
||||
else if (this._cache.size >= this.maxSize) {
|
||||
const firstKey = this.firstKey;
|
||||
if (this._onDelete != null)
|
||||
this._onDelete(firstKey, this._cache.get(firstKey));
|
||||
this._cache.delete(firstKey);
|
||||
}
|
||||
this._cache.set(key, value);
|
||||
return this;
|
||||
}
|
||||
get firstKey() {
|
||||
return this._cache.keys()
|
||||
.next().value;
|
||||
}
|
||||
clear() {
|
||||
this._cache.clear();
|
||||
}
|
||||
keys() {
|
||||
return this._cache.keys();
|
||||
}
|
||||
delete(key) {
|
||||
this._cache.delete(key);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=LruCache.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/LruCache.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/LruCache.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"LruCache.js","sourceRoot":"","sources":["../../src/utils/LruCache.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,QAAQ;IACD,OAAO,CAAS;IAChC,gBAAgB,CAAkB,MAAM,GAAG,IAAI,GAAG,EAAc,CAAC;IACjE,gBAAgB,CAAkB,SAAS,CAAoC;IAE/E,YAAmB,OAAe,EAAE,EAChC,QAAQ,KAGR,EAAE;QACF,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC9B,CAAC;IAEM,GAAG,CAAC,GAAQ;QACf,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC;YACrB,OAAO,SAAS,CAAC;QAErB,uCAAuC;QACvC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAE,CAAC;QACnC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACxB,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;IAChB,CAAC;IAEM,GAAG,CAAC,GAAQ,EAAE,KAAY;QAC7B,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC;YACpB,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACvB,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACxC,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAS,CAAC;YAEhC,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI;gBACtB,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAE,CAAC,CAAC;YAEzD,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;QACjC,CAAC;QAED,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC5B,OAAO,IAAI,CAAC;IAChB,CAAC;IAED,IAAW,QAAQ;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;aACpB,IAAI,EAAE,CAAC,KAAK,CAAC;IACtB,CAAC;IAEM,KAAK;QACR,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;IACxB,CAAC;IAEM,IAAI;QACP,OAAO,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;IAC9B,CAAC;IAEM,MAAM,CAAC,GAAQ;QAClB,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;IAC5B,CAAC;CACJ"}
|
||||
177
node_modules/node-llama-cpp/dist/utils/OpenAIFormat.d.ts
generated
vendored
Normal file
177
node_modules/node-llama-cpp/dist/utils/OpenAIFormat.d.ts
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
import { ChatHistoryItem, ChatModelFunctions, ChatWrapperSettings } from "../types.js";
|
||||
import { ChatWrapper } from "../ChatWrapper.js";
|
||||
import { LlamaChatResponseFunctionCall } from "../evaluator/LlamaChat/LlamaChat.js";
|
||||
import { TokenBias } from "../evaluator/TokenBias.js";
|
||||
import { LlamaGrammar } from "../evaluator/LlamaGrammar.js";
|
||||
import { Llama } from "../bindings/Llama.js";
|
||||
import { LlamaModel } from "../evaluator/LlamaModel/LlamaModel.js";
|
||||
import { GbnfJsonSchema } from "./gbnfJson/types.js";
|
||||
import { LlamaText } from "./LlamaText.js";
|
||||
export declare class OpenAIFormat {
|
||||
readonly chatWrapper: ChatWrapper;
|
||||
constructor({ chatWrapper }: {
|
||||
chatWrapper: ChatWrapper;
|
||||
});
|
||||
/**
|
||||
* Convert `node-llama-cpp`'s chat history to OpenAI format.
|
||||
*
|
||||
* Note that this conversion is lossy, as OpenAI's format is more limited than `node-llama-cpp`'s.
|
||||
*/
|
||||
toOpenAiChat<Functions extends ChatModelFunctions>({ chatHistory, functionCalls, functions, useRawValues }: {
|
||||
chatHistory: ChatHistoryItem[];
|
||||
functionCalls?: LlamaChatResponseFunctionCall<Functions>[];
|
||||
functions?: Functions;
|
||||
useRawValues?: boolean;
|
||||
}): OpenAiChatCreationOptions;
|
||||
fromOpenAiChat<Functions extends ChatModelFunctions = ChatModelFunctions>(options: OpenAiChatCreationOptions, { llama, model }?: {
|
||||
llama?: Llama;
|
||||
model?: LlamaModel;
|
||||
}): Promise<{
|
||||
chatHistory: ChatHistoryItem[];
|
||||
functionCalls?: LlamaChatResponseFunctionCall<ChatModelFunctions>[];
|
||||
functions?: Functions;
|
||||
tokenBias?: TokenBias;
|
||||
maxTokens?: number;
|
||||
maxParallelFunctionCalls?: number;
|
||||
grammar?: LlamaGrammar;
|
||||
seed?: number;
|
||||
customStopTriggers?: string[];
|
||||
temperature?: number;
|
||||
minP?: number;
|
||||
topK?: number;
|
||||
topP?: number;
|
||||
}>;
|
||||
}
|
||||
export declare function fromIntermediateToCompleteOpenAiMessages(messages: IntermediateOpenAiMessage[]): (OpenAiChatSystemMessage | OpenAiChatUserMessage | OpenAiChatToolMessage | OpenAiChatAssistantMessage | {
|
||||
content: string;
|
||||
role: "assistant";
|
||||
tool_calls?: Array<{
|
||||
id: string;
|
||||
type: "function";
|
||||
function: {
|
||||
name: string;
|
||||
arguments: string | any;
|
||||
};
|
||||
}>;
|
||||
})[];
|
||||
export declare function fromChatHistoryToIntermediateOpenAiMessages<Functions extends ChatModelFunctions>({ chatHistory, chatWrapperSettings, functionCalls, functions, useRawValues, combineModelMessageAndToolCalls, stringifyFunctionParams, stringifyFunctionResults, squashModelTextResponses }: {
|
||||
chatHistory: readonly ChatHistoryItem[];
|
||||
chatWrapperSettings: ChatWrapperSettings;
|
||||
functionCalls?: LlamaChatResponseFunctionCall<Functions>[];
|
||||
functions?: Functions;
|
||||
useRawValues?: boolean;
|
||||
combineModelMessageAndToolCalls?: boolean;
|
||||
stringifyFunctionParams?: boolean;
|
||||
stringifyFunctionResults?: boolean;
|
||||
squashModelTextResponses?: boolean;
|
||||
}): IntermediateOpenAiConversionFromChatHistory;
|
||||
export type IntermediateOpenAiConversionFromChatHistory = {
|
||||
messages: IntermediateOpenAiMessage[];
|
||||
tools?: OpenAiChatTool[];
|
||||
};
|
||||
export type OpenAiChatCreationOptions = {
|
||||
messages: OpenAiChatMessage[];
|
||||
tools?: OpenAiChatTool[];
|
||||
"tool_choice"?: "none" | "auto";
|
||||
"logit_bias"?: Record<string, number> | null;
|
||||
"max_completion_tokens"?: number | null;
|
||||
/** Overridden by `"max_completion_tokens"` */
|
||||
"max_tokens"?: number | null;
|
||||
"parallel_tool_calls"?: boolean;
|
||||
/**
|
||||
* Only used when a Llama instance is provided.
|
||||
* A llama instance is provided through a context sequence.
|
||||
*/
|
||||
"response_format"?: {
|
||||
type: "text";
|
||||
} | {
|
||||
type: "json_schema";
|
||||
"json_schema": {
|
||||
name: string;
|
||||
description?: string;
|
||||
schema?: GbnfJsonSchema;
|
||||
strict?: boolean | null;
|
||||
};
|
||||
} | {
|
||||
type: "json_object";
|
||||
};
|
||||
seed?: number | null;
|
||||
stop?: string | null | string[];
|
||||
temperature?: number | null;
|
||||
"min_p"?: number | null;
|
||||
"top_p"?: number | null;
|
||||
"top_k"?: number | null;
|
||||
};
|
||||
type OpenAiChatTool = {
|
||||
type: "function";
|
||||
function: {
|
||||
name: string;
|
||||
description?: string;
|
||||
parameters?: GbnfJsonSchema;
|
||||
strict?: boolean | null;
|
||||
};
|
||||
};
|
||||
export type IntermediateOpenAiMessage = (Omit<OpenAiChatSystemMessage, "content"> & {
|
||||
content: LlamaText | string;
|
||||
} | Omit<OpenAiChatUserMessage, "content"> & {
|
||||
content: LlamaText | string;
|
||||
} | Omit<OpenAiChatToolMessage, "content"> & {
|
||||
content: LlamaText | string;
|
||||
} | Omit<OpenAiChatAssistantMessage, "content" | "tool_calls"> & {
|
||||
content?: LlamaText | string;
|
||||
"tool_calls"?: Array<{
|
||||
id: string;
|
||||
type: "function";
|
||||
function: {
|
||||
name: string;
|
||||
arguments: string | any;
|
||||
};
|
||||
}>;
|
||||
});
|
||||
export type OpenAiChatMessage = OpenAiChatSystemMessage | OpenAiChatUserMessage | OpenAiChatAssistantMessage | OpenAiChatToolMessage;
|
||||
export type OpenAiChatSystemMessage = {
|
||||
role: "system";
|
||||
content: string | {
|
||||
type: "text";
|
||||
text: string;
|
||||
}[];
|
||||
};
|
||||
export type OpenAiChatUserMessage = {
|
||||
role: "user";
|
||||
content: string | {
|
||||
type: "text";
|
||||
text: string;
|
||||
}[];
|
||||
};
|
||||
export type OpenAiChatAssistantMessage = {
|
||||
role: "assistant";
|
||||
content?: string | {
|
||||
type: "text";
|
||||
text: string;
|
||||
}[] | null;
|
||||
"tool_calls"?: Array<{
|
||||
id: string;
|
||||
type: "function";
|
||||
function: {
|
||||
name: string;
|
||||
arguments: string;
|
||||
};
|
||||
}>;
|
||||
};
|
||||
export type OpenAiChatToolMessage = {
|
||||
role: "tool";
|
||||
content: string | {
|
||||
type: "text";
|
||||
text: string;
|
||||
}[];
|
||||
"tool_call_id": string;
|
||||
};
|
||||
export declare function resolveOpenAiText(text: string | {
|
||||
type: "text";
|
||||
text: string;
|
||||
}[]): string;
|
||||
export declare function resolveOpenAiText(text: string | {
|
||||
type: "text";
|
||||
text: string;
|
||||
}[] | null | undefined): string | null;
|
||||
export {};
|
||||
488
node_modules/node-llama-cpp/dist/utils/OpenAIFormat.js
generated
vendored
Normal file
488
node_modules/node-llama-cpp/dist/utils/OpenAIFormat.js
generated
vendored
Normal file
@@ -0,0 +1,488 @@
|
||||
import { splitText } from "lifecycle-utils";
|
||||
import { allSegmentTypes } from "../types.js";
|
||||
import { jsonDumps } from "../chatWrappers/utils/jsonDumps.js";
|
||||
import { TokenBias } from "../evaluator/TokenBias.js";
|
||||
import { getChatWrapperSegmentDefinition } from "./getChatWrapperSegmentDefinition.js";
|
||||
import { LlamaText } from "./LlamaText.js";
|
||||
import { removeUndefinedFields } from "./removeNullFields.js";
|
||||
// Note: this is a work in progress and is not yet complete.
|
||||
// Will be exported through the main index.js file once this is complete and fully tested
|
||||
export class OpenAIFormat {
|
||||
chatWrapper;
|
||||
constructor({ chatWrapper }) {
|
||||
this.chatWrapper = chatWrapper;
|
||||
}
|
||||
/**
|
||||
* Convert `node-llama-cpp`'s chat history to OpenAI format.
|
||||
*
|
||||
* Note that this conversion is lossy, as OpenAI's format is more limited than `node-llama-cpp`'s.
|
||||
*/
|
||||
toOpenAiChat({ chatHistory, functionCalls, functions, useRawValues = true }) {
|
||||
const res = fromChatHistoryToIntermediateOpenAiMessages({
|
||||
chatHistory,
|
||||
chatWrapperSettings: this.chatWrapper.settings,
|
||||
functionCalls,
|
||||
functions,
|
||||
useRawValues
|
||||
});
|
||||
return {
|
||||
...res,
|
||||
messages: fromIntermediateToCompleteOpenAiMessages(res.messages)
|
||||
};
|
||||
}
|
||||
async fromOpenAiChat(options, { llama, model } = {}) {
|
||||
const { messages, tools } = options;
|
||||
if ((options["response_format"]?.type === "json_schema" || options["response_format"]?.type === "json_object") &&
|
||||
tools != null && options["tool_choice"] !== "none")
|
||||
throw new Error("Using both JSON response format and tools is not supported yet");
|
||||
const { chatHistory, functionCalls: pendingFunctionCalls } = fromOpenAiMessagesToChatHistory({
|
||||
messages,
|
||||
chatWrapper: this.chatWrapper
|
||||
});
|
||||
const functions = {};
|
||||
for (const tool of tools ?? []) {
|
||||
functions[tool.function.name] = {
|
||||
description: tool.function.description,
|
||||
params: tool.function.parameters
|
||||
};
|
||||
}
|
||||
let tokenBias;
|
||||
if (options["logit_bias"] != null && model != null) {
|
||||
tokenBias = TokenBias.for(model);
|
||||
for (const [token, bias] of Object.entries(options["logit_bias"]))
|
||||
tokenBias.set(token, { logit: bias });
|
||||
}
|
||||
let grammar;
|
||||
if (options["response_format"]?.type === "json_schema" && llama != null) {
|
||||
const schema = options["response_format"]?.json_schema?.schema;
|
||||
if (schema != null)
|
||||
grammar = await llama.createGrammarForJsonSchema(schema);
|
||||
else
|
||||
grammar = await llama.getGrammarFor("json");
|
||||
}
|
||||
else if (options["response_format"]?.type === "json_object" && llama != null)
|
||||
grammar = await llama.getGrammarFor("json");
|
||||
return {
|
||||
chatHistory,
|
||||
functionCalls: pendingFunctionCalls,
|
||||
functions: Object.keys(functions).length === 0
|
||||
? undefined
|
||||
: functions,
|
||||
tokenBias,
|
||||
maxTokens: options["max_completion_tokens"] ?? options["max_tokens"] ?? undefined,
|
||||
maxParallelFunctionCalls: options["parallel_tool_calls"] === false ? 1 : undefined,
|
||||
grammar,
|
||||
seed: options.seed ?? undefined,
|
||||
customStopTriggers: typeof options.stop === "string"
|
||||
? [options.stop]
|
||||
: options.stop instanceof Array
|
||||
? options.stop.filter((item) => typeof item === "string")
|
||||
: undefined,
|
||||
temperature: options.temperature ?? undefined,
|
||||
minP: options["min_p"] ?? undefined,
|
||||
topK: options["top_k"] ?? undefined,
|
||||
topP: options["top_p"] ?? undefined
|
||||
};
|
||||
}
|
||||
}
|
||||
export function fromIntermediateToCompleteOpenAiMessages(messages) {
|
||||
return messages.map((message) => {
|
||||
if (message.content != null && LlamaText.isLlamaText(message.content))
|
||||
return {
|
||||
...message,
|
||||
content: message.content.toString()
|
||||
};
|
||||
return message;
|
||||
});
|
||||
}
|
||||
export function fromChatHistoryToIntermediateOpenAiMessages({ chatHistory, chatWrapperSettings, functionCalls, functions, useRawValues = true, combineModelMessageAndToolCalls = true, stringifyFunctionParams = true, stringifyFunctionResults = true, squashModelTextResponses = true }) {
|
||||
const messages = [];
|
||||
for (let i = 0; i < chatHistory.length; i++) {
|
||||
const item = chatHistory[i];
|
||||
if (item == null)
|
||||
continue;
|
||||
if (item.type === "system")
|
||||
messages.push({
|
||||
role: "system",
|
||||
content: LlamaText.fromJSON(item.text)
|
||||
});
|
||||
else if (item.type === "user")
|
||||
messages.push({
|
||||
role: "user",
|
||||
content: item.text
|
||||
});
|
||||
else if (item.type === "model") {
|
||||
let lastModelTextMessage = null;
|
||||
const segmentStack = [];
|
||||
let canUseLastAssistantMessage = squashModelTextResponses;
|
||||
const addResponseText = (text) => {
|
||||
const lastResItem = canUseLastAssistantMessage
|
||||
? messages.at(-1)
|
||||
: undefined;
|
||||
if (lastResItem?.role === "assistant" && (lastResItem.tool_calls == null || lastResItem.tool_calls.length === 0)) {
|
||||
if (lastResItem.content == null)
|
||||
lastResItem.content = text;
|
||||
else
|
||||
lastResItem.content = LlamaText([lastResItem.content, text]);
|
||||
}
|
||||
else {
|
||||
lastModelTextMessage = {
|
||||
role: "assistant",
|
||||
content: text
|
||||
};
|
||||
messages.push(lastModelTextMessage);
|
||||
canUseLastAssistantMessage = true;
|
||||
}
|
||||
};
|
||||
for (let j = 0; j < item.response.length; j++) {
|
||||
const response = item.response[j];
|
||||
if (response == null)
|
||||
continue;
|
||||
if (typeof response === "string")
|
||||
addResponseText(response);
|
||||
else if (response.type === "segment") {
|
||||
const segmentDefinition = getChatWrapperSegmentDefinition(chatWrapperSettings, response.segmentType);
|
||||
if (response.raw != null && useRawValues)
|
||||
addResponseText(LlamaText.fromJSON(response.raw));
|
||||
else
|
||||
addResponseText(LlamaText([
|
||||
(segmentStack.length > 0 && segmentStack.at(-1) === response.segmentType)
|
||||
? ""
|
||||
: segmentDefinition?.prefix ?? "",
|
||||
response.text,
|
||||
response.ended
|
||||
? (segmentDefinition?.suffix ?? "")
|
||||
: ""
|
||||
]));
|
||||
if (!response.ended && segmentStack.at(-1) !== response.segmentType)
|
||||
segmentStack.push(response.segmentType);
|
||||
else if (response.ended && segmentStack.at(-1) === response.segmentType) {
|
||||
segmentStack.pop();
|
||||
if (segmentStack.length === 0 && segmentDefinition?.suffix == null &&
|
||||
chatWrapperSettings.segments?.closeAllSegments != null)
|
||||
addResponseText(LlamaText(chatWrapperSettings.segments.closeAllSegments));
|
||||
}
|
||||
}
|
||||
else if (response.type === "functionCall") {
|
||||
const toolCallId = generateToolCallId(i, j);
|
||||
if (lastModelTextMessage == null ||
|
||||
(!combineModelMessageAndToolCalls && lastModelTextMessage.content != null && lastModelTextMessage.content !== "") ||
|
||||
(response.startsNewChunk &&
|
||||
lastModelTextMessage.tool_calls != null && lastModelTextMessage.tool_calls.length > 0)) {
|
||||
lastModelTextMessage = {
|
||||
role: "assistant"
|
||||
};
|
||||
messages.push(lastModelTextMessage);
|
||||
}
|
||||
lastModelTextMessage["tool_calls"] ||= [];
|
||||
lastModelTextMessage["tool_calls"].push({
|
||||
id: toolCallId,
|
||||
type: "function",
|
||||
function: {
|
||||
name: response.name,
|
||||
arguments: stringifyFunctionParams
|
||||
? response.params === undefined
|
||||
? ""
|
||||
: jsonDumps(response.params)
|
||||
: response.params
|
||||
}
|
||||
});
|
||||
messages.push({
|
||||
role: "tool",
|
||||
"tool_call_id": toolCallId,
|
||||
content: stringifyFunctionResults
|
||||
? response.result === undefined
|
||||
? ""
|
||||
: jsonDumps(response.result)
|
||||
: response.result
|
||||
});
|
||||
}
|
||||
}
|
||||
addResponseText("");
|
||||
}
|
||||
else
|
||||
void item;
|
||||
}
|
||||
if (functionCalls != null && functionCalls.length > 0) {
|
||||
let modelMessage = messages.at(-1);
|
||||
const messageIndex = chatHistory.length - 1;
|
||||
const functionCallStartIndex = modelMessage?.role === "assistant"
|
||||
? (modelMessage.tool_calls?.length ?? 0)
|
||||
: 0;
|
||||
if (modelMessage?.role !== "assistant" ||
|
||||
(!combineModelMessageAndToolCalls && modelMessage.content != null && modelMessage.content !== "")) {
|
||||
modelMessage = {
|
||||
role: "assistant"
|
||||
};
|
||||
messages.push(modelMessage);
|
||||
}
|
||||
modelMessage["tool_calls"] ||= [];
|
||||
for (let i = 0; i < functionCalls.length; i++) {
|
||||
const functionCall = functionCalls[i];
|
||||
if (functionCall == null)
|
||||
continue;
|
||||
const toolCallId = generateToolCallId(messageIndex, functionCallStartIndex + i);
|
||||
modelMessage["tool_calls"].push({
|
||||
id: toolCallId,
|
||||
type: "function",
|
||||
function: {
|
||||
name: functionCall.functionName,
|
||||
arguments: stringifyFunctionParams
|
||||
? functionCall.params === undefined
|
||||
? ""
|
||||
: jsonDumps(functionCall.params)
|
||||
: functionCall.params
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
const tools = [];
|
||||
for (const [funcName, func] of Object.entries(functions ?? {}))
|
||||
tools.push({
|
||||
type: "function",
|
||||
function: {
|
||||
name: funcName,
|
||||
...removeUndefinedFields({
|
||||
description: func.description,
|
||||
parameters: func.params
|
||||
})
|
||||
}
|
||||
});
|
||||
return removeUndefinedFields({
|
||||
messages,
|
||||
tools: tools.length > 0
|
||||
? tools
|
||||
: undefined
|
||||
});
|
||||
}
|
||||
function fromOpenAiMessagesToChatHistory({ messages, chatWrapper }) {
|
||||
const chatHistory = [];
|
||||
const pendingFunctionCalls = [];
|
||||
const findToolCallResult = (startIndex, toolCallId, toolCallIndex) => {
|
||||
let foundToolIndex = 0;
|
||||
for (let i = startIndex; i < messages.length; i++) {
|
||||
const message = messages[i];
|
||||
if (message == null)
|
||||
continue;
|
||||
if (message.role === "user" || message.role === "assistant")
|
||||
break;
|
||||
if (message.role !== "tool")
|
||||
continue;
|
||||
if (toolCallId == null) {
|
||||
if (toolCallIndex === foundToolIndex)
|
||||
return message;
|
||||
else if (foundToolIndex > foundToolIndex)
|
||||
return undefined;
|
||||
}
|
||||
else if (message?.tool_call_id === toolCallId)
|
||||
return message;
|
||||
foundToolIndex++;
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
let lastUserOrAssistantMessageIndex = messages.length - 1;
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
const message = messages[i];
|
||||
if (message == null)
|
||||
continue;
|
||||
if (message.role === "user" || message.role === "assistant") {
|
||||
lastUserOrAssistantMessageIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < messages.length; i++) {
|
||||
const message = messages[i];
|
||||
if (message == null)
|
||||
continue;
|
||||
if (message.role === "system") {
|
||||
if (message.content != null)
|
||||
chatHistory.push({
|
||||
type: "system",
|
||||
text: LlamaText(resolveOpenAiText(message.content)).toJSON()
|
||||
});
|
||||
}
|
||||
else if (message.role === "user")
|
||||
chatHistory.push({
|
||||
type: "user",
|
||||
text: resolveOpenAiText(message.content) ?? ""
|
||||
});
|
||||
else if (message.role === "assistant") {
|
||||
const isLastAssistantMessage = i === lastUserOrAssistantMessageIndex;
|
||||
let chatItem = chatHistory.at(-1);
|
||||
if (chatItem?.type !== "model") {
|
||||
chatItem = {
|
||||
type: "model",
|
||||
response: []
|
||||
};
|
||||
chatHistory.push(chatItem);
|
||||
}
|
||||
const text = resolveOpenAiText(message.content);
|
||||
if (text != null && text !== "") {
|
||||
const segmentDefinitions = new Map();
|
||||
for (const segmentType of allSegmentTypes) {
|
||||
const segmentDefinition = getChatWrapperSegmentDefinition(chatWrapper.settings, segmentType);
|
||||
if (segmentDefinition != null)
|
||||
segmentDefinitions.set(segmentType, {
|
||||
prefix: LlamaText(segmentDefinition.prefix).toString(),
|
||||
suffix: segmentDefinition.suffix != null
|
||||
? LlamaText(segmentDefinition.suffix).toString()
|
||||
: undefined
|
||||
});
|
||||
}
|
||||
const modelResponseSegments = segmentModelResponseText(text, {
|
||||
segmentDefinitions,
|
||||
closeAllSegments: chatWrapper.settings.segments?.closeAllSegments != null
|
||||
? LlamaText(chatWrapper.settings.segments.closeAllSegments).toString()
|
||||
: undefined
|
||||
});
|
||||
for (const segment of modelResponseSegments) {
|
||||
if (segment.type == null) {
|
||||
if (typeof chatItem.response.at(-1) === "string")
|
||||
chatItem.response[chatItem.response.length - 1] += segment.text;
|
||||
else
|
||||
chatItem.response.push(segment.text);
|
||||
}
|
||||
else
|
||||
chatItem.response.push({
|
||||
type: "segment",
|
||||
segmentType: segment.type,
|
||||
text: segment.text,
|
||||
ended: segment.ended
|
||||
});
|
||||
}
|
||||
}
|
||||
let toolCallIndex = 0;
|
||||
for (const toolCall of message.tool_calls ?? []) {
|
||||
const functionName = toolCall.function.name;
|
||||
const callParams = parseToolSerializedValue(toolCall.function.arguments);
|
||||
const toolCallResult = findToolCallResult(i + 1, toolCall.id, toolCallIndex);
|
||||
if (toolCallResult == null) {
|
||||
pendingFunctionCalls.push({
|
||||
functionName,
|
||||
params: callParams,
|
||||
raw: chatWrapper.generateFunctionCall(functionName, callParams).toJSON()
|
||||
});
|
||||
}
|
||||
if (toolCallResult != null || !isLastAssistantMessage)
|
||||
chatItem.response.push({
|
||||
type: "functionCall",
|
||||
name: functionName,
|
||||
params: callParams,
|
||||
result: parseToolSerializedValue(toolCallResult?.content),
|
||||
startsNewChunk: toolCallIndex === 0
|
||||
? true
|
||||
: undefined
|
||||
});
|
||||
toolCallIndex++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
chatHistory,
|
||||
functionCalls: pendingFunctionCalls
|
||||
};
|
||||
}
|
||||
function generateToolCallId(messageIndex, callIndex) {
|
||||
const length = 9;
|
||||
const start = "fc_" + String(messageIndex) + "_";
|
||||
return start + String(callIndex).padStart(length - start.length, "0");
|
||||
}
|
||||
export function resolveOpenAiText(text) {
|
||||
if (typeof text === "string")
|
||||
return text;
|
||||
if (text instanceof Array)
|
||||
return text.map((item) => item?.text ?? "").join("");
|
||||
return null;
|
||||
}
|
||||
function parseToolSerializedValue(value) {
|
||||
const text = resolveOpenAiText(value);
|
||||
if (text == null || text === "")
|
||||
return undefined;
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
}
|
||||
catch (err) {
|
||||
return text;
|
||||
}
|
||||
}
|
||||
function segmentModelResponseText(text, { segmentDefinitions, closeAllSegments }) {
|
||||
const separatorActions = new Map();
|
||||
for (const [segmentType, { prefix, suffix }] of segmentDefinitions) {
|
||||
separatorActions.set(prefix, { type: "prefix", segmentType });
|
||||
if (suffix != null)
|
||||
separatorActions.set(suffix, { type: "suffix", segmentType });
|
||||
}
|
||||
if (closeAllSegments != null)
|
||||
separatorActions.set(closeAllSegments, { type: "closeAll" });
|
||||
const textParts = splitText(text, [...separatorActions.keys()]);
|
||||
const segments = [];
|
||||
const stack = [];
|
||||
const stackSet = new Set();
|
||||
const pushTextToLastSegment = (text) => {
|
||||
const lastSegment = segments.at(-1);
|
||||
if (lastSegment != null && !lastSegment.ended)
|
||||
lastSegment.text += text;
|
||||
else
|
||||
segments.push({
|
||||
type: undefined,
|
||||
text,
|
||||
ended: false
|
||||
});
|
||||
};
|
||||
for (const item of textParts) {
|
||||
if (typeof item === "string" || !separatorActions.has(item.separator))
|
||||
pushTextToLastSegment(typeof item === "string"
|
||||
? item
|
||||
: item.separator);
|
||||
else {
|
||||
const action = separatorActions.get(item.separator);
|
||||
if (action.type === "closeAll") {
|
||||
while (stack.length > 0) {
|
||||
const segmentType = stack.pop();
|
||||
stackSet.delete(segmentType);
|
||||
const lastSegment = segments.at(-1);
|
||||
if (lastSegment != null && lastSegment.type != undefined && lastSegment.type === segmentType)
|
||||
lastSegment.ended = true;
|
||||
else
|
||||
segments.push({ type: segmentType, text: "", ended: true });
|
||||
}
|
||||
}
|
||||
else if (action.type === "prefix") {
|
||||
if (!stackSet.has(action.segmentType)) {
|
||||
stack.push(action.segmentType);
|
||||
stackSet.add(action.segmentType);
|
||||
segments.push({ type: action.segmentType, text: "", ended: false });
|
||||
}
|
||||
else
|
||||
pushTextToLastSegment(item.separator);
|
||||
}
|
||||
else if (action.type === "suffix") {
|
||||
const currentType = stack.at(-1);
|
||||
if (currentType != null && currentType === action.segmentType) {
|
||||
const lastSegment = segments.at(-1);
|
||||
if (lastSegment != null && lastSegment.type != null && lastSegment.type === action.segmentType) {
|
||||
lastSegment.ended = true;
|
||||
stack.pop();
|
||||
stackSet.delete(action.segmentType);
|
||||
}
|
||||
else
|
||||
segments.push({ type: action.segmentType, text: "", ended: true });
|
||||
}
|
||||
else {
|
||||
const segmentTypeIndex = stack.lastIndexOf(action.segmentType);
|
||||
if (segmentTypeIndex < 0)
|
||||
pushTextToLastSegment(item.separator);
|
||||
else {
|
||||
for (let i = stack.length - 1; i >= segmentTypeIndex; i--) {
|
||||
const segmentType = stack.pop();
|
||||
stackSet.delete(segmentType);
|
||||
segments.push({ type: segmentType, text: "", ended: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return segments;
|
||||
}
|
||||
//# sourceMappingURL=OpenAIFormat.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/OpenAIFormat.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/OpenAIFormat.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
node_modules/node-llama-cpp/dist/utils/OverridesObject.d.ts
generated
vendored
Normal file
7
node_modules/node-llama-cpp/dist/utils/OverridesObject.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Makes all the properties of an object optional, including nested objects,
|
||||
* and strips all keys that their value is not of the specified allowed value types.
|
||||
*/
|
||||
export type OverridesObject<T, AllowedValueTypes> = T extends object ? {
|
||||
[P in keyof T]?: OverridesObject<T[P], AllowedValueTypes>;
|
||||
} : T extends Array<infer I> ? AllowedValueTypes extends Array<any> ? Array<OverridesObject<I, AllowedValueTypes>> : never : T extends ReadonlyArray<infer I> ? AllowedValueTypes extends ReadonlyArray<any> ? ReadonlyArray<OverridesObject<I, AllowedValueTypes>> : never : AllowedValueTypes extends T ? T : never;
|
||||
2
node_modules/node-llama-cpp/dist/utils/OverridesObject.js
generated
vendored
Normal file
2
node_modules/node-llama-cpp/dist/utils/OverridesObject.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export {};
|
||||
//# sourceMappingURL=OverridesObject.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/OverridesObject.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/OverridesObject.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"OverridesObject.js","sourceRoot":"","sources":["../../src/utils/OverridesObject.ts"],"names":[],"mappings":""}
|
||||
9
node_modules/node-llama-cpp/dist/utils/ReplHistory.d.ts
generated
vendored
Normal file
9
node_modules/node-llama-cpp/dist/utils/ReplHistory.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export declare class ReplHistory {
|
||||
private readonly _filePath;
|
||||
private _fileContent;
|
||||
private constructor();
|
||||
add(line: string): Promise<void>;
|
||||
get history(): readonly string[];
|
||||
private _addItemToHistory;
|
||||
static load(filePath: string, saveAndLoadHistory?: boolean): Promise<ReplHistory>;
|
||||
}
|
||||
72
node_modules/node-llama-cpp/dist/utils/ReplHistory.js
generated
vendored
Normal file
72
node_modules/node-llama-cpp/dist/utils/ReplHistory.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
import path from "path";
|
||||
import fs from "fs-extra";
|
||||
import { withLock } from "lifecycle-utils";
|
||||
const emptyHistory = {
|
||||
history: []
|
||||
};
|
||||
export class ReplHistory {
|
||||
_filePath;
|
||||
_fileContent;
|
||||
constructor(filePath, fileContent) {
|
||||
this._filePath = filePath;
|
||||
this._fileContent = fileContent;
|
||||
}
|
||||
async add(line) {
|
||||
if (this._filePath == null) {
|
||||
this._fileContent = this._addItemToHistory(line, this._fileContent);
|
||||
return;
|
||||
}
|
||||
await withLock([this, "file"], async () => {
|
||||
try {
|
||||
const json = parseReplJsonfile(await fs.readJSON(this._filePath));
|
||||
this._fileContent = this._addItemToHistory(line, json);
|
||||
await fs.ensureDir(path.dirname(this._filePath));
|
||||
await fs.writeJSON(this._filePath, this._fileContent, {
|
||||
spaces: 4
|
||||
});
|
||||
}
|
||||
catch (err) { }
|
||||
});
|
||||
}
|
||||
get history() {
|
||||
return this._fileContent.history;
|
||||
}
|
||||
_addItemToHistory(item, fileContent) {
|
||||
const newHistory = fileContent.history.slice();
|
||||
const currentItemIndex = newHistory.indexOf(item);
|
||||
if (currentItemIndex !== -1)
|
||||
newHistory.splice(currentItemIndex, 1);
|
||||
newHistory.unshift(item);
|
||||
return {
|
||||
...fileContent,
|
||||
history: newHistory
|
||||
};
|
||||
}
|
||||
static async load(filePath, saveAndLoadHistory = true) {
|
||||
if (!saveAndLoadHistory)
|
||||
return new ReplHistory(null, {
|
||||
history: []
|
||||
});
|
||||
try {
|
||||
if (!(await fs.pathExists(filePath))) {
|
||||
await fs.ensureDir(path.dirname(filePath));
|
||||
await fs.writeJSON(filePath, emptyHistory, {
|
||||
spaces: 4
|
||||
});
|
||||
}
|
||||
const json = parseReplJsonfile(await fs.readJSON(filePath));
|
||||
return new ReplHistory(filePath, json);
|
||||
}
|
||||
catch (err) {
|
||||
return new ReplHistory(null, {
|
||||
history: []
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function parseReplJsonfile(file) {
|
||||
if (typeof file !== "object" || file == null || !("history" in file) || !(file.history instanceof Array) || file.history.some((item) => typeof item !== "string"))
|
||||
throw new Error("Invalid ReplyHistory file");
|
||||
return file;
|
||||
}
|
||||
//# sourceMappingURL=ReplHistory.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/ReplHistory.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/ReplHistory.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"ReplHistory.js","sourceRoot":"","sources":["../../src/utils/ReplHistory.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,MAAM,UAAU,CAAC;AAC1B,OAAO,EAAC,QAAQ,EAAC,MAAM,iBAAiB,CAAC;AAMzC,MAAM,YAAY,GAAqB;IACnC,OAAO,EAAE,EAAE;CACd,CAAC;AAEF,MAAM,OAAO,WAAW;IACH,SAAS,CAAgB;IAClC,YAAY,CAAmB;IAEvC,YAAoB,QAAuB,EAAE,WAA6B;QACtE,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAC1B,IAAI,CAAC,YAAY,GAAG,WAAW,CAAC;IACpC,CAAC;IAEM,KAAK,CAAC,GAAG,CAAC,IAAY;QACzB,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE,CAAC;YACzB,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;YACpE,OAAO;QACX,CAAC;QAED,MAAM,QAAQ,CAAC,CAAC,IAAmB,EAAE,MAAM,CAAC,EAAE,KAAK,IAAI,EAAE;YACrD,IAAI,CAAC;gBACD,MAAM,IAAI,GAAG,iBAAiB,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAU,CAAC,CAAC,CAAC;gBACnE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;gBAEvD,MAAM,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAU,CAAC,CAAC,CAAC;gBAClD,MAAM,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,SAAU,EAAE,IAAI,CAAC,YAAY,EAAE;oBACnD,MAAM,EAAE,CAAC;iBACZ,CAAC,CAAC;YACP,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC,CAAA,CAAC;QACpB,CAAC,CAAC,CAAC;IACP,CAAC;IAED,IAAW,OAAO;QACd,OAAO,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC;IACrC,CAAC;IAEO,iBAAiB,CAAC,IAAY,EAAE,WAA6B;QACjE,MAAM,UAAU,GAAG,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;QAC/C,MAAM,gBAAgB,GAAG,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAElD,IAAI,gBAAgB,KAAK,CAAC,CAAC;YACvB,UAAU,CAAC,MAAM,CAAC,gBAAgB,EAAE,CAAC,CAAC,CAAC;QAE3C,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAEzB,OAAO;YACH,GAAG,WAAW;YACd,OAAO,EAAE,UAAU;SACtB,CAAC;IACN,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAgB,EAAE,qBAA8B,IAAI;QACzE,IAAI,CAAC,kBAAkB;YACnB,OAAO,IAAI,WAAW,CAAC,IAAI,EAAE;gBACzB,OAAO,EAAE,EAAE;aACd,CAAC,CAAC;QAEP,IAAI,CAAC;YACD,IAAI,CAAC,CAAC,MAAM,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC;gBACnC,MAAM,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAC3C,MAAM,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,YAAY,EAAE;oBACvC,MAAM,EAAE,CAAC;iBACZ,CAAC,CAAC;YACP,CAAC;YAED,MAAM,IAAI,GAAG,iBAAiB,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;YAC5D,OAAO,IAAI,WAAW,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QAC3C,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACX,OAAO,IAAI,WAAW,CAAC,IAAI,EAAE;gBACzB,OAAO,EAAE,EAAE;aACd,CAAC,CAAC;QACP,CAAC;IACL,CAAC;CACJ;AAED,SAAS,iBAAiB,CAAC,IAAa;IACpC,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,CAAC,SAAS,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO,YAAY,KAAK,CAAC,IAAI,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,OAAO,IAAI,KAAK,QAAQ,CAAC;QAC7J,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;IAEjD,OAAO,IAAwB,CAAC;AACpC,CAAC"}
|
||||
47
node_modules/node-llama-cpp/dist/utils/StopGenerationDetector.d.ts
generated
vendored
Normal file
47
node_modules/node-llama-cpp/dist/utils/StopGenerationDetector.d.ts
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import { Token, Tokenizer } from "../types.js";
|
||||
import { LlamaText } from "./LlamaText.js";
|
||||
import { QueuedTokenRelease, QueuedTokenReleaseLock } from "./TokenStreamRegulator.js";
|
||||
export type StopGenerationTrigger = (string | Token)[];
|
||||
export declare class StopGenerationDetector<T extends string = string> {
|
||||
recordGeneration({ text, tokens, queuedTokenRelease, startNewChecks, triggerMustStartWithGeneration }: {
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
queuedTokenRelease?: QueuedTokenRelease;
|
||||
startNewChecks?: boolean;
|
||||
triggerMustStartWithGeneration?: boolean;
|
||||
}): void;
|
||||
addStopTrigger(stopTrigger: StopGenerationTrigger, completeEvent?: T): this;
|
||||
/** Whether there are some stops that have been found and triggered. */
|
||||
get hasTriggeredStops(): boolean;
|
||||
/** Whether there are some stops that have been found, but not triggered yet. */
|
||||
get hasInProgressStops(): boolean;
|
||||
/** Gets the stops that have been found and triggered. */
|
||||
getTriggeredStops(): TriggeredStop<T>[];
|
||||
clearTriggeredStops(): void;
|
||||
clearInProgressStops(): void;
|
||||
get hasTriggers(): boolean;
|
||||
/**
|
||||
* For a given generation, get the number of possibilities that would be disregarded if the generation is recorded.
|
||||
*
|
||||
* Calling this function does not change the state of the detector.
|
||||
*/
|
||||
getDisregardedPossibilitiesCountForAGeneration({ text, tokens, startNewChecks }: {
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
/** Setting this to `true` implies that `triggerMustStartWithGeneration` is also `true` */
|
||||
startNewChecks: boolean;
|
||||
}): number;
|
||||
static resolveStopTriggers(stopTriggers: readonly (string | Readonly<StopGenerationTrigger> | LlamaText)[], tokenizer: Tokenizer): StopGenerationTrigger[];
|
||||
static resolveLlamaTextTrigger(llamaText: LlamaText, tokenizer: Tokenizer): StopGenerationTrigger;
|
||||
static getFirstRemainingGenerationAfterStop(triggeredStops: TriggeredStop[]): {
|
||||
stopTrigger: StopGenerationTrigger | undefined;
|
||||
firstRemainingGenerationAfterStop: string | Token[] | undefined;
|
||||
};
|
||||
static detokenizeRemainingGeneration(remainingGeneration: string | Token[] | undefined, stopTrigger: StopGenerationTrigger | undefined, tokenizer: Tokenizer, specialTokens?: boolean): string;
|
||||
}
|
||||
export type TriggeredStop<T extends string = string> = {
|
||||
stopTrigger: StopGenerationTrigger;
|
||||
events: T[];
|
||||
remainingGeneration: (string | Token[])[];
|
||||
queuedTokenReleaseLocks: QueuedTokenReleaseLock[];
|
||||
};
|
||||
291
node_modules/node-llama-cpp/dist/utils/StopGenerationDetector.js
generated
vendored
Normal file
291
node_modules/node-llama-cpp/dist/utils/StopGenerationDetector.js
generated
vendored
Normal file
@@ -0,0 +1,291 @@
|
||||
import { SpecialToken, isLlamaText, SpecialTokensText } from "./LlamaText.js";
|
||||
export class StopGenerationDetector {
|
||||
/** @internal */ _stopTriggers = new Map();
|
||||
/** @internal */ _activeChecks = new Set();
|
||||
/** @internal */ _triggeredStops = new Map();
|
||||
recordGeneration({ text, tokens, queuedTokenRelease, startNewChecks = true, triggerMustStartWithGeneration = false }) {
|
||||
const currentActiveChecks = this._activeChecks;
|
||||
this._activeChecks = new Set();
|
||||
for (const check of currentActiveChecks) {
|
||||
let checkKept = false;
|
||||
if (text.length > 0)
|
||||
this._checkTriggerPart(check, text);
|
||||
else {
|
||||
this._activeChecks.add(check);
|
||||
checkKept = true;
|
||||
}
|
||||
if (tokens.length > 0)
|
||||
this._checkTriggerPart(check, tokens);
|
||||
else {
|
||||
this._activeChecks.add(check);
|
||||
checkKept = true;
|
||||
}
|
||||
if (!checkKept)
|
||||
check.queuedTokenReleaseLock?.dispose();
|
||||
}
|
||||
if (!startNewChecks)
|
||||
return;
|
||||
for (let i = 0; i < text.length && (!triggerMustStartWithGeneration || i === 0); i++) {
|
||||
const char = text[i];
|
||||
const currentPart = this._stopTriggers.get(char);
|
||||
if (currentPart == null)
|
||||
continue;
|
||||
const textCheck = {
|
||||
queuedTokenReleaseLock: queuedTokenRelease?.createTextIndexLock(i),
|
||||
currentPart
|
||||
};
|
||||
this._checkTriggerPart(textCheck, text.slice(i + 1));
|
||||
textCheck.queuedTokenReleaseLock?.dispose();
|
||||
}
|
||||
for (let i = 0; i < tokens.length && (!triggerMustStartWithGeneration || i === 0); i++) {
|
||||
const token = tokens[i];
|
||||
const currentPart = this._stopTriggers.get(token);
|
||||
if (currentPart == null)
|
||||
continue;
|
||||
const tokenCheck = {
|
||||
queuedTokenReleaseLock: queuedTokenRelease?.createTokenIndexLock(i),
|
||||
currentPart
|
||||
};
|
||||
this._checkTriggerPart(tokenCheck, tokens.slice(i + 1));
|
||||
tokenCheck.queuedTokenReleaseLock?.dispose();
|
||||
}
|
||||
}
|
||||
addStopTrigger(stopTrigger, completeEvent) {
|
||||
const simplifiedTrigger = simplifyStopTrigger(stopTrigger);
|
||||
const triggerValues = simplifiedTrigger
|
||||
.map((item) => {
|
||||
if (typeof item === "string")
|
||||
return item.split("");
|
||||
else
|
||||
return [item];
|
||||
})
|
||||
.flat(1);
|
||||
let currentMap = this._stopTriggers;
|
||||
for (let i = 0; i < triggerValues.length; i++) {
|
||||
const value = triggerValues[i];
|
||||
const isLast = i === triggerValues.length - 1;
|
||||
if (!currentMap.has(value)) {
|
||||
currentMap.set(value, {
|
||||
next: new Map()
|
||||
});
|
||||
}
|
||||
const part = currentMap.get(value);
|
||||
if (isLast) {
|
||||
part.next = undefined;
|
||||
part.completesTrigger = simplifiedTrigger;
|
||||
part.completeEvents = part.completeEvents ?? new Set();
|
||||
if (completeEvent != null)
|
||||
part.completeEvents.add(completeEvent);
|
||||
}
|
||||
else if (part.next == null)
|
||||
break;
|
||||
else
|
||||
currentMap = part.next;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/** Whether there are some stops that have been found and triggered. */
|
||||
get hasTriggeredStops() {
|
||||
return this._triggeredStops.size > 0;
|
||||
}
|
||||
/** Whether there are some stops that have been found, but not triggered yet. */
|
||||
get hasInProgressStops() {
|
||||
return this._activeChecks.size > 0;
|
||||
}
|
||||
/** Gets the stops that have been found and triggered. */
|
||||
getTriggeredStops() {
|
||||
const res = [];
|
||||
for (const [triggerPart, triggeredStop] of this._triggeredStops.entries()) {
|
||||
res.push({
|
||||
stopTrigger: triggerPart.completesTrigger,
|
||||
events: Array.from(triggerPart.completeEvents ?? new Set()),
|
||||
remainingGeneration: Array.from(triggeredStop.remainingGenerations),
|
||||
queuedTokenReleaseLocks: Array.from(triggeredStop.queuedTokenReleaseLocks)
|
||||
});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
clearTriggeredStops() {
|
||||
for (const triggeredStop of this._triggeredStops.values()) {
|
||||
for (const queuedTokenReleaseLock of triggeredStop.queuedTokenReleaseLocks)
|
||||
queuedTokenReleaseLock.dispose();
|
||||
}
|
||||
this._triggeredStops.clear();
|
||||
}
|
||||
clearInProgressStops() {
|
||||
for (const check of this._activeChecks)
|
||||
check.queuedTokenReleaseLock?.dispose();
|
||||
this._activeChecks.clear();
|
||||
}
|
||||
get hasTriggers() {
|
||||
return this._stopTriggers.size > 0;
|
||||
}
|
||||
/**
|
||||
* For a given generation, get the number of possibilities that would be disregarded if the generation is recorded.
|
||||
*
|
||||
* Calling this function does not change the state of the detector.
|
||||
*/
|
||||
getDisregardedPossibilitiesCountForAGeneration({ text, tokens, startNewChecks }) {
|
||||
let res = 0;
|
||||
for (const check of this._activeChecks) {
|
||||
const disregardedTextPossibilities = this._getCountOfPossibleTriggersToBeDisregarded(check.currentPart, text);
|
||||
const disregardedTokenPossibilities = this._getCountOfPossibleTriggersToBeDisregarded(check.currentPart, tokens);
|
||||
res += Math.min(disregardedTextPossibilities, disregardedTokenPossibilities);
|
||||
}
|
||||
if (startNewChecks) {
|
||||
const disregardedTextPossibilities = text.length > 0
|
||||
? this._getCountOfPossibleTriggersToBeDisregarded(this._stopTriggers.get(text[0]), text.slice(1))
|
||||
: null;
|
||||
const disregardedTokenPossibilities = tokens.length > 0
|
||||
? this._getCountOfPossibleTriggersToBeDisregarded(this._stopTriggers.get(tokens[0]), tokens.slice(1))
|
||||
: null;
|
||||
if (disregardedTextPossibilities != null && disregardedTokenPossibilities != null)
|
||||
res += Math.min(disregardedTextPossibilities, disregardedTokenPossibilities);
|
||||
else if (disregardedTextPossibilities != null)
|
||||
res += disregardedTextPossibilities;
|
||||
else if (disregardedTokenPossibilities != null)
|
||||
res += disregardedTokenPossibilities;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
/** @internal */
|
||||
_addFoundStop(part, remainingGeneration, queuedTokenReleaseLock) {
|
||||
if (!this._triggeredStops.has(part))
|
||||
this._triggeredStops.set(part, {
|
||||
remainingGenerations: new Set(),
|
||||
queuedTokenReleaseLocks: new Set()
|
||||
});
|
||||
const triggeredStop = this._triggeredStops.get(part);
|
||||
if (remainingGeneration != null)
|
||||
triggeredStop.remainingGenerations.add(remainingGeneration);
|
||||
if (queuedTokenReleaseLock != null)
|
||||
triggeredStop.queuedTokenReleaseLocks.add(queuedTokenReleaseLock);
|
||||
}
|
||||
/** @internal */
|
||||
_getCountOfPossibleTriggersToBeDisregarded(initialPart, value) {
|
||||
if (initialPart == null)
|
||||
return 0;
|
||||
let part = initialPart;
|
||||
let res = 0;
|
||||
for (let i = 0; i < value.length && part != null; i++) {
|
||||
const item = value[i];
|
||||
if (part.next == null)
|
||||
return res + 1;
|
||||
if (part.next.has(item)) {
|
||||
res += part.next.size - 1;
|
||||
part = part.next.get(item);
|
||||
continue;
|
||||
}
|
||||
return res + part.next.size;
|
||||
}
|
||||
if (part == null || part.next == null)
|
||||
return res + 1;
|
||||
return res;
|
||||
}
|
||||
/** @internal */
|
||||
_checkTriggerPart(check, value) {
|
||||
if (check == null)
|
||||
return false;
|
||||
let part = check.currentPart;
|
||||
for (let i = 0; i < value.length && part != null; i++) {
|
||||
const item = value[i];
|
||||
if (part.next == null) {
|
||||
this._addFoundStop(part, value.slice(i), check.queuedTokenReleaseLock?.duplicate?.());
|
||||
return true;
|
||||
}
|
||||
if (part.next.has(item)) {
|
||||
part = part.next.get(item);
|
||||
continue;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
if (part == null)
|
||||
return false;
|
||||
if (part.next == null) {
|
||||
this._addFoundStop(part, undefined, check.queuedTokenReleaseLock?.duplicate?.());
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
this._activeChecks.add({
|
||||
...check,
|
||||
currentPart: part,
|
||||
queuedTokenReleaseLock: check.queuedTokenReleaseLock?.duplicate?.()
|
||||
});
|
||||
return true;
|
||||
}
|
||||
}
|
||||
static resolveStopTriggers(stopTriggers, tokenizer) {
|
||||
return stopTriggers
|
||||
.map((stopTrigger) => {
|
||||
if (isLlamaText(stopTrigger))
|
||||
return StopGenerationDetector.resolveLlamaTextTrigger(stopTrigger, tokenizer);
|
||||
else if (typeof stopTrigger === "string")
|
||||
return simplifyStopTrigger([stopTrigger]);
|
||||
else
|
||||
return simplifyStopTrigger(stopTrigger);
|
||||
})
|
||||
.filter((stopTrigger) => stopTrigger.length > 0);
|
||||
}
|
||||
static resolveLlamaTextTrigger(llamaText, tokenizer) {
|
||||
return simplifyStopTrigger(llamaText.values
|
||||
.filter((value) => value !== "")
|
||||
.map((value) => {
|
||||
if (typeof value === "string")
|
||||
return [value];
|
||||
else if (value instanceof SpecialToken)
|
||||
return value.tokenize(tokenizer);
|
||||
else if (value instanceof SpecialTokensText)
|
||||
return value.tokenizeSpecialTokensOnly(tokenizer);
|
||||
return value;
|
||||
})
|
||||
.flat(1));
|
||||
}
|
||||
static getFirstRemainingGenerationAfterStop(triggeredStops) {
|
||||
const [stopTrigger] = triggeredStops
|
||||
.filter((stopTrigger) => (stopTrigger.remainingGeneration.some((remainingGeneration) => remainingGeneration.length > 0)));
|
||||
return {
|
||||
stopTrigger: stopTrigger?.stopTrigger ?? triggeredStops?.[0]?.stopTrigger,
|
||||
firstRemainingGenerationAfterStop: stopTrigger?.remainingGeneration?.filter((remainingGeneration) => remainingGeneration.length > 0)?.[0]
|
||||
};
|
||||
}
|
||||
static detokenizeRemainingGeneration(remainingGeneration, stopTrigger, tokenizer, specialTokens = false) {
|
||||
if (remainingGeneration == null || remainingGeneration.length === 0)
|
||||
return "";
|
||||
if (typeof remainingGeneration === "string")
|
||||
return remainingGeneration;
|
||||
return tokenizer.detokenize(remainingGeneration, specialTokens, tokenizeStopTrigger(stopTrigger, tokenizer, specialTokens));
|
||||
}
|
||||
}
|
||||
function simplifyStopTrigger(stopTrigger) {
|
||||
let text = "";
|
||||
const res = [];
|
||||
for (const item of stopTrigger) {
|
||||
if (typeof item === "string") {
|
||||
text += item;
|
||||
continue;
|
||||
}
|
||||
if (text !== "") {
|
||||
res.push(text);
|
||||
text = "";
|
||||
}
|
||||
res.push(item);
|
||||
}
|
||||
if (text !== "")
|
||||
res.push(text);
|
||||
return res;
|
||||
}
|
||||
function tokenizeStopTrigger(stopTrigger, tokenizer, specialTokens = false) {
|
||||
if (stopTrigger == null)
|
||||
return [];
|
||||
const res = [];
|
||||
for (const item of stopTrigger) {
|
||||
if (typeof item === "string") {
|
||||
const tokens = tokenizer(item, specialTokens, "trimLeadingSpace");
|
||||
res.push(...tokens);
|
||||
}
|
||||
else
|
||||
res.push(item);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
//# sourceMappingURL=StopGenerationDetector.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/StopGenerationDetector.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/StopGenerationDetector.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
32
node_modules/node-llama-cpp/dist/utils/ThreadsSplitter.d.ts
generated
vendored
Normal file
32
node_modules/node-llama-cpp/dist/utils/ThreadsSplitter.d.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import { DisposableHandle } from "lifecycle-utils";
|
||||
import type { Promisable } from "./transformPromisable.js";
|
||||
export declare class ThreadsSplitter {
|
||||
private readonly _threadDemands;
|
||||
private readonly _threadFreeCallbacks;
|
||||
private _activeThreads;
|
||||
private _totalWantedThreads;
|
||||
maxThreads: number;
|
||||
/**
|
||||
* Set to `0` to disable the limit
|
||||
* @param maxThreads
|
||||
*/
|
||||
constructor(maxThreads: number);
|
||||
createConsumer(wantedThreads: number, minThreads?: number): ThreadsSplitterConsumer;
|
||||
normalizeThreadsValue(threads: number): number;
|
||||
private _callOnActiveThreadsFreeIfCan;
|
||||
private _calculateIdealProportion;
|
||||
}
|
||||
export declare class ThreadsSplitterConsumer {
|
||||
private readonly _threadsSplitter;
|
||||
private readonly _wantedThreads;
|
||||
private readonly _demandedThreads;
|
||||
private readonly _wantedThreadsGcRegistry;
|
||||
private readonly _demandedThreadsGcRegistry;
|
||||
private _usedThreads;
|
||||
private _disposed;
|
||||
constructor(threadsSplitter: ThreadsSplitter, wantedThreads: number, minThreads: number);
|
||||
[Symbol.dispose](): void;
|
||||
dispose(): void;
|
||||
getAllocationToConsume(): Promisable<[threadsToUse: number, usageHandle: DisposableHandle]>;
|
||||
private _getAsyncAllocationToConsume;
|
||||
}
|
||||
177
node_modules/node-llama-cpp/dist/utils/ThreadsSplitter.js
generated
vendored
Normal file
177
node_modules/node-llama-cpp/dist/utils/ThreadsSplitter.js
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
import { DisposedError, DisposableHandle } from "lifecycle-utils";
|
||||
export class ThreadsSplitter {
|
||||
_threadDemands = new MaxNumberCollection();
|
||||
_threadFreeCallbacks = [];
|
||||
_activeThreads = 0;
|
||||
_totalWantedThreads = 0;
|
||||
maxThreads;
|
||||
/**
|
||||
* Set to `0` to disable the limit
|
||||
* @param maxThreads
|
||||
*/
|
||||
constructor(maxThreads) {
|
||||
this.maxThreads = Math.floor(Math.max(0, maxThreads));
|
||||
this._removeWantedThreads = this._removeWantedThreads.bind(this);
|
||||
this._removeThreadDemand = this._removeThreadDemand.bind(this);
|
||||
}
|
||||
createConsumer(wantedThreads, minThreads = 1) {
|
||||
if (wantedThreads !== 0 && minThreads > wantedThreads)
|
||||
minThreads = wantedThreads;
|
||||
if (this.maxThreads !== 0 && wantedThreads === 0)
|
||||
wantedThreads = this.maxThreads;
|
||||
return new ThreadsSplitterConsumer(this, wantedThreads, minThreads);
|
||||
}
|
||||
normalizeThreadsValue(threads) {
|
||||
if (this.maxThreads === 0)
|
||||
return Math.floor(Math.max(0, threads));
|
||||
return Math.floor(Math.max(0, Math.min(this.maxThreads, threads)));
|
||||
}
|
||||
/** @internal */
|
||||
_getUpdatedActiveThreads(inUsed, wanted, demanded) {
|
||||
const initialActiveThreads = this._activeThreads;
|
||||
if (inUsed > wanted)
|
||||
this._activeThreads -= inUsed - wanted;
|
||||
const idealThreads = this._calculateIdealProportion(wanted, demanded);
|
||||
let allocatedThreads = Math.min(inUsed, wanted); // already allocated
|
||||
if (allocatedThreads === idealThreads) {
|
||||
this._callOnActiveThreadsFreeIfCan(initialActiveThreads);
|
||||
return idealThreads;
|
||||
}
|
||||
else if (allocatedThreads > idealThreads) {
|
||||
this._activeThreads -= allocatedThreads - idealThreads;
|
||||
this._callOnActiveThreadsFreeIfCan(initialActiveThreads);
|
||||
return idealThreads;
|
||||
}
|
||||
const neededThreads = idealThreads - allocatedThreads;
|
||||
const availableThreads = this.maxThreads - this._activeThreads;
|
||||
if (neededThreads <= availableThreads) {
|
||||
this._activeThreads += neededThreads;
|
||||
this._callOnActiveThreadsFreeIfCan(initialActiveThreads);
|
||||
return idealThreads;
|
||||
}
|
||||
allocatedThreads += availableThreads;
|
||||
this._activeThreads += availableThreads;
|
||||
this._callOnActiveThreadsFreeIfCan(initialActiveThreads);
|
||||
return allocatedThreads;
|
||||
}
|
||||
_callOnActiveThreadsFreeIfCan(lastActiveThreads) {
|
||||
if (this._activeThreads >= lastActiveThreads)
|
||||
return;
|
||||
while (this._threadFreeCallbacks.length > 0)
|
||||
this._threadFreeCallbacks.shift()?.();
|
||||
}
|
||||
_calculateIdealProportion(wantedThreads, demandedThreads) {
|
||||
return Math.min(wantedThreads, Math.max(demandedThreads, Math.ceil((wantedThreads / this._totalWantedThreads) *
|
||||
Math.max(1, this.maxThreads - (Math.max(demandedThreads, this._threadDemands.maxNumber) - demandedThreads)))));
|
||||
}
|
||||
/** @internal */
|
||||
_waitForFreeThread() {
|
||||
return new Promise((resolve) => this._threadFreeCallbacks.push(resolve));
|
||||
}
|
||||
/** @internal */
|
||||
_addWantedThreads(wantedThreads) {
|
||||
this._totalWantedThreads += wantedThreads;
|
||||
}
|
||||
/** @internal */
|
||||
_removeWantedThreads(wantedThreads) {
|
||||
this._totalWantedThreads -= wantedThreads;
|
||||
}
|
||||
/** @internal */
|
||||
_addThreadDemand(demandedThreads) {
|
||||
this._threadDemands.add(demandedThreads);
|
||||
}
|
||||
/** @internal */
|
||||
_removeThreadDemand(demandedThreads) {
|
||||
const isHighestDemand = this._threadDemands.maxNumber === demandedThreads;
|
||||
this._threadDemands.remove(demandedThreads);
|
||||
if (demandedThreads !== 0 && isHighestDemand && this._threadDemands.maxNumber !== demandedThreads) {
|
||||
while (this._threadFreeCallbacks.length > 0)
|
||||
this._threadFreeCallbacks.shift()?.();
|
||||
}
|
||||
}
|
||||
}
|
||||
export class ThreadsSplitterConsumer {
|
||||
_threadsSplitter;
|
||||
_wantedThreads;
|
||||
_demandedThreads;
|
||||
_wantedThreadsGcRegistry;
|
||||
_demandedThreadsGcRegistry;
|
||||
_usedThreads = 0;
|
||||
_disposed = false;
|
||||
constructor(threadsSplitter, wantedThreads, minThreads) {
|
||||
this._threadsSplitter = threadsSplitter;
|
||||
this._wantedThreads = wantedThreads;
|
||||
this._demandedThreads = minThreads;
|
||||
this._threadsSplitter._addWantedThreads(this._wantedThreads);
|
||||
this._threadsSplitter._addThreadDemand(this._demandedThreads);
|
||||
this._wantedThreadsGcRegistry = new FinalizationRegistry(this._threadsSplitter._removeWantedThreads);
|
||||
this._wantedThreadsGcRegistry.register(this, this._wantedThreads);
|
||||
this._demandedThreadsGcRegistry = new FinalizationRegistry(this._threadsSplitter._removeThreadDemand);
|
||||
this._demandedThreadsGcRegistry.register(this, this._demandedThreads);
|
||||
}
|
||||
[Symbol.dispose]() {
|
||||
this.dispose();
|
||||
}
|
||||
dispose() {
|
||||
if (this._disposed)
|
||||
return;
|
||||
this._disposed = true;
|
||||
this._threadsSplitter._removeWantedThreads(this._wantedThreads);
|
||||
this._threadsSplitter._removeThreadDemand(this._demandedThreads);
|
||||
this._wantedThreadsGcRegistry.unregister(this);
|
||||
this._demandedThreadsGcRegistry.unregister(this);
|
||||
}
|
||||
getAllocationToConsume() {
|
||||
if (this._disposed)
|
||||
throw new DisposedError();
|
||||
if (this._threadsSplitter.maxThreads === 0)
|
||||
return [this._wantedThreads, new DisposableHandle(() => { })];
|
||||
return this._getAsyncAllocationToConsume();
|
||||
}
|
||||
async _getAsyncAllocationToConsume() {
|
||||
do {
|
||||
this._usedThreads = this._threadsSplitter._getUpdatedActiveThreads(this._usedThreads, this._wantedThreads, this._demandedThreads);
|
||||
if (this._usedThreads < this._demandedThreads) {
|
||||
this._usedThreads = this._threadsSplitter._getUpdatedActiveThreads(this._usedThreads, 0, 0);
|
||||
await this._threadsSplitter._waitForFreeThread();
|
||||
}
|
||||
} while (this._usedThreads < this._demandedThreads);
|
||||
return [this._usedThreads, new DisposableHandle(() => {
|
||||
this._usedThreads = this._threadsSplitter._getUpdatedActiveThreads(this._usedThreads, 0, 0);
|
||||
})];
|
||||
}
|
||||
}
|
||||
class MaxNumberCollection {
|
||||
_countMap = new Map();
|
||||
_maxNumber = 0;
|
||||
add(number) {
|
||||
const count = this._countMap.get(number) ?? 0;
|
||||
this._countMap.set(number, count + 1);
|
||||
if (number > this._maxNumber)
|
||||
this._maxNumber = number;
|
||||
}
|
||||
remove(number) {
|
||||
const count = this._countMap.get(number);
|
||||
if (count == null)
|
||||
return;
|
||||
if (count === 1) {
|
||||
this._countMap.delete(number);
|
||||
if (number === this._maxNumber)
|
||||
this._maxNumber = this._findMaxNumber();
|
||||
}
|
||||
else
|
||||
this._countMap.set(number, count - 1);
|
||||
}
|
||||
get maxNumber() {
|
||||
return this._maxNumber;
|
||||
}
|
||||
_findMaxNumber() {
|
||||
let maxNumber = 0;
|
||||
for (const number of this._countMap.keys()) {
|
||||
if (number > maxNumber)
|
||||
maxNumber = number;
|
||||
}
|
||||
return maxNumber;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=ThreadsSplitter.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/ThreadsSplitter.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/ThreadsSplitter.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
38
node_modules/node-llama-cpp/dist/utils/TokenStreamRegulator.d.ts
generated
vendored
Normal file
38
node_modules/node-llama-cpp/dist/utils/TokenStreamRegulator.d.ts
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
import { Token, Tokenizer } from "../types.js";
|
||||
export declare class TokenStreamRegulator {
|
||||
addChunk({ tokens, text }: {
|
||||
tokens: Token[];
|
||||
text: string;
|
||||
}): QueuedTokenRelease;
|
||||
popFreeChunkTokens(): Token[];
|
||||
getPartiallyFreeChunk(tokenizer: Tokenizer): {
|
||||
tokens: Token[];
|
||||
text: string;
|
||||
};
|
||||
getAllQueuedChunkTokens(): Token[];
|
||||
getLastQueuedChunkTokens(maxTokens?: number): Token[];
|
||||
clearQueue(): void;
|
||||
reset(): void;
|
||||
removeChunkIfLast(queuedTokenRelease: QueuedTokenRelease | undefined): boolean;
|
||||
}
|
||||
export declare class QueuedTokenRelease {
|
||||
private constructor();
|
||||
get tokens(): readonly Token[];
|
||||
get text(): string;
|
||||
get isFree(): boolean;
|
||||
get hasTextLocks(): boolean;
|
||||
get hasTokenLocks(): boolean;
|
||||
get isPartiallyFree(): boolean;
|
||||
getFreeTextIndex(): number;
|
||||
getFreeTokenIndex(): number;
|
||||
createTextIndexLock(startIndex: number): QueuedTokenReleaseLock;
|
||||
createTokenIndexLock(startIndex: number): QueuedTokenReleaseLock;
|
||||
modifyTokensAndText(tokens: readonly Token[], text: string): void;
|
||||
}
|
||||
export declare class QueuedTokenReleaseLock {
|
||||
private constructor();
|
||||
get index(): number;
|
||||
duplicate(): QueuedTokenReleaseLock;
|
||||
dispose(): void;
|
||||
[Symbol.dispose](): void;
|
||||
}
|
||||
200
node_modules/node-llama-cpp/dist/utils/TokenStreamRegulator.js
generated
vendored
Normal file
200
node_modules/node-llama-cpp/dist/utils/TokenStreamRegulator.js
generated
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
import { DisposedError } from "lifecycle-utils";
|
||||
import { maxRecentDetokenizerTokens } from "../consts.js";
|
||||
import { pushAll } from "./pushAll.js";
|
||||
export class TokenStreamRegulator {
|
||||
/** @internal */ _queue = [];
|
||||
/** @internal */ _LastTokens = [];
|
||||
addChunk({ tokens, text }) {
|
||||
const queuedRelease = QueuedTokenRelease._create(tokens, text);
|
||||
this._queue.push(queuedRelease);
|
||||
return queuedRelease;
|
||||
}
|
||||
popFreeChunkTokens() {
|
||||
const res = [];
|
||||
while (this._queue.length > 0 && this._queue[0].isFree) {
|
||||
const tokens = this._queue.shift().tokens;
|
||||
pushAll(res, tokens);
|
||||
pushAll(this._LastTokens, tokens);
|
||||
}
|
||||
if (this._LastTokens.length > maxRecentDetokenizerTokens)
|
||||
this._LastTokens.splice(0, this._LastTokens.length - maxRecentDetokenizerTokens);
|
||||
return res;
|
||||
}
|
||||
getPartiallyFreeChunk(tokenizer) {
|
||||
if (this._queue.length > 0 && this._queue[0].isPartiallyFree) {
|
||||
const queuedRelease = this._queue[0];
|
||||
if (queuedRelease.hasTextLocks && !queuedRelease.hasTokenLocks)
|
||||
return {
|
||||
tokens: [],
|
||||
text: queuedRelease.text.slice(0, queuedRelease.getFreeTextIndex())
|
||||
};
|
||||
else if (queuedRelease.hasTokenLocks && !queuedRelease.hasTextLocks) {
|
||||
const tokens = queuedRelease.tokens.slice(0, queuedRelease.getFreeTokenIndex());
|
||||
return {
|
||||
tokens,
|
||||
text: tokenizer.detokenize(tokens, false, this._LastTokens)
|
||||
};
|
||||
}
|
||||
const freeTokenIndex = queuedRelease.getFreeTokenIndex();
|
||||
const tokens = queuedRelease.tokens.slice(0, freeTokenIndex);
|
||||
const tokensText = tokenizer.detokenize(tokens, false, this._LastTokens);
|
||||
const freeTextIndex = queuedRelease.getFreeTextIndex();
|
||||
const text = queuedRelease.text.slice(0, freeTextIndex);
|
||||
if (text.length > tokensText.length) {
|
||||
return {
|
||||
tokens,
|
||||
text: tokensText
|
||||
};
|
||||
}
|
||||
else if (text.length < tokensText.length) {
|
||||
const resTokens = [];
|
||||
let resTokensText = "";
|
||||
const lastTokens = this._LastTokens.slice();
|
||||
for (const token of tokens) {
|
||||
const tokenText = tokenizer.detokenize([token], false, lastTokens);
|
||||
lastTokens.push(token);
|
||||
// ensure partial tokens are detokenized correctly
|
||||
if (resTokensText.length + tokenText.length > text.length)
|
||||
resTokensText = tokenizer.detokenize(resTokens, false, this._LastTokens);
|
||||
if (resTokensText.length + tokenText.length > text.length) {
|
||||
const remainingText = text.slice(resTokensText.length);
|
||||
const remainingTokens = tokenizer(remainingText, false, "trimLeadingSpace");
|
||||
pushAll(resTokens, remainingTokens);
|
||||
break;
|
||||
}
|
||||
resTokens.push(token);
|
||||
resTokensText += tokenText;
|
||||
}
|
||||
return {
|
||||
tokens: resTokens,
|
||||
text
|
||||
};
|
||||
}
|
||||
return {
|
||||
tokens: queuedRelease.tokens.slice(0, freeTokenIndex),
|
||||
text: queuedRelease.text.slice(0, freeTextIndex)
|
||||
};
|
||||
}
|
||||
return {
|
||||
tokens: [],
|
||||
text: ""
|
||||
};
|
||||
}
|
||||
getAllQueuedChunkTokens() {
|
||||
return this._queue.flatMap((queuedRelease) => queuedRelease.tokens);
|
||||
}
|
||||
getLastQueuedChunkTokens(maxTokens = maxRecentDetokenizerTokens) {
|
||||
const res = [];
|
||||
for (let i = this._queue.length - 1; i >= 0 && res.length < maxTokens; i--) {
|
||||
const tokens = this._queue[i].tokens;
|
||||
for (let j = tokens.length - 1; j >= 0 && res.length < maxTokens; j--)
|
||||
res.unshift(tokens[j]);
|
||||
}
|
||||
return this._queue.flatMap((queuedRelease) => queuedRelease.tokens);
|
||||
}
|
||||
clearQueue() {
|
||||
this._queue.length = 0;
|
||||
}
|
||||
reset() {
|
||||
this.clearQueue();
|
||||
this._LastTokens.length = 0;
|
||||
}
|
||||
removeChunkIfLast(queuedTokenRelease) {
|
||||
if (this._queue.at(-1) === queuedTokenRelease)
|
||||
return this._queue.pop() != null;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export class QueuedTokenRelease {
|
||||
/** @internal */ _textLocks = new Set();
|
||||
/** @internal */ _tokenLocks = new Set();
|
||||
/** @internal */ _tokens;
|
||||
/** @internal */ _text;
|
||||
constructor(tokens, text) {
|
||||
this._tokens = tokens;
|
||||
this._text = text;
|
||||
}
|
||||
get tokens() {
|
||||
return this._tokens;
|
||||
}
|
||||
get text() {
|
||||
return this._text;
|
||||
}
|
||||
get isFree() {
|
||||
return this._textLocks.size === 0 && this._tokenLocks.size === 0;
|
||||
}
|
||||
get hasTextLocks() {
|
||||
return this._textLocks.size > 0;
|
||||
}
|
||||
get hasTokenLocks() {
|
||||
return this._tokenLocks.size > 0;
|
||||
}
|
||||
get isPartiallyFree() {
|
||||
if (this.isFree)
|
||||
return true;
|
||||
const freeTextIndex = this.getFreeTextIndex();
|
||||
const freeTokenIndex = this.getFreeTokenIndex();
|
||||
return freeTextIndex > 0 && freeTokenIndex > 0;
|
||||
}
|
||||
getFreeTextIndex() {
|
||||
if (this._textLocks.size === 0)
|
||||
return this.text.length;
|
||||
return [...this._textLocks]
|
||||
.reduce((res, lock) => Math.min(res, lock.index), this.text.length);
|
||||
}
|
||||
getFreeTokenIndex() {
|
||||
if (this._tokenLocks.size === 0)
|
||||
return this.tokens.length;
|
||||
return [...this._tokenLocks]
|
||||
.reduce((res, lock) => Math.min(res, lock.index), this.tokens.length);
|
||||
}
|
||||
createTextIndexLock(startIndex) {
|
||||
const lock = QueuedTokenReleaseLock._create(startIndex, this._textLocks);
|
||||
if (startIndex >= 0 && startIndex < this.text.length)
|
||||
this._textLocks.add(lock);
|
||||
return lock;
|
||||
}
|
||||
createTokenIndexLock(startIndex) {
|
||||
const lock = QueuedTokenReleaseLock._create(startIndex, this._tokenLocks);
|
||||
if (startIndex >= 0 && startIndex < this.tokens.length)
|
||||
this._tokenLocks.add(lock);
|
||||
return lock;
|
||||
}
|
||||
modifyTokensAndText(tokens, text) {
|
||||
this._tokens = tokens;
|
||||
this._text = text;
|
||||
}
|
||||
/** @internal */
|
||||
static _create(tokens, text) {
|
||||
return new QueuedTokenRelease(tokens, text);
|
||||
}
|
||||
}
|
||||
export class QueuedTokenReleaseLock {
|
||||
/** @internal */ _index;
|
||||
/** @internal */ _locks;
|
||||
constructor(index, locks) {
|
||||
this._index = index;
|
||||
this._locks = locks;
|
||||
}
|
||||
get index() {
|
||||
return this._index;
|
||||
}
|
||||
duplicate() {
|
||||
if (!this._locks.has(this))
|
||||
throw new DisposedError();
|
||||
const lock = QueuedTokenReleaseLock._create(this._index, this._locks);
|
||||
this._locks.add(lock);
|
||||
return lock;
|
||||
}
|
||||
dispose() {
|
||||
this._locks.delete(this);
|
||||
}
|
||||
[Symbol.dispose]() {
|
||||
this.dispose();
|
||||
}
|
||||
/** @internal */
|
||||
static _create(length, locks) {
|
||||
return new QueuedTokenReleaseLock(length, locks);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=TokenStreamRegulator.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/TokenStreamRegulator.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/TokenStreamRegulator.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
node_modules/node-llama-cpp/dist/utils/UnsupportedError.d.ts
generated
vendored
Normal file
2
node_modules/node-llama-cpp/dist/utils/UnsupportedError.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export declare class UnsupportedError extends Error {
|
||||
}
|
||||
7
node_modules/node-llama-cpp/dist/utils/UnsupportedError.js
generated
vendored
Normal file
7
node_modules/node-llama-cpp/dist/utils/UnsupportedError.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export class UnsupportedError extends Error {
|
||||
/** @internal */
|
||||
constructor(message = "UnsupportedError") {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=UnsupportedError.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/UnsupportedError.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/UnsupportedError.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"UnsupportedError.js","sourceRoot":"","sources":["../../src/utils/UnsupportedError.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,gBAAiB,SAAQ,KAAK;IACvC,gBAAgB;IAChB,YAAmB,UAAkB,kBAAkB;QACnD,KAAK,CAAC,OAAO,CAAC,CAAC;IACnB,CAAC;CACJ"}
|
||||
6
node_modules/node-llama-cpp/dist/utils/appendUserMessageToChatHistory.d.ts
generated
vendored
Normal file
6
node_modules/node-llama-cpp/dist/utils/appendUserMessageToChatHistory.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import { ChatHistoryItem } from "../types.js";
|
||||
/**
|
||||
* Appends a user message to the chat history.
|
||||
* If the last message in the chat history is also a user message, the new message will be appended to it.
|
||||
*/
|
||||
export declare function appendUserMessageToChatHistory(chatHistory: readonly ChatHistoryItem[], message: string): ChatHistoryItem[];
|
||||
22
node_modules/node-llama-cpp/dist/utils/appendUserMessageToChatHistory.js
generated
vendored
Normal file
22
node_modules/node-llama-cpp/dist/utils/appendUserMessageToChatHistory.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Appends a user message to the chat history.
|
||||
* If the last message in the chat history is also a user message, the new message will be appended to it.
|
||||
*/
|
||||
export function appendUserMessageToChatHistory(chatHistory, message) {
|
||||
const newChatHistory = chatHistory.slice();
|
||||
if (newChatHistory.length > 0 && newChatHistory[newChatHistory.length - 1].type === "user") {
|
||||
const lastUserMessage = newChatHistory[newChatHistory.length - 1];
|
||||
newChatHistory[newChatHistory.length - 1] = {
|
||||
...lastUserMessage,
|
||||
text: [lastUserMessage.text, message].join("\n\n")
|
||||
};
|
||||
}
|
||||
else {
|
||||
newChatHistory.push({
|
||||
type: "user",
|
||||
text: message
|
||||
});
|
||||
}
|
||||
return newChatHistory;
|
||||
}
|
||||
//# sourceMappingURL=appendUserMessageToChatHistory.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/appendUserMessageToChatHistory.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/appendUserMessageToChatHistory.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"appendUserMessageToChatHistory.js","sourceRoot":"","sources":["../../src/utils/appendUserMessageToChatHistory.ts"],"names":[],"mappings":"AAEA;;;GAGG;AACH,MAAM,UAAU,8BAA8B,CAAC,WAAuC,EAAE,OAAe;IACnG,MAAM,cAAc,GAAG,WAAW,CAAC,KAAK,EAAE,CAAC;IAE3C,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,IAAI,cAAc,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,CAAE,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;QAC1F,MAAM,eAAe,GAAG,cAAc,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,CAAqB,CAAC;QAEtF,cAAc,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG;YACxC,GAAG,eAAe;YAClB,IAAI,EAAE,CAAC,eAAe,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;SACrD,CAAC;IACN,CAAC;SAAM,CAAC;QACJ,cAAc,CAAC,IAAI,CAAC;YAChB,IAAI,EAAE,MAAM;YACZ,IAAI,EAAE,OAAO;SAChB,CAAC,CAAC;IACP,CAAC;IAED,OAAO,cAAc,CAAC;AAC1B,CAAC"}
|
||||
1
node_modules/node-llama-cpp/dist/utils/clearTempFolder.d.ts
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/clearTempFolder.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare function clearTempFolder(): Promise<void>;
|
||||
16
node_modules/node-llama-cpp/dist/utils/clearTempFolder.js
generated
vendored
Normal file
16
node_modules/node-llama-cpp/dist/utils/clearTempFolder.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
import process from "process";
|
||||
import fs from "fs-extra";
|
||||
import { tempDownloadDirectory } from "../config.js";
|
||||
export async function clearTempFolder() {
|
||||
if (process.platform === "win32") {
|
||||
try {
|
||||
await fs.remove(tempDownloadDirectory);
|
||||
}
|
||||
catch (err) {
|
||||
// do nothing as it fails sometime on Windows, and since it's a temp folder, it's not a big deal
|
||||
}
|
||||
return;
|
||||
}
|
||||
await fs.remove(tempDownloadDirectory);
|
||||
}
|
||||
//# sourceMappingURL=clearTempFolder.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/clearTempFolder.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/clearTempFolder.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"clearTempFolder.js","sourceRoot":"","sources":["../../src/utils/clearTempFolder.ts"],"names":[],"mappings":"AAAA,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,EAAE,MAAM,UAAU,CAAC;AAC1B,OAAO,EAAC,qBAAqB,EAAC,MAAM,cAAc,CAAC;AAEnD,MAAM,CAAC,KAAK,UAAU,eAAe;IACjC,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE,CAAC;QAC/B,IAAI,CAAC;YACD,MAAM,EAAE,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;QAC3C,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACX,gGAAgG;QACpG,CAAC;QAED,OAAO;IACX,CAAC;IAED,MAAM,EAAE,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;AAC3C,CAAC"}
|
||||
10
node_modules/node-llama-cpp/dist/utils/cmake.d.ts
generated
vendored
Normal file
10
node_modules/node-llama-cpp/dist/utils/cmake.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
export declare function hasBuiltinCmake(): Promise<boolean>;
|
||||
export declare function getCmakePath(): Promise<string>;
|
||||
export declare function downloadCmakeIfNeeded(wrapWithStatusLogs?: boolean): Promise<void>;
|
||||
export declare function clearLocalCmake(): Promise<void>;
|
||||
/**
|
||||
* There's an issue where after a compilation, the cmake binaries have permissions that don't allow them to be deleted.
|
||||
* This function fixes that.
|
||||
* It should be run after each compilation.
|
||||
*/
|
||||
export declare function fixXpackPermissions(): Promise<void>;
|
||||
143
node_modules/node-llama-cpp/dist/utils/cmake.js
generated
vendored
Normal file
143
node_modules/node-llama-cpp/dist/utils/cmake.js
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
import path from "path";
|
||||
import fs from "fs-extra";
|
||||
import which from "which";
|
||||
import chalk from "chalk";
|
||||
import { chmodr } from "chmodrp";
|
||||
import { defaultXpacksCacheDirectory, defaultXpacksStoreDirectory, llamaDirectory, localXpacksCacheDirectory, localXpacksStoreDirectory, xpackDirectory, xpmVersion } from "../config.js";
|
||||
import { logDistroInstallInstruction } from "../bindings/utils/logDistroInstallInstruction.js";
|
||||
import { getPlatform } from "../bindings/utils/getPlatform.js";
|
||||
import { getWindowsVisualStudioEditionPaths } from "../bindings/utils/detectBuildTools.js";
|
||||
import { spawnCommand } from "./spawnCommand.js";
|
||||
import withStatusLogs from "./withStatusLogs.js";
|
||||
import { withLockfile } from "./withLockfile.js";
|
||||
export async function hasBuiltinCmake() {
|
||||
try {
|
||||
const resolvedPath = await which("cmake");
|
||||
return resolvedPath !== "";
|
||||
}
|
||||
catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export async function getCmakePath() {
|
||||
try {
|
||||
const resolvedPath = await which("cmake", {
|
||||
nothrow: true
|
||||
});
|
||||
if (resolvedPath !== "" && resolvedPath != null)
|
||||
return resolvedPath;
|
||||
}
|
||||
catch (err) { }
|
||||
try {
|
||||
const existingCmake = await findExistingCmake();
|
||||
if (existingCmake != null)
|
||||
return existingCmake;
|
||||
}
|
||||
catch (err) { }
|
||||
try {
|
||||
let resolvedPath = await which("cmake", {
|
||||
path: path.join(llamaDirectory, "xpack", "xpacks", ".bin")
|
||||
});
|
||||
if (resolvedPath.toLowerCase().endsWith(".cmd"))
|
||||
resolvedPath = (await getBinFromWindowCmd(resolvedPath, "cmake.exe")) ?? "";
|
||||
else if (resolvedPath.toLowerCase().endsWith(".ps1")) {
|
||||
const cmdFilePath = resolvedPath.slice(0, -".ps1".length) + ".cmd";
|
||||
if (await fs.pathExists(cmdFilePath))
|
||||
resolvedPath = (await getBinFromWindowCmd(cmdFilePath, "cmake.exe")) ?? "";
|
||||
}
|
||||
if (resolvedPath !== "")
|
||||
return resolvedPath;
|
||||
}
|
||||
catch (err) { }
|
||||
throw new Error("cmake not found");
|
||||
}
|
||||
export async function downloadCmakeIfNeeded(wrapWithStatusLogs = false) {
|
||||
try {
|
||||
await getCmakePath();
|
||||
return;
|
||||
}
|
||||
catch (err) { }
|
||||
if (!wrapWithStatusLogs)
|
||||
await downloadCmake({ progressLogs: wrapWithStatusLogs });
|
||||
else {
|
||||
try {
|
||||
await withStatusLogs({
|
||||
loading: chalk.blue("Downloading cmake"),
|
||||
success: chalk.blue("Downloaded cmake"),
|
||||
fail: chalk.blue("Failed to download cmake")
|
||||
}, async () => {
|
||||
await downloadCmake({ progressLogs: wrapWithStatusLogs });
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
await logDistroInstallInstruction('To install "cmake", ', {
|
||||
linuxPackages: { apt: ["cmake"], apk: ["cmake"] },
|
||||
macOsPackages: { brew: ["cmake"] }
|
||||
});
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function clearLocalCmake() {
|
||||
await fs.remove(localXpacksStoreDirectory);
|
||||
await fs.remove(localXpacksCacheDirectory);
|
||||
await fs.remove(path.join(xpackDirectory, "xpacks"));
|
||||
}
|
||||
/**
|
||||
* There's an issue where after a compilation, the cmake binaries have permissions that don't allow them to be deleted.
|
||||
* This function fixes that.
|
||||
* It should be run after each compilation.
|
||||
*/
|
||||
export async function fixXpackPermissions() {
|
||||
try {
|
||||
await chmodr(localXpacksStoreDirectory, 0o777);
|
||||
await chmodr(localXpacksCacheDirectory, 0o777);
|
||||
await chmodr(path.join(xpackDirectory, "xpacks"), 0o777);
|
||||
}
|
||||
catch (err) { }
|
||||
}
|
||||
async function findExistingCmake() {
|
||||
const platform = getPlatform();
|
||||
if (platform === "win") {
|
||||
const { vsEditionPaths } = await getWindowsVisualStudioEditionPaths();
|
||||
const potentialCmakePaths = vsEditionPaths.map((editionPath) => (path.join(editionPath, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "CMake", "bin", "cmake.exe")));
|
||||
const cmakePaths = (await Promise.all(potentialCmakePaths.map(async (cmakePath) => {
|
||||
if (await fs.pathExists(cmakePath))
|
||||
return cmakePath;
|
||||
return null;
|
||||
})))
|
||||
.filter((cmakePath) => cmakePath != null);
|
||||
return cmakePaths[0];
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
async function downloadCmake({ progressLogs = true } = {}) {
|
||||
await withLockfile({
|
||||
resourcePath: path.join(xpackDirectory, "cmakeInstall")
|
||||
}, async () => {
|
||||
const xpmEnv = {
|
||||
...process.env,
|
||||
XPACKS_STORE_FOLDER: defaultXpacksStoreDirectory,
|
||||
XPACKS_CACHE_FOLDER: defaultXpacksCacheDirectory
|
||||
};
|
||||
await spawnCommand("npm", ["exec", "--yes", "--", `xpm@${xpmVersion}`, "install", "@xpack-dev-tools/cmake@latest", "--no-save"], xpackDirectory, xpmEnv, progressLogs);
|
||||
await fs.remove(localXpacksCacheDirectory);
|
||||
await fixXpackPermissions();
|
||||
});
|
||||
}
|
||||
async function getBinFromWindowCmd(cmdFilePath, binName) {
|
||||
const fileContent = await fs.readFile(cmdFilePath, "utf8");
|
||||
const lowercaseFileContent = fileContent.toLowerCase();
|
||||
if (!lowercaseFileContent.includes(binName))
|
||||
return null;
|
||||
const lastIndexOfBinName = lowercaseFileContent.lastIndexOf(binName);
|
||||
const characterAfterBinName = fileContent[lastIndexOfBinName + binName.length];
|
||||
if (characterAfterBinName !== '"' && characterAfterBinName !== "'")
|
||||
return null;
|
||||
const startStringCharacter = fileContent.lastIndexOf(characterAfterBinName, lastIndexOfBinName);
|
||||
const binPath = fileContent.slice(startStringCharacter + 1, lastIndexOfBinName + binName.length);
|
||||
if (!await fs.pathExists(binPath))
|
||||
return null;
|
||||
return binPath;
|
||||
}
|
||||
//# sourceMappingURL=cmake.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/cmake.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/cmake.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
node_modules/node-llama-cpp/dist/utils/compareTokens.d.ts
generated
vendored
Normal file
2
node_modules/node-llama-cpp/dist/utils/compareTokens.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import { Token } from "../types.js";
|
||||
export declare function compareTokens(token1?: Token, token2?: Token): boolean;
|
||||
4
node_modules/node-llama-cpp/dist/utils/compareTokens.js
generated
vendored
Normal file
4
node_modules/node-llama-cpp/dist/utils/compareTokens.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
export function compareTokens(token1, token2) {
|
||||
return token1 === token2;
|
||||
}
|
||||
//# sourceMappingURL=compareTokens.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/compareTokens.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/compareTokens.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"compareTokens.js","sourceRoot":"","sources":["../../src/utils/compareTokens.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,aAAa,CAAC,MAAc,EAAE,MAAc;IACxD,OAAO,MAAM,KAAK,MAAM,CAAC;AAC7B,CAAC"}
|
||||
262
node_modules/node-llama-cpp/dist/utils/createModelDownloader.d.ts
generated
vendored
Normal file
262
node_modules/node-llama-cpp/dist/utils/createModelDownloader.d.ts
generated
vendored
Normal file
@@ -0,0 +1,262 @@
|
||||
import { ModelFileAccessTokens } from "./modelFileAccessTokens.js";
|
||||
import { ModelDownloadEndpoints } from "./modelDownloadEndpoints.js";
|
||||
export type ModelDownloaderOptions = ({
|
||||
/**
|
||||
* The URI to download the model from.
|
||||
*
|
||||
* The supported URI schemes are:
|
||||
* - **HTTP:** `https://`, `http://`
|
||||
* - **Hugging Face:** `hf:<user>/<model>:<quant>` (`:<quant>` is optional, but recommended)
|
||||
* - **Hugging Face:** `hf:<user>/<model>/<file-path>#<branch>` (`#<branch>` is optional)
|
||||
*/
|
||||
modelUri: string;
|
||||
} | {
|
||||
/**
|
||||
* @hidden
|
||||
* @deprecated Use `modelUri` instead.
|
||||
*/
|
||||
modelUrl: string;
|
||||
}) & {
|
||||
/**
|
||||
* The directory to save the model file to.
|
||||
* Default to `node-llama-cpp`'s default global models directory (`~/.node-llama-cpp/models`).
|
||||
*/
|
||||
dirPath?: string;
|
||||
fileName?: string;
|
||||
headers?: Record<string, string>;
|
||||
/**
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
showCliProgress?: boolean;
|
||||
onProgress?: (status: {
|
||||
totalSize: number;
|
||||
downloadedSize: number;
|
||||
}) => void;
|
||||
/**
|
||||
* If true, the downloader will skip the download if the file already exists, and its size matches the size of the remote file.
|
||||
*
|
||||
* Defaults to `true`.
|
||||
*/
|
||||
skipExisting?: boolean;
|
||||
/**
|
||||
* If true, the temporary file will be deleted when the download is canceled.
|
||||
*
|
||||
* Defaults to `true`.
|
||||
*/
|
||||
deleteTempFileOnCancel?: boolean;
|
||||
/**
|
||||
* The number of parallel downloads to use when downloading split files.
|
||||
*
|
||||
* Defaults to `4`.
|
||||
*/
|
||||
parallelDownloads?: number;
|
||||
/**
|
||||
* Tokens to use to access the remote model file when downloading.
|
||||
*/
|
||||
tokens?: ModelFileAccessTokens;
|
||||
/**
|
||||
* Configure the URLs used for resolving model URIs.
|
||||
* @see [Model URIs](https://node-llama-cpp.withcat.ai/guide/downloading-models#model-uris)
|
||||
*/
|
||||
endpoints?: ModelDownloadEndpoints;
|
||||
};
|
||||
/**
|
||||
* Create a model downloader to download a model from a URI.
|
||||
* Uses [`ipull`](https://github.com/ido-pluto/ipull) to download a model file as fast as possible with parallel connections
|
||||
* and other optimizations.
|
||||
*
|
||||
* If the uri points to a `.gguf` file that is split into multiple parts (for example, `model-00001-of-00009.gguf`),
|
||||
* all the parts will be downloaded to the specified directory.
|
||||
*
|
||||
* If the uri points to a `.gguf` file that is binary split into multiple parts (for example, `model.gguf.part1of9`),
|
||||
* all the parts will be spliced into a single file and be downloaded to the specified directory.
|
||||
*
|
||||
* If the uri points to a `.gguf` file that is not split or binary spliced (for example, `model.gguf`),
|
||||
* the file will be downloaded to the specified directory.
|
||||
*
|
||||
* The supported URI schemes are:
|
||||
* - **HTTP:** `https://`, `http://`
|
||||
* - **Hugging Face:** `hf:<user>/<model>:<quant>` (`:<quant>` is optional, but recommended)
|
||||
* - **Hugging Face:** `hf:<user>/<model>/<file-path>#<branch>` (`#<branch>` is optional)
|
||||
* @example
|
||||
* ```typescript
|
||||
* import {fileURLToPath} from "url";
|
||||
* import path from "path";
|
||||
* import {createModelDownloader, getLlama} from "node-llama-cpp";
|
||||
*
|
||||
* const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
*
|
||||
* const downloader = await createModelDownloader({
|
||||
* modelUri: "https://example.com/model.gguf",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* });
|
||||
* const modelPath = await downloader.download();
|
||||
*
|
||||
* const llama = await getLlama();
|
||||
* const model = await llama.loadModel({
|
||||
* modelPath
|
||||
* });
|
||||
* ```
|
||||
* @example
|
||||
* ```typescript
|
||||
* import {fileURLToPath} from "url";
|
||||
* import path from "path";
|
||||
* import {createModelDownloader, getLlama} from "node-llama-cpp";
|
||||
*
|
||||
* const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
*
|
||||
* const downloader = await createModelDownloader({
|
||||
* modelUri: "hf:user/model:quant",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* });
|
||||
* const modelPath = await downloader.download();
|
||||
*
|
||||
* const llama = await getLlama();
|
||||
* const model = await llama.loadModel({
|
||||
* modelPath
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export declare function createModelDownloader(options: ModelDownloaderOptions): Promise<ModelDownloader>;
|
||||
/**
|
||||
* Combine multiple models downloaders to a single downloader to download everything using as much parallelism as possible.
|
||||
*
|
||||
* You can check each individual model downloader for its download progress,
|
||||
* but only the `onProgress` passed to the combined downloader will be called during the download.
|
||||
*
|
||||
* When combining `ModelDownloader` instances, the following options on each individual `ModelDownloader` are ignored:
|
||||
* - `showCliProgress`
|
||||
* - `onProgress`
|
||||
* - `parallelDownloads`
|
||||
*
|
||||
* To set any of those options for the combined downloader, you have to pass them to the combined downloader instance.
|
||||
* @example
|
||||
* ```typescript
|
||||
* import {fileURLToPath} from "url";
|
||||
* import path from "path";
|
||||
* import {createModelDownloader, combineModelDownloaders, getLlama} from "node-llama-cpp";
|
||||
*
|
||||
* const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
*
|
||||
* const downloaders = [
|
||||
* createModelDownloader({
|
||||
* modelUri: "https://example.com/model1.gguf",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* }),
|
||||
* createModelDownloader({
|
||||
* modelUri: "hf:user/model2:quant",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* }),
|
||||
* createModelDownloader({
|
||||
* modelUri: "hf:user/model/model3.gguf",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* })
|
||||
* ];
|
||||
* const combinedDownloader = await combineModelDownloaders(downloaders, {
|
||||
* showCliProgress: true // show download progress in the CLI
|
||||
* });
|
||||
* const [
|
||||
* model1Path,
|
||||
* model2Path,
|
||||
* model3Path
|
||||
* ] = await combinedDownloader.download();
|
||||
*
|
||||
* const llama = await getLlama();
|
||||
* const model1 = await llama.loadModel({
|
||||
* modelPath: model1Path!
|
||||
* });
|
||||
* const model2 = await llama.loadModel({
|
||||
* modelPath: model2Path!
|
||||
* });
|
||||
* const model3 = await llama.loadModel({
|
||||
* modelPath: model3Path!
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export declare function combineModelDownloaders(downloaders: (ModelDownloader | Promise<ModelDownloader>)[], options?: CombinedModelDownloaderOptions): Promise<CombinedModelDownloader>;
|
||||
export declare class ModelDownloader {
|
||||
private constructor();
|
||||
/**
|
||||
* The filename of the entrypoint file that should be used to load the model.
|
||||
*/
|
||||
get entrypointFilename(): string;
|
||||
/**
|
||||
* The full path to the entrypoint file that should be used to load the model.
|
||||
*/
|
||||
get entrypointFilePath(): string;
|
||||
/**
|
||||
* If the model is binary spliced from multiple parts, this will return the number of those binary parts.
|
||||
*/
|
||||
get splitBinaryParts(): number | undefined;
|
||||
/**
|
||||
* The total number of files that will be saved to the directory.
|
||||
* For split files, this will be the number of split parts, as multiple files will be saved.
|
||||
* For binary-split files, this will be 1, as the parts will be spliced into a single file.
|
||||
*/
|
||||
get totalFiles(): number;
|
||||
get totalSize(): number;
|
||||
get downloadedSize(): number;
|
||||
/**
|
||||
* @returns The path to the entrypoint file that should be used to load the model
|
||||
*/
|
||||
download({ signal }?: {
|
||||
signal?: AbortSignal;
|
||||
}): Promise<string>;
|
||||
cancel({ deleteTempFile }?: {
|
||||
/**
|
||||
* Delete the temporary file that was created during the download.
|
||||
*
|
||||
* Defaults to the value of `deleteTempFileOnCancel` in the constructor.
|
||||
*/
|
||||
deleteTempFile?: boolean;
|
||||
}): Promise<void>;
|
||||
}
|
||||
export type CombinedModelDownloaderOptions = {
|
||||
/**
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
showCliProgress?: boolean;
|
||||
onProgress?: (status: {
|
||||
totalSize: number;
|
||||
downloadedSize: number;
|
||||
}) => void;
|
||||
/**
|
||||
* The number of parallel downloads to use fo files.
|
||||
*
|
||||
* Defaults to `4`.
|
||||
*/
|
||||
parallelDownloads?: number;
|
||||
};
|
||||
export declare class CombinedModelDownloader {
|
||||
/**
|
||||
* When combining `ModelDownloader` instances, the following options on each individual `ModelDownloader` are ignored:
|
||||
* - `showCliProgress`
|
||||
* - `onProgress`
|
||||
* - `parallelDownloads`
|
||||
*
|
||||
* To set any of those options for the combined downloader, you have to pass them to the combined downloader instance
|
||||
*/
|
||||
private constructor();
|
||||
cancel(): Promise<void>;
|
||||
/**
|
||||
* @returns The paths to the entrypoint files that should be used to load the models
|
||||
*/
|
||||
download({ signal }?: {
|
||||
signal?: AbortSignal;
|
||||
}): Promise<string[]>;
|
||||
get modelDownloaders(): readonly ModelDownloader[];
|
||||
/**
|
||||
* The filename of the entrypoint files that should be used to load the models.
|
||||
*/
|
||||
get entrypointFilenames(): string[];
|
||||
/**
|
||||
* The full paths to the entrypoint files that should be used to load the models.
|
||||
*/
|
||||
get entrypointFilePaths(): string[];
|
||||
/**
|
||||
* The accumulation of `totalFiles` of all the model downloaders
|
||||
*/
|
||||
get totalFiles(): number;
|
||||
get totalSize(): number;
|
||||
get downloadedSize(): number;
|
||||
}
|
||||
486
node_modules/node-llama-cpp/dist/utils/createModelDownloader.js
generated
vendored
Normal file
486
node_modules/node-llama-cpp/dist/utils/createModelDownloader.js
generated
vendored
Normal file
@@ -0,0 +1,486 @@
|
||||
import process from "process";
|
||||
import path from "path";
|
||||
import { downloadFile, downloadSequence } from "ipull";
|
||||
import fs from "fs-extra";
|
||||
import chalk from "chalk";
|
||||
import { createSplitPartFilename, resolveSplitGgufParts } from "../gguf/utils/resolveSplitGgufParts.js";
|
||||
import { getFilenameForBinarySplitGgufPartUrls, resolveBinarySplitGgufPartUrls } from "../gguf/utils/resolveBinarySplitGgufPartUrls.js";
|
||||
import { cliModelsDirectory, isCI } from "../config.js";
|
||||
import { safeEventCallback } from "./safeEventCallback.js";
|
||||
import { resolveModelFileAccessTokensTryHeaders } from "./modelFileAccessTokens.js";
|
||||
import { pushAll } from "./pushAll.js";
|
||||
import { resolveModelDestination } from "./resolveModelDestination.js";
|
||||
import { getAuthorizationHeader, resolveParsedModelUri } from "./parseModelUri.js";
|
||||
import withOra from "./withOra.js";
|
||||
/**
|
||||
* Create a model downloader to download a model from a URI.
|
||||
* Uses [`ipull`](https://github.com/ido-pluto/ipull) to download a model file as fast as possible with parallel connections
|
||||
* and other optimizations.
|
||||
*
|
||||
* If the uri points to a `.gguf` file that is split into multiple parts (for example, `model-00001-of-00009.gguf`),
|
||||
* all the parts will be downloaded to the specified directory.
|
||||
*
|
||||
* If the uri points to a `.gguf` file that is binary split into multiple parts (for example, `model.gguf.part1of9`),
|
||||
* all the parts will be spliced into a single file and be downloaded to the specified directory.
|
||||
*
|
||||
* If the uri points to a `.gguf` file that is not split or binary spliced (for example, `model.gguf`),
|
||||
* the file will be downloaded to the specified directory.
|
||||
*
|
||||
* The supported URI schemes are:
|
||||
* - **HTTP:** `https://`, `http://`
|
||||
* - **Hugging Face:** `hf:<user>/<model>:<quant>` (`:<quant>` is optional, but recommended)
|
||||
* - **Hugging Face:** `hf:<user>/<model>/<file-path>#<branch>` (`#<branch>` is optional)
|
||||
* @example
|
||||
* ```typescript
|
||||
* import {fileURLToPath} from "url";
|
||||
* import path from "path";
|
||||
* import {createModelDownloader, getLlama} from "node-llama-cpp";
|
||||
*
|
||||
* const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
*
|
||||
* const downloader = await createModelDownloader({
|
||||
* modelUri: "https://example.com/model.gguf",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* });
|
||||
* const modelPath = await downloader.download();
|
||||
*
|
||||
* const llama = await getLlama();
|
||||
* const model = await llama.loadModel({
|
||||
* modelPath
|
||||
* });
|
||||
* ```
|
||||
* @example
|
||||
* ```typescript
|
||||
* import {fileURLToPath} from "url";
|
||||
* import path from "path";
|
||||
* import {createModelDownloader, getLlama} from "node-llama-cpp";
|
||||
*
|
||||
* const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
*
|
||||
* const downloader = await createModelDownloader({
|
||||
* modelUri: "hf:user/model:quant",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* });
|
||||
* const modelPath = await downloader.download();
|
||||
*
|
||||
* const llama = await getLlama();
|
||||
* const model = await llama.loadModel({
|
||||
* modelPath
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export function createModelDownloader(options) {
|
||||
return ModelDownloader._create(options);
|
||||
}
|
||||
/**
|
||||
* Combine multiple models downloaders to a single downloader to download everything using as much parallelism as possible.
|
||||
*
|
||||
* You can check each individual model downloader for its download progress,
|
||||
* but only the `onProgress` passed to the combined downloader will be called during the download.
|
||||
*
|
||||
* When combining `ModelDownloader` instances, the following options on each individual `ModelDownloader` are ignored:
|
||||
* - `showCliProgress`
|
||||
* - `onProgress`
|
||||
* - `parallelDownloads`
|
||||
*
|
||||
* To set any of those options for the combined downloader, you have to pass them to the combined downloader instance.
|
||||
* @example
|
||||
* ```typescript
|
||||
* import {fileURLToPath} from "url";
|
||||
* import path from "path";
|
||||
* import {createModelDownloader, combineModelDownloaders, getLlama} from "node-llama-cpp";
|
||||
*
|
||||
* const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
*
|
||||
* const downloaders = [
|
||||
* createModelDownloader({
|
||||
* modelUri: "https://example.com/model1.gguf",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* }),
|
||||
* createModelDownloader({
|
||||
* modelUri: "hf:user/model2:quant",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* }),
|
||||
* createModelDownloader({
|
||||
* modelUri: "hf:user/model/model3.gguf",
|
||||
* dirPath: path.join(__dirname, "models")
|
||||
* })
|
||||
* ];
|
||||
* const combinedDownloader = await combineModelDownloaders(downloaders, {
|
||||
* showCliProgress: true // show download progress in the CLI
|
||||
* });
|
||||
* const [
|
||||
* model1Path,
|
||||
* model2Path,
|
||||
* model3Path
|
||||
* ] = await combinedDownloader.download();
|
||||
*
|
||||
* const llama = await getLlama();
|
||||
* const model1 = await llama.loadModel({
|
||||
* modelPath: model1Path!
|
||||
* });
|
||||
* const model2 = await llama.loadModel({
|
||||
* modelPath: model2Path!
|
||||
* });
|
||||
* const model3 = await llama.loadModel({
|
||||
* modelPath: model3Path!
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export async function combineModelDownloaders(downloaders, options) {
|
||||
const downloader = CombinedModelDownloader._create(await Promise.all(downloaders), options);
|
||||
await downloader._init();
|
||||
return downloader;
|
||||
}
|
||||
export class ModelDownloader {
|
||||
/** @internal */ _modelUrl;
|
||||
/** @internal */ _dirPath;
|
||||
/** @internal */ _fileName;
|
||||
/** @internal */ _headers;
|
||||
/** @internal */ _showCliProgress;
|
||||
/** @internal */ _onProgress;
|
||||
/** @internal */ _tokens;
|
||||
/** @internal */ _endpoints;
|
||||
/** @internal */ _deleteTempFileOnCancel;
|
||||
/** @internal */ _skipExisting;
|
||||
/** @internal */ _parallelDownloads;
|
||||
/** @internal */ _specificFileDownloaders = [];
|
||||
/** @internal */ _downloader;
|
||||
/** @internal */ _entrypointFilename;
|
||||
/** @internal */ _splitBinaryParts;
|
||||
/** @internal */ _totalFiles;
|
||||
/** @internal */ _tryHeaders = [];
|
||||
constructor(options, { resolvedModelUrl, resolvedFileName }) {
|
||||
const { dirPath = cliModelsDirectory, headers, showCliProgress = false, onProgress, deleteTempFileOnCancel = true, skipExisting = true, parallelDownloads = 4, tokens, endpoints } = options;
|
||||
this._modelUrl = resolvedModelUrl;
|
||||
this._dirPath = path.resolve(process.cwd(), dirPath);
|
||||
this._fileName = resolvedFileName;
|
||||
this._headers = headers;
|
||||
this._showCliProgress = showCliProgress;
|
||||
this._onProgress = safeEventCallback(onProgress);
|
||||
this._deleteTempFileOnCancel = deleteTempFileOnCancel;
|
||||
this._skipExisting = skipExisting;
|
||||
this._parallelDownloads = parallelDownloads;
|
||||
this._tokens = tokens;
|
||||
this._endpoints = endpoints;
|
||||
this._onDownloadProgress = this._onDownloadProgress.bind(this);
|
||||
}
|
||||
/**
|
||||
* The filename of the entrypoint file that should be used to load the model.
|
||||
*/
|
||||
get entrypointFilename() {
|
||||
return this._entrypointFilename;
|
||||
}
|
||||
/**
|
||||
* The full path to the entrypoint file that should be used to load the model.
|
||||
*/
|
||||
get entrypointFilePath() {
|
||||
return path.join(this._dirPath, this.entrypointFilename);
|
||||
}
|
||||
/**
|
||||
* If the model is binary spliced from multiple parts, this will return the number of those binary parts.
|
||||
*/
|
||||
get splitBinaryParts() {
|
||||
return this._splitBinaryParts;
|
||||
}
|
||||
/**
|
||||
* The total number of files that will be saved to the directory.
|
||||
* For split files, this will be the number of split parts, as multiple files will be saved.
|
||||
* For binary-split files, this will be 1, as the parts will be spliced into a single file.
|
||||
*/
|
||||
get totalFiles() {
|
||||
return this._totalFiles;
|
||||
}
|
||||
get totalSize() {
|
||||
return this._specificFileDownloaders
|
||||
.map((downloader) => downloader.status.totalBytes)
|
||||
.reduce((acc, totalBytes) => acc + totalBytes, 0);
|
||||
}
|
||||
get downloadedSize() {
|
||||
return this._specificFileDownloaders
|
||||
.map((downloader) => downloader.status.transferredBytes)
|
||||
.reduce((acc, transferredBytes) => acc + transferredBytes, 0);
|
||||
}
|
||||
/**
|
||||
* @returns The path to the entrypoint file that should be used to load the model
|
||||
*/
|
||||
async download({ signal } = {}) {
|
||||
if (signal?.aborted)
|
||||
throw signal.reason;
|
||||
const onAbort = () => {
|
||||
signal?.removeEventListener("abort", onAbort);
|
||||
this.cancel();
|
||||
};
|
||||
if (signal != null)
|
||||
signal.addEventListener("abort", onAbort);
|
||||
try {
|
||||
if (this._onProgress)
|
||||
this._downloader.on("progress", this._onDownloadProgress);
|
||||
await this._downloader.download();
|
||||
}
|
||||
catch (err) {
|
||||
if (signal?.aborted)
|
||||
throw signal.reason;
|
||||
throw err;
|
||||
}
|
||||
finally {
|
||||
if (this._onProgress)
|
||||
this._downloader.off("progress", this._onDownloadProgress);
|
||||
if (signal != null)
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
return this.entrypointFilePath;
|
||||
}
|
||||
async cancel({ deleteTempFile = this._deleteTempFileOnCancel } = {}) {
|
||||
for (const downloader of this._specificFileDownloaders)
|
||||
await downloader.close({ deleteTempFile });
|
||||
if (this._downloader !== this._specificFileDownloaders[0])
|
||||
await this._downloader?.close({ deleteTempFile });
|
||||
}
|
||||
/** @internal */
|
||||
_onDownloadProgress() {
|
||||
this._onProgress?.({
|
||||
totalSize: this.totalSize,
|
||||
downloadedSize: this.downloadedSize
|
||||
});
|
||||
}
|
||||
/** @internal */
|
||||
async resolveTryHeaders() {
|
||||
if (this._tokens == null)
|
||||
return;
|
||||
pushAll(this._tryHeaders, await resolveModelFileAccessTokensTryHeaders(this._modelUrl, this._tokens, this._endpoints, this._headers));
|
||||
}
|
||||
/** @internal */
|
||||
async _init() {
|
||||
await this.resolveTryHeaders();
|
||||
const binarySplitPartUrls = resolveBinarySplitGgufPartUrls(this._modelUrl);
|
||||
await fs.ensureDir(this._dirPath);
|
||||
if (binarySplitPartUrls instanceof Array) {
|
||||
this._downloader = await downloadFile({
|
||||
partURLs: binarySplitPartUrls,
|
||||
directory: this._dirPath,
|
||||
fileName: this._fileName ?? getFilenameForBinarySplitGgufPartUrls(binarySplitPartUrls),
|
||||
cliProgress: this._showCliProgress,
|
||||
cliStyle: isCI ? "ci" : "fancy",
|
||||
headers: this._headers ?? {},
|
||||
tryHeaders: this._tryHeaders.slice(),
|
||||
skipExisting: this._skipExisting
|
||||
});
|
||||
this._specificFileDownloaders.push(this._downloader);
|
||||
this._entrypointFilename = this._downloader.fileName;
|
||||
this._splitBinaryParts = binarySplitPartUrls.length;
|
||||
this._totalFiles = 1;
|
||||
if (this._downloader.fileName == null || this._downloader.fileName === "")
|
||||
throw new Error("Failed to get the file name from the given URL");
|
||||
return;
|
||||
}
|
||||
const splitGgufPartUrls = resolveSplitGgufParts(this._modelUrl);
|
||||
if (splitGgufPartUrls.length === 1) {
|
||||
this._downloader = await downloadFile({
|
||||
url: splitGgufPartUrls[0],
|
||||
directory: this._dirPath,
|
||||
fileName: this._fileName ?? undefined,
|
||||
cliProgress: this._showCliProgress,
|
||||
cliStyle: isCI ? "ci" : "fancy",
|
||||
headers: this._headers ?? {},
|
||||
tryHeaders: this._tryHeaders.slice(),
|
||||
skipExisting: this._skipExisting
|
||||
});
|
||||
this._specificFileDownloaders.push(this._downloader);
|
||||
this._entrypointFilename = this._downloader.fileName;
|
||||
this._totalFiles = 1;
|
||||
if (this._downloader.fileName == null || this._downloader.fileName === "")
|
||||
throw new Error("Failed to get the file name from the given URL");
|
||||
return;
|
||||
}
|
||||
const partDownloads = splitGgufPartUrls.map((url, index) => downloadFile({
|
||||
url,
|
||||
directory: this._dirPath,
|
||||
fileName: this._fileName != null
|
||||
? createSplitPartFilename(this._fileName, index + 1, splitGgufPartUrls.length)
|
||||
: undefined,
|
||||
headers: this._headers ?? {},
|
||||
tryHeaders: this._tryHeaders.slice(),
|
||||
skipExisting: this._skipExisting
|
||||
}));
|
||||
this._downloader = await downloadSequence({
|
||||
cliProgress: this._showCliProgress,
|
||||
cliStyle: isCI ? "ci" : "fancy",
|
||||
parallelDownloads: this._parallelDownloads
|
||||
}, ...partDownloads);
|
||||
const firstDownload = await partDownloads[0];
|
||||
this._specificFileDownloaders = await Promise.all(partDownloads);
|
||||
this._entrypointFilename = firstDownload.fileName;
|
||||
this._totalFiles = partDownloads.length;
|
||||
if (this._entrypointFilename == null || this._entrypointFilename === "")
|
||||
throw new Error("Failed to get the file name from the given URL");
|
||||
return;
|
||||
}
|
||||
/** @internal */
|
||||
static async _create(options) {
|
||||
const { modelUri, modelUrl, dirPath = cliModelsDirectory, fileName, _showUriResolvingProgress = false } = options;
|
||||
const resolvedModelUri = modelUri || modelUrl;
|
||||
if (resolvedModelUri == null || dirPath == null)
|
||||
throw new Error("modelUri and dirPath cannot be null");
|
||||
async function getModelUrlAndFilename() {
|
||||
const resolvedModelDestination = resolveModelDestination(resolvedModelUri, undefined, options.endpoints);
|
||||
if (resolvedModelDestination.type == "file")
|
||||
return {
|
||||
resolvedModelUrl: path.resolve(dirPath, resolvedModelDestination.path),
|
||||
resolvedFileName: fileName
|
||||
};
|
||||
else if (resolvedModelDestination.type === "url")
|
||||
return {
|
||||
resolvedModelUrl: resolvedModelDestination.url,
|
||||
resolvedFileName: fileName
|
||||
};
|
||||
else if (resolvedModelDestination.parsedUri.type === "resolved")
|
||||
return {
|
||||
resolvedModelUrl: resolvedModelDestination.parsedUri.resolvedUrl,
|
||||
resolvedFileName: fileName || resolvedModelDestination.parsedUri.fullFilename
|
||||
};
|
||||
const resolvedUri = _showUriResolvingProgress
|
||||
? await withOra({
|
||||
loading: chalk.blue("Resolving model URI"),
|
||||
success: chalk.blue("Resolved model URI"),
|
||||
fail: chalk.blue("Failed to resolve model URI"),
|
||||
noSuccessLiveStatus: true
|
||||
}, () => {
|
||||
return resolveParsedModelUri(resolvedModelDestination.parsedUri, {
|
||||
tokens: options.tokens,
|
||||
endpoints: options.endpoints,
|
||||
authorizationHeader: getAuthorizationHeader(options.headers)
|
||||
});
|
||||
})
|
||||
: await resolveParsedModelUri(resolvedModelDestination.parsedUri, {
|
||||
tokens: options.tokens,
|
||||
endpoints: options.endpoints,
|
||||
authorizationHeader: getAuthorizationHeader(options.headers)
|
||||
});
|
||||
return {
|
||||
resolvedModelUrl: resolvedUri.resolvedUrl,
|
||||
resolvedFileName: fileName || resolvedUri.fullFilename
|
||||
};
|
||||
}
|
||||
const modelDownloader = new ModelDownloader(options, await getModelUrlAndFilename());
|
||||
await modelDownloader._init();
|
||||
return modelDownloader;
|
||||
}
|
||||
}
|
||||
export class CombinedModelDownloader {
|
||||
/** @internal */ _downloaders;
|
||||
/** @internal */ _showCliProgress;
|
||||
/** @internal */ _onProgress;
|
||||
/** @internal */ _parallelDownloads;
|
||||
/** @internal */ _lock = {};
|
||||
/** @internal */ _downloader;
|
||||
/**
|
||||
* When combining `ModelDownloader` instances, the following options on each individual `ModelDownloader` are ignored:
|
||||
* - `showCliProgress`
|
||||
* - `onProgress`
|
||||
* - `parallelDownloads`
|
||||
*
|
||||
* To set any of those options for the combined downloader, you have to pass them to the combined downloader instance
|
||||
*/
|
||||
constructor(downloaders, options) {
|
||||
const { showCliProgress = false, onProgress, parallelDownloads = 4 } = options ?? {};
|
||||
this._downloaders = Object.freeze(downloaders);
|
||||
this._showCliProgress = showCliProgress;
|
||||
this._onProgress = onProgress;
|
||||
this._parallelDownloads = parallelDownloads;
|
||||
this._onDownloadProgress = this._onDownloadProgress.bind(this);
|
||||
}
|
||||
async cancel() {
|
||||
for (const modelDownloader of this._downloaders) {
|
||||
if (modelDownloader._specificFileDownloaders.every((downloader) => downloader.status.downloadStatus === "Finished"))
|
||||
continue;
|
||||
for (const downloader of modelDownloader._specificFileDownloaders)
|
||||
await downloader.close({
|
||||
deleteTempFile: modelDownloader._deleteTempFileOnCancel
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @returns The paths to the entrypoint files that should be used to load the models
|
||||
*/
|
||||
async download({ signal } = {}) {
|
||||
if (signal?.aborted)
|
||||
throw signal.reason;
|
||||
const onAbort = () => {
|
||||
signal?.removeEventListener("abort", onAbort);
|
||||
this.cancel();
|
||||
};
|
||||
if (signal != null)
|
||||
signal.addEventListener("abort", onAbort);
|
||||
try {
|
||||
if (this._onProgress)
|
||||
this._downloader.on("progress", this._onDownloadProgress);
|
||||
await this._downloader.download();
|
||||
}
|
||||
catch (err) {
|
||||
if (signal?.aborted)
|
||||
throw signal.reason;
|
||||
throw err;
|
||||
}
|
||||
finally {
|
||||
if (this._onProgress)
|
||||
this._downloader.off("progress", this._onDownloadProgress);
|
||||
if (signal != null)
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
return this.entrypointFilePaths;
|
||||
}
|
||||
get modelDownloaders() {
|
||||
return this._downloaders;
|
||||
}
|
||||
/**
|
||||
* The filename of the entrypoint files that should be used to load the models.
|
||||
*/
|
||||
get entrypointFilenames() {
|
||||
return this._downloaders.map((downloader) => downloader.entrypointFilename);
|
||||
}
|
||||
/**
|
||||
* The full paths to the entrypoint files that should be used to load the models.
|
||||
*/
|
||||
get entrypointFilePaths() {
|
||||
return this._downloaders.map((downloader) => downloader.entrypointFilePath);
|
||||
}
|
||||
/**
|
||||
* The accumulation of `totalFiles` of all the model downloaders
|
||||
*/
|
||||
get totalFiles() {
|
||||
return this._downloaders
|
||||
.map((downloader) => downloader.totalFiles)
|
||||
.reduce((acc, totalFiles) => acc + totalFiles, 0);
|
||||
}
|
||||
get totalSize() {
|
||||
return this._downloaders
|
||||
.map((downloader) => downloader.totalSize)
|
||||
.reduce((acc, totalBytes) => acc + totalBytes, 0);
|
||||
}
|
||||
get downloadedSize() {
|
||||
return this._downloaders
|
||||
.map((downloader) => downloader.downloadedSize)
|
||||
.reduce((acc, transferredBytes) => acc + transferredBytes, 0);
|
||||
}
|
||||
/** @internal */
|
||||
_onDownloadProgress() {
|
||||
this._onProgress?.({
|
||||
totalSize: this.totalSize,
|
||||
downloadedSize: this.downloadedSize
|
||||
});
|
||||
}
|
||||
/** @internal */
|
||||
async _init() {
|
||||
this._downloader = await downloadSequence({
|
||||
cliProgress: this._showCliProgress,
|
||||
cliStyle: isCI ? "ci" : "fancy",
|
||||
parallelDownloads: this._parallelDownloads
|
||||
}, ...this._downloaders.flatMap((downloader) => downloader._specificFileDownloaders));
|
||||
}
|
||||
/** @internal */
|
||||
static _create(downloaders, options) {
|
||||
return new CombinedModelDownloader(downloaders, options);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=createModelDownloader.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/createModelDownloader.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/createModelDownloader.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
node_modules/node-llama-cpp/dist/utils/findBestOption.d.ts
generated
vendored
Normal file
4
node_modules/node-llama-cpp/dist/utils/findBestOption.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
export declare function findBestOption<const O>({ generator, score }: {
|
||||
generator: () => Generator<O>;
|
||||
score: (option: O) => number | null;
|
||||
}): O | null;
|
||||
15
node_modules/node-llama-cpp/dist/utils/findBestOption.js
generated
vendored
Normal file
15
node_modules/node-llama-cpp/dist/utils/findBestOption.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
export function findBestOption({ generator, score }) {
|
||||
let bestOption = null;
|
||||
let bestScore = null;
|
||||
for (const option of generator()) {
|
||||
const currentScore = score(option);
|
||||
if (currentScore === Infinity)
|
||||
return option;
|
||||
if (currentScore != null && (bestScore == null || currentScore > bestScore)) {
|
||||
bestOption = option;
|
||||
bestScore = currentScore;
|
||||
}
|
||||
}
|
||||
return bestOption;
|
||||
}
|
||||
//# sourceMappingURL=findBestOption.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/findBestOption.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/findBestOption.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"findBestOption.js","sourceRoot":"","sources":["../../src/utils/findBestOption.ts"],"names":[],"mappings":"AAAA,MAAM,UAAU,cAAc,CAAU,EAAC,SAAS,EAAE,KAAK,EAGxD;IACG,IAAI,UAAU,GAAa,IAAI,CAAC;IAChC,IAAI,SAAS,GAAkB,IAAI,CAAC;IAEpC,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE,EAAE,CAAC;QAC/B,MAAM,YAAY,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;QAEnC,IAAI,YAAY,KAAK,QAAQ;YACzB,OAAO,MAAM,CAAC;QAElB,IAAI,YAAY,IAAI,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,IAAI,YAAY,GAAG,SAAS,CAAC,EAAE,CAAC;YAC1E,UAAU,GAAG,MAAM,CAAC;YACpB,SAAS,GAAG,YAAY,CAAC;QAC7B,CAAC;IACL,CAAC;IAED,OAAO,UAAU,CAAC;AACtB,CAAC"}
|
||||
20
node_modules/node-llama-cpp/dist/utils/findCharacterRemovalCountToFitChatHistoryInContext.d.ts
generated
vendored
Normal file
20
node_modules/node-llama-cpp/dist/utils/findCharacterRemovalCountToFitChatHistoryInContext.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import { ChatHistoryItem, Tokenizer } from "../types.js";
|
||||
import { ChatWrapper } from "../ChatWrapper.js";
|
||||
export declare function findCharacterRemovalCountToFitChatHistoryInContext({ compressChatHistory, chatHistory, tokensCountToFit, tokenizer, chatWrapper, initialCharactersRemovalCount, estimatedCharactersPerToken, maxDecompressionAttempts, failedCompressionErrorMessage }: {
|
||||
compressChatHistory(options: {
|
||||
chatHistory: readonly ChatHistoryItem[];
|
||||
charactersToRemove: number;
|
||||
estimatedCharactersPerToken: number;
|
||||
}): ChatHistoryItem[] | Promise<ChatHistoryItem[]>;
|
||||
chatHistory: ChatHistoryItem[];
|
||||
tokensCountToFit: number;
|
||||
tokenizer: Tokenizer;
|
||||
chatWrapper: ChatWrapper;
|
||||
initialCharactersRemovalCount?: number;
|
||||
estimatedCharactersPerToken?: number;
|
||||
maxDecompressionAttempts?: number;
|
||||
failedCompressionErrorMessage?: string;
|
||||
}): Promise<{
|
||||
removedCharactersCount: number;
|
||||
compressedChatHistory: ChatHistoryItem[];
|
||||
}>;
|
||||
85
node_modules/node-llama-cpp/dist/utils/findCharacterRemovalCountToFitChatHistoryInContext.js
generated
vendored
Normal file
85
node_modules/node-llama-cpp/dist/utils/findCharacterRemovalCountToFitChatHistoryInContext.js
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
const maxSequentialUnhelpfulIterations = 100;
|
||||
export async function findCharacterRemovalCountToFitChatHistoryInContext({ compressChatHistory, chatHistory, tokensCountToFit, tokenizer, chatWrapper, initialCharactersRemovalCount = 0, estimatedCharactersPerToken = 5, maxDecompressionAttempts = 2, failedCompressionErrorMessage = "Failed to compress chat history. Consider increasing the context size." }) {
|
||||
let currentEstimatedCharactersPerToken = estimatedCharactersPerToken;
|
||||
function getTokensCountForChatHistory(chatHistory) {
|
||||
const { contextText } = chatWrapper.generateContextState({ chatHistory });
|
||||
return contextText.tokenize(tokenizer, "trimLeadingSpace").length;
|
||||
}
|
||||
async function getResultForCharacterRemovalCount(characterRemovalCount) {
|
||||
if (characterRemovalCount === 0)
|
||||
return {
|
||||
compressedHistory: chatHistory,
|
||||
tokensCount: getTokensCountForChatHistory(chatHistory),
|
||||
characterRemovalCount
|
||||
};
|
||||
const compressedHistory = await compressChatHistory({
|
||||
chatHistory,
|
||||
charactersToRemove: characterRemovalCount,
|
||||
estimatedCharactersPerToken: currentEstimatedCharactersPerToken
|
||||
});
|
||||
return {
|
||||
compressedHistory,
|
||||
tokensCount: getTokensCountForChatHistory(compressedHistory),
|
||||
characterRemovalCount
|
||||
};
|
||||
}
|
||||
let latestCompressionAttempt = await getResultForCharacterRemovalCount(initialCharactersRemovalCount);
|
||||
const firstCompressionAttempt = latestCompressionAttempt;
|
||||
let latestCompressionAttemptTokensCount = latestCompressionAttempt.tokensCount;
|
||||
let sameTokensCountRepetitions = 0;
|
||||
if (latestCompressionAttempt.tokensCount === tokensCountToFit ||
|
||||
(latestCompressionAttempt.tokensCount < tokensCountToFit && latestCompressionAttempt.characterRemovalCount === 0))
|
||||
return {
|
||||
removedCharactersCount: initialCharactersRemovalCount,
|
||||
compressedChatHistory: latestCompressionAttempt.compressedHistory
|
||||
};
|
||||
let bestCompressionAttempt = latestCompressionAttempt;
|
||||
for (let compressionAttempts = 0, decompressionAttempts = 0; bestCompressionAttempt.tokensCount !== tokensCountToFit;) {
|
||||
if (compressionAttempts > 0) {
|
||||
if (latestCompressionAttempt.tokensCount != firstCompressionAttempt.tokensCount &&
|
||||
latestCompressionAttempt.characterRemovalCount != firstCompressionAttempt.characterRemovalCount)
|
||||
currentEstimatedCharactersPerToken =
|
||||
Math.abs(latestCompressionAttempt.characterRemovalCount - firstCompressionAttempt.characterRemovalCount) /
|
||||
Math.abs(latestCompressionAttempt.tokensCount - firstCompressionAttempt.tokensCount);
|
||||
if (!Number.isFinite(currentEstimatedCharactersPerToken) || currentEstimatedCharactersPerToken === 0)
|
||||
currentEstimatedCharactersPerToken = estimatedCharactersPerToken;
|
||||
}
|
||||
const tokensLeftToRemove = latestCompressionAttempt.tokensCount - tokensCountToFit;
|
||||
let additionalCharactersToRemove = Math.round(tokensLeftToRemove * currentEstimatedCharactersPerToken);
|
||||
if (additionalCharactersToRemove === 0) {
|
||||
if (tokensLeftToRemove > 0)
|
||||
additionalCharactersToRemove = 1;
|
||||
else if (tokensLeftToRemove < 0)
|
||||
additionalCharactersToRemove = -1;
|
||||
}
|
||||
if (tokensLeftToRemove > 0)
|
||||
compressionAttempts++;
|
||||
else if (tokensLeftToRemove < 0)
|
||||
decompressionAttempts++;
|
||||
if (decompressionAttempts >= maxDecompressionAttempts)
|
||||
break;
|
||||
latestCompressionAttempt = await getResultForCharacterRemovalCount(latestCompressionAttempt.characterRemovalCount + additionalCharactersToRemove);
|
||||
if ((bestCompressionAttempt.tokensCount > tokensCountToFit &&
|
||||
latestCompressionAttempt.tokensCount <= bestCompressionAttempt.tokensCount) || (bestCompressionAttempt.tokensCount < tokensCountToFit &&
|
||||
latestCompressionAttempt.tokensCount < tokensCountToFit &&
|
||||
latestCompressionAttempt.tokensCount > bestCompressionAttempt.tokensCount) || (bestCompressionAttempt.tokensCount <= tokensCountToFit &&
|
||||
latestCompressionAttempt.tokensCount <= tokensCountToFit &&
|
||||
latestCompressionAttempt.characterRemovalCount < bestCompressionAttempt.characterRemovalCount))
|
||||
bestCompressionAttempt = latestCompressionAttempt;
|
||||
if (latestCompressionAttempt.tokensCount === latestCompressionAttemptTokensCount)
|
||||
sameTokensCountRepetitions++;
|
||||
else {
|
||||
latestCompressionAttemptTokensCount = latestCompressionAttempt.tokensCount;
|
||||
sameTokensCountRepetitions = 0;
|
||||
}
|
||||
if (decompressionAttempts === 0 &&
|
||||
compressionAttempts >= maxSequentialUnhelpfulIterations &&
|
||||
sameTokensCountRepetitions >= maxSequentialUnhelpfulIterations)
|
||||
throw new Error(failedCompressionErrorMessage);
|
||||
}
|
||||
return {
|
||||
removedCharactersCount: bestCompressionAttempt.characterRemovalCount,
|
||||
compressedChatHistory: bestCompressionAttempt.compressedHistory
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=findCharacterRemovalCountToFitChatHistoryInContext.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/findCharacterRemovalCountToFitChatHistoryInContext.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/findCharacterRemovalCountToFitChatHistoryInContext.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"findCharacterRemovalCountToFitChatHistoryInContext.js","sourceRoot":"","sources":["../../src/utils/findCharacterRemovalCountToFitChatHistoryInContext.ts"],"names":[],"mappings":"AAGA,MAAM,gCAAgC,GAAG,GAAG,CAAC;AAE7C,MAAM,CAAC,KAAK,UAAU,kDAAkD,CAAC,EACrE,mBAAmB,EACnB,WAAW,EACX,gBAAgB,EAChB,SAAS,EACT,WAAW,EACX,6BAA6B,GAAG,CAAC,EACjC,2BAA2B,GAAG,CAAC,EAC/B,wBAAwB,GAAG,CAAC,EAC5B,6BAA6B,GAAG,wEAAwE,EAa3G;IAIG,IAAI,kCAAkC,GAAG,2BAA2B,CAAC;IAErE,SAAS,4BAA4B,CAAC,WAAuC;QACzE,MAAM,EAAC,WAAW,EAAC,GAAG,WAAW,CAAC,oBAAoB,CAAC,EAAC,WAAW,EAAC,CAAC,CAAC;QACtE,OAAO,WAAW,CAAC,QAAQ,CAAC,SAAS,EAAE,kBAAkB,CAAC,CAAC,MAAM,CAAC;IACtE,CAAC;IAED,KAAK,UAAU,iCAAiC,CAAC,qBAA6B;QAC1E,IAAI,qBAAqB,KAAK,CAAC;YAC3B,OAAO;gBACH,iBAAiB,EAAE,WAAW;gBAC9B,WAAW,EAAE,4BAA4B,CAAC,WAAW,CAAC;gBACtD,qBAAqB;aACxB,CAAC;QAEN,MAAM,iBAAiB,GAAG,MAAM,mBAAmB,CAAC;YAChD,WAAW;YACX,kBAAkB,EAAE,qBAAqB;YACzC,2BAA2B,EAAE,kCAAkC;SAClE,CAAC,CAAC;QAEH,OAAO;YACH,iBAAiB;YACjB,WAAW,EAAE,4BAA4B,CAAC,iBAAiB,CAAC;YAC5D,qBAAqB;SACxB,CAAC;IACN,CAAC;IAED,IAAI,wBAAwB,GAAG,MAAM,iCAAiC,CAAC,6BAA6B,CAAC,CAAC;IACtG,MAAM,uBAAuB,GAAG,wBAAwB,CAAC;IACzD,IAAI,mCAAmC,GAAG,wBAAwB,CAAC,WAAW,CAAC;IAC/E,IAAI,0BAA0B,GAAG,CAAC,CAAC;IAEnC,IAAI,wBAAwB,CAAC,WAAW,KAAK,gBAAgB;QACzD,CAAC,wBAAwB,CAAC,WAAW,GAAG,gBAAgB,IAAI,wBAAwB,CAAC,qBAAqB,KAAK,CAAC,CAAC;QAEjH,OAAO;YACH,sBAAsB,EAAE,6BAA6B;YACrD,qBAAqB,EAAE,wBAAwB,CAAC,iBAAiB;SACpE,CAAC;IAEN,IAAI,sBAAsB,GAAG,wBAAwB,CAAC;IACtD,KACI,IAAI,mBAAmB,GAAG,CAAC,EAAE,qBAAqB,GAAG,CAAC,EACtD,sBAAsB,CAAC,WAAW,KAAK,gBAAgB,GACzD,CAAC;QACC,IAAI,mBAAmB,GAAG,CAAC,EAAE,CAAC;YAC1B,IAAI,wBAAwB,CAAC,WAAW,IAAI,uBAAuB,CAAC,WAAW;gBAC3E,wBAAwB,CAAC,qBAAqB,IAAI,uBAAuB,CAAC,qBAAqB;gBAE/F,kCAAkC;oBAC9B,IAAI,CAAC,GAAG,CAAC,wBAAwB,CAAC,qBAAqB,GAAG,uBAAuB,CAAC,qBAAqB,CAAC;wBACxG,IAAI,CAAC,GAAG,CAAC,wBAAwB,CAAC,WAAW,GAAG,uBAAuB,CAAC,WAAW,CAAC,CAAC;YAE7F,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,kCAAkC,CAAC,IAAI,kCAAkC,KAAK,CAAC;gBAChG,kCAAkC,GAAG,2BAA2B,CAAC;QACzE,CAAC;QAED,MAAM,kBAAkB,GAAG,wBAAwB,CAAC,WAAW,GAAG,gBAAgB,CAAC;QACnF,IAAI,4BAA4B,GAAG,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,kCAAkC,CAAC,CAAC;QAEvG,IAAI,4BAA4B,KAAK,CAAC,EAAE,CAAC;YACrC,IAAI,kBAAkB,GAAG,CAAC;gBACtB,4BAA4B,GAAG,CAAC,CAAC;iBAChC,IAAI,kBAAkB,GAAG,CAAC;gBAC3B,4BAA4B,GAAG,CAAC,CAAC,CAAC;QAC1C,CAAC;QAED,IAAI,kBAAkB,GAAG,CAAC;YACtB,mBAAmB,EAAE,CAAC;aACrB,IAAI,kBAAkB,GAAG,CAAC;YAC3B,qBAAqB,EAAE,CAAC;QAE5B,IAAI,qBAAqB,IAAI,wBAAwB;YACjD,MAAM;QAEV,wBAAwB,GAAG,MAAM,iCAAiC,CAC9D,wBAAwB,CAAC,qBAAqB,GAAG,4BAA4B,CAChF,CAAC;QAEF,IAAI,CACA,sBAAsB,CAAC,WAAW,GAAG,gBAAgB;YACrD,wBAAwB,CAAC,WAAW,IAAI,sBAAsB,CAAC,WAAW,CAC7E,IAAI,CACD,sBAAsB,CAAC,WAAW,GAAG,gBAAgB;YACrD,wBAAwB,CAAC,WAAW,GAAG,gBAAgB;YACvD,wBAAwB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,CAC5E,IAAI,CACD,sBAAsB,CAAC,WAAW,IAAI,gBAAgB;YACtD,wBAAwB,CAAC,WAAW,IAAI,gBAAgB;YACxD,wBAAwB,CAAC,qBAAqB,GAAG,sBAAsB,CAAC,qBAAqB,CAChG;YACG,sBAAsB,GAAG,wBAAwB,CAAC;QAEtD,IAAI,wBAAwB,CAAC,WAAW,KAAK,mCAAmC;YAC5E,0BAA0B,EAAE,CAAC;aAC5B,CAAC;YACF,mCAAmC,GAAG,wBAAwB,CAAC,WAAW,CAAC;YAC3E,0BAA0B,GAAG,CAAC,CAAC;QACnC,CAAC;QAED,IAAI,qBAAqB,KAAK,CAAC;YAC3B,mBAAmB,IAAI,gCAAgC;YACvD,0BAA0B,IAAI,gCAAgC;YAE9D,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;IACvD,CAAC;IAED,OAAO;QACH,sBAAsB,EAAE,sBAAsB,CAAC,qBAAqB;QACpE,qBAAqB,EAAE,sBAAsB,CAAC,iBAAiB;KAClE,CAAC;AACN,CAAC"}
|
||||
19
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfGrammarGenerator.d.ts
generated
vendored
Normal file
19
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfGrammarGenerator.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { MultiKeyMap } from "lifecycle-utils";
|
||||
import { GbnfJsonSchema } from "./types.js";
|
||||
export declare class GbnfGrammarGenerator {
|
||||
rules: Map<string, string>;
|
||||
ruleContentToRuleName: Map<string, string>;
|
||||
literalValueRuleNames: Map<string | number, string>;
|
||||
defRuleNames: MultiKeyMap<[string, GbnfJsonSchema], string | null>;
|
||||
defScopeDefs: MultiKeyMap<[string, GbnfJsonSchema], Record<string, GbnfJsonSchema>>;
|
||||
usedRootRuleName: boolean;
|
||||
private ruleId;
|
||||
private valueRuleId;
|
||||
private defRuleId;
|
||||
generateRuleName(): string;
|
||||
generateRuleNameForLiteralValue(value: string | number): string;
|
||||
generateRuleNameForDef(defName: string, def: GbnfJsonSchema): string;
|
||||
registerDefs(scopeDefs: Record<string, GbnfJsonSchema>): void;
|
||||
generateGbnfFile(rootGrammar: string): string;
|
||||
getProposedLiteralValueRuleNameLength(): number;
|
||||
}
|
||||
60
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfGrammarGenerator.js
generated
vendored
Normal file
60
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfGrammarGenerator.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import { MultiKeyMap } from "lifecycle-utils";
|
||||
export class GbnfGrammarGenerator {
|
||||
rules = new Map();
|
||||
ruleContentToRuleName = new Map();
|
||||
literalValueRuleNames = new Map();
|
||||
defRuleNames = new MultiKeyMap();
|
||||
defScopeDefs = new MultiKeyMap();
|
||||
usedRootRuleName = false;
|
||||
ruleId = 0;
|
||||
valueRuleId = 0;
|
||||
defRuleId = 0;
|
||||
generateRuleName() {
|
||||
const ruleId = this.ruleId;
|
||||
this.ruleId++;
|
||||
return `rule${ruleId}`;
|
||||
}
|
||||
generateRuleNameForLiteralValue(value) {
|
||||
const existingRuleName = this.literalValueRuleNames.get(value);
|
||||
if (existingRuleName != null)
|
||||
return existingRuleName;
|
||||
const ruleName = `val${this.valueRuleId}`;
|
||||
this.valueRuleId++;
|
||||
this.literalValueRuleNames.set(value, ruleName);
|
||||
return ruleName;
|
||||
}
|
||||
generateRuleNameForDef(defName, def) {
|
||||
const existingRuleName = this.defRuleNames.get([defName, def]);
|
||||
if (existingRuleName != null)
|
||||
return existingRuleName;
|
||||
const ruleName = `def${this.defRuleId}`;
|
||||
this.defRuleId++;
|
||||
this.defRuleNames.set([defName, def], ruleName);
|
||||
return ruleName;
|
||||
}
|
||||
registerDefs(scopeDefs) {
|
||||
for (const [defName, def] of Object.entries(scopeDefs))
|
||||
this.defScopeDefs.set([defName, def], scopeDefs);
|
||||
}
|
||||
generateGbnfFile(rootGrammar) {
|
||||
const rules = [{
|
||||
name: "root",
|
||||
grammar: rootGrammar
|
||||
}];
|
||||
for (const [ruleName, grammar] of this.rules.entries()) {
|
||||
if (grammar == null)
|
||||
continue;
|
||||
rules.push({
|
||||
name: ruleName,
|
||||
grammar
|
||||
});
|
||||
}
|
||||
const ruleStrings = rules.map((rule) => rule.name + " ::= " + rule.grammar);
|
||||
const gbnf = ruleStrings.join("\n");
|
||||
return gbnf;
|
||||
}
|
||||
getProposedLiteralValueRuleNameLength() {
|
||||
return `val${this.valueRuleId}`.length;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfGrammarGenerator.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfGrammarGenerator.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfGrammarGenerator.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfGrammarGenerator.js","sourceRoot":"","sources":["../../../src/utils/gbnfJson/GbnfGrammarGenerator.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,WAAW,EAAC,MAAM,iBAAiB,CAAC;AAG5C,MAAM,OAAO,oBAAoB;IACtB,KAAK,GAAG,IAAI,GAAG,EAAkB,CAAC;IAClC,qBAAqB,GAAG,IAAI,GAAG,EAAkB,CAAC;IAClD,qBAAqB,GAAG,IAAI,GAAG,EAA2B,CAAC;IAC3D,YAAY,GAAG,IAAI,WAAW,EAA2C,CAAC;IAC1E,YAAY,GAAG,IAAI,WAAW,EAA4D,CAAC;IAC3F,gBAAgB,GAAY,KAAK,CAAC;IACjC,MAAM,GAAW,CAAC,CAAC;IACnB,WAAW,GAAW,CAAC,CAAC;IACxB,SAAS,GAAW,CAAC,CAAC;IAEvB,gBAAgB;QACnB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QAC3B,IAAI,CAAC,MAAM,EAAE,CAAC;QAEd,OAAO,OAAO,MAAM,EAAE,CAAC;IAC3B,CAAC;IAEM,+BAA+B,CAAC,KAAsB;QACzD,MAAM,gBAAgB,GAAG,IAAI,CAAC,qBAAqB,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAC/D,IAAI,gBAAgB,IAAI,IAAI;YACxB,OAAO,gBAAgB,CAAC;QAE5B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,EAAE,CAAC;QAC1C,IAAI,CAAC,WAAW,EAAE,CAAC;QAEnB,IAAI,CAAC,qBAAqB,CAAC,GAAG,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;QAEhD,OAAO,QAAQ,CAAC;IACpB,CAAC;IAEM,sBAAsB,CAAC,OAAe,EAAE,GAAmB;QAC9D,MAAM,gBAAgB,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC,CAAC;QAC/D,IAAI,gBAAgB,IAAI,IAAI;YACxB,OAAO,gBAAgB,CAAC;QAE5B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,SAAS,EAAE,CAAC;QACxC,IAAI,CAAC,SAAS,EAAE,CAAC;QAEjB,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE,QAAQ,CAAC,CAAC;QAEhD,OAAO,QAAQ,CAAC;IACpB,CAAC;IAEM,YAAY,CAAC,SAAyC;QACzD,KAAK,MAAM,CAAC,OAAO,EAAE,GAAG,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC;YAClD,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IACzD,CAAC;IAEM,gBAAgB,CAAC,WAAmB;QACvC,MAAM,KAAK,GAAsC,CAAC;gBAC9C,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,WAAW;aACvB,CAAC,CAAC;QAEH,KAAK,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,EAAE,CAAC;YACrD,IAAI,OAAO,IAAI,IAAI;gBACf,SAAS;YAEb,KAAK,CAAC,IAAI,CAAC;gBACP,IAAI,EAAE,QAAQ;gBACd,OAAO;aACV,CAAC,CAAC;QACP,CAAC;QAED,MAAM,WAAW,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,GAAG,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC;QAC5E,MAAM,IAAI,GAAG,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAEpC,OAAO,IAAI,CAAC;IAChB,CAAC;IAEM,qCAAqC;QACxC,OAAO,MAAM,IAAI,CAAC,WAAW,EAAE,CAAC,MAAM,CAAC;IAC3C,CAAC;CACJ"}
|
||||
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfTerminal.d.ts
generated
vendored
Normal file
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfTerminal.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import { GbnfGrammarGenerator } from "./GbnfGrammarGenerator.js";
|
||||
export declare abstract class GbnfTerminal {
|
||||
private _ruleName;
|
||||
/** To be used only by `getRuleName` */
|
||||
protected generateRuleName(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
protected getRuleName(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
abstract getGrammar(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
protected getGrammarFromResolve(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
private _getRootRuleName;
|
||||
resolve(grammarGenerator: GbnfGrammarGenerator, resolveAsRootGrammar?: boolean): string;
|
||||
}
|
||||
54
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfTerminal.js
generated
vendored
Normal file
54
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfTerminal.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
export class GbnfTerminal {
|
||||
_ruleName = null;
|
||||
/** To be used only by `getRuleName` */
|
||||
generateRuleName(grammarGenerator) {
|
||||
return grammarGenerator.generateRuleName();
|
||||
}
|
||||
getRuleName(grammarGenerator) {
|
||||
if (this._ruleName != null)
|
||||
return this._ruleName;
|
||||
const ruleName = this.generateRuleName(grammarGenerator);
|
||||
this._ruleName = ruleName;
|
||||
return ruleName;
|
||||
}
|
||||
getGrammarFromResolve(grammarGenerator) {
|
||||
return this.getGrammar(grammarGenerator);
|
||||
}
|
||||
_getRootRuleName(grammarGenerator) {
|
||||
if (this._ruleName != null)
|
||||
return this._ruleName;
|
||||
const ruleName = grammarGenerator.usedRootRuleName
|
||||
? this.getRuleName(grammarGenerator)
|
||||
: "root";
|
||||
this._ruleName = ruleName;
|
||||
if (ruleName === "root")
|
||||
grammarGenerator.usedRootRuleName = true;
|
||||
return ruleName;
|
||||
}
|
||||
resolve(grammarGenerator, resolveAsRootGrammar = false) {
|
||||
if (this._ruleName != null)
|
||||
return this._ruleName;
|
||||
const grammar = this.getGrammarFromResolve(grammarGenerator);
|
||||
const existingRuleName = grammarGenerator.ruleContentToRuleName.get(grammar);
|
||||
if (existingRuleName != null) {
|
||||
this._ruleName = existingRuleName;
|
||||
return existingRuleName;
|
||||
}
|
||||
const ruleName = resolveAsRootGrammar
|
||||
? this._getRootRuleName(grammarGenerator)
|
||||
: this.getRuleName(grammarGenerator);
|
||||
if (resolveAsRootGrammar)
|
||||
return grammar;
|
||||
if (grammar === ruleName) {
|
||||
this._ruleName = ruleName;
|
||||
return ruleName;
|
||||
}
|
||||
if (!grammarGenerator.rules.has(ruleName)) {
|
||||
grammarGenerator.rules.set(ruleName, grammar);
|
||||
grammarGenerator.ruleContentToRuleName.set(grammar, ruleName);
|
||||
}
|
||||
this._ruleName = ruleName;
|
||||
return ruleName;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfTerminal.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfTerminal.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/GbnfTerminal.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfTerminal.js","sourceRoot":"","sources":["../../../src/utils/gbnfJson/GbnfTerminal.ts"],"names":[],"mappings":"AAGA,MAAM,OAAgB,YAAY;IACtB,SAAS,GAAkB,IAAI,CAAC;IAExC,uCAAuC;IAC7B,gBAAgB,CAAC,gBAAsC;QAC7D,OAAO,gBAAgB,CAAC,gBAAgB,EAAE,CAAC;IAC/C,CAAC;IAES,WAAW,CAAC,gBAAsC;QACxD,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI;YACtB,OAAO,IAAI,CAAC,SAAS,CAAC;QAE1B,MAAM,QAAQ,GAAG,IAAI,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,CAAC;QACzD,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAE1B,OAAO,QAAQ,CAAC;IACpB,CAAC;IAIS,qBAAqB,CAAC,gBAAsC;QAClE,OAAO,IAAI,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;IAC7C,CAAC;IAEO,gBAAgB,CAAC,gBAAsC;QAC3D,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI;YACtB,OAAO,IAAI,CAAC,SAAS,CAAC;QAE1B,MAAM,QAAQ,GAAG,gBAAgB,CAAC,gBAAgB;YAC9C,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC;YACpC,CAAC,CAAC,MAAM,CAAC;QACb,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAE1B,IAAI,QAAQ,KAAK,MAAM;YACnB,gBAAgB,CAAC,gBAAgB,GAAG,IAAI,CAAC;QAE7C,OAAO,QAAQ,CAAC;IACpB,CAAC;IAEM,OAAO,CAAC,gBAAsC,EAAE,uBAAgC,KAAK;QACxF,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI;YACtB,OAAO,IAAI,CAAC,SAAS,CAAC;QAE1B,MAAM,OAAO,GAAG,IAAI,CAAC,qBAAqB,CAAC,gBAAgB,CAAC,CAAC;QAE7D,MAAM,gBAAgB,GAAG,gBAAgB,CAAC,qBAAqB,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;QAC7E,IAAI,gBAAgB,IAAI,IAAI,EAAE,CAAC;YAC3B,IAAI,CAAC,SAAS,GAAG,gBAAgB,CAAC;YAClC,OAAO,gBAAgB,CAAC;QAC5B,CAAC;QAED,MAAM,QAAQ,GAAG,oBAAoB;YACjC,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,gBAAgB,CAAC;YACzC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,CAAC;QAEzC,IAAI,oBAAoB;YACpB,OAAO,OAAO,CAAC;QAEnB,IAAI,OAAO,KAAK,QAAQ,EAAE,CAAC;YACvB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;YAC1B,OAAO,QAAQ,CAAC;QACpB,CAAC;QAED,IAAI,CAAC,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC;YACxC,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YAC9C,gBAAgB,CAAC,qBAAqB,CAAC,GAAG,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;QAClE,CAAC;QAED,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAC1B,OAAO,QAAQ,CAAC;IACpB,CAAC;CACJ"}
|
||||
5
node_modules/node-llama-cpp/dist/utils/gbnfJson/getGbnfGrammarForGbnfJsonSchema.d.ts
generated
vendored
Normal file
5
node_modules/node-llama-cpp/dist/utils/gbnfJson/getGbnfGrammarForGbnfJsonSchema.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { GbnfJsonSchema } from "./types.js";
|
||||
export declare function getGbnfGrammarForGbnfJsonSchema(schema: Readonly<GbnfJsonSchema>, { allowNewLines, scopePadSpaces }?: {
|
||||
allowNewLines?: boolean;
|
||||
scopePadSpaces?: number;
|
||||
}): string;
|
||||
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/getGbnfGrammarForGbnfJsonSchema.js
generated
vendored
Normal file
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/getGbnfGrammarForGbnfJsonSchema.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import { getGbnfJsonTerminalForGbnfJsonSchema } from "./utils/getGbnfJsonTerminalForGbnfJsonSchema.js";
|
||||
import { GbnfGrammarGenerator } from "./GbnfGrammarGenerator.js";
|
||||
import { GbnfJsonScopeState } from "./utils/GbnfJsonScopeState.js";
|
||||
export function getGbnfGrammarForGbnfJsonSchema(schema, { allowNewLines = true, scopePadSpaces = 4 } = {}) {
|
||||
const grammarGenerator = new GbnfGrammarGenerator();
|
||||
const scopeState = new GbnfJsonScopeState({ allowNewLines, scopePadSpaces });
|
||||
const rootTerminal = getGbnfJsonTerminalForGbnfJsonSchema(schema, grammarGenerator, scopeState);
|
||||
const rootGrammar = rootTerminal.resolve(grammarGenerator, true);
|
||||
return grammarGenerator.generateGbnfFile(rootGrammar + ` "${"\\n".repeat(4)}"` + " [\\n]*");
|
||||
}
|
||||
//# sourceMappingURL=getGbnfGrammarForGbnfJsonSchema.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/getGbnfGrammarForGbnfJsonSchema.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/getGbnfGrammarForGbnfJsonSchema.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"getGbnfGrammarForGbnfJsonSchema.js","sourceRoot":"","sources":["../../../src/utils/gbnfJson/getGbnfGrammarForGbnfJsonSchema.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,oCAAoC,EAAC,MAAM,iDAAiD,CAAC;AACrG,OAAO,EAAC,oBAAoB,EAAC,MAAM,2BAA2B,CAAC;AAC/D,OAAO,EAAC,kBAAkB,EAAC,MAAM,+BAA+B,CAAC;AAGjE,MAAM,UAAU,+BAA+B,CAAC,MAAgC,EAAE,EAC9E,aAAa,GAAG,IAAI,EACpB,cAAc,GAAG,CAAC,KAIlB,EAAE;IACF,MAAM,gBAAgB,GAAG,IAAI,oBAAoB,EAAE,CAAC;IACpD,MAAM,UAAU,GAAG,IAAI,kBAAkB,CAAC,EAAC,aAAa,EAAE,cAAc,EAAC,CAAC,CAAC;IAC3E,MAAM,YAAY,GAAG,oCAAoC,CAAC,MAAM,EAAE,gBAAgB,EAAE,UAAU,CAAC,CAAC;IAChG,MAAM,WAAW,GAAG,YAAY,CAAC,OAAO,CAAC,gBAAgB,EAAE,IAAI,CAAC,CAAC;IAEjE,OAAO,gBAAgB,CAAC,gBAAgB,CAAC,WAAW,GAAG,KAAK,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,CAAC,CAAC;AAChG,CAAC"}
|
||||
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfAnyJson.d.ts
generated
vendored
Normal file
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfAnyJson.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfGrammarGenerator } from "../GbnfGrammarGenerator.js";
|
||||
import { GbnfJsonScopeState } from "../utils/GbnfJsonScopeState.js";
|
||||
export declare class GbnfAnyJson extends GbnfTerminal {
|
||||
readonly scopeState: GbnfJsonScopeState;
|
||||
constructor(scopeState?: GbnfJsonScopeState);
|
||||
getGrammar(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
protected getRuleName(): string;
|
||||
}
|
||||
53
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfAnyJson.js
generated
vendored
Normal file
53
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfAnyJson.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfJsonScopeState } from "../utils/GbnfJsonScopeState.js";
|
||||
import { GbnfString } from "./GbnfString.js";
|
||||
import { GbnfOr } from "./GbnfOr.js";
|
||||
import { GbnfNumber } from "./GbnfNumber.js";
|
||||
import { GbnfBoolean } from "./GbnfBoolean.js";
|
||||
import { GbnfNull } from "./GbnfNull.js";
|
||||
import { GbnfArray } from "./GbnfArray.js";
|
||||
import { reservedRuleNames } from "./gbnfConsts.js";
|
||||
import { GbnfObjectMap } from "./GbnfObjectMap.js";
|
||||
export class GbnfAnyJson extends GbnfTerminal {
|
||||
scopeState;
|
||||
constructor(scopeState = new GbnfJsonScopeState()) {
|
||||
super();
|
||||
this.scopeState = scopeState;
|
||||
}
|
||||
getGrammar(grammarGenerator) {
|
||||
const subAnyJsonScopeItem = this.scopeState.settings.allowNewLines
|
||||
? new GbnfAnyJson(new GbnfJsonScopeState({
|
||||
allowNewLines: false,
|
||||
scopePadSpaces: this.scopeState.settings.scopePadSpaces
|
||||
}, this.scopeState.currentNestingScope))
|
||||
: new GbnfSubAnyJson(this.scopeState);
|
||||
return new GbnfOr([
|
||||
new GbnfString(),
|
||||
new GbnfNumber({ allowFractional: true }),
|
||||
new GbnfBoolean(),
|
||||
new GbnfNull(),
|
||||
new GbnfArray({
|
||||
items: subAnyJsonScopeItem,
|
||||
scopeState: this.scopeState
|
||||
}),
|
||||
new GbnfObjectMap({
|
||||
fields: [],
|
||||
additionalProperties: subAnyJsonScopeItem,
|
||||
scopeState: this.scopeState
|
||||
})
|
||||
]).getGrammar(grammarGenerator);
|
||||
}
|
||||
getRuleName() {
|
||||
return reservedRuleNames.anyJson({
|
||||
allowNewLines: this.scopeState.settings.allowNewLines,
|
||||
scopeSpaces: this.scopeState.settings.scopePadSpaces,
|
||||
nestingScope: this.scopeState.currentNestingScope
|
||||
});
|
||||
}
|
||||
}
|
||||
class GbnfSubAnyJson extends GbnfAnyJson {
|
||||
getGrammar() {
|
||||
return this.getRuleName();
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfAnyJson.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfAnyJson.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfAnyJson.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfAnyJson.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfAnyJson.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAEhD,OAAO,EAAC,kBAAkB,EAAC,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAC,UAAU,EAAC,MAAM,iBAAiB,CAAC;AAC3C,OAAO,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AACnC,OAAO,EAAC,UAAU,EAAC,MAAM,iBAAiB,CAAC;AAC3C,OAAO,EAAC,WAAW,EAAC,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAC,QAAQ,EAAC,MAAM,eAAe,CAAC;AACvC,OAAO,EAAC,SAAS,EAAC,MAAM,gBAAgB,CAAC;AACzC,OAAO,EAAC,iBAAiB,EAAC,MAAM,iBAAiB,CAAC;AAClD,OAAO,EAAC,aAAa,EAAC,MAAM,oBAAoB,CAAC;AAGjD,MAAM,OAAO,WAAY,SAAQ,YAAY;IACzB,UAAU,CAAqB;IAE/C,YAAmB,aAAiC,IAAI,kBAAkB,EAAE;QACxE,KAAK,EAAE,CAAC;QAER,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IACjC,CAAC;IAEM,UAAU,CAAC,gBAAsC;QACpD,MAAM,mBAAmB,GAAG,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,aAAa;YAC9D,CAAC,CAAC,IAAI,WAAW,CACb,IAAI,kBAAkB,CAAC;gBACnB,aAAa,EAAE,KAAK;gBACpB,cAAc,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,cAAc;aAC1D,EAAE,IAAI,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C;YACD,CAAC,CAAC,IAAI,cAAc,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAE1C,OAAO,IAAI,MAAM,CAAC;YACd,IAAI,UAAU,EAAE;YAChB,IAAI,UAAU,CAAC,EAAC,eAAe,EAAE,IAAI,EAAC,CAAC;YACvC,IAAI,WAAW,EAAE;YACjB,IAAI,QAAQ,EAAE;YACd,IAAI,SAAS,CAAC;gBACV,KAAK,EAAE,mBAAmB;gBAC1B,UAAU,EAAE,IAAI,CAAC,UAAU;aAC9B,CAAC;YACF,IAAI,aAAa,CAAC;gBACd,MAAM,EAAE,EAAE;gBACV,oBAAoB,EAAE,mBAAmB;gBACzC,UAAU,EAAE,IAAI,CAAC,UAAU;aAC9B,CAAC;SACL,CAAC,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;IACpC,CAAC;IAEkB,WAAW;QAC1B,OAAO,iBAAiB,CAAC,OAAO,CAAC;YAC7B,aAAa,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,aAAa;YACrD,WAAW,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,cAAc;YACpD,YAAY,EAAE,IAAI,CAAC,UAAU,CAAC,mBAAmB;SACpD,CAAC,CAAC;IACP,CAAC;CACJ;AAED,MAAM,cAAe,SAAQ,WAAW;IACpB,UAAU;QACtB,OAAO,IAAI,CAAC,WAAW,EAAE,CAAC;IAC9B,CAAC;CACJ"}
|
||||
18
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfArray.d.ts
generated
vendored
Normal file
18
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfArray.d.ts
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfGrammarGenerator } from "../GbnfGrammarGenerator.js";
|
||||
import { GbnfJsonScopeState } from "../utils/GbnfJsonScopeState.js";
|
||||
export declare class GbnfArray extends GbnfTerminal {
|
||||
readonly items?: GbnfTerminal;
|
||||
readonly prefixItems?: GbnfTerminal[];
|
||||
readonly minItems: number;
|
||||
readonly maxItems?: number;
|
||||
readonly scopeState: GbnfJsonScopeState;
|
||||
constructor({ items, prefixItems, minItems, maxItems, scopeState }: {
|
||||
items?: GbnfTerminal;
|
||||
prefixItems?: GbnfTerminal[];
|
||||
minItems?: number;
|
||||
maxItems?: number;
|
||||
scopeState: GbnfJsonScopeState;
|
||||
});
|
||||
getGrammar(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
}
|
||||
83
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfArray.js
generated
vendored
Normal file
83
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfArray.js
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfJsonScopeState } from "../utils/GbnfJsonScopeState.js";
|
||||
import { GbnfWhitespace } from "./GbnfWhitespace.js";
|
||||
import { GbnfGrammar } from "./GbnfGrammar.js";
|
||||
import { GbnfRepetition } from "./GbnfRepetition.js";
|
||||
import { GbnfCommaWhitespace } from "./GbnfCommaWhitespace.js";
|
||||
import { GbnfAnyJson } from "./GbnfAnyJson.js";
|
||||
export class GbnfArray extends GbnfTerminal {
|
||||
items;
|
||||
prefixItems;
|
||||
minItems;
|
||||
maxItems;
|
||||
scopeState;
|
||||
constructor({ items, prefixItems, minItems = 0, maxItems, scopeState = new GbnfJsonScopeState() }) {
|
||||
super();
|
||||
this.items = items;
|
||||
this.prefixItems = prefixItems;
|
||||
this.minItems = Math.floor(minItems);
|
||||
this.maxItems = maxItems == null ? undefined : Math.floor(maxItems);
|
||||
this.scopeState = scopeState;
|
||||
if (this.prefixItems != null && this.minItems < this.prefixItems.length)
|
||||
this.minItems = this.prefixItems.length;
|
||||
else if (this.minItems < 0)
|
||||
this.minItems = 0;
|
||||
if (this.maxItems != null && this.maxItems < this.minItems)
|
||||
this.maxItems = this.minItems;
|
||||
else if (this.maxItems != null && this.maxItems < 0)
|
||||
this.maxItems = 0;
|
||||
}
|
||||
getGrammar(grammarGenerator) {
|
||||
const getWhitespaceRule = (newScope, newLine) => (newScope
|
||||
? new GbnfWhitespace(this.scopeState.getForNewScope(), { newLine })
|
||||
: new GbnfWhitespace(this.scopeState, { newLine }));
|
||||
const getWhitespaceRuleName = (newScope, newLine) => (getWhitespaceRule(newScope, newLine).resolve(grammarGenerator));
|
||||
const getCommaWhitespaceRule = (newScope, newLine) => (newScope
|
||||
? new GbnfCommaWhitespace(this.scopeState.getForNewScope(), { newLine })
|
||||
: new GbnfCommaWhitespace(this.scopeState, { newLine }));
|
||||
const getCommaWhitespaceRuleName = (newScope, newLine) => (getCommaWhitespaceRule(newScope, newLine).resolve(grammarGenerator));
|
||||
const arrayItemsGrammar = [];
|
||||
if (this.prefixItems != null && this.prefixItems.length > 0) {
|
||||
for (const item of this.prefixItems) {
|
||||
if (arrayItemsGrammar.length > 0)
|
||||
arrayItemsGrammar.push(getCommaWhitespaceRuleName(true, "before"));
|
||||
arrayItemsGrammar.push(item.resolve(grammarGenerator));
|
||||
}
|
||||
if (this.minItems > this.prefixItems.length || this.maxItems == null || this.maxItems > this.prefixItems.length) {
|
||||
const restMinRepetitions = this.minItems - this.prefixItems.length;
|
||||
const restMaxRepetitions = this.maxItems == null
|
||||
? undefined
|
||||
: this.maxItems - this.prefixItems.length;
|
||||
if (arrayItemsGrammar.length > 0)
|
||||
arrayItemsGrammar.push(new GbnfRepetition({
|
||||
value: new GbnfGrammar([
|
||||
getCommaWhitespaceRuleName(true, "before"),
|
||||
(this.items ?? new GbnfAnyJson()).resolve(grammarGenerator)
|
||||
], true),
|
||||
minRepetitions: restMinRepetitions,
|
||||
maxRepetitions: restMaxRepetitions
|
||||
}).getGrammar(grammarGenerator));
|
||||
else
|
||||
arrayItemsGrammar.push(new GbnfRepetition({
|
||||
value: this.items ?? new GbnfAnyJson(),
|
||||
separator: getCommaWhitespaceRule(true, "before"),
|
||||
minRepetitions: restMinRepetitions,
|
||||
maxRepetitions: restMaxRepetitions
|
||||
}).getGrammar(grammarGenerator));
|
||||
}
|
||||
}
|
||||
else
|
||||
arrayItemsGrammar.push(new GbnfRepetition({
|
||||
value: this.items ?? new GbnfAnyJson(),
|
||||
separator: getCommaWhitespaceRule(true, "before"),
|
||||
minRepetitions: this.minItems,
|
||||
maxRepetitions: this.maxItems
|
||||
}).getGrammar(grammarGenerator));
|
||||
return new GbnfGrammar([
|
||||
'"["', getWhitespaceRuleName(true, "before"),
|
||||
new GbnfGrammar(arrayItemsGrammar).getGrammar(),
|
||||
getWhitespaceRuleName(false, "before"), '"]"'
|
||||
]).getGrammar();
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfArray.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfArray.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfArray.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfArray.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfArray.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAEhD,OAAO,EAAC,kBAAkB,EAAC,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAC,cAAc,EAAC,MAAM,qBAAqB,CAAC;AACnD,OAAO,EAAC,WAAW,EAAC,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAC,cAAc,EAAC,MAAM,qBAAqB,CAAC;AACnD,OAAO,EAAC,mBAAmB,EAAC,MAAM,0BAA0B,CAAC;AAC7D,OAAO,EAAC,WAAW,EAAC,MAAM,kBAAkB,CAAC;AAG7C,MAAM,OAAO,SAAU,SAAQ,YAAY;IACvB,KAAK,CAAgB;IACrB,WAAW,CAAkB;IAC7B,QAAQ,CAAS;IACjB,QAAQ,CAAU;IAClB,UAAU,CAAqB;IAE/C,YAAmB,EACf,KAAK,EAAE,WAAW,EAAE,QAAQ,GAAG,CAAC,EAAE,QAAQ,EAC1C,UAAU,GAAG,IAAI,kBAAkB,EAAE,EAIxC;QACG,KAAK,EAAE,CAAC;QAER,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QACrC,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QACpE,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAE7B,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,IAAI,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM;YACnE,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;aACvC,IAAI,IAAI,CAAC,QAAQ,GAAG,CAAC;YACtB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC;QAEtB,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,IAAI,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ;YACtD,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;aAC7B,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,IAAI,IAAI,CAAC,QAAQ,GAAG,CAAC;YAC/C,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC;IAC1B,CAAC;IAEM,UAAU,CAAC,gBAAsC;QACpD,MAAM,iBAAiB,GAAG,CAAC,QAAiB,EAAE,OAAmC,EAAE,EAAE,CAAC,CAClF,QAAQ;YACJ,CAAC,CAAC,IAAI,cAAc,CAAC,IAAI,CAAC,UAAU,CAAC,cAAc,EAAE,EAAE,EAAC,OAAO,EAAC,CAAC;YACjE,CAAC,CAAC,IAAI,cAAc,CAAC,IAAI,CAAC,UAAU,EAAE,EAAC,OAAO,EAAC,CAAC,CACvD,CAAC;QACF,MAAM,qBAAqB,GAAG,CAAC,QAAiB,EAAE,OAAmC,EAAE,EAAE,CAAC,CACtF,iBAAiB,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CACjE,CAAC;QAEF,MAAM,sBAAsB,GAAG,CAAC,QAAiB,EAAE,OAAmC,EAAE,EAAE,CAAC,CACvF,QAAQ;YACJ,CAAC,CAAC,IAAI,mBAAmB,CAAC,IAAI,CAAC,UAAU,CAAC,cAAc,EAAE,EAAE,EAAC,OAAO,EAAC,CAAC;YACtE,CAAC,CAAC,IAAI,mBAAmB,CAAC,IAAI,CAAC,UAAU,EAAE,EAAC,OAAO,EAAC,CAAC,CAC5D,CAAC;QACF,MAAM,0BAA0B,GAAG,CAAC,QAAiB,EAAE,OAAmC,EAAE,EAAE,CAAC,CAC3F,sBAAsB,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CACtE,CAAC;QAEF,MAAM,iBAAiB,GAAa,EAAE,CAAC;QACvC,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,IAAI,IAAI,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC1D,KAAK,MAAM,IAAI,IAAI,IAAI,CAAC,WAAW,EAAE,CAAC;gBAClC,IAAI,iBAAiB,CAAC,MAAM,GAAG,CAAC;oBAC5B,iBAAiB,CAAC,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;gBAEvE,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC;YAC3D,CAAC;YAED,IAAI,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,IAAI,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,CAAC;gBAC9G,MAAM,kBAAkB,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;gBACnE,MAAM,kBAAkB,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI;oBAC5C,CAAC,CAAC,SAAS;oBACX,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;gBAE9C,IAAI,iBAAiB,CAAC,MAAM,GAAG,CAAC;oBAC5B,iBAAiB,CAAC,IAAI,CAClB,IAAI,cAAc,CAAC;wBACf,KAAK,EAAE,IAAI,WAAW,CAAC;4BACnB,0BAA0B,CAAC,IAAI,EAAE,QAAQ,CAAC;4BAC1C,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,WAAW,EAAE,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC;yBAC9D,EAAE,IAAI,CAAC;wBACR,cAAc,EAAE,kBAAkB;wBAClC,cAAc,EAAE,kBAAkB;qBACrC,CAAC,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAClC,CAAC;;oBAEF,iBAAiB,CAAC,IAAI,CAClB,IAAI,cAAc,CAAC;wBACf,KAAK,EAAE,IAAI,CAAC,KAAK,IAAI,IAAI,WAAW,EAAE;wBACtC,SAAS,EAAE,sBAAsB,CAAC,IAAI,EAAE,QAAQ,CAAC;wBACjD,cAAc,EAAE,kBAAkB;wBAClC,cAAc,EAAE,kBAAkB;qBACrC,CAAC,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAClC,CAAC;YACV,CAAC;QACL,CAAC;;YACG,iBAAiB,CAAC,IAAI,CAClB,IAAI,cAAc,CAAC;gBACf,KAAK,EAAE,IAAI,CAAC,KAAK,IAAI,IAAI,WAAW,EAAE;gBACtC,SAAS,EAAE,sBAAsB,CAAC,IAAI,EAAE,QAAQ,CAAC;gBACjD,cAAc,EAAE,IAAI,CAAC,QAAQ;gBAC7B,cAAc,EAAE,IAAI,CAAC,QAAQ;aAChC,CAAC,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAClC,CAAC;QAEN,OAAO,IAAI,WAAW,CAAC;YACnB,KAAK,EAAE,qBAAqB,CAAC,IAAI,EAAE,QAAQ,CAAC;YAC5C,IAAI,WAAW,CAAC,iBAAiB,CAAC,CAAC,UAAU,EAAE;YAC/C,qBAAqB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE,KAAK;SAChD,CAAC,CAAC,UAAU,EAAE,CAAC;IACpB,CAAC;CACJ"}
|
||||
7
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBoolean.d.ts
generated
vendored
Normal file
7
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBoolean.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
export declare class GbnfBoolean extends GbnfTerminal {
|
||||
getGrammar(): string;
|
||||
protected getGrammarFromResolve(): string;
|
||||
private _getGrammar;
|
||||
protected getRuleName(): string;
|
||||
}
|
||||
22
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBoolean.js
generated
vendored
Normal file
22
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBoolean.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { reservedRuleNames } from "./gbnfConsts.js";
|
||||
export class GbnfBoolean extends GbnfTerminal {
|
||||
getGrammar() {
|
||||
return this._getGrammar();
|
||||
}
|
||||
getGrammarFromResolve() {
|
||||
return this._getGrammar(false);
|
||||
}
|
||||
_getGrammar(wrap = true) {
|
||||
const values = ['"true"', '"false"'];
|
||||
if (wrap)
|
||||
return [
|
||||
"(", values.join(" | "), ")"
|
||||
].join(" ");
|
||||
return values.join(" | ");
|
||||
}
|
||||
getRuleName() {
|
||||
return reservedRuleNames.boolean;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfBoolean.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBoolean.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBoolean.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfBoolean.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfBoolean.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAChD,OAAO,EAAC,iBAAiB,EAAC,MAAM,iBAAiB,CAAC;AAGlD,MAAM,OAAO,WAAY,SAAQ,YAAY;IAClC,UAAU;QACb,OAAO,IAAI,CAAC,WAAW,EAAE,CAAC;IAC9B,CAAC;IAEkB,qBAAqB;QACpC,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IACnC,CAAC;IAEO,WAAW,CAAC,OAAgB,IAAI;QACpC,MAAM,MAAM,GAAa,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;QAE/C,IAAI,IAAI;YACJ,OAAO;gBACH,GAAG,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,GAAG;aAC/B,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAEhB,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAC9B,CAAC;IAEkB,WAAW;QAC1B,OAAO,iBAAiB,CAAC,OAAO,CAAC;IACrC,CAAC;CACJ"}
|
||||
7
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBooleanValue.d.ts
generated
vendored
Normal file
7
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBooleanValue.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
export declare class GbnfBooleanValue extends GbnfTerminal {
|
||||
readonly value: boolean;
|
||||
constructor(value: boolean);
|
||||
getGrammar(): string;
|
||||
resolve(): string;
|
||||
}
|
||||
17
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBooleanValue.js
generated
vendored
Normal file
17
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBooleanValue.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
export class GbnfBooleanValue extends GbnfTerminal {
|
||||
value;
|
||||
constructor(value) {
|
||||
super();
|
||||
this.value = value;
|
||||
}
|
||||
getGrammar() {
|
||||
if (this.value)
|
||||
return '"true"';
|
||||
return '"false"';
|
||||
}
|
||||
resolve() {
|
||||
return this.getGrammar();
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfBooleanValue.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBooleanValue.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfBooleanValue.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfBooleanValue.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfBooleanValue.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAGhD,MAAM,OAAO,gBAAiB,SAAQ,YAAY;IAC9B,KAAK,CAAU;IAE/B,YAAmB,KAAc;QAC7B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACvB,CAAC;IAEM,UAAU;QACb,IAAI,IAAI,CAAC,KAAK;YACV,OAAO,QAAQ,CAAC;QAEpB,OAAO,SAAS,CAAC;IACrB,CAAC;IAEe,OAAO;QACnB,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;IAC7B,CAAC;CACJ"}
|
||||
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfCommaWhitespace.d.ts
generated
vendored
Normal file
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfCommaWhitespace.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfJsonScopeState } from "../utils/GbnfJsonScopeState.js";
|
||||
export declare class GbnfCommaWhitespace extends GbnfTerminal {
|
||||
readonly scopeState: GbnfJsonScopeState;
|
||||
readonly newLine: "before" | "after" | false;
|
||||
constructor(scopeState: GbnfJsonScopeState, { newLine }?: {
|
||||
newLine?: "before" | "after" | false;
|
||||
});
|
||||
getGrammar(): string;
|
||||
protected getRuleName(): string;
|
||||
}
|
||||
28
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfCommaWhitespace.js
generated
vendored
Normal file
28
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfCommaWhitespace.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfGrammar } from "./GbnfGrammar.js";
|
||||
import { GbnfWhitespace } from "./GbnfWhitespace.js";
|
||||
import { reservedRuleNames } from "./gbnfConsts.js";
|
||||
export class GbnfCommaWhitespace extends GbnfTerminal {
|
||||
scopeState;
|
||||
newLine;
|
||||
constructor(scopeState, { newLine = "before" } = {}) {
|
||||
super();
|
||||
this.scopeState = scopeState;
|
||||
this.newLine = newLine;
|
||||
}
|
||||
getGrammar() {
|
||||
return new GbnfGrammar([
|
||||
'","', new GbnfWhitespace(this.scopeState, { newLine: this.newLine }).getGrammar()
|
||||
]).getGrammar();
|
||||
}
|
||||
getRuleName() {
|
||||
return reservedRuleNames.commaWhitespace({
|
||||
newLine: this.scopeState.settings.allowNewLines
|
||||
? this.newLine
|
||||
: false,
|
||||
scopeSpaces: this.scopeState.settings.scopePadSpaces,
|
||||
nestingScope: this.scopeState.currentNestingScope
|
||||
});
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfCommaWhitespace.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfCommaWhitespace.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfCommaWhitespace.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfCommaWhitespace.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfCommaWhitespace.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAEhD,OAAO,EAAC,WAAW,EAAC,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAC,cAAc,EAAC,MAAM,qBAAqB,CAAC;AACnD,OAAO,EAAC,iBAAiB,EAAC,MAAM,iBAAiB,CAAC;AAGlD,MAAM,OAAO,mBAAoB,SAAQ,YAAY;IACjC,UAAU,CAAqB;IAC/B,OAAO,CAA6B;IAEpD,YAAmB,UAA8B,EAAE,EAC/C,OAAO,GAAG,QAAQ,KAGlB,EAAE;QACF,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IAC3B,CAAC;IAEM,UAAU;QACb,OAAO,IAAI,WAAW,CAAC;YACnB,KAAK,EAAE,IAAI,cAAc,CAAC,IAAI,CAAC,UAAU,EAAE,EAAC,OAAO,EAAE,IAAI,CAAC,OAAO,EAAC,CAAC,CAAC,UAAU,EAAE;SACnF,CAAC,CAAC,UAAU,EAAE,CAAC;IACpB,CAAC;IAEkB,WAAW;QAC1B,OAAO,iBAAiB,CAAC,eAAe,CAAC;YACrC,OAAO,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,aAAa;gBAC3C,CAAC,CAAC,IAAI,CAAC,OAAO;gBACd,CAAC,CAAC,KAAK;YACX,WAAW,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,cAAc;YACpD,YAAY,EAAE,IAAI,CAAC,UAAU,CAAC,mBAAmB;SACpD,CAAC,CAAC;IACP,CAAC;CACJ"}
|
||||
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfFormatString.d.ts
generated
vendored
Normal file
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfFormatString.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfGrammarGenerator } from "../GbnfGrammarGenerator.js";
|
||||
import { GbnfJsonFormatStringSchema } from "../types.js";
|
||||
export declare class GbnfFormatString extends GbnfTerminal {
|
||||
readonly format: GbnfJsonFormatStringSchema["format"];
|
||||
constructor(format: GbnfJsonFormatStringSchema["format"]);
|
||||
getGrammar(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
protected getRuleName(): string;
|
||||
private _getDateGrammar;
|
||||
private _getTimeGrammar;
|
||||
}
|
||||
90
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfFormatString.js
generated
vendored
Normal file
90
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfFormatString.js
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { reservedRuleNames } from "./gbnfConsts.js";
|
||||
import { GbnfGrammar } from "./GbnfGrammar.js";
|
||||
import { GbnfString } from "./GbnfString.js";
|
||||
export class GbnfFormatString extends GbnfTerminal {
|
||||
format;
|
||||
constructor(format) {
|
||||
super();
|
||||
this.format = format;
|
||||
}
|
||||
getGrammar(grammarGenerator) {
|
||||
const quote = '"\\""';
|
||||
if (this.format === "date")
|
||||
return new GbnfGrammar([
|
||||
quote,
|
||||
this._getDateGrammar(),
|
||||
quote
|
||||
]).getGrammar();
|
||||
else if (this.format === "time") {
|
||||
return new GbnfGrammar([
|
||||
quote,
|
||||
this._getTimeGrammar(),
|
||||
quote
|
||||
]).getGrammar();
|
||||
}
|
||||
else if (this.format === "date-time")
|
||||
return new GbnfGrammar([
|
||||
quote,
|
||||
this._getDateGrammar(),
|
||||
'"T"',
|
||||
this._getTimeGrammar(),
|
||||
quote
|
||||
]).getGrammar();
|
||||
return new GbnfString({
|
||||
minLength: 0,
|
||||
maxLength: 0
|
||||
}).resolve(grammarGenerator);
|
||||
}
|
||||
getRuleName() {
|
||||
return reservedRuleNames.formatString(this.format);
|
||||
}
|
||||
_getDateGrammar() {
|
||||
return new GbnfGrammar([
|
||||
"[0-9]{4}",
|
||||
'"-"',
|
||||
or([
|
||||
'"0" [1-9]',
|
||||
'"1" [012]'
|
||||
]),
|
||||
'"-"',
|
||||
or([
|
||||
'"0" [1-9]',
|
||||
"[12] [0-9]",
|
||||
'"3" [01]'
|
||||
])
|
||||
]).getGrammar();
|
||||
}
|
||||
_getTimeGrammar() {
|
||||
return new GbnfGrammar([
|
||||
or([
|
||||
"[01] [0-9]",
|
||||
'"2" [0-3]'
|
||||
]),
|
||||
'":"',
|
||||
"[0-5] [0-9]",
|
||||
'":"',
|
||||
"[0-5] [0-9]",
|
||||
'( "." [0-9]{3} )?',
|
||||
or([
|
||||
'"Z"',
|
||||
new GbnfGrammar([
|
||||
or([
|
||||
'"+"',
|
||||
'"-"'
|
||||
]),
|
||||
or([
|
||||
"[01] [0-9]",
|
||||
'"2" [0-3]'
|
||||
]),
|
||||
'":"',
|
||||
"[0-5] [0-9]"
|
||||
]).getGrammar()
|
||||
])
|
||||
]).getGrammar();
|
||||
}
|
||||
}
|
||||
function or(values) {
|
||||
return "(" + values.join(" | ") + ")";
|
||||
}
|
||||
//# sourceMappingURL=GbnfFormatString.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfFormatString.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfFormatString.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfFormatString.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfFormatString.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAGhD,OAAO,EAAC,iBAAiB,EAAC,MAAM,iBAAiB,CAAC;AAClD,OAAO,EAAC,WAAW,EAAC,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAC,UAAU,EAAC,MAAM,iBAAiB,CAAC;AAG3C,MAAM,OAAO,gBAAiB,SAAQ,YAAY;IAC9B,MAAM,CAAuC;IAE7D,YAAmB,MAA4C;QAC3D,KAAK,EAAE,CAAC;QAER,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACzB,CAAC;IAEM,UAAU,CAAC,gBAAsC;QACpD,MAAM,KAAK,GAAG,OAAO,CAAC;QACtB,IAAI,IAAI,CAAC,MAAM,KAAK,MAAM;YACtB,OAAO,IAAI,WAAW,CAAC;gBACnB,KAAK;gBACL,IAAI,CAAC,eAAe,EAAE;gBACtB,KAAK;aACR,CAAC,CAAC,UAAU,EAAE,CAAC;aACf,IAAI,IAAI,CAAC,MAAM,KAAK,MAAM,EAAE,CAAC;YAC9B,OAAO,IAAI,WAAW,CAAC;gBACnB,KAAK;gBACL,IAAI,CAAC,eAAe,EAAE;gBACtB,KAAK;aACR,CAAC,CAAC,UAAU,EAAE,CAAC;QACpB,CAAC;aAAM,IAAI,IAAI,CAAC,MAAM,KAAK,WAAW;YAClC,OAAO,IAAI,WAAW,CAAC;gBACnB,KAAK;gBACL,IAAI,CAAC,eAAe,EAAE;gBACtB,KAAK;gBACL,IAAI,CAAC,eAAe,EAAE;gBACtB,KAAK;aACR,CAAC,CAAC,UAAU,EAAE,CAAC;QAEpB,OAAO,IAAI,UAAU,CAAC;YAClB,SAAS,EAAE,CAAC;YACZ,SAAS,EAAE,CAAC;SACf,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;IACjC,CAAC;IAEkB,WAAW;QAC1B,OAAO,iBAAiB,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACvD,CAAC;IAEO,eAAe;QACnB,OAAO,IAAI,WAAW,CAAC;YACnB,UAAU;YACV,KAAK;YACL,EAAE,CAAC;gBACC,WAAW;gBACX,WAAW;aACd,CAAC;YACF,KAAK;YACL,EAAE,CAAC;gBACC,WAAW;gBACX,YAAY;gBACZ,UAAU;aACb,CAAC;SACL,CAAC,CAAC,UAAU,EAAE,CAAC;IACpB,CAAC;IAEO,eAAe;QACnB,OAAO,IAAI,WAAW,CAAC;YACnB,EAAE,CAAC;gBACC,YAAY;gBACZ,WAAW;aACd,CAAC;YACF,KAAK;YACL,aAAa;YACb,KAAK;YACL,aAAa;YACb,mBAAmB;YACnB,EAAE,CAAC;gBACC,KAAK;gBACL,IAAI,WAAW,CAAC;oBACZ,EAAE,CAAC;wBACC,KAAK;wBACL,KAAK;qBACR,CAAC;oBACF,EAAE,CAAC;wBACC,YAAY;wBACZ,WAAW;qBACd,CAAC;oBACF,KAAK;oBACL,aAAa;iBAChB,CAAC,CAAC,UAAU,EAAE;aAClB,CAAC;SACL,CAAC,CAAC,UAAU,EAAE,CAAC;IACpB,CAAC;CACJ;AAED,SAAS,EAAE,CAAC,MAAgB;IACxB,OAAO,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,CAAC;AAC1C,CAAC"}
|
||||
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfGrammar.d.ts
generated
vendored
Normal file
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfGrammar.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfGrammarGenerator } from "../GbnfGrammarGenerator.js";
|
||||
export declare class GbnfGrammar extends GbnfTerminal {
|
||||
readonly grammar: string | string[];
|
||||
readonly resolveToRawGrammar: boolean;
|
||||
constructor(grammar: string | string[], resolveToRawGrammar?: boolean);
|
||||
getGrammar(): string;
|
||||
resolve(grammarGenerator: GbnfGrammarGenerator, resolveAsRootGrammar?: boolean): string;
|
||||
}
|
||||
23
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfGrammar.js
generated
vendored
Normal file
23
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfGrammar.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
export class GbnfGrammar extends GbnfTerminal {
|
||||
grammar;
|
||||
resolveToRawGrammar;
|
||||
constructor(grammar, resolveToRawGrammar = false) {
|
||||
super();
|
||||
this.grammar = grammar;
|
||||
this.resolveToRawGrammar = resolveToRawGrammar;
|
||||
}
|
||||
getGrammar() {
|
||||
if (this.grammar instanceof Array)
|
||||
return this.grammar
|
||||
.filter((item) => item !== "")
|
||||
.join(" ");
|
||||
return this.grammar;
|
||||
}
|
||||
resolve(grammarGenerator, resolveAsRootGrammar = false) {
|
||||
if (this.resolveToRawGrammar)
|
||||
return this.getGrammar();
|
||||
return super.resolve(grammarGenerator, resolveAsRootGrammar);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfGrammar.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfGrammar.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfGrammar.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfGrammar.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfGrammar.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAIhD,MAAM,OAAO,WAAY,SAAQ,YAAY;IACzB,OAAO,CAAoB;IAC3B,mBAAmB,CAAU;IAE7C,YAAmB,OAA0B,EAAE,sBAA+B,KAAK;QAC/E,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACnD,CAAC;IAEM,UAAU;QACb,IAAI,IAAI,CAAC,OAAO,YAAY,KAAK;YAC7B,OAAO,IAAI,CAAC,OAAO;iBACd,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC;iBAC7B,IAAI,CAAC,GAAG,CAAC,CAAC;QAEnB,OAAO,IAAI,CAAC,OAAO,CAAC;IACxB,CAAC;IAEe,OAAO,CAAC,gBAAsC,EAAE,uBAAgC,KAAK;QACjG,IAAI,IAAI,CAAC,mBAAmB;YACxB,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;QAE7B,OAAO,KAAK,CAAC,OAAO,CAAC,gBAAgB,EAAE,oBAAoB,CAAC,CAAC;IACjE,CAAC;CACJ"}
|
||||
5
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfInsideStringChar.d.ts
generated
vendored
Normal file
5
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfInsideStringChar.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
export declare class GbnfInsideStringChar extends GbnfTerminal {
|
||||
getGrammar(): string;
|
||||
protected getRuleName(): string;
|
||||
}
|
||||
24
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfInsideStringChar.js
generated
vendored
Normal file
24
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfInsideStringChar.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { reservedRuleNames } from "./gbnfConsts.js";
|
||||
export class GbnfInsideStringChar extends GbnfTerminal {
|
||||
getGrammar() {
|
||||
return [
|
||||
negatedCharacterSet([
|
||||
'"',
|
||||
"\\\\",
|
||||
"\\x7F",
|
||||
"\\x00-\\x1F"
|
||||
]),
|
||||
// escape sequences
|
||||
'"\\\\" ["\\\\/bfnrt]',
|
||||
'"\\\\u" [0-9a-fA-F]{4}'
|
||||
].join(" | ");
|
||||
}
|
||||
getRuleName() {
|
||||
return reservedRuleNames.stringChar;
|
||||
}
|
||||
}
|
||||
function negatedCharacterSet(characterDefinitions) {
|
||||
return "[^" + characterDefinitions.join("") + "]";
|
||||
}
|
||||
//# sourceMappingURL=GbnfInsideStringChar.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfInsideStringChar.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfInsideStringChar.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfInsideStringChar.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfInsideStringChar.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAChD,OAAO,EAAC,iBAAiB,EAAC,MAAM,iBAAiB,CAAC;AAElD,MAAM,OAAO,oBAAqB,SAAQ,YAAY;IAC3C,UAAU;QACb,OAAO;YACH,mBAAmB,CAAC;gBAChB,GAAG;gBACH,MAAM;gBACN,OAAO;gBACP,aAAa;aAChB,CAAC;YAEF,mBAAmB;YACnB,sBAAsB;YACtB,wBAAwB;SAC3B,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAClB,CAAC;IAEkB,WAAW;QAC1B,OAAO,iBAAiB,CAAC,UAAU,CAAC;IACxC,CAAC;CACJ;AAED,SAAS,mBAAmB,CAAC,oBAA8B;IACvD,OAAO,IAAI,GAAG,oBAAoB,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC;AACtD,CAAC"}
|
||||
5
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNull.d.ts
generated
vendored
Normal file
5
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNull.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
export declare class GbnfNull extends GbnfTerminal {
|
||||
getGrammar(): string;
|
||||
protected getRuleName(): string;
|
||||
}
|
||||
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNull.js
generated
vendored
Normal file
11
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNull.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { reservedRuleNames } from "./gbnfConsts.js";
|
||||
export class GbnfNull extends GbnfTerminal {
|
||||
getGrammar() {
|
||||
return '"null"';
|
||||
}
|
||||
getRuleName() {
|
||||
return reservedRuleNames.null;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfNull.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNull.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNull.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfNull.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfNull.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAChD,OAAO,EAAC,iBAAiB,EAAC,MAAM,iBAAiB,CAAC;AAGlD,MAAM,OAAO,QAAS,SAAQ,YAAY;IAC/B,UAAU;QACb,OAAO,QAAQ,CAAC;IACpB,CAAC;IAEkB,WAAW;QAC1B,OAAO,iBAAiB,CAAC,IAAI,CAAC;IAClC,CAAC;CACJ"}
|
||||
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumber.d.ts
generated
vendored
Normal file
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumber.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
export declare class GbnfNumber extends GbnfTerminal {
|
||||
readonly allowFractional: boolean;
|
||||
constructor({ allowFractional }: {
|
||||
allowFractional: boolean;
|
||||
});
|
||||
getGrammar(): string;
|
||||
protected getRuleName(): string;
|
||||
}
|
||||
22
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumber.js
generated
vendored
Normal file
22
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumber.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { reservedRuleNames } from "./gbnfConsts.js";
|
||||
export class GbnfNumber extends GbnfTerminal {
|
||||
allowFractional;
|
||||
constructor({ allowFractional = true }) {
|
||||
super();
|
||||
this.allowFractional = allowFractional;
|
||||
}
|
||||
getGrammar() {
|
||||
const num = '"-"? ("0" | [1-9] [0-9]{0,15})';
|
||||
const exponent = ' ([eE] [-+]? ("0" | [1-9] [0-9]{0,15}))?';
|
||||
if (this.allowFractional)
|
||||
return num + ' ("." [0-9]{1,16})?' + exponent;
|
||||
return num + exponent;
|
||||
}
|
||||
getRuleName() {
|
||||
if (this.allowFractional)
|
||||
return reservedRuleNames.number.fractional;
|
||||
return reservedRuleNames.number.integer;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfNumber.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumber.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumber.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfNumber.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfNumber.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAChD,OAAO,EAAC,iBAAiB,EAAC,MAAM,iBAAiB,CAAC;AAGlD,MAAM,OAAO,UAAW,SAAQ,YAAY;IACxB,eAAe,CAAU;IAEzC,YAAmB,EAAC,eAAe,GAAG,IAAI,EAA6B;QACnE,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;IAC3C,CAAC;IAEM,UAAU;QACb,MAAM,GAAG,GAAG,gCAAgC,CAAC;QAC7C,MAAM,QAAQ,GAAG,0CAA0C,CAAC;QAE5D,IAAI,IAAI,CAAC,eAAe;YACpB,OAAO,GAAG,GAAG,qBAAqB,GAAG,QAAQ,CAAC;QAElD,OAAO,GAAG,GAAG,QAAQ,CAAC;IAC1B,CAAC;IAEkB,WAAW;QAC1B,IAAI,IAAI,CAAC,eAAe;YACpB,OAAO,iBAAiB,CAAC,MAAM,CAAC,UAAU,CAAC;QAE/C,OAAO,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;IAC5C,CAAC;CACJ"}
|
||||
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumberValue.d.ts
generated
vendored
Normal file
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumberValue.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfGrammarGenerator } from "../GbnfGrammarGenerator.js";
|
||||
export declare class GbnfNumberValue extends GbnfTerminal {
|
||||
readonly value: number;
|
||||
constructor(value: number);
|
||||
getGrammar(): string;
|
||||
resolve(grammarGenerator: GbnfGrammarGenerator, resolveAsRootGrammar?: boolean): string;
|
||||
protected generateRuleName(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
}
|
||||
21
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumberValue.js
generated
vendored
Normal file
21
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumberValue.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
export class GbnfNumberValue extends GbnfTerminal {
|
||||
value;
|
||||
constructor(value) {
|
||||
super();
|
||||
this.value = value;
|
||||
}
|
||||
getGrammar() {
|
||||
return '"' + JSON.stringify(this.value) + '"';
|
||||
}
|
||||
resolve(grammarGenerator, resolveAsRootGrammar = false) {
|
||||
const grammar = this.getGrammar();
|
||||
if (grammar.length <= grammarGenerator.getProposedLiteralValueRuleNameLength())
|
||||
return grammar;
|
||||
return super.resolve(grammarGenerator, resolveAsRootGrammar);
|
||||
}
|
||||
generateRuleName(grammarGenerator) {
|
||||
return grammarGenerator.generateRuleNameForLiteralValue(this.value);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfNumberValue.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumberValue.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfNumberValue.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfNumberValue.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfNumberValue.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAIhD,MAAM,OAAO,eAAgB,SAAQ,YAAY;IAC7B,KAAK,CAAS;IAE9B,YAAmB,KAAa;QAC5B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACvB,CAAC;IAEe,UAAU;QACtB,OAAO,GAAG,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,CAAC;IAClD,CAAC;IAEe,OAAO,CAAC,gBAAsC,EAAE,uBAAgC,KAAK;QACjG,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QAClC,IAAI,OAAO,CAAC,MAAM,IAAI,gBAAgB,CAAC,qCAAqC,EAAE;YAC1E,OAAO,OAAO,CAAC;QAEnB,OAAO,KAAK,CAAC,OAAO,CAAC,gBAAgB,EAAE,oBAAoB,CAAC,CAAC;IACjE,CAAC;IAEkB,gBAAgB,CAAC,gBAAsC;QACtE,OAAO,gBAAgB,CAAC,+BAA+B,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACxE,CAAC;CACJ"}
|
||||
28
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfObjectMap.d.ts
generated
vendored
Normal file
28
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfObjectMap.d.ts
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfGrammarGenerator } from "../GbnfGrammarGenerator.js";
|
||||
import { GbnfJsonScopeState } from "../utils/GbnfJsonScopeState.js";
|
||||
import { GbnfString } from "./GbnfString.js";
|
||||
import { GbnfStringValue } from "./GbnfStringValue.js";
|
||||
export declare class GbnfObjectMap extends GbnfTerminal {
|
||||
readonly fields: Array<Readonly<{
|
||||
key: GbnfString | GbnfStringValue;
|
||||
value: GbnfTerminal;
|
||||
required: true;
|
||||
}>>;
|
||||
readonly additionalProperties?: GbnfTerminal;
|
||||
readonly minProperties: number;
|
||||
readonly maxProperties?: number;
|
||||
readonly scopeState: GbnfJsonScopeState;
|
||||
constructor({ fields, additionalProperties, minProperties, maxProperties, scopeState }: {
|
||||
fields: Array<Readonly<{
|
||||
key: GbnfString | GbnfStringValue;
|
||||
value: GbnfTerminal;
|
||||
required: true;
|
||||
}>>;
|
||||
additionalProperties?: GbnfTerminal;
|
||||
minProperties?: number;
|
||||
maxProperties?: number;
|
||||
scopeState?: GbnfJsonScopeState;
|
||||
});
|
||||
getGrammar(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
}
|
||||
88
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfObjectMap.js
generated
vendored
Normal file
88
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfObjectMap.js
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfJsonScopeState } from "../utils/GbnfJsonScopeState.js";
|
||||
import { GbnfString } from "./GbnfString.js";
|
||||
import { GbnfWhitespace } from "./GbnfWhitespace.js";
|
||||
import { GbnfGrammar } from "./GbnfGrammar.js";
|
||||
import { GbnfRepetition } from "./GbnfRepetition.js";
|
||||
import { GbnfCommaWhitespace } from "./GbnfCommaWhitespace.js";
|
||||
export class GbnfObjectMap extends GbnfTerminal {
|
||||
fields;
|
||||
additionalProperties;
|
||||
minProperties;
|
||||
maxProperties;
|
||||
scopeState;
|
||||
constructor({ fields, additionalProperties, minProperties = 0, maxProperties, scopeState = new GbnfJsonScopeState() }) {
|
||||
super();
|
||||
this.fields = fields;
|
||||
this.additionalProperties = additionalProperties;
|
||||
this.minProperties = Math.floor(minProperties);
|
||||
this.maxProperties = maxProperties == null ? undefined : Math.floor(maxProperties);
|
||||
this.scopeState = scopeState;
|
||||
if (this.minProperties < this.fields.length)
|
||||
this.minProperties = this.fields.length;
|
||||
if (this.maxProperties != null && this.maxProperties < this.minProperties)
|
||||
this.maxProperties = this.minProperties;
|
||||
else if (this.maxProperties != null && this.maxProperties < 0)
|
||||
this.maxProperties = 0;
|
||||
}
|
||||
getGrammar(grammarGenerator) {
|
||||
const getWhitespaceRuleName = (newScope, newLine) => (newScope
|
||||
? new GbnfWhitespace(this.scopeState.getForNewScope(), { newLine }).resolve(grammarGenerator)
|
||||
: new GbnfWhitespace(this.scopeState, { newLine }).resolve(grammarGenerator));
|
||||
const getCommaWhitespaceRule = (newScope, newLine) => (newScope
|
||||
? new GbnfCommaWhitespace(this.scopeState.getForNewScope(), { newLine })
|
||||
: new GbnfCommaWhitespace(this.scopeState, { newLine }));
|
||||
const getCommaWhitespaceRuleName = (newScope, newLine) => (getCommaWhitespaceRule(newScope, newLine).resolve(grammarGenerator));
|
||||
const objectItemsGrammar = [];
|
||||
for (const { key, value } of this.fields) {
|
||||
if (objectItemsGrammar.length > 0)
|
||||
objectItemsGrammar.push(getCommaWhitespaceRuleName(true, "before"));
|
||||
objectItemsGrammar.push(new GbnfGrammar([
|
||||
key.getGrammar(grammarGenerator), '":"', "[ ]?", value.resolve(grammarGenerator)
|
||||
]).getGrammar());
|
||||
}
|
||||
if (this.additionalProperties != null) {
|
||||
const additionalPropertiesGrammar = new GbnfGrammar([
|
||||
new GbnfString().resolve(grammarGenerator), '":"', "[ ]?", this.additionalProperties.resolve(grammarGenerator)
|
||||
]);
|
||||
if (this.minProperties > this.fields.length) {
|
||||
if (objectItemsGrammar.length > 0)
|
||||
objectItemsGrammar.push(getCommaWhitespaceRuleName(true, "before"));
|
||||
objectItemsGrammar.push(new GbnfRepetition({
|
||||
value: additionalPropertiesGrammar,
|
||||
separator: getCommaWhitespaceRule(true, "before"),
|
||||
minRepetitions: this.minProperties - this.fields.length,
|
||||
maxRepetitions: this.maxProperties == null
|
||||
? undefined
|
||||
: this.maxProperties - this.fields.length
|
||||
}).getGrammar(grammarGenerator));
|
||||
}
|
||||
else if (this.maxProperties == null || this.maxProperties > this.fields.length) {
|
||||
if (objectItemsGrammar.length === 0)
|
||||
objectItemsGrammar.push(new GbnfRepetition({
|
||||
value: additionalPropertiesGrammar,
|
||||
separator: getCommaWhitespaceRule(true, "before"),
|
||||
maxRepetitions: this.maxProperties == null
|
||||
? undefined
|
||||
: this.maxProperties - this.fields.length
|
||||
}).getGrammar(grammarGenerator));
|
||||
else
|
||||
objectItemsGrammar.push(new GbnfRepetition({
|
||||
value: new GbnfGrammar([
|
||||
getCommaWhitespaceRuleName(true, "before"),
|
||||
additionalPropertiesGrammar.resolve(grammarGenerator)
|
||||
], true),
|
||||
maxRepetitions: this.maxProperties == null
|
||||
? undefined
|
||||
: this.maxProperties - this.fields.length
|
||||
}).getGrammar(grammarGenerator));
|
||||
}
|
||||
}
|
||||
return new GbnfGrammar([
|
||||
'"{"', getWhitespaceRuleName(true, "before"),
|
||||
new GbnfGrammar(objectItemsGrammar).getGrammar(),
|
||||
getWhitespaceRuleName(false, "before"), '"}"'
|
||||
]).getGrammar();
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GbnfObjectMap.js.map
|
||||
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfObjectMap.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfObjectMap.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GbnfObjectMap.js","sourceRoot":"","sources":["../../../../src/utils/gbnfJson/terminals/GbnfObjectMap.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,oBAAoB,CAAC;AAEhD,OAAO,EAAC,kBAAkB,EAAC,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAC,UAAU,EAAC,MAAM,iBAAiB,CAAC;AAE3C,OAAO,EAAC,cAAc,EAAC,MAAM,qBAAqB,CAAC;AACnD,OAAO,EAAC,WAAW,EAAC,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAC,cAAc,EAAC,MAAM,qBAAqB,CAAC;AACnD,OAAO,EAAC,mBAAmB,EAAC,MAAM,0BAA0B,CAAC;AAG7D,MAAM,OAAO,aAAc,SAAQ,YAAY;IAC3B,MAAM,CAA4F;IAClG,oBAAoB,CAAgB;IACpC,aAAa,CAAS;IACtB,aAAa,CAAU;IACvB,UAAU,CAAqB;IAE/C,YAAmB,EACf,MAAM,EAAE,oBAAoB,EAAE,aAAa,GAAG,CAAC,EAAE,aAAa,EAC9D,UAAU,GAAG,IAAI,kBAAkB,EAAE,EAMxC;QACG,KAAK,EAAE,CAAC;QAER,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;QACjD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;QAC/C,IAAI,CAAC,aAAa,GAAG,aAAa,IAAI,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;QACnF,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAE7B,IAAI,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM;YACvC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC;QAE5C,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,IAAI,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa;YACrE,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;aACvC,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,IAAI,IAAI,CAAC,aAAa,GAAG,CAAC;YACzD,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;IAC/B,CAAC;IAEM,UAAU,CAAC,gBAAsC;QACpD,MAAM,qBAAqB,GAAG,CAAC,QAAiB,EAAE,OAAmC,EAAE,EAAE,CAAC,CACtF,QAAQ;YACJ,CAAC,CAAC,IAAI,cAAc,CAAC,IAAI,CAAC,UAAU,CAAC,cAAc,EAAE,EAAE,EAAC,OAAO,EAAC,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC;YAC3F,CAAC,CAAC,IAAI,cAAc,CAAC,IAAI,CAAC,UAAU,EAAE,EAAC,OAAO,EAAC,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CACjF,CAAC;QAEF,MAAM,sBAAsB,GAAG,CAAC,QAAiB,EAAE,OAAmC,EAAE,EAAE,CAAC,CACvF,QAAQ;YACJ,CAAC,CAAC,IAAI,mBAAmB,CAAC,IAAI,CAAC,UAAU,CAAC,cAAc,EAAE,EAAE,EAAC,OAAO,EAAC,CAAC;YACtE,CAAC,CAAC,IAAI,mBAAmB,CAAC,IAAI,CAAC,UAAU,EAAE,EAAC,OAAO,EAAC,CAAC,CAC5D,CAAC;QACF,MAAM,0BAA0B,GAAG,CAAC,QAAiB,EAAE,OAAmC,EAAE,EAAE,CAAC,CAC3F,sBAAsB,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CACtE,CAAC;QAEF,MAAM,kBAAkB,GAAa,EAAE,CAAC;QACxC,KAAK,MAAM,EAAC,GAAG,EAAE,KAAK,EAAC,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YACrC,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC;gBAC7B,kBAAkB,CAAC,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;YAExE,kBAAkB,CAAC,IAAI,CACnB,IAAI,WAAW,CAAC;gBACZ,GAAG,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,CAAC,gBAAgB,CAAC;aACnF,CAAC,CAAC,UAAU,EAAE,CAClB,CAAC;QACN,CAAC;QAED,IAAI,IAAI,CAAC,oBAAoB,IAAI,IAAI,EAAE,CAAC;YACpC,MAAM,2BAA2B,GAAG,IAAI,WAAW,CAAC;gBAChD,IAAI,UAAU,EAAE,CAAC,OAAO,CAAC,gBAAgB,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,gBAAgB,CAAC;aACjH,CAAC,CAAC;YAEH,IAAI,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;gBAC1C,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC;oBAC7B,kBAAkB,CAAC,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;gBAExE,kBAAkB,CAAC,IAAI,CACnB,IAAI,cAAc,CAAC;oBACf,KAAK,EAAE,2BAA2B;oBAClC,SAAS,EAAE,sBAAsB,CAAC,IAAI,EAAE,QAAQ,CAAC;oBACjD,cAAc,EAAE,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM;oBACvD,cAAc,EAAE,IAAI,CAAC,aAAa,IAAI,IAAI;wBACtC,CAAC,CAAC,SAAS;wBACX,CAAC,CAAC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM;iBAChD,CAAC,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAClC,CAAC;YACN,CAAC;iBAAM,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,IAAI,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;gBAC/E,IAAI,kBAAkB,CAAC,MAAM,KAAK,CAAC;oBAC/B,kBAAkB,CAAC,IAAI,CACnB,IAAI,cAAc,CAAC;wBACf,KAAK,EAAE,2BAA2B;wBAClC,SAAS,EAAE,sBAAsB,CAAC,IAAI,EAAE,QAAQ,CAAC;wBACjD,cAAc,EAAE,IAAI,CAAC,aAAa,IAAI,IAAI;4BACtC,CAAC,CAAC,SAAS;4BACX,CAAC,CAAC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM;qBAChD,CAAC,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAClC,CAAC;;oBAEF,kBAAkB,CAAC,IAAI,CACnB,IAAI,cAAc,CAAC;wBACf,KAAK,EAAE,IAAI,WAAW,CAAC;4BACnB,0BAA0B,CAAC,IAAI,EAAE,QAAQ,CAAC;4BAC1C,2BAA2B,CAAC,OAAO,CAAC,gBAAgB,CAAC;yBACxD,EAAE,IAAI,CAAC;wBACR,cAAc,EAAE,IAAI,CAAC,aAAa,IAAI,IAAI;4BACtC,CAAC,CAAC,SAAS;4BACX,CAAC,CAAC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM;qBAChD,CAAC,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAClC,CAAC;YACV,CAAC;QACL,CAAC;QAED,OAAO,IAAI,WAAW,CAAC;YACnB,KAAK,EAAE,qBAAqB,CAAC,IAAI,EAAE,QAAQ,CAAC;YAC5C,IAAI,WAAW,CAAC,kBAAkB,CAAC,CAAC,UAAU,EAAE;YAChD,qBAAqB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE,KAAK;SAChD,CAAC,CAAC,UAAU,EAAE,CAAC;IACpB,CAAC;CACJ"}
|
||||
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfOr.d.ts
generated
vendored
Normal file
9
node_modules/node-llama-cpp/dist/utils/gbnfJson/terminals/GbnfOr.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { GbnfTerminal } from "../GbnfTerminal.js";
|
||||
import { GbnfGrammarGenerator } from "../GbnfGrammarGenerator.js";
|
||||
export declare class GbnfOr extends GbnfTerminal {
|
||||
readonly values: readonly GbnfTerminal[];
|
||||
readonly useRawGrammar: boolean;
|
||||
constructor(values: readonly GbnfTerminal[], useRawGrammar?: boolean);
|
||||
getGrammar(grammarGenerator: GbnfGrammarGenerator): string;
|
||||
resolve(grammarGenerator: GbnfGrammarGenerator, resolveAsRootGrammar?: boolean): string;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user