First upload version 0.0.1
This commit is contained in:
76
node_modules/node-llama-cpp/dist/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.d.ts
generated
vendored
Normal file
76
node_modules/node-llama-cpp/dist/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.d.ts
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
import { ChatModelFunctions } from "../../types.js";
|
||||
/**
|
||||
* Generate documentation about the functions that are available for a model to call.
|
||||
* Useful for generating a system message with information about the available functions as part of a chat wrapper.
|
||||
*/
|
||||
export declare class ChatModelFunctionsDocumentationGenerator {
|
||||
readonly chatModelFunctions?: ChatModelFunctions;
|
||||
readonly hasAnyFunctions: boolean;
|
||||
constructor(chatModelFunctions: ChatModelFunctions | undefined);
|
||||
/**
|
||||
* Example:
|
||||
* ```ts
|
||||
* // Retrieve the current date
|
||||
* function getDate();
|
||||
*
|
||||
* // Retrieve the current time
|
||||
* function getTime(params: {hours: "24" | "12", seconds: boolean});
|
||||
* ```
|
||||
* @param options
|
||||
* @param [options.documentParams] - Whether to document the parameters of the functions
|
||||
*/
|
||||
getTypeScriptFunctionSignatures({ documentParams }?: {
|
||||
documentParams?: boolean;
|
||||
}): string;
|
||||
/**
|
||||
* Example:
|
||||
* ```ts
|
||||
* // Retrieve the current date
|
||||
* type getDate = () => any;
|
||||
*
|
||||
* // Retrieve the current time
|
||||
* type getTime = (_: {hours: "24" | "12", seconds: boolean}) => any;
|
||||
* ```
|
||||
* @param options
|
||||
* @param [options.documentParams] - Whether to document the parameters of the functions
|
||||
* @param [options.reservedFunctionNames] - Function names that are reserved and cannot be used
|
||||
*/
|
||||
getTypeScriptFunctionTypes({ documentParams, reservedFunctionNames }?: {
|
||||
documentParams?: boolean;
|
||||
reservedFunctionNames?: string[];
|
||||
}): string;
|
||||
/**
|
||||
* Example:
|
||||
* ```
|
||||
* Use the function 'getDate' to: Retrieve the current date
|
||||
* {"name": "getDate", "description": "Retrieve the current date"}
|
||||
*
|
||||
* Use the function 'getTime' to: Retrieve the current time
|
||||
* {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}
|
||||
* ```
|
||||
* @param options
|
||||
* @param [options.documentParams] - Whether to document the parameters of the functions
|
||||
*/
|
||||
getLlama3_1FunctionSignatures({ documentParams }?: {
|
||||
documentParams?: boolean;
|
||||
}): string;
|
||||
/**
|
||||
* Example:
|
||||
* ```
|
||||
* {"name": "getDate", "description": "Retrieve the current date"}
|
||||
*
|
||||
* {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}
|
||||
* ```
|
||||
* @param options
|
||||
* @param [options.documentParams] - Whether to document the parameters of the functions
|
||||
*/
|
||||
getLlama3_2LightweightFunctionSignatures({ documentParams }?: {
|
||||
documentParams?: boolean;
|
||||
}): string;
|
||||
getQwenFunctionSignatures({ documentParams }?: {
|
||||
documentParams?: boolean;
|
||||
}): string;
|
||||
getSeedFunctionSignatures({ documentParams }?: {
|
||||
documentParams?: boolean;
|
||||
}): string;
|
||||
}
|
||||
177
node_modules/node-llama-cpp/dist/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.js
generated
vendored
Normal file
177
node_modules/node-llama-cpp/dist/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.js
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
import { getTypeScriptTypeStringForGbnfJsonSchema } from "../../utils/getTypeScriptTypeStringForGbnfJsonSchema.js";
|
||||
import { jsonDumps } from "./jsonDumps.js";
|
||||
/**
|
||||
* Generate documentation about the functions that are available for a model to call.
|
||||
* Useful for generating a system message with information about the available functions as part of a chat wrapper.
|
||||
*/
|
||||
export class ChatModelFunctionsDocumentationGenerator {
|
||||
chatModelFunctions;
|
||||
hasAnyFunctions;
|
||||
constructor(chatModelFunctions) {
|
||||
this.chatModelFunctions = chatModelFunctions;
|
||||
this.hasAnyFunctions = Object.keys(this.chatModelFunctions ?? {}).length > 0;
|
||||
}
|
||||
/**
|
||||
* Example:
|
||||
* ```ts
|
||||
* // Retrieve the current date
|
||||
* function getDate();
|
||||
*
|
||||
* // Retrieve the current time
|
||||
* function getTime(params: {hours: "24" | "12", seconds: boolean});
|
||||
* ```
|
||||
* @param options
|
||||
* @param [options.documentParams] - Whether to document the parameters of the functions
|
||||
*/
|
||||
getTypeScriptFunctionSignatures({ documentParams = true } = {}) {
|
||||
const chatModelFunctions = this.chatModelFunctions;
|
||||
if (!this.hasAnyFunctions || chatModelFunctions == null)
|
||||
return "";
|
||||
const functionNames = Object.keys(chatModelFunctions);
|
||||
return functionNames
|
||||
.map((functionName) => {
|
||||
const functionDefinition = chatModelFunctions[functionName];
|
||||
let res = "";
|
||||
if (functionDefinition?.description != null && functionDefinition.description.trim() !== "")
|
||||
res += "// " + functionDefinition.description.split("\n").join("\n// ") + "\n";
|
||||
res += "function " + functionName + "(";
|
||||
if (documentParams && functionDefinition?.params != null)
|
||||
res += "params: " + getTypeScriptTypeStringForGbnfJsonSchema(functionDefinition.params);
|
||||
else if (!documentParams && functionDefinition?.params != null)
|
||||
res += "params";
|
||||
res += ");";
|
||||
return res;
|
||||
})
|
||||
.join("\n\n");
|
||||
}
|
||||
/**
|
||||
* Example:
|
||||
* ```ts
|
||||
* // Retrieve the current date
|
||||
* type getDate = () => any;
|
||||
*
|
||||
* // Retrieve the current time
|
||||
* type getTime = (_: {hours: "24" | "12", seconds: boolean}) => any;
|
||||
* ```
|
||||
* @param options
|
||||
* @param [options.documentParams] - Whether to document the parameters of the functions
|
||||
* @param [options.reservedFunctionNames] - Function names that are reserved and cannot be used
|
||||
*/
|
||||
getTypeScriptFunctionTypes({ documentParams = true, reservedFunctionNames = [] } = {}) {
|
||||
const chatModelFunctions = this.chatModelFunctions;
|
||||
if (!this.hasAnyFunctions || chatModelFunctions == null)
|
||||
return "";
|
||||
const functionNames = Object.keys(chatModelFunctions);
|
||||
const reservedFunctionNamesSet = new Set(reservedFunctionNames);
|
||||
return functionNames
|
||||
.map((functionName) => {
|
||||
if (reservedFunctionNamesSet.has(functionName))
|
||||
throw new Error(`Function name "${functionName}" is reserved and cannot be used`);
|
||||
const functionDefinition = chatModelFunctions[functionName];
|
||||
let res = "";
|
||||
if (functionDefinition?.description != null && functionDefinition.description.trim() !== "")
|
||||
res += "// " + functionDefinition.description.split("\n").join("\n// ") + "\n";
|
||||
res += "type " + functionName + " = (";
|
||||
if (documentParams && functionDefinition?.params != null)
|
||||
res += "_: " + getTypeScriptTypeStringForGbnfJsonSchema(functionDefinition.params);
|
||||
res += ") => any;";
|
||||
return res;
|
||||
})
|
||||
.join("\n\n");
|
||||
}
|
||||
/* eslint-disable @stylistic/max-len */
|
||||
/**
|
||||
* Example:
|
||||
* ```
|
||||
* Use the function 'getDate' to: Retrieve the current date
|
||||
* {"name": "getDate", "description": "Retrieve the current date"}
|
||||
*
|
||||
* Use the function 'getTime' to: Retrieve the current time
|
||||
* {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}
|
||||
* ```
|
||||
* @param options
|
||||
* @param [options.documentParams] - Whether to document the parameters of the functions
|
||||
*/
|
||||
getLlama3_1FunctionSignatures({ documentParams = true } = {}) {
|
||||
const chatModelFunctions = this.chatModelFunctions;
|
||||
if (!this.hasAnyFunctions || chatModelFunctions == null)
|
||||
return "";
|
||||
const functionNames = Object.keys(chatModelFunctions);
|
||||
return functionNames
|
||||
.map((functionName) => {
|
||||
const functionDefinition = chatModelFunctions[functionName];
|
||||
let res = `Use the function '${functionName}'`;
|
||||
const addDescription = functionDefinition?.description != null && functionDefinition.description.trim() !== "";
|
||||
if (addDescription)
|
||||
res += " to: " + functionDefinition.description.split("\n").join("\n// ") + "\n";
|
||||
else
|
||||
res += ".\n";
|
||||
res += jsonDumps({
|
||||
name: functionName,
|
||||
...(addDescription ? { description: functionDefinition.description } : {}),
|
||||
...(documentParams && functionDefinition?.params != null ? { parameters: functionDefinition.params } : {})
|
||||
});
|
||||
return res;
|
||||
})
|
||||
.join("\n\n");
|
||||
}
|
||||
/* eslint-enable @stylistic/max-len */
|
||||
/* eslint-disable @stylistic/max-len */
|
||||
/**
|
||||
* Example:
|
||||
* ```
|
||||
* {"name": "getDate", "description": "Retrieve the current date"}
|
||||
*
|
||||
* {"name": "getTime", "description": "Retrieve the current time", "parameters": {"type": "object", "properties": {"hours": {"enum": ["24", "12"]}, "seconds": {"type": "boolean"}}}}
|
||||
* ```
|
||||
* @param options
|
||||
* @param [options.documentParams] - Whether to document the parameters of the functions
|
||||
*/
|
||||
getLlama3_2LightweightFunctionSignatures({ documentParams = true } = {}) {
|
||||
const chatModelFunctions = this.chatModelFunctions;
|
||||
if (!this.hasAnyFunctions || chatModelFunctions == null)
|
||||
return "";
|
||||
const functionNames = Object.keys(chatModelFunctions);
|
||||
const functionsLines = functionNames
|
||||
.map((functionName) => {
|
||||
const functionDefinition = chatModelFunctions[functionName];
|
||||
const addDescription = functionDefinition?.description != null && functionDefinition.description.trim() !== "";
|
||||
return jsonDumps({
|
||||
name: functionName,
|
||||
...(addDescription ? { description: functionDefinition.description } : {}),
|
||||
...(documentParams && functionDefinition?.params != null ? { parameters: functionDefinition.params } : {})
|
||||
});
|
||||
})
|
||||
.join("\n\n");
|
||||
return functionsLines;
|
||||
}
|
||||
/* eslint-enable @stylistic/max-len */
|
||||
getQwenFunctionSignatures({ documentParams = true } = {}) {
|
||||
return this._convertToJinjaTools({ documentParams })
|
||||
.map((tool) => jsonDumps(tool))
|
||||
.join("\n");
|
||||
}
|
||||
getSeedFunctionSignatures({ documentParams = true } = {}) {
|
||||
return jsonDumps(this._convertToJinjaTools({ documentParams }));
|
||||
}
|
||||
/** @internal */
|
||||
_convertToJinjaTools({ documentParams = true } = {}) {
|
||||
const chatModelFunctions = this.chatModelFunctions;
|
||||
if (!this.hasAnyFunctions || chatModelFunctions == null)
|
||||
return [];
|
||||
return [...Object.entries(chatModelFunctions)]
|
||||
.map(([functionName, functionDefinition]) => {
|
||||
return {
|
||||
type: "function",
|
||||
function: {
|
||||
name: functionName,
|
||||
description: functionDefinition.description,
|
||||
parameters: documentParams
|
||||
? functionDefinition.params
|
||||
: undefined
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=ChatModelFunctionsDocumentationGenerator.js.map
|
||||
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/ChatModelFunctionsDocumentationGenerator.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
10
node_modules/node-llama-cpp/dist/chatWrappers/utils/chunkChatItems.d.ts
generated
vendored
Normal file
10
node_modules/node-llama-cpp/dist/chatWrappers/utils/chunkChatItems.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { ChatHistoryItem, ChatModelResponse } from "../../types.js";
|
||||
import { LlamaText } from "../../utils/LlamaText.js";
|
||||
export declare function chunkChatItems(chatHistory: readonly ChatHistoryItem[], { generateModelResponseText, joinAdjacentMessagesOfTheSameType }: {
|
||||
generateModelResponseText: (modelResponse: ChatModelResponse["response"]) => LlamaText;
|
||||
joinAdjacentMessagesOfTheSameType?: boolean;
|
||||
}): {
|
||||
system: LlamaText;
|
||||
user: LlamaText;
|
||||
model: LlamaText;
|
||||
}[];
|
||||
44
node_modules/node-llama-cpp/dist/chatWrappers/utils/chunkChatItems.js
generated
vendored
Normal file
44
node_modules/node-llama-cpp/dist/chatWrappers/utils/chunkChatItems.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import { LlamaText } from "../../utils/LlamaText.js";
|
||||
export function chunkChatItems(chatHistory, { generateModelResponseText, joinAdjacentMessagesOfTheSameType = true }) {
|
||||
const resultItems = [];
|
||||
let systemTexts = [];
|
||||
let userTexts = [];
|
||||
let modelTexts = [];
|
||||
let currentAggregateFocus = null;
|
||||
function flush() {
|
||||
if (systemTexts.length > 0 || userTexts.length > 0 || modelTexts.length > 0)
|
||||
resultItems.push({
|
||||
system: LlamaText.joinValues("\n\n", systemTexts),
|
||||
user: LlamaText.joinValues("\n\n", userTexts),
|
||||
model: LlamaText.joinValues("\n\n", modelTexts)
|
||||
});
|
||||
systemTexts = [];
|
||||
userTexts = [];
|
||||
modelTexts = [];
|
||||
}
|
||||
for (const item of chatHistory) {
|
||||
if (item.type === "system") {
|
||||
if (!joinAdjacentMessagesOfTheSameType || currentAggregateFocus !== "system")
|
||||
flush();
|
||||
currentAggregateFocus = "system";
|
||||
systemTexts.push(LlamaText.fromJSON(item.text));
|
||||
}
|
||||
else if (item.type === "user") {
|
||||
if (!joinAdjacentMessagesOfTheSameType || (currentAggregateFocus !== "system" && currentAggregateFocus !== "user"))
|
||||
flush();
|
||||
currentAggregateFocus = "user";
|
||||
userTexts.push(LlamaText(item.text));
|
||||
}
|
||||
else if (item.type === "model") {
|
||||
if (!joinAdjacentMessagesOfTheSameType)
|
||||
flush();
|
||||
currentAggregateFocus = "model";
|
||||
modelTexts.push(generateModelResponseText(item.response));
|
||||
}
|
||||
else
|
||||
void item;
|
||||
}
|
||||
flush();
|
||||
return resultItems;
|
||||
}
|
||||
//# sourceMappingURL=chunkChatItems.js.map
|
||||
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/chunkChatItems.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/chunkChatItems.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunkChatItems.js","sourceRoot":"","sources":["../../../src/chatWrappers/utils/chunkChatItems.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,SAAS,EAAC,MAAM,0BAA0B,CAAC;AAEnD,MAAM,UAAU,cAAc,CAAC,WAAuC,EAAE,EACpE,yBAAyB,EACzB,iCAAiC,GAAG,IAAI,EAI3C;IACG,MAAM,WAAW,GAIZ,EAAE,CAAC;IAER,IAAI,WAAW,GAAgB,EAAE,CAAC;IAClC,IAAI,SAAS,GAAgB,EAAE,CAAC;IAChC,IAAI,UAAU,GAAgB,EAAE,CAAC;IACjC,IAAI,qBAAqB,GAAuC,IAAI,CAAC;IAErE,SAAS,KAAK;QACV,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC;YACvE,WAAW,CAAC,IAAI,CAAC;gBACb,MAAM,EAAE,SAAS,CAAC,UAAU,CAAC,MAAM,EAAE,WAAW,CAAC;gBACjD,IAAI,EAAE,SAAS,CAAC,UAAU,CAAC,MAAM,EAAE,SAAS,CAAC;gBAC7C,KAAK,EAAE,SAAS,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC;aAClD,CAAC,CAAC;QAEP,WAAW,GAAG,EAAE,CAAC;QACjB,SAAS,GAAG,EAAE,CAAC;QACf,UAAU,GAAG,EAAE,CAAC;IACpB,CAAC;IAED,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE,CAAC;QAC7B,IAAI,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;YACzB,IAAI,CAAC,iCAAiC,IAAI,qBAAqB,KAAK,QAAQ;gBACxE,KAAK,EAAE,CAAC;YAEZ,qBAAqB,GAAG,QAAQ,CAAC;YACjC,WAAW,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;QACpD,CAAC;aAAM,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC9B,IAAI,CAAC,iCAAiC,IAAI,CAAC,qBAAqB,KAAK,QAAQ,IAAI,qBAAqB,KAAK,MAAM,CAAC;gBAC9G,KAAK,EAAE,CAAC;YAEZ,qBAAqB,GAAG,MAAM,CAAC;YAC/B,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;QACzC,CAAC;aAAM,IAAI,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;YAC/B,IAAI,CAAC,iCAAiC;gBAClC,KAAK,EAAE,CAAC;YAEZ,qBAAqB,GAAG,OAAO,CAAC;YAChC,UAAU,CAAC,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;QAC9D,CAAC;;YACG,KAAM,IAAqB,CAAC;IACpC,CAAC;IAED,KAAK,EAAE,CAAC;IAER,OAAO,WAAW,CAAC;AACvB,CAAC"}
|
||||
2
node_modules/node-llama-cpp/dist/chatWrappers/utils/getModelLinageNames.d.ts
generated
vendored
Normal file
2
node_modules/node-llama-cpp/dist/chatWrappers/utils/getModelLinageNames.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import { GgufMetadata } from "../../gguf/types/GgufMetadataTypes.js";
|
||||
export declare function getModelLinageNames(ggufMetadata?: GgufMetadata): string[][];
|
||||
18
node_modules/node-llama-cpp/dist/chatWrappers/utils/getModelLinageNames.js
generated
vendored
Normal file
18
node_modules/node-llama-cpp/dist/chatWrappers/utils/getModelLinageNames.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
export function getModelLinageNames(ggufMetadata) {
|
||||
const res = [];
|
||||
if (ggufMetadata == null)
|
||||
return res;
|
||||
const currentModelInfo = [ggufMetadata?.general?.name, ggufMetadata?.general?.basename]
|
||||
.filter((v) => v != null);
|
||||
if (currentModelInfo.length > 0)
|
||||
res.push(currentModelInfo);
|
||||
if (typeof ggufMetadata?.general?.base_model?.count === "number") {
|
||||
for (let i = 0; i < ggufMetadata.general.base_model.count; i++) {
|
||||
const baseModel = ggufMetadata.general.base_model[String(i)];
|
||||
if (baseModel?.name != null)
|
||||
res.push([baseModel.name]);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
//# sourceMappingURL=getModelLinageNames.js.map
|
||||
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/getModelLinageNames.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/getModelLinageNames.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"getModelLinageNames.js","sourceRoot":"","sources":["../../../src/chatWrappers/utils/getModelLinageNames.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,mBAAmB,CAAC,YAA2B;IAC3D,MAAM,GAAG,GAAe,EAAE,CAAC;IAE3B,IAAI,YAAY,IAAI,IAAI;QACpB,OAAO,GAAG,CAAC;IAEf,MAAM,gBAAgB,GAAG,CAAC,YAAY,EAAE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,QAAQ,CAAC;SAClF,MAAM,CAAC,CAAC,CAAC,EAAe,EAAE,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC;IAC3C,IAAI,gBAAgB,CAAC,MAAM,GAAG,CAAC;QAC3B,GAAG,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;IAE/B,IAAI,OAAO,YAAY,EAAE,OAAO,EAAE,UAAU,EAAE,KAAK,KAAK,QAAQ,EAAE,CAAC;QAC/D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,EAAE,CAAC;YAC7D,MAAM,SAAS,GAAG,YAAY,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAgB,CAAC,CAAC;YAC5E,IAAI,SAAS,EAAE,IAAI,IAAI,IAAI;gBACvB,GAAG,CAAC,IAAI,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;QACnC,CAAC;IACL,CAAC;IAED,OAAO,GAAG,CAAC;AACf,CAAC"}
|
||||
4
node_modules/node-llama-cpp/dist/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.d.ts
generated
vendored
Normal file
4
node_modules/node-llama-cpp/dist/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import { ChatWrapper } from "../../ChatWrapper.js";
|
||||
import { Tokenizer } from "../../types.js";
|
||||
import { JinjaTemplateChatWrapperOptions } from "../generic/JinjaTemplateChatWrapper.js";
|
||||
export declare function isJinjaTemplateEquivalentToSpecializedChatWrapper(jinjaTemplateWrapperOptions: JinjaTemplateChatWrapperOptions, specializedChatWrapper: ChatWrapper, tokenizer?: Tokenizer): boolean;
|
||||
394
node_modules/node-llama-cpp/dist/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.js
generated
vendored
Normal file
394
node_modules/node-llama-cpp/dist/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.js
generated
vendored
Normal file
@@ -0,0 +1,394 @@
|
||||
import { splitText } from "lifecycle-utils";
|
||||
import { JinjaTemplateChatWrapper } from "../generic/JinjaTemplateChatWrapper.js";
|
||||
import { SpecialToken, LlamaText, SpecialTokensText } from "../../utils/LlamaText.js";
|
||||
import { compareTokens } from "../../utils/compareTokens.js";
|
||||
import { StopGenerationDetector } from "../../utils/StopGenerationDetector.js";
|
||||
import { jsonDumps } from "./jsonDumps.js";
|
||||
export function isJinjaTemplateEquivalentToSpecializedChatWrapper(jinjaTemplateWrapperOptions, specializedChatWrapper, tokenizer) {
|
||||
const getCheckChatHistories = (jinjaChatWrapper) => [
|
||||
...testChatHistories,
|
||||
...((jinjaChatWrapper.usingJinjaFunctionCallTemplate || jinjaTemplateWrapperOptions.functionCallMessageTemplate === "auto")
|
||||
? testChatHistoriesWithFunctionCalls
|
||||
: [])
|
||||
];
|
||||
const canTestMultipleConvertSystemMessagesToUserMessages = jinjaTemplateWrapperOptions.convertUnsupportedSystemMessagesToUserMessages == null ||
|
||||
jinjaTemplateWrapperOptions.convertUnsupportedSystemMessagesToUserMessages === "auto";
|
||||
try {
|
||||
const jinjaChatWrapper = new JinjaTemplateChatWrapper({
|
||||
...jinjaTemplateWrapperOptions,
|
||||
convertUnsupportedSystemMessagesToUserMessages: canTestMultipleConvertSystemMessagesToUserMessages
|
||||
? false
|
||||
: jinjaTemplateWrapperOptions.convertUnsupportedSystemMessagesToUserMessages,
|
||||
trimLeadingWhitespaceInResponses: false
|
||||
});
|
||||
const checkChatHistories = getCheckChatHistories(jinjaChatWrapper);
|
||||
if (checkEquivalence(jinjaChatWrapper, specializedChatWrapper, checkChatHistories, tokenizer))
|
||||
return true;
|
||||
}
|
||||
catch (err) {
|
||||
// Do nothing
|
||||
}
|
||||
try {
|
||||
const jinjaChatWrapperWithLeadingWhitespaceTrimming = new JinjaTemplateChatWrapper({
|
||||
...jinjaTemplateWrapperOptions,
|
||||
convertUnsupportedSystemMessagesToUserMessages: canTestMultipleConvertSystemMessagesToUserMessages
|
||||
? false
|
||||
: jinjaTemplateWrapperOptions.convertUnsupportedSystemMessagesToUserMessages,
|
||||
trimLeadingWhitespaceInResponses: true
|
||||
});
|
||||
const checkChatHistories = getCheckChatHistories(jinjaChatWrapperWithLeadingWhitespaceTrimming);
|
||||
if (checkEquivalence(jinjaChatWrapperWithLeadingWhitespaceTrimming, specializedChatWrapper, checkChatHistories, tokenizer))
|
||||
return true;
|
||||
}
|
||||
catch (err) {
|
||||
// Do nothing
|
||||
}
|
||||
if (!canTestMultipleConvertSystemMessagesToUserMessages)
|
||||
return false;
|
||||
const convertSystemMessagesToUserMessagesTemplate = "### System message\n\n{{message}}\n\n----";
|
||||
try {
|
||||
const jinjaChatWrapper = new JinjaTemplateChatWrapper({
|
||||
...jinjaTemplateWrapperOptions,
|
||||
convertUnsupportedSystemMessagesToUserMessages: {
|
||||
use: "always",
|
||||
format: convertSystemMessagesToUserMessagesTemplate
|
||||
},
|
||||
trimLeadingWhitespaceInResponses: false
|
||||
});
|
||||
const checkChatHistories = getCheckChatHistories(jinjaChatWrapper);
|
||||
const transformedCheckChatHistories = convertTestChatHistoriesSystemMessagesToUserMessages(checkChatHistories, convertSystemMessagesToUserMessagesTemplate);
|
||||
if (checkEquivalence(jinjaChatWrapper, specializedChatWrapper, transformedCheckChatHistories, tokenizer))
|
||||
return true;
|
||||
}
|
||||
catch (err) {
|
||||
// Do nothing
|
||||
}
|
||||
try {
|
||||
const jinjaChatWrapperWithLeadingWhitespaceTrimming = new JinjaTemplateChatWrapper({
|
||||
...jinjaTemplateWrapperOptions,
|
||||
convertUnsupportedSystemMessagesToUserMessages: {
|
||||
use: "always",
|
||||
format: convertSystemMessagesToUserMessagesTemplate
|
||||
},
|
||||
trimLeadingWhitespaceInResponses: true
|
||||
});
|
||||
const checkChatHistories = getCheckChatHistories(jinjaChatWrapperWithLeadingWhitespaceTrimming);
|
||||
const transformedCheckChatHistories = convertTestChatHistoriesSystemMessagesToUserMessages(checkChatHistories, convertSystemMessagesToUserMessagesTemplate);
|
||||
if (checkEquivalence(jinjaChatWrapperWithLeadingWhitespaceTrimming, specializedChatWrapper, transformedCheckChatHistories, tokenizer))
|
||||
return true;
|
||||
}
|
||||
catch (err) {
|
||||
// Do nothing
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function checkEquivalence(jinjaChatWrapper, specializedChatWrapper, testChatHistories, tokenizer) {
|
||||
for (const testChatHistory of testChatHistories) {
|
||||
const jinjaRes = jinjaChatWrapper.generateContextState({ chatHistory: testChatHistory });
|
||||
jinjaRes.contextText = convertFunctionNameAndParamsToRegularText(jinjaRes.contextText, testChatHistory);
|
||||
const convertedSettings = convertChatWrapperSettingsToUseSpecialTokensText(specializedChatWrapper.settings);
|
||||
const originalSpecializedSettings = specializedChatWrapper.settings;
|
||||
if (convertedSettings != null)
|
||||
specializedChatWrapper.settings = convertedSettings;
|
||||
let specializedWrapperRes;
|
||||
try {
|
||||
specializedWrapperRes = specializedChatWrapper.generateContextState({ chatHistory: testChatHistory });
|
||||
}
|
||||
finally {
|
||||
if (convertedSettings != null)
|
||||
specializedChatWrapper.settings = originalSpecializedSettings;
|
||||
}
|
||||
if (!compareContextTexts(jinjaRes.contextText, specializedWrapperRes.contextText, tokenizer))
|
||||
return false;
|
||||
const specializedStopGenerationTriggers = [
|
||||
...specializedWrapperRes.stopGenerationTriggers,
|
||||
...(specializedWrapperRes.rerender?.triggers == null
|
||||
? []
|
||||
: specializedWrapperRes.rerender.triggers)
|
||||
];
|
||||
const jinjaHasAllSpecializedStopGenerationTriggers = jinjaRes.stopGenerationTriggers
|
||||
.every((trigger) => {
|
||||
return [trigger, trigger.trimEnd(), trigger.trimStart(), trigger.trimStart().trimEnd()].some((normalizedJinjaTrigger) => {
|
||||
if (normalizedJinjaTrigger.values.length === 0)
|
||||
return true;
|
||||
const foundSimilarTriggers = specializedStopGenerationTriggers.some((specializedTrigger) => (normalizedJinjaTrigger.includes(specializedTrigger)));
|
||||
if (foundSimilarTriggers)
|
||||
return true;
|
||||
if (tokenizer != null) {
|
||||
const resolvedStopGenerationTrigger = StopGenerationDetector.resolveLlamaTextTrigger(normalizedJinjaTrigger, tokenizer);
|
||||
const foundSimilarOrShorterTokenizedTriggers = specializedStopGenerationTriggers
|
||||
.some((specializedTrigger) => {
|
||||
const resolvedSpecializedTrigger = StopGenerationDetector.resolveLlamaTextTrigger(specializedTrigger, tokenizer);
|
||||
return resolvedSpecializedTrigger.every((item, index) => {
|
||||
const resolveTriggerItem = resolvedStopGenerationTrigger[index];
|
||||
if (typeof item === "string" && typeof resolveTriggerItem === "string")
|
||||
return item === resolveTriggerItem;
|
||||
else if (typeof item === "string" || typeof resolveTriggerItem === "string" ||
|
||||
resolveTriggerItem == null)
|
||||
return false;
|
||||
return compareTokens(item, resolveTriggerItem);
|
||||
});
|
||||
});
|
||||
if (foundSimilarOrShorterTokenizedTriggers)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
});
|
||||
if (!jinjaHasAllSpecializedStopGenerationTriggers)
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function compareContextTexts(text1, text2, tokenizer) {
|
||||
function compare(text1, text2) {
|
||||
if (LlamaText.compare(text1, text2))
|
||||
return true;
|
||||
if (tokenizer != null) {
|
||||
const tokenizedText1 = text1.tokenize(tokenizer);
|
||||
const tokenizedText2 = text2.tokenize(tokenizer);
|
||||
if (tokenizedText1.length === tokenizedText2.length)
|
||||
return tokenizedText1.every((token, index) => compareTokens(token, tokenizedText2[index]));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const trimmedText1 = text1.trimEnd();
|
||||
const trimmedText2 = text2.trimEnd();
|
||||
const normalizedText1 = removeLeadingBos(trimmedText1);
|
||||
const normalizedText2 = removeLeadingBos(trimmedText2);
|
||||
const texts1 = (normalizedText1.values.length !== trimmedText1.values.length && tokenizer != null)
|
||||
? [trimmedText1, normalizedText1]
|
||||
: [normalizedText1];
|
||||
const texts2 = (normalizedText2.values.length !== trimmedText2.values.length && tokenizer != null)
|
||||
? [trimmedText2, normalizedText2]
|
||||
: [normalizedText2];
|
||||
return texts1.some((text1) => (texts2.some((text2) => (compare(text1, text2)))));
|
||||
}
|
||||
function convertTestChatHistoriesSystemMessagesToUserMessages(chatHistories, template) {
|
||||
return chatHistories
|
||||
.map((history) => (history
|
||||
.slice()
|
||||
.map((item, index, array) => {
|
||||
if (item.type === "system") {
|
||||
if (index === 0 && array.length > 1 && array[1].type === "user") {
|
||||
array[1] = {
|
||||
type: "user",
|
||||
text: LlamaText([
|
||||
LlamaText.joinValues(LlamaText.fromJSON(item.text), template.split("{{message}}")),
|
||||
"\n\n",
|
||||
array[1].text
|
||||
]).toString()
|
||||
};
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
type: "user",
|
||||
text: LlamaText.joinValues(LlamaText.fromJSON(item.text), template.split("{{message}}")).toString()
|
||||
};
|
||||
}
|
||||
return item;
|
||||
})
|
||||
.filter((item) => item != null)));
|
||||
}
|
||||
function convertChatWrapperSettingsToUseSpecialTokensText(settings) {
|
||||
if (settings?.functions == null)
|
||||
return null;
|
||||
function convertToSpecialTokensText(value, keepTexts) {
|
||||
if (value == null)
|
||||
return value;
|
||||
return LlamaText(LlamaText(value).values
|
||||
.map((item) => {
|
||||
if (typeof item !== "string")
|
||||
return item;
|
||||
if (keepTexts == null || keepTexts.length === 0)
|
||||
return new SpecialTokensText(item);
|
||||
return splitText(item, keepTexts).map((textPart) => {
|
||||
if (typeof textPart === "string")
|
||||
return new SpecialTokensText(textPart);
|
||||
return textPart.separator;
|
||||
});
|
||||
}));
|
||||
}
|
||||
return {
|
||||
...settings,
|
||||
functions: {
|
||||
...settings.functions,
|
||||
call: {
|
||||
...settings.functions.call,
|
||||
prefix: convertToSpecialTokensText(settings.functions.call.prefix),
|
||||
suffix: convertToSpecialTokensText(settings.functions.call.suffix),
|
||||
paramsPrefix: convertToSpecialTokensText(settings.functions.call.paramsPrefix)
|
||||
},
|
||||
result: {
|
||||
...settings.functions.result,
|
||||
prefix: convertToSpecialTokensText(settings.functions.result.prefix, ["{{functionName}}", "{{functionParams}}"]),
|
||||
suffix: convertToSpecialTokensText(settings.functions.result.suffix, ["{{functionName}}", "{{functionParams}}"])
|
||||
},
|
||||
parallelism: settings.functions.parallelism == null
|
||||
? settings.functions.parallelism
|
||||
: {
|
||||
...settings.functions.parallelism,
|
||||
call: {
|
||||
...settings.functions.parallelism.call,
|
||||
sectionPrefix: convertToSpecialTokensText(settings.functions.parallelism.call.sectionPrefix),
|
||||
betweenCalls: convertToSpecialTokensText(settings.functions.parallelism.call.betweenCalls),
|
||||
sectionSuffix: convertToSpecialTokensText(settings.functions.parallelism.call.sectionSuffix)
|
||||
},
|
||||
result: settings.functions.parallelism.result == null
|
||||
? settings.functions.parallelism.result
|
||||
: {
|
||||
...settings.functions.parallelism.result,
|
||||
sectionPrefix: convertToSpecialTokensText(settings.functions.parallelism.result.sectionPrefix),
|
||||
betweenResults: convertToSpecialTokensText(settings.functions.parallelism.result.betweenResults),
|
||||
sectionSuffix: convertToSpecialTokensText(settings.functions.parallelism.result.sectionSuffix)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function convertFunctionNameAndParamsToRegularText(contextText, chatHistory) {
|
||||
const ensureRegularTextItems = new Set();
|
||||
for (const item of chatHistory) {
|
||||
if (item.type !== "model")
|
||||
continue;
|
||||
for (const response of item.response) {
|
||||
if (typeof response === "string" || response.type !== "functionCall")
|
||||
continue;
|
||||
ensureRegularTextItems.add(response.name);
|
||||
if (response.params !== undefined && response.params !== "")
|
||||
ensureRegularTextItems.add(jsonDumps(response.params));
|
||||
}
|
||||
}
|
||||
const ensureRegularTextItemsArray = [...ensureRegularTextItems];
|
||||
return LlamaText(contextText.values.map((item) => {
|
||||
if (!(item instanceof SpecialTokensText))
|
||||
return item;
|
||||
return splitText(item.value, ensureRegularTextItemsArray)
|
||||
.map((textPart) => {
|
||||
if (typeof textPart === "string")
|
||||
return new SpecialTokensText(textPart);
|
||||
return textPart.separator;
|
||||
});
|
||||
}));
|
||||
}
|
||||
const testChatHistories = [
|
||||
[{
|
||||
type: "system",
|
||||
text: "System message ~!@#$%^&*()\n*"
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: [""]
|
||||
}],
|
||||
[{
|
||||
type: "system",
|
||||
text: "System message ~!@#$%^&*()\n*"
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: ["Result 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"]
|
||||
}],
|
||||
[{
|
||||
type: "system",
|
||||
text: "System message ~!@#$%^&*()\n*"
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: ["Result 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"]
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: [""]
|
||||
}],
|
||||
[{
|
||||
type: "system",
|
||||
text: "System message ~!@#$%^&*()\n*"
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: ["Result 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"]
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: ["Result2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"]
|
||||
}]
|
||||
];
|
||||
const testChatHistoriesWithFunctionCalls = [
|
||||
[{
|
||||
type: "system",
|
||||
text: "System message ~!@#$%^&*()\n*"
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: ["Result 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"]
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: [
|
||||
"Result2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~",
|
||||
{
|
||||
type: "functionCall",
|
||||
name: "func1name",
|
||||
params: { param1: "value1" },
|
||||
result: "func1result"
|
||||
},
|
||||
"Result3 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
]
|
||||
}],
|
||||
[{
|
||||
type: "system",
|
||||
text: "System message ~!@#$%^&*()\n*"
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: ["Result 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"]
|
||||
}, {
|
||||
type: "user",
|
||||
text: "Message2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
}, {
|
||||
type: "model",
|
||||
response: [
|
||||
"Result2 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~",
|
||||
{
|
||||
type: "functionCall",
|
||||
name: "func1name",
|
||||
params: { param1: "value1" },
|
||||
result: "func1result"
|
||||
},
|
||||
{
|
||||
type: "functionCall",
|
||||
name: "func2name",
|
||||
params: { param1: "value2" },
|
||||
result: "func2result"
|
||||
},
|
||||
"Result3 1234567890!@#$%^&*()_+-=[]{}|\\:;\"',./<>?`~"
|
||||
]
|
||||
}]
|
||||
];
|
||||
function removeLeadingBos(llamaText) {
|
||||
if (llamaText.values.length === 0)
|
||||
return llamaText;
|
||||
const firstValue = llamaText.values[0];
|
||||
if (firstValue instanceof SpecialToken && firstValue.value === "BOS")
|
||||
return LlamaText(llamaText.values.slice(1));
|
||||
return llamaText;
|
||||
}
|
||||
//# sourceMappingURL=isJinjaTemplateEquivalentToSpecializedChatWrapper.js.map
|
||||
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/isJinjaTemplateEquivalentToSpecializedChatWrapper.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
node_modules/node-llama-cpp/dist/chatWrappers/utils/isLlama3_2LightweightModel.d.ts
generated
vendored
Normal file
2
node_modules/node-llama-cpp/dist/chatWrappers/utils/isLlama3_2LightweightModel.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import { ChatWrapperCheckModelCompatibilityParams } from "../../types.js";
|
||||
export declare function isLlama3_2LightweightModel(options: ChatWrapperCheckModelCompatibilityParams): boolean;
|
||||
9
node_modules/node-llama-cpp/dist/chatWrappers/utils/isLlama3_2LightweightModel.js
generated
vendored
Normal file
9
node_modules/node-llama-cpp/dist/chatWrappers/utils/isLlama3_2LightweightModel.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { includesText } from "../../utils/includesText.js";
|
||||
import { getModelLinageNames } from "./getModelLinageNames.js";
|
||||
export function isLlama3_2LightweightModel(options) {
|
||||
const isLlama3_2 = getModelLinageNames(options.fileInfo?.metadata)
|
||||
.some((modelNames) => includesText(modelNames, ["llama 3.2", "llama-3.2", "llama3.2"]));
|
||||
const isSmallModel = ["1B", "3B"].includes(options.fileInfo?.metadata?.general?.size_label ?? "");
|
||||
return isLlama3_2 && isSmallModel;
|
||||
}
|
||||
//# sourceMappingURL=isLlama3_2LightweightModel.js.map
|
||||
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/isLlama3_2LightweightModel.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/isLlama3_2LightweightModel.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"isLlama3_2LightweightModel.js","sourceRoot":"","sources":["../../../src/chatWrappers/utils/isLlama3_2LightweightModel.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,YAAY,EAAC,MAAM,6BAA6B,CAAC;AACzD,OAAO,EAAC,mBAAmB,EAAC,MAAM,0BAA0B,CAAC;AAE7D,MAAM,UAAU,0BAA0B,CAAC,OAAiD;IACxF,MAAM,UAAU,GAAG,mBAAmB,CAAC,OAAO,CAAC,QAAQ,EAAE,QAAQ,CAAC;SAC7D,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC,YAAY,CAAC,UAAU,EAAE,CAAC,WAAW,EAAE,WAAW,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;IAC5F,MAAM,YAAY,GAAI,CAAC,IAAI,EAAE,IAAI,CAAc,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,UAAU,IAAI,EAAE,CAAC,CAAC;IAEhH,OAAO,UAAU,IAAI,YAAY,CAAC;AACtC,CAAC"}
|
||||
7
node_modules/node-llama-cpp/dist/chatWrappers/utils/jsonDumps.d.ts
generated
vendored
Normal file
7
node_modules/node-llama-cpp/dist/chatWrappers/utils/jsonDumps.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Like `JSON.stringify` but results in a value formatted in the format that Python produces when using `json.dumps(value)`.
|
||||
*
|
||||
* We need to format results this way since this is what many models use in their training data,
|
||||
* so this is what many models expect to have in their context state.
|
||||
*/
|
||||
export declare function jsonDumps(value: any): string;
|
||||
18
node_modules/node-llama-cpp/dist/chatWrappers/utils/jsonDumps.js
generated
vendored
Normal file
18
node_modules/node-llama-cpp/dist/chatWrappers/utils/jsonDumps.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* Like `JSON.stringify` but results in a value formatted in the format that Python produces when using `json.dumps(value)`.
|
||||
*
|
||||
* We need to format results this way since this is what many models use in their training data,
|
||||
* so this is what many models expect to have in their context state.
|
||||
*/
|
||||
export function jsonDumps(value) {
|
||||
return JSON.stringify(value, null, 1)
|
||||
.split("\n")
|
||||
.map((line) => {
|
||||
line = line.trim();
|
||||
if (line.endsWith(","))
|
||||
line += " ";
|
||||
return line;
|
||||
})
|
||||
.join("");
|
||||
}
|
||||
//# sourceMappingURL=jsonDumps.js.map
|
||||
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/jsonDumps.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/jsonDumps.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"jsonDumps.js","sourceRoot":"","sources":["../../../src/chatWrappers/utils/jsonDumps.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CAAC,KAAU;IAChC,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;SAChC,KAAK,CAAC,IAAI,CAAC;SACX,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QACV,IAAI,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;QAEnB,IAAI,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC;YAClB,IAAI,IAAI,GAAG,CAAC;QAEhB,OAAO,IAAI,CAAC;IAChB,CAAC,CAAC;SACD,IAAI,CAAC,EAAE,CAAC,CAAC;AAClB,CAAC"}
|
||||
148
node_modules/node-llama-cpp/dist/chatWrappers/utils/resolveChatWrapper.d.ts
generated
vendored
Normal file
148
node_modules/node-llama-cpp/dist/chatWrappers/utils/resolveChatWrapper.d.ts
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
import { Llama3ChatWrapper } from "../Llama3ChatWrapper.js";
|
||||
import { Llama2ChatWrapper } from "../Llama2ChatWrapper.js";
|
||||
import { ChatMLChatWrapper } from "../ChatMLChatWrapper.js";
|
||||
import { GeneralChatWrapper } from "../GeneralChatWrapper.js";
|
||||
import { FalconChatWrapper } from "../FalconChatWrapper.js";
|
||||
import { FunctionaryChatWrapper } from "../FunctionaryChatWrapper.js";
|
||||
import { AlpacaChatWrapper } from "../AlpacaChatWrapper.js";
|
||||
import { GemmaChatWrapper } from "../GemmaChatWrapper.js";
|
||||
import { JinjaTemplateChatWrapper } from "../generic/JinjaTemplateChatWrapper.js";
|
||||
import { TemplateChatWrapper } from "../generic/TemplateChatWrapper.js";
|
||||
import { Llama3_1ChatWrapper } from "../Llama3_1ChatWrapper.js";
|
||||
import { Llama3_2LightweightChatWrapper } from "../Llama3_2LightweightChatWrapper.js";
|
||||
import { DeepSeekChatWrapper } from "../DeepSeekChatWrapper.js";
|
||||
import { MistralChatWrapper } from "../MistralChatWrapper.js";
|
||||
import { Tokenizer } from "../../types.js";
|
||||
import { LlamaModel } from "../../evaluator/LlamaModel/LlamaModel.js";
|
||||
import { QwenChatWrapper } from "../QwenChatWrapper.js";
|
||||
import { HarmonyChatWrapper } from "../HarmonyChatWrapper.js";
|
||||
import { SeedChatWrapper } from "../SeedChatWrapper.js";
|
||||
import type { GgufFileInfo } from "../../gguf/types/GgufFileInfoTypes.js";
|
||||
export declare const specializedChatWrapperTypeNames: readonly ["general", "deepSeek", "qwen", "llama3.2-lightweight", "llama3.1", "llama3", "llama2Chat", "mistral", "alpacaChat", "functionary", "chatML", "falconChat", "gemma", "harmony", "seed"];
|
||||
export type SpecializedChatWrapperTypeName = (typeof specializedChatWrapperTypeNames)[number];
|
||||
export declare const templateChatWrapperTypeNames: readonly ["template", "jinjaTemplate"];
|
||||
export type TemplateChatWrapperTypeName = (typeof templateChatWrapperTypeNames)[number];
|
||||
export declare const resolvableChatWrapperTypeNames: readonly ["auto", "general", "deepSeek", "qwen", "llama3.2-lightweight", "llama3.1", "llama3", "llama2Chat", "mistral", "alpacaChat", "functionary", "chatML", "falconChat", "gemma", "harmony", "seed", "template", "jinjaTemplate"];
|
||||
export type ResolvableChatWrapperTypeName = (typeof resolvableChatWrapperTypeNames)[number];
|
||||
export declare const chatWrappers: Readonly<{
|
||||
readonly general: typeof GeneralChatWrapper;
|
||||
readonly deepSeek: typeof DeepSeekChatWrapper;
|
||||
readonly qwen: typeof QwenChatWrapper;
|
||||
readonly "llama3.1": typeof Llama3_1ChatWrapper;
|
||||
readonly "llama3.2-lightweight": typeof Llama3_2LightweightChatWrapper;
|
||||
readonly llama3: typeof Llama3ChatWrapper;
|
||||
readonly llama2Chat: typeof Llama2ChatWrapper;
|
||||
readonly mistral: typeof MistralChatWrapper;
|
||||
readonly alpacaChat: typeof AlpacaChatWrapper;
|
||||
readonly functionary: typeof FunctionaryChatWrapper;
|
||||
readonly chatML: typeof ChatMLChatWrapper;
|
||||
readonly falconChat: typeof FalconChatWrapper;
|
||||
readonly gemma: typeof GemmaChatWrapper;
|
||||
readonly harmony: typeof HarmonyChatWrapper;
|
||||
readonly seed: typeof SeedChatWrapper;
|
||||
readonly template: typeof TemplateChatWrapper;
|
||||
readonly jinjaTemplate: typeof JinjaTemplateChatWrapper;
|
||||
}>;
|
||||
export type BuiltInChatWrapperType = InstanceType<typeof chatWrappers[keyof typeof chatWrappers]>;
|
||||
export type ResolveChatWrapperOptions = {
|
||||
/**
|
||||
* Resolve to a specific chat wrapper type.
|
||||
* You better not set this option unless you need to force a specific chat wrapper type.
|
||||
*
|
||||
* Defaults to `"auto"`.
|
||||
*/
|
||||
type?: "auto" | SpecializedChatWrapperTypeName | TemplateChatWrapperTypeName;
|
||||
bosString?: string | null;
|
||||
filename?: string;
|
||||
fileInfo?: GgufFileInfo;
|
||||
tokenizer?: Tokenizer;
|
||||
customWrapperSettings?: {
|
||||
[wrapper in keyof typeof chatWrappers]?: ConstructorParameters<(typeof chatWrappers)[wrapper]>[0];
|
||||
};
|
||||
/**
|
||||
* Defaults to `true`.
|
||||
*/
|
||||
warningLogs?: boolean;
|
||||
/**
|
||||
* Defaults to `true`.
|
||||
*/
|
||||
fallbackToOtherWrappersOnJinjaError?: boolean;
|
||||
/**
|
||||
* Don't resolve to a Jinja chat wrapper unless `type` is set to a Jinja chat wrapper type.
|
||||
*
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
noJinja?: boolean;
|
||||
};
|
||||
export type ResolveChatWrapperWithModelOptions = {
|
||||
/**
|
||||
* Resolve to a specific chat wrapper type.
|
||||
* You better not set this option unless you need to force a specific chat wrapper type.
|
||||
*
|
||||
* Defaults to `"auto"`.
|
||||
*/
|
||||
type?: "auto" | SpecializedChatWrapperTypeName | TemplateChatWrapperTypeName;
|
||||
customWrapperSettings?: {
|
||||
[wrapper in keyof typeof chatWrappers]?: typeof JinjaTemplateChatWrapper extends (typeof chatWrappers)[wrapper] ? Partial<ConstructorParameters<(typeof chatWrappers)[wrapper]>[0]> : ConstructorParameters<(typeof chatWrappers)[wrapper]>[0];
|
||||
};
|
||||
/**
|
||||
* Defaults to `true`.
|
||||
*/
|
||||
warningLogs?: boolean;
|
||||
/**
|
||||
* Defaults to `true`.
|
||||
*/
|
||||
fallbackToOtherWrappersOnJinjaError?: boolean;
|
||||
/**
|
||||
* Don't resolve to a Jinja chat wrapper unless `type` is set to a Jinja chat wrapper type.
|
||||
*
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
noJinja?: boolean;
|
||||
};
|
||||
/**
|
||||
* Resolve to a chat wrapper instance based on the provided information.
|
||||
* The more information provided, the better the resolution will be (except for `type`).
|
||||
*
|
||||
* It's recommended to not set `type` to a specific chat wrapper in order for the resolution to be more flexible, but it is useful for when
|
||||
* you need to provide the ability to force a specific chat wrapper type.
|
||||
* Note that when setting `type` to a generic chat wrapper type (such as `"template"` or `"jinjaTemplate"`), the `customWrapperSettings`
|
||||
* must contain the necessary settings for that chat wrapper to be created.
|
||||
*
|
||||
* When loading a Jinja chat template from either `fileInfo` or `customWrapperSettings.jinjaTemplate.template`,
|
||||
* if the chat template format is invalid, it fallbacks to resolve other chat wrappers,
|
||||
* unless `fallbackToOtherWrappersOnJinjaError` is set to `false` (in which case, it will throw an error).
|
||||
* @example
|
||||
* ```typescript
|
||||
* import {getLlama, resolveChatWrapper, GeneralChatWrapper} from "node-llama-cpp";
|
||||
*
|
||||
* const llama = await getLlama();
|
||||
* const model = await llama.loadModel({modelPath: "path/to/model.gguf"});
|
||||
*
|
||||
* const chatWrapper = resolveChatWrapper(model, {
|
||||
* customWrapperSettings: {
|
||||
* "llama3.1": {
|
||||
* cuttingKnowledgeDate: new Date("2025-01-01T00:00:00Z")
|
||||
* }
|
||||
* }
|
||||
* }) ?? new GeneralChatWrapper()
|
||||
* ```
|
||||
* @example
|
||||
*```typescript
|
||||
* import {getLlama, resolveChatWrapper, GeneralChatWrapper} from "node-llama-cpp";
|
||||
*
|
||||
* const llama = await getLlama();
|
||||
* const model = await llama.loadModel({modelPath: "path/to/model.gguf"});
|
||||
*
|
||||
* const chatWrapper = resolveChatWrapper({
|
||||
* bosString: model.tokens.bosString,
|
||||
* filename: model.filename,
|
||||
* fileInfo: model.fileInfo,
|
||||
* tokenizer: model.tokenizer
|
||||
* }) ?? new GeneralChatWrapper()
|
||||
* ```
|
||||
*/
|
||||
export declare function resolveChatWrapper(model: LlamaModel, options?: ResolveChatWrapperWithModelOptions): BuiltInChatWrapperType;
|
||||
export declare function resolveChatWrapper(options: ResolveChatWrapperOptions): BuiltInChatWrapperType | null;
|
||||
export declare function isSpecializedChatWrapperType(type: string): type is SpecializedChatWrapperTypeName;
|
||||
export declare function isTemplateChatWrapperType(type: string): type is TemplateChatWrapperTypeName;
|
||||
325
node_modules/node-llama-cpp/dist/chatWrappers/utils/resolveChatWrapper.js
generated
vendored
Normal file
325
node_modules/node-llama-cpp/dist/chatWrappers/utils/resolveChatWrapper.js
generated
vendored
Normal file
@@ -0,0 +1,325 @@
|
||||
import { parseModelFileName } from "../../utils/parseModelFileName.js";
|
||||
import { Llama3ChatWrapper } from "../Llama3ChatWrapper.js";
|
||||
import { Llama2ChatWrapper } from "../Llama2ChatWrapper.js";
|
||||
import { ChatMLChatWrapper } from "../ChatMLChatWrapper.js";
|
||||
import { GeneralChatWrapper } from "../GeneralChatWrapper.js";
|
||||
import { FalconChatWrapper } from "../FalconChatWrapper.js";
|
||||
import { FunctionaryChatWrapper } from "../FunctionaryChatWrapper.js";
|
||||
import { AlpacaChatWrapper } from "../AlpacaChatWrapper.js";
|
||||
import { GemmaChatWrapper } from "../GemmaChatWrapper.js";
|
||||
import { JinjaTemplateChatWrapper } from "../generic/JinjaTemplateChatWrapper.js";
|
||||
import { TemplateChatWrapper } from "../generic/TemplateChatWrapper.js";
|
||||
import { getConsoleLogPrefix } from "../../utils/getConsoleLogPrefix.js";
|
||||
import { Llama3_1ChatWrapper } from "../Llama3_1ChatWrapper.js";
|
||||
import { Llama3_2LightweightChatWrapper } from "../Llama3_2LightweightChatWrapper.js";
|
||||
import { DeepSeekChatWrapper } from "../DeepSeekChatWrapper.js";
|
||||
import { MistralChatWrapper } from "../MistralChatWrapper.js";
|
||||
import { includesText } from "../../utils/includesText.js";
|
||||
import { LlamaModel } from "../../evaluator/LlamaModel/LlamaModel.js";
|
||||
import { QwenChatWrapper } from "../QwenChatWrapper.js";
|
||||
import { HarmonyChatWrapper } from "../HarmonyChatWrapper.js";
|
||||
import { SeedChatWrapper } from "../SeedChatWrapper.js";
|
||||
import { isJinjaTemplateEquivalentToSpecializedChatWrapper } from "./isJinjaTemplateEquivalentToSpecializedChatWrapper.js";
|
||||
import { getModelLinageNames } from "./getModelLinageNames.js";
|
||||
export const specializedChatWrapperTypeNames = Object.freeze([
|
||||
"general", "deepSeek", "qwen", "llama3.2-lightweight", "llama3.1", "llama3", "llama2Chat", "mistral", "alpacaChat", "functionary",
|
||||
"chatML", "falconChat", "gemma", "harmony", "seed"
|
||||
]);
|
||||
export const templateChatWrapperTypeNames = Object.freeze([
|
||||
"template", "jinjaTemplate"
|
||||
]);
|
||||
export const resolvableChatWrapperTypeNames = Object.freeze([
|
||||
"auto",
|
||||
...specializedChatWrapperTypeNames,
|
||||
...templateChatWrapperTypeNames
|
||||
]);
|
||||
export const chatWrappers = Object.freeze({
|
||||
"general": GeneralChatWrapper,
|
||||
"deepSeek": DeepSeekChatWrapper,
|
||||
"qwen": QwenChatWrapper,
|
||||
"llama3.1": Llama3_1ChatWrapper,
|
||||
"llama3.2-lightweight": Llama3_2LightweightChatWrapper,
|
||||
"llama3": Llama3ChatWrapper,
|
||||
"llama2Chat": Llama2ChatWrapper,
|
||||
"mistral": MistralChatWrapper,
|
||||
"alpacaChat": AlpacaChatWrapper,
|
||||
"functionary": FunctionaryChatWrapper,
|
||||
"chatML": ChatMLChatWrapper,
|
||||
"falconChat": FalconChatWrapper,
|
||||
"gemma": GemmaChatWrapper,
|
||||
"harmony": HarmonyChatWrapper,
|
||||
"seed": SeedChatWrapper,
|
||||
"template": TemplateChatWrapper,
|
||||
"jinjaTemplate": JinjaTemplateChatWrapper
|
||||
});
|
||||
const chatWrapperToConfigType = new Map(Object.entries(chatWrappers)
|
||||
.map(([configType, Wrapper]) => ([Wrapper, configType])));
|
||||
const specializedChatWrapperRelatedTexts = {
|
||||
"harmony": ["gpt", "gpt-oss"]
|
||||
};
|
||||
export function resolveChatWrapper(options, modelOptions) {
|
||||
if (options instanceof LlamaModel)
|
||||
return resolveChatWrapper({
|
||||
...(modelOptions ?? {}),
|
||||
customWrapperSettings: modelOptions?.customWrapperSettings,
|
||||
bosString: options.tokens.bosString,
|
||||
filename: options.filename,
|
||||
fileInfo: options.fileInfo,
|
||||
tokenizer: options.tokenizer
|
||||
}) ?? new GeneralChatWrapper();
|
||||
const { type = "auto", bosString, filename, fileInfo, tokenizer, customWrapperSettings, warningLogs = true, fallbackToOtherWrappersOnJinjaError = true, noJinja = false } = options;
|
||||
function createSpecializedChatWrapper(specializedChatWrapper, defaultSettings = {}) {
|
||||
const chatWrapperConfigType = chatWrapperToConfigType.get(specializedChatWrapper);
|
||||
const chatWrapperSettings = customWrapperSettings?.[chatWrapperConfigType];
|
||||
return new specializedChatWrapper({
|
||||
...(defaultSettings ?? {}),
|
||||
...(chatWrapperSettings ?? {})
|
||||
});
|
||||
}
|
||||
if (type !== "auto" && type != null) {
|
||||
if (isTemplateChatWrapperType(type)) {
|
||||
const Wrapper = chatWrappers[type];
|
||||
if (isClassReference(Wrapper, TemplateChatWrapper)) {
|
||||
const wrapperSettings = customWrapperSettings?.template;
|
||||
if (wrapperSettings == null || wrapperSettings?.template == null || wrapperSettings?.historyTemplate == null ||
|
||||
wrapperSettings.historyTemplate.system == null || wrapperSettings.historyTemplate.user == null ||
|
||||
wrapperSettings.historyTemplate.model == null) {
|
||||
if (warningLogs)
|
||||
console.warn(getConsoleLogPrefix() + "Template chat wrapper settings must have a template, historyTemplate, historyTemplate.system, historyTemplate.user, and historyTemplate.model. Falling back to resolve other chat wrapper types.");
|
||||
}
|
||||
else
|
||||
return new TemplateChatWrapper(wrapperSettings);
|
||||
}
|
||||
else if (isClassReference(Wrapper, JinjaTemplateChatWrapper)) {
|
||||
const jinjaTemplate = customWrapperSettings?.jinjaTemplate?.template ?? fileInfo?.metadata?.tokenizer?.chat_template;
|
||||
if (jinjaTemplate == null) {
|
||||
if (warningLogs)
|
||||
console.warn(getConsoleLogPrefix() + "Jinja template chat wrapper received no template. Falling back to resolve other chat wrapper types.");
|
||||
}
|
||||
else {
|
||||
try {
|
||||
return new JinjaTemplateChatWrapper({
|
||||
tokenizer,
|
||||
...(customWrapperSettings?.jinjaTemplate ?? {}),
|
||||
template: jinjaTemplate
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
if (!fallbackToOtherWrappersOnJinjaError)
|
||||
throw err;
|
||||
else if (warningLogs)
|
||||
console.error(getConsoleLogPrefix() + "Error creating Jinja template chat wrapper. Falling back to resolve other chat wrappers. Error:", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
void Wrapper;
|
||||
}
|
||||
else if (Object.hasOwn(chatWrappers, type)) {
|
||||
const Wrapper = chatWrappers[type];
|
||||
const wrapperSettings = customWrapperSettings?.[type];
|
||||
return new Wrapper(wrapperSettings);
|
||||
}
|
||||
}
|
||||
const modelJinjaTemplate = customWrapperSettings?.jinjaTemplate?.template ?? fileInfo?.metadata?.tokenizer?.chat_template;
|
||||
if (modelJinjaTemplate != null && modelJinjaTemplate.trim() !== "") {
|
||||
const jinjaTemplateChatWrapperOptions = {
|
||||
tokenizer,
|
||||
...(customWrapperSettings?.jinjaTemplate ?? {}),
|
||||
template: modelJinjaTemplate
|
||||
};
|
||||
const chatWrapperNamesToCheck = orderChatWrapperNamesByAssumedCompatibilityWithModel(specializedChatWrapperTypeNames, { filename, fileInfo });
|
||||
for (const specializedChatWrapperTypeName of chatWrapperNamesToCheck) {
|
||||
const Wrapper = chatWrappers[specializedChatWrapperTypeName];
|
||||
const wrapperSettings = customWrapperSettings?.[specializedChatWrapperTypeName];
|
||||
const isCompatible = Wrapper._checkModelCompatibility({
|
||||
tokenizer,
|
||||
fileInfo
|
||||
});
|
||||
if (!isCompatible)
|
||||
continue;
|
||||
const testOptionConfigurations = Wrapper._getOptionConfigurationsToTestIfCanSupersedeJinjaTemplate?.() ?? [];
|
||||
if (testOptionConfigurations.length === 0)
|
||||
testOptionConfigurations.push({});
|
||||
for (const testConfigurationOrPair of testOptionConfigurations) {
|
||||
const testConfig = testConfigurationOrPair instanceof Array
|
||||
? (testConfigurationOrPair[0] ?? {})
|
||||
: testConfigurationOrPair;
|
||||
const applyConfig = testConfigurationOrPair instanceof Array
|
||||
? (testConfigurationOrPair[1] ?? {})
|
||||
: testConfigurationOrPair;
|
||||
const additionalJinjaOptions = testConfigurationOrPair instanceof Array
|
||||
? testConfigurationOrPair[2]
|
||||
: undefined;
|
||||
const testChatWrapperSettings = {
|
||||
...(wrapperSettings ?? {}),
|
||||
...(testConfig ?? {})
|
||||
};
|
||||
const applyChatWrapperSettings = {
|
||||
...(wrapperSettings ?? {}),
|
||||
...(applyConfig ?? {})
|
||||
};
|
||||
const chatWrapper = new Wrapper(testChatWrapperSettings);
|
||||
const jinjaTemplateChatWrapperOptionsWithAdditionalParameters = {
|
||||
...(additionalJinjaOptions ?? {}),
|
||||
...jinjaTemplateChatWrapperOptions,
|
||||
additionalRenderParameters: additionalJinjaOptions?.additionalRenderParameters == null
|
||||
? jinjaTemplateChatWrapperOptions.additionalRenderParameters
|
||||
: {
|
||||
...(jinjaTemplateChatWrapperOptions.additionalRenderParameters ?? {}),
|
||||
...additionalJinjaOptions.additionalRenderParameters
|
||||
}
|
||||
};
|
||||
if (isJinjaTemplateEquivalentToSpecializedChatWrapper(jinjaTemplateChatWrapperOptionsWithAdditionalParameters, chatWrapper, tokenizer))
|
||||
return new Wrapper(applyChatWrapperSettings);
|
||||
}
|
||||
}
|
||||
if (!noJinja) {
|
||||
if (!fallbackToOtherWrappersOnJinjaError)
|
||||
return new JinjaTemplateChatWrapper(jinjaTemplateChatWrapperOptions);
|
||||
try {
|
||||
return new JinjaTemplateChatWrapper(jinjaTemplateChatWrapperOptions);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(getConsoleLogPrefix() + "Error creating Jinja template chat wrapper. Falling back to resolve other chat wrappers. Error:", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const modelNames of getModelLinageNames(fileInfo?.metadata)) {
|
||||
if (includesText(modelNames, ["llama 3.2", "llama-3.2", "llama3.2"]) && Llama3_2LightweightChatWrapper._checkModelCompatibility({ tokenizer, fileInfo }))
|
||||
return createSpecializedChatWrapper(Llama3_2LightweightChatWrapper);
|
||||
else if (includesText(modelNames, ["llama 3.1", "llama-3.1", "llama3.1"]) && Llama3_1ChatWrapper._checkModelCompatibility({ tokenizer, fileInfo }))
|
||||
return createSpecializedChatWrapper(Llama3_1ChatWrapper);
|
||||
else if (includesText(modelNames, ["llama 3", "llama-3", "llama3"]))
|
||||
return createSpecializedChatWrapper(Llama3ChatWrapper);
|
||||
else if (includesText(modelNames, ["Mistral", "Mistral Large", "Mistral Large Instruct", "Mistral-Large", "Codestral"]))
|
||||
return createSpecializedChatWrapper(MistralChatWrapper);
|
||||
else if (includesText(modelNames, ["Gemma", "Gemma 2"]))
|
||||
return createSpecializedChatWrapper(GemmaChatWrapper);
|
||||
else if (includesText(modelNames, ["gpt-oss", "Gpt Oss", "Gpt-Oss", "openai_gpt-oss", "Openai_Gpt Oss", "openai.gpt-oss", "Openai.Gpt Oss"]))
|
||||
return createSpecializedChatWrapper(HarmonyChatWrapper);
|
||||
else if (includesText(modelNames, ["seed-oss", "Seed Oss", "Seed OSS", "Seed-Oss", "Seed-OSS", "ByteDance-Seed_Seed-OSS", "ByteDance-Seed.Seed-OSS"]))
|
||||
return createSpecializedChatWrapper(SeedChatWrapper);
|
||||
}
|
||||
// try to find a pattern in the Jinja template to resolve to a specialized chat wrapper,
|
||||
// with a logic similar to `llama.cpp`'s `llama_chat_apply_template_internal` function
|
||||
if (modelJinjaTemplate != null && modelJinjaTemplate.trim() !== "") {
|
||||
if (modelJinjaTemplate.includes("<seed:think>") || (modelJinjaTemplate.includes("<seed:bos>") && modelJinjaTemplate.includes("<seed:eos>")))
|
||||
return createSpecializedChatWrapper(SeedChatWrapper);
|
||||
else if (modelJinjaTemplate.includes("<|start|>") && modelJinjaTemplate.includes("<|channel|>"))
|
||||
return createSpecializedChatWrapper(HarmonyChatWrapper);
|
||||
else if (modelJinjaTemplate.includes("<|im_start|>"))
|
||||
return createSpecializedChatWrapper(ChatMLChatWrapper);
|
||||
else if (modelJinjaTemplate.includes("[INST]"))
|
||||
return createSpecializedChatWrapper(Llama2ChatWrapper, {
|
||||
addSpaceBeforeEos: modelJinjaTemplate.includes("' ' + eos_token")
|
||||
});
|
||||
else if (modelJinjaTemplate.includes("<|start_header_id|>") && modelJinjaTemplate.includes("<|end_header_id|>")) {
|
||||
if (Llama3_1ChatWrapper._checkModelCompatibility({ tokenizer, fileInfo }))
|
||||
return createSpecializedChatWrapper(Llama3_1ChatWrapper);
|
||||
else
|
||||
return createSpecializedChatWrapper(Llama3ChatWrapper);
|
||||
}
|
||||
else if (modelJinjaTemplate.includes("<start_of_turn>"))
|
||||
return createSpecializedChatWrapper(GemmaChatWrapper);
|
||||
}
|
||||
if (filename != null) {
|
||||
const { name, subType, fileType, otherInfo } = parseModelFileName(filename);
|
||||
if (fileType?.toLowerCase() === "gguf") {
|
||||
const lowercaseName = name?.toLowerCase();
|
||||
const lowercaseSubType = subType?.toLowerCase();
|
||||
const splitLowercaseSubType = (lowercaseSubType?.split("-") ?? []).concat(otherInfo.map((info) => info.toLowerCase()));
|
||||
const firstSplitLowercaseSubType = splitLowercaseSubType[0];
|
||||
if (lowercaseName === "llama") {
|
||||
if (splitLowercaseSubType.includes("chat"))
|
||||
return createSpecializedChatWrapper(Llama2ChatWrapper);
|
||||
return createSpecializedChatWrapper(GeneralChatWrapper);
|
||||
}
|
||||
else if (lowercaseName === "codellama")
|
||||
return createSpecializedChatWrapper(GeneralChatWrapper);
|
||||
else if (lowercaseName === "yarn" && firstSplitLowercaseSubType === "llama")
|
||||
return createSpecializedChatWrapper(Llama2ChatWrapper);
|
||||
else if (lowercaseName === "orca")
|
||||
return createSpecializedChatWrapper(ChatMLChatWrapper);
|
||||
else if (lowercaseName === "phind" && lowercaseSubType === "codellama")
|
||||
return createSpecializedChatWrapper(Llama2ChatWrapper);
|
||||
else if (lowercaseName === "mistral")
|
||||
return createSpecializedChatWrapper(GeneralChatWrapper);
|
||||
else if (firstSplitLowercaseSubType === "llama")
|
||||
return createSpecializedChatWrapper(Llama2ChatWrapper);
|
||||
else if (lowercaseSubType === "alpaca")
|
||||
return createSpecializedChatWrapper(AlpacaChatWrapper);
|
||||
else if (lowercaseName === "functionary")
|
||||
return createSpecializedChatWrapper(FunctionaryChatWrapper);
|
||||
else if (lowercaseName === "dolphin" && splitLowercaseSubType.includes("mistral"))
|
||||
return createSpecializedChatWrapper(ChatMLChatWrapper);
|
||||
else if (lowercaseName === "gemma")
|
||||
return createSpecializedChatWrapper(GemmaChatWrapper);
|
||||
else if (splitLowercaseSubType.includes("chatml"))
|
||||
return createSpecializedChatWrapper(ChatMLChatWrapper);
|
||||
}
|
||||
}
|
||||
if (bosString !== "" && bosString != null) {
|
||||
if ("<s>[INST] <<SYS>>\n".startsWith(bosString)) {
|
||||
return createSpecializedChatWrapper(Llama2ChatWrapper);
|
||||
}
|
||||
else if ("<|im_start|>system\n".startsWith(bosString)) {
|
||||
return createSpecializedChatWrapper(ChatMLChatWrapper);
|
||||
}
|
||||
}
|
||||
if (fileInfo != null) {
|
||||
const arch = fileInfo.metadata.general?.architecture;
|
||||
if (arch === "llama")
|
||||
return createSpecializedChatWrapper(GeneralChatWrapper);
|
||||
else if (arch === "falcon")
|
||||
return createSpecializedChatWrapper(FalconChatWrapper);
|
||||
else if (arch === "gemma" || arch === "gemma2")
|
||||
return createSpecializedChatWrapper(GemmaChatWrapper);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
export function isSpecializedChatWrapperType(type) {
|
||||
return specializedChatWrapperTypeNames.includes(type);
|
||||
}
|
||||
export function isTemplateChatWrapperType(type) {
|
||||
return templateChatWrapperTypeNames.includes(type);
|
||||
}
|
||||
// this is needed because TypeScript guards don't work automatically with class references
|
||||
function isClassReference(value, classReference) {
|
||||
return value === classReference;
|
||||
}
|
||||
function orderChatWrapperNamesByAssumedCompatibilityWithModel(chatWrapperNames, { filename, fileInfo }) {
|
||||
const rankPoints = {
|
||||
modelName: 3,
|
||||
modelNamePosition: 4,
|
||||
fileName: 2,
|
||||
fileNamePosition: 3
|
||||
};
|
||||
function getPointsForTextMatch(pattern, fullText, existsPoints, positionPoints) {
|
||||
if (fullText == null)
|
||||
return 0;
|
||||
const index = fullText.toLowerCase().indexOf(pattern.toLowerCase());
|
||||
if (index >= 0)
|
||||
return existsPoints + (((index + 1) / fullText.length) * positionPoints);
|
||||
return 0;
|
||||
}
|
||||
function getPointsForWrapperName(wrapperName, fullText, existsPoints, positionPoints) {
|
||||
const additionalNames = specializedChatWrapperRelatedTexts[wrapperName] ?? [];
|
||||
return [wrapperName, ...additionalNames]
|
||||
.map((pattern) => getPointsForTextMatch(pattern, fullText, existsPoints, positionPoints))
|
||||
.reduce((res, item) => Math.max(res, item), 0);
|
||||
}
|
||||
const modelName = fileInfo?.metadata?.general?.name;
|
||||
return chatWrapperNames
|
||||
.slice()
|
||||
.sort((a, b) => {
|
||||
let aPoints = 0;
|
||||
let bPoints = 0;
|
||||
aPoints += getPointsForWrapperName(a, modelName, rankPoints.modelName, rankPoints.modelNamePosition);
|
||||
bPoints += getPointsForWrapperName(b, modelName, rankPoints.modelName, rankPoints.modelNamePosition);
|
||||
aPoints += getPointsForWrapperName(a, filename, rankPoints.fileName, rankPoints.fileNamePosition);
|
||||
bPoints += getPointsForWrapperName(b, filename, rankPoints.fileName, rankPoints.fileNamePosition);
|
||||
return bPoints - aPoints;
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=resolveChatWrapper.js.map
|
||||
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/resolveChatWrapper.js.map
generated
vendored
Normal file
1
node_modules/node-llama-cpp/dist/chatWrappers/utils/resolveChatWrapper.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user