Files
airllm-fork-nodejs/node_modules/node-llama-cpp/dist/bindings/utils/getLlamaWithoutBackend.js
2026-02-05 15:27:49 +08:00

40 lines
1.4 KiB
JavaScript

import { withLock } from "lifecycle-utils";
import { getLlamaForOptions } from "../getLlama.js";
import { LlamaLogLevel } from "../types.js";
let sharedLlamaWithoutBackend = null;
/**
* This is used to access various methods in the addon side without actually using a backend
*/
export async function getLlamaWithoutBackend() {
if (sharedLlamaWithoutBackend != null)
return sharedLlamaWithoutBackend;
return await withLock([getLlamaWithoutBackend, "loadAddon"], async () => {
if (sharedLlamaWithoutBackend != null)
return sharedLlamaWithoutBackend;
try {
sharedLlamaWithoutBackend = await getLlamaForOptions({
gpu: false,
progressLogs: false,
logLevel: LlamaLogLevel.error,
build: "never",
usePrebuiltBinaries: true,
vramPadding: 0
}, {
skipLlamaInit: true
});
}
catch (err) {
sharedLlamaWithoutBackend = await getLlamaForOptions({
progressLogs: false,
logLevel: LlamaLogLevel.error,
build: "never",
usePrebuiltBinaries: true,
vramPadding: 0
}, {
skipLlamaInit: true
});
}
return sharedLlamaWithoutBackend;
});
}
//# sourceMappingURL=getLlamaWithoutBackend.js.map