First upload version 0.0.1
This commit is contained in:
40
node_modules/node-llama-cpp/dist/bindings/utils/getLlamaWithoutBackend.js
generated
vendored
Normal file
40
node_modules/node-llama-cpp/dist/bindings/utils/getLlamaWithoutBackend.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
import { withLock } from "lifecycle-utils";
|
||||
import { getLlamaForOptions } from "../getLlama.js";
|
||||
import { LlamaLogLevel } from "../types.js";
|
||||
let sharedLlamaWithoutBackend = null;
|
||||
/**
|
||||
* This is used to access various methods in the addon side without actually using a backend
|
||||
*/
|
||||
export async function getLlamaWithoutBackend() {
|
||||
if (sharedLlamaWithoutBackend != null)
|
||||
return sharedLlamaWithoutBackend;
|
||||
return await withLock([getLlamaWithoutBackend, "loadAddon"], async () => {
|
||||
if (sharedLlamaWithoutBackend != null)
|
||||
return sharedLlamaWithoutBackend;
|
||||
try {
|
||||
sharedLlamaWithoutBackend = await getLlamaForOptions({
|
||||
gpu: false,
|
||||
progressLogs: false,
|
||||
logLevel: LlamaLogLevel.error,
|
||||
build: "never",
|
||||
usePrebuiltBinaries: true,
|
||||
vramPadding: 0
|
||||
}, {
|
||||
skipLlamaInit: true
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
sharedLlamaWithoutBackend = await getLlamaForOptions({
|
||||
progressLogs: false,
|
||||
logLevel: LlamaLogLevel.error,
|
||||
build: "never",
|
||||
usePrebuiltBinaries: true,
|
||||
vramPadding: 0
|
||||
}, {
|
||||
skipLlamaInit: true
|
||||
});
|
||||
}
|
||||
return sharedLlamaWithoutBackend;
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=getLlamaWithoutBackend.js.map
|
||||
Reference in New Issue
Block a user