First upload version 0.0.1

This commit is contained in:
Neyra
2026-02-05 15:27:49 +08:00
commit 8e9b7201ed
4182 changed files with 593136 additions and 0 deletions

74
node_modules/node-llama-cpp/dist/config.d.ts generated vendored Normal file
View File

@@ -0,0 +1,74 @@
import { LlamaLogLevel } from "./bindings/types.js";
export declare const llamaDirectory: string;
export declare const llamaToolchainsDirectory: string;
export declare const llamaPrebuiltBinsDirectory: string;
export declare const llamaLocalBuildBinsDirectory: string;
export declare const llamaBinsGrammarsDirectory: string;
export declare const projectTemplatesDirectory: string;
export declare const packedProjectTemplatesDirectory: string;
export declare const llamaCppDirectory: string;
export declare const llamaCppGrammarsDirectory: string;
export declare const tempDownloadDirectory: string;
export declare const cliHomedirDirectory: string;
export declare const chatCommandHistoryFilePath: string;
export declare const cliModelsDirectory: string;
export declare const lastBuildInfoJsonPath: string;
export declare const binariesGithubReleasePath: string;
export declare const llamaCppDirectoryInfoFilePath: string;
export declare const currentReleaseGitBundlePath: string;
export declare const xpackDirectory: string;
export declare const localXpacksStoreDirectory: string;
export declare const localXpacksCacheDirectory: string;
export declare const buildMetadataFileName = "_nlcBuildMetadata.json";
export declare const xpmVersion = "^0.16.3";
export declare const builtinLlamaCppGitHubRepo = "ggml-org/llama.cpp";
export declare const builtinLlamaCppRelease: string;
export declare const isCI: boolean;
export declare const isRunningInsideGoogleColab: boolean;
export declare const useCiLogs: boolean;
export declare const defaultLlamaCppGitHubRepo: string;
export declare const defaultLlamaCppRelease: string;
export declare const defaultLlamaCppGpuSupport: false | "metal" | "cuda" | "vulkan" | "auto";
export declare const defaultLlamaCppLogLevel: LlamaLogLevel;
export declare const defaultLlamaCppDebugMode: boolean;
export declare const defaultSkipDownload: boolean;
export declare const defaultBindingTestLogLevel: LlamaLogLevel;
export declare const defaultXpacksStoreDirectory: string;
export declare const defaultXpacksCacheDirectory: string;
export declare const customCmakeOptionsEnvVarPrefix = "NODE_LLAMA_CPP_CMAKE_OPTION_";
export declare const defaultChatSystemPrompt: string;
export declare const cliBinName = "node-llama-cpp";
export declare const npxRunPrefix = "npx --no ";
export declare const enableRecursiveClone = false;
export declare const documentationPageUrls: {
readonly CUDA: string;
readonly Vulkan: string;
readonly CLI: {
readonly index: string;
readonly Pull: string;
readonly Chat: string;
readonly Init: string;
readonly Complete: string;
readonly Infill: string;
readonly Inspect: {
readonly index: string;
readonly GPU: string;
readonly GGUF: string;
readonly Measure: string;
readonly Estimate: string;
};
readonly Source: {
readonly index: string;
readonly Download: string;
readonly Build: string;
readonly Clear: string;
};
};
readonly troubleshooting: {
readonly RosettaIllegalHardwareInstruction: string;
};
};
export declare const newGithubIssueUrl = "https://github.com/withcatai/node-llama-cpp/issues";
export declare const recommendedBaseDockerImage = "node:20";
export declare const minAllowedContextSizeInCalculations = 24;
export declare const contextSizePad = 256;