First upload version 0.0.1

This commit is contained in:
Neyra
2026-02-05 15:27:49 +08:00
commit 8e9b7201ed
4182 changed files with 593136 additions and 0 deletions

53
node_modules/@huggingface/jinja/dist/lexer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,53 @@
/**
* Represents tokens that our language understands in parsing.
*/
export declare const TOKEN_TYPES: Readonly<{
Text: "Text";
NumericLiteral: "NumericLiteral";
StringLiteral: "StringLiteral";
Identifier: "Identifier";
Equals: "Equals";
OpenParen: "OpenParen";
CloseParen: "CloseParen";
OpenStatement: "OpenStatement";
CloseStatement: "CloseStatement";
OpenExpression: "OpenExpression";
CloseExpression: "CloseExpression";
OpenSquareBracket: "OpenSquareBracket";
CloseSquareBracket: "CloseSquareBracket";
OpenCurlyBracket: "OpenCurlyBracket";
CloseCurlyBracket: "CloseCurlyBracket";
Comma: "Comma";
Dot: "Dot";
Colon: "Colon";
Pipe: "Pipe";
CallOperator: "CallOperator";
AdditiveBinaryOperator: "AdditiveBinaryOperator";
MultiplicativeBinaryOperator: "MultiplicativeBinaryOperator";
ComparisonBinaryOperator: "ComparisonBinaryOperator";
UnaryOperator: "UnaryOperator";
Comment: "Comment";
}>;
export type TokenType = keyof typeof TOKEN_TYPES;
/**
* Represents a single token in the template.
*/
export declare class Token {
value: string;
type: TokenType;
/**
* Constructs a new Token.
* @param {string} value The raw value as seen inside the source code.
* @param {TokenType} type The type of token.
*/
constructor(value: string, type: TokenType);
}
export interface PreprocessOptions {
trim_blocks?: boolean;
lstrip_blocks?: boolean;
}
/**
* Generate a list of tokens from a source string.
*/
export declare function tokenize(source: string, options?: PreprocessOptions): Token[];
//# sourceMappingURL=lexer.d.ts.map