forked from github/quartz
feat(bases): migrate from vault to upstream
Signed-off-by: Aaron Pham <contact@aarnphm.xyz>
This commit is contained in:
76
quartz/util/base/compiler/ast.ts
Normal file
76
quartz/util/base/compiler/ast.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
export type Position = { offset: number; line: number; column: number }
|
||||
|
||||
export type Span = { start: Position; end: Position; file?: string }
|
||||
|
||||
export type Program = { type: "Program"; body: Expr | null; span: Span }
|
||||
|
||||
export type Expr =
|
||||
| Literal
|
||||
| Identifier
|
||||
| UnaryExpr
|
||||
| BinaryExpr
|
||||
| LogicalExpr
|
||||
| CallExpr
|
||||
| MemberExpr
|
||||
| IndexExpr
|
||||
| ListExpr
|
||||
| ErrorExpr
|
||||
|
||||
export type LiteralKind = "number" | "string" | "boolean" | "null" | "date" | "duration" | "regex"
|
||||
|
||||
export type NumberLiteral = { type: "Literal"; kind: "number"; value: number; span: Span }
|
||||
export type StringLiteral = { type: "Literal"; kind: "string"; value: string; span: Span }
|
||||
export type BooleanLiteral = { type: "Literal"; kind: "boolean"; value: boolean; span: Span }
|
||||
export type NullLiteral = { type: "Literal"; kind: "null"; value: null; span: Span }
|
||||
export type DateLiteral = { type: "Literal"; kind: "date"; value: string; span: Span }
|
||||
export type DurationLiteral = { type: "Literal"; kind: "duration"; value: string; span: Span }
|
||||
export type RegexLiteral = {
|
||||
type: "Literal"
|
||||
kind: "regex"
|
||||
value: string
|
||||
flags: string
|
||||
span: Span
|
||||
}
|
||||
|
||||
export type Literal =
|
||||
| NumberLiteral
|
||||
| StringLiteral
|
||||
| BooleanLiteral
|
||||
| NullLiteral
|
||||
| DateLiteral
|
||||
| DurationLiteral
|
||||
| RegexLiteral
|
||||
|
||||
export type Identifier = { type: "Identifier"; name: string; span: Span }
|
||||
|
||||
export type UnaryExpr = { type: "UnaryExpr"; operator: "!" | "-"; argument: Expr; span: Span }
|
||||
|
||||
export type BinaryExpr = {
|
||||
type: "BinaryExpr"
|
||||
operator: "+" | "-" | "*" | "/" | "%" | "==" | "!=" | ">" | ">=" | "<" | "<="
|
||||
left: Expr
|
||||
right: Expr
|
||||
span: Span
|
||||
}
|
||||
|
||||
export type LogicalExpr = {
|
||||
type: "LogicalExpr"
|
||||
operator: "&&" | "||"
|
||||
left: Expr
|
||||
right: Expr
|
||||
span: Span
|
||||
}
|
||||
|
||||
export type CallExpr = { type: "CallExpr"; callee: Expr; args: Expr[]; span: Span }
|
||||
|
||||
export type MemberExpr = { type: "MemberExpr"; object: Expr; property: string; span: Span }
|
||||
|
||||
export type IndexExpr = { type: "IndexExpr"; object: Expr; index: Expr; span: Span }
|
||||
|
||||
export type ListExpr = { type: "ListExpr"; elements: Expr[]; span: Span }
|
||||
|
||||
export type ErrorExpr = { type: "ErrorExpr"; message: string; span: Span }
|
||||
|
||||
export function spanFrom(start: Span, end: Span): Span {
|
||||
return { start: start.start, end: end.end, file: start.file || end.file }
|
||||
}
|
||||
9
quartz/util/base/compiler/diagnostics.ts
Normal file
9
quartz/util/base/compiler/diagnostics.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { Span } from "./ast"
|
||||
|
||||
export type BaseExpressionDiagnostic = {
|
||||
kind: "lex" | "parse" | "runtime"
|
||||
message: string
|
||||
span: Span
|
||||
context: string
|
||||
source: string
|
||||
}
|
||||
3
quartz/util/base/compiler/errors.ts
Normal file
3
quartz/util/base/compiler/errors.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import { Span } from "./ast"
|
||||
|
||||
export type Diagnostic = { kind: "lex" | "parse"; message: string; span: Span }
|
||||
10
quartz/util/base/compiler/expressions.ts
Normal file
10
quartz/util/base/compiler/expressions.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { ProgramIR } from "./ir"
|
||||
|
||||
export type BasesExpressions = {
|
||||
filters?: ProgramIR
|
||||
viewFilters: Record<string, ProgramIR>
|
||||
formulas: Record<string, ProgramIR>
|
||||
summaries: Record<string, ProgramIR>
|
||||
viewSummaries: Record<string, Record<string, ProgramIR>>
|
||||
propertyExpressions: Record<string, ProgramIR>
|
||||
}
|
||||
44
quartz/util/base/compiler/index.ts
Normal file
44
quartz/util/base/compiler/index.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
export { lex } from "./lexer"
|
||||
export { parseExpressionSource } from "./parser"
|
||||
export type { ParseResult } from "./parser"
|
||||
export type { Diagnostic } from "./errors"
|
||||
export type { Program, Expr, Span, Position } from "./ast"
|
||||
export type { BaseExpressionDiagnostic } from "./diagnostics"
|
||||
export type { BasesExpressions } from "./expressions"
|
||||
export type { Instruction, ProgramIR } from "./ir"
|
||||
export { compileExpression } from "./ir"
|
||||
export { buildPropertyExpressionSource } from "./properties"
|
||||
export type {
|
||||
SummaryDefinition,
|
||||
ViewSummaryConfig,
|
||||
PropertyConfig,
|
||||
BuiltinSummaryType,
|
||||
} from "./schema"
|
||||
export { BUILTIN_SUMMARY_TYPES } from "./schema"
|
||||
export {
|
||||
evaluateExpression,
|
||||
evaluateFilterExpression,
|
||||
evaluateSummaryExpression,
|
||||
valueToUnknown,
|
||||
} from "./interpreter"
|
||||
export type {
|
||||
EvalContext,
|
||||
Value,
|
||||
NullValue,
|
||||
BooleanValue,
|
||||
NumberValue,
|
||||
StringValue,
|
||||
DateValue,
|
||||
DurationValue,
|
||||
ListValue,
|
||||
ObjectValue,
|
||||
FileValue,
|
||||
LinkValue,
|
||||
RegexValue,
|
||||
HtmlValue,
|
||||
IconValue,
|
||||
ImageValue,
|
||||
ValueKind,
|
||||
ValueOf,
|
||||
} from "./interpreter"
|
||||
export { isValueKind } from "./interpreter"
|
||||
73
quartz/util/base/compiler/interpreter.test.ts
Normal file
73
quartz/util/base/compiler/interpreter.test.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import assert from "node:assert"
|
||||
import test from "node:test"
|
||||
import { FilePath, FullSlug, SimpleSlug } from "../../path"
|
||||
|
||||
type ContentLayout = "default" | "article" | "page"
|
||||
import { evaluateExpression, valueToUnknown, EvalContext } from "./interpreter"
|
||||
import { compileExpression } from "./ir"
|
||||
import { parseExpressionSource } from "./parser"
|
||||
|
||||
const parseExpr = (source: string) => {
|
||||
const result = parseExpressionSource(source, "test")
|
||||
if (!result.program.body) {
|
||||
throw new Error(`expected expression for ${source}`)
|
||||
}
|
||||
return compileExpression(result.program.body)
|
||||
}
|
||||
|
||||
const makeCtx = (): EvalContext => {
|
||||
const fileA = {
|
||||
slug: "a" as FullSlug,
|
||||
filePath: "a.md" as FilePath,
|
||||
frontmatter: { title: "A", pageLayout: "default" as ContentLayout },
|
||||
links: [] as SimpleSlug[],
|
||||
}
|
||||
const fileB = {
|
||||
slug: "b" as FullSlug,
|
||||
filePath: "b.md" as FilePath,
|
||||
frontmatter: { title: "B", pageLayout: "default" as ContentLayout },
|
||||
links: ["a"] as SimpleSlug[],
|
||||
}
|
||||
return { file: fileA, allFiles: [fileA, fileB] }
|
||||
}
|
||||
|
||||
test("link equality resolves to file targets", () => {
|
||||
const expr = parseExpr('link("a") == file("a")')
|
||||
const value = valueToUnknown(evaluateExpression(expr, makeCtx()))
|
||||
assert.strictEqual(value, true)
|
||||
})
|
||||
|
||||
test("link equality matches raw string targets", () => {
|
||||
const expr = parseExpr('link("a") == "a"')
|
||||
const value = valueToUnknown(evaluateExpression(expr, makeCtx()))
|
||||
assert.strictEqual(value, true)
|
||||
})
|
||||
|
||||
test("date arithmetic handles month additions", () => {
|
||||
const expr = parseExpr('date("2025-01-01") + "1M"')
|
||||
const value = valueToUnknown(evaluateExpression(expr, makeCtx()))
|
||||
assert.ok(value instanceof Date)
|
||||
assert.strictEqual(value.toISOString().split("T")[0], "2025-02-01")
|
||||
})
|
||||
|
||||
test("date subtraction returns duration in ms", () => {
|
||||
const expr = parseExpr('date("2025-01-02") - date("2025-01-01")')
|
||||
const value = valueToUnknown(evaluateExpression(expr, makeCtx()))
|
||||
assert.strictEqual(value, 86400000)
|
||||
})
|
||||
|
||||
test("list summary helpers compute statistics", () => {
|
||||
const meanExpr = parseExpr("([1, 2, 3]).mean()")
|
||||
const medianExpr = parseExpr("([1, 2, 3]).median()")
|
||||
const stddevExpr = parseExpr("([1, 2, 3]).stddev()")
|
||||
const sumExpr = parseExpr("([1, 2, 3]).sum()")
|
||||
const ctx = makeCtx()
|
||||
assert.strictEqual(valueToUnknown(evaluateExpression(meanExpr, ctx)), 2)
|
||||
assert.strictEqual(valueToUnknown(evaluateExpression(medianExpr, ctx)), 2)
|
||||
assert.strictEqual(valueToUnknown(evaluateExpression(sumExpr, ctx)), 6)
|
||||
const stddev = valueToUnknown(evaluateExpression(stddevExpr, ctx))
|
||||
assert.strictEqual(typeof stddev, "number")
|
||||
if (typeof stddev === "number") {
|
||||
assert.ok(Math.abs(stddev - Math.sqrt(2 / 3)) < 1e-6)
|
||||
}
|
||||
})
|
||||
1718
quartz/util/base/compiler/interpreter.ts
Normal file
1718
quartz/util/base/compiler/interpreter.ts
Normal file
File diff suppressed because it is too large
Load Diff
164
quartz/util/base/compiler/ir.ts
Normal file
164
quartz/util/base/compiler/ir.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { BinaryExpr, Expr, Literal, Span, UnaryExpr } from "./ast"
|
||||
|
||||
export type JumpInstruction = {
|
||||
op: "jump" | "jump_if_false" | "jump_if_true"
|
||||
target: number
|
||||
span: Span
|
||||
}
|
||||
|
||||
export type Instruction =
|
||||
| { op: "const"; literal: Literal; span: Span }
|
||||
| { op: "ident"; name: string; span: Span }
|
||||
| { op: "load_formula"; name: string; span: Span }
|
||||
| { op: "load_formula_index"; span: Span }
|
||||
| { op: "member"; property: string; span: Span }
|
||||
| { op: "index"; span: Span }
|
||||
| { op: "list"; count: number; span: Span }
|
||||
| { op: "unary"; operator: UnaryExpr["operator"]; span: Span }
|
||||
| { op: "binary"; operator: BinaryExpr["operator"]; span: Span }
|
||||
| { op: "to_bool"; span: Span }
|
||||
| { op: "call_global"; name: string; argc: number; span: Span }
|
||||
| { op: "call_method"; name: string; argc: number; span: Span }
|
||||
| { op: "call_dynamic"; span: Span }
|
||||
| { op: "filter"; program: ProgramIR | null; span: Span }
|
||||
| { op: "map"; program: ProgramIR | null; span: Span }
|
||||
| { op: "reduce"; program: ProgramIR | null; initial: ProgramIR | null; span: Span }
|
||||
| JumpInstruction
|
||||
|
||||
export type ProgramIR = { instructions: Instruction[]; span: Span }
|
||||
|
||||
const compileExpr = (expr: Expr, out: Instruction[]) => {
|
||||
switch (expr.type) {
|
||||
case "Literal":
|
||||
out.push({ op: "const", literal: expr, span: expr.span })
|
||||
return
|
||||
case "Identifier":
|
||||
out.push({ op: "ident", name: expr.name, span: expr.span })
|
||||
return
|
||||
case "UnaryExpr":
|
||||
compileExpr(expr.argument, out)
|
||||
out.push({ op: "unary", operator: expr.operator, span: expr.span })
|
||||
return
|
||||
case "BinaryExpr":
|
||||
compileExpr(expr.left, out)
|
||||
compileExpr(expr.right, out)
|
||||
out.push({ op: "binary", operator: expr.operator, span: expr.span })
|
||||
return
|
||||
case "LogicalExpr": {
|
||||
if (expr.operator === "&&") {
|
||||
compileExpr(expr.left, out)
|
||||
const jumpFalse: JumpInstruction = { op: "jump_if_false", target: -1, span: expr.span }
|
||||
out.push(jumpFalse)
|
||||
compileExpr(expr.right, out)
|
||||
out.push({ op: "to_bool", span: expr.span })
|
||||
const jumpEnd: JumpInstruction = { op: "jump", target: -1, span: expr.span }
|
||||
out.push(jumpEnd)
|
||||
const falseTarget = out.length
|
||||
jumpFalse.target = falseTarget
|
||||
out.push({
|
||||
op: "const",
|
||||
literal: { type: "Literal", kind: "boolean", value: false, span: expr.span },
|
||||
span: expr.span,
|
||||
})
|
||||
jumpEnd.target = out.length
|
||||
return
|
||||
}
|
||||
compileExpr(expr.left, out)
|
||||
const jumpTrue: JumpInstruction = { op: "jump_if_true", target: -1, span: expr.span }
|
||||
out.push(jumpTrue)
|
||||
compileExpr(expr.right, out)
|
||||
out.push({ op: "to_bool", span: expr.span })
|
||||
const jumpEnd: JumpInstruction = { op: "jump", target: -1, span: expr.span }
|
||||
out.push(jumpEnd)
|
||||
const trueTarget = out.length
|
||||
jumpTrue.target = trueTarget
|
||||
out.push({
|
||||
op: "const",
|
||||
literal: { type: "Literal", kind: "boolean", value: true, span: expr.span },
|
||||
span: expr.span,
|
||||
})
|
||||
jumpEnd.target = out.length
|
||||
return
|
||||
}
|
||||
case "MemberExpr":
|
||||
if (expr.object.type === "Identifier" && expr.object.name === "formula") {
|
||||
out.push({ op: "load_formula", name: expr.property, span: expr.span })
|
||||
return
|
||||
}
|
||||
compileExpr(expr.object, out)
|
||||
out.push({ op: "member", property: expr.property, span: expr.span })
|
||||
return
|
||||
case "IndexExpr":
|
||||
if (expr.object.type === "Identifier" && expr.object.name === "formula") {
|
||||
compileExpr(expr.index, out)
|
||||
out.push({ op: "load_formula_index", span: expr.span })
|
||||
return
|
||||
}
|
||||
compileExpr(expr.object, out)
|
||||
compileExpr(expr.index, out)
|
||||
out.push({ op: "index", span: expr.span })
|
||||
return
|
||||
case "ListExpr":
|
||||
for (const element of expr.elements) {
|
||||
compileExpr(element, out)
|
||||
}
|
||||
out.push({ op: "list", count: expr.elements.length, span: expr.span })
|
||||
return
|
||||
case "CallExpr": {
|
||||
if (expr.callee.type === "Identifier") {
|
||||
for (const arg of expr.args) {
|
||||
compileExpr(arg, out)
|
||||
}
|
||||
out.push({
|
||||
op: "call_global",
|
||||
name: expr.callee.name,
|
||||
argc: expr.args.length,
|
||||
span: expr.span,
|
||||
})
|
||||
return
|
||||
}
|
||||
if (expr.callee.type === "MemberExpr") {
|
||||
const method = expr.callee.property
|
||||
if (method === "filter" || method === "map" || method === "reduce") {
|
||||
compileExpr(expr.callee.object, out)
|
||||
const exprArg = expr.args[0]
|
||||
const program = exprArg ? compileExpression(exprArg) : null
|
||||
if (method === "filter") {
|
||||
out.push({ op: "filter", program, span: expr.span })
|
||||
return
|
||||
}
|
||||
if (method === "map") {
|
||||
out.push({ op: "map", program, span: expr.span })
|
||||
return
|
||||
}
|
||||
const initialArg = expr.args[1]
|
||||
const initial = initialArg ? compileExpression(initialArg) : null
|
||||
out.push({ op: "reduce", program, initial, span: expr.span })
|
||||
return
|
||||
}
|
||||
compileExpr(expr.callee.object, out)
|
||||
for (const arg of expr.args) {
|
||||
compileExpr(arg, out)
|
||||
}
|
||||
out.push({ op: "call_method", name: method, argc: expr.args.length, span: expr.span })
|
||||
return
|
||||
}
|
||||
compileExpr(expr.callee, out)
|
||||
out.push({ op: "call_dynamic", span: expr.span })
|
||||
return
|
||||
}
|
||||
case "ErrorExpr":
|
||||
out.push({
|
||||
op: "const",
|
||||
literal: { type: "Literal", kind: "null", value: null, span: expr.span },
|
||||
span: expr.span,
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
export const compileExpression = (expr: Expr): ProgramIR => {
|
||||
const instructions: Instruction[] = []
|
||||
compileExpr(expr, instructions)
|
||||
return { instructions, span: expr.span }
|
||||
}
|
||||
53
quartz/util/base/compiler/lexer.test.ts
Normal file
53
quartz/util/base/compiler/lexer.test.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import assert from "node:assert"
|
||||
import test from "node:test"
|
||||
import { lex } from "./lexer"
|
||||
|
||||
test("lexes bracket access with hyphenated keys", () => {
|
||||
const result = lex('note["my-field"]')
|
||||
const types = result.tokens.map((token) => token.type)
|
||||
assert.deepStrictEqual(types, ["identifier", "punctuation", "string", "punctuation", "eof"])
|
||||
const value = result.tokens[2]
|
||||
if (value.type !== "string") {
|
||||
throw new Error("expected string token")
|
||||
}
|
||||
assert.strictEqual(value.value, "my-field")
|
||||
})
|
||||
|
||||
test("lexes bracket access with escaped quotes", () => {
|
||||
const result = lex('note["my\\\"field"]')
|
||||
const value = result.tokens.find((token) => token.type === "string")
|
||||
if (!value || value.type !== "string") {
|
||||
throw new Error("expected string token")
|
||||
}
|
||||
assert.strictEqual(value.value, 'my"field')
|
||||
})
|
||||
|
||||
test("lexes regex literals with flags", () => {
|
||||
const result = lex('name.replace(/:/g, "-")')
|
||||
const regexToken = result.tokens.find((token) => token.type === "regex")
|
||||
if (!regexToken || regexToken.type !== "regex") {
|
||||
throw new Error("expected regex token")
|
||||
}
|
||||
assert.strictEqual(regexToken.pattern, ":")
|
||||
assert.strictEqual(regexToken.flags, "g")
|
||||
})
|
||||
|
||||
test("lexes regex literals with escaped slashes", () => {
|
||||
const result = lex("path.matches(/\\//)")
|
||||
const regexToken = result.tokens.find((token) => token.type === "regex")
|
||||
if (!regexToken || regexToken.type !== "regex") {
|
||||
throw new Error("expected regex token")
|
||||
}
|
||||
assert.strictEqual(regexToken.pattern, "\\/")
|
||||
assert.strictEqual(regexToken.flags, "")
|
||||
})
|
||||
|
||||
test("lexes division as operator, not regex", () => {
|
||||
const result = lex("a / b")
|
||||
const operatorToken = result.tokens.find(
|
||||
(token) => token.type === "operator" && token.value === "/",
|
||||
)
|
||||
assert.ok(operatorToken)
|
||||
const regexToken = result.tokens.find((token) => token.type === "regex")
|
||||
assert.strictEqual(regexToken, undefined)
|
||||
})
|
||||
300
quartz/util/base/compiler/lexer.ts
Normal file
300
quartz/util/base/compiler/lexer.ts
Normal file
@@ -0,0 +1,300 @@
|
||||
import { Position, Span } from "./ast"
|
||||
import { Diagnostic } from "./errors"
|
||||
import {
|
||||
Operator,
|
||||
Punctuation,
|
||||
Token,
|
||||
StringToken,
|
||||
RegexToken,
|
||||
NumberToken,
|
||||
BooleanToken,
|
||||
NullToken,
|
||||
ThisToken,
|
||||
IdentifierToken,
|
||||
OperatorToken,
|
||||
PunctuationToken,
|
||||
EofToken,
|
||||
} from "./tokens"
|
||||
|
||||
type LexResult = { tokens: Token[]; diagnostics: Diagnostic[] }
|
||||
|
||||
const operatorTokens: Operator[] = [
|
||||
"==",
|
||||
"!=",
|
||||
">=",
|
||||
"<=",
|
||||
"&&",
|
||||
"||",
|
||||
"+",
|
||||
"-",
|
||||
"*",
|
||||
"/",
|
||||
"%",
|
||||
"!",
|
||||
">",
|
||||
"<",
|
||||
]
|
||||
|
||||
const punctuationTokens: Punctuation[] = [".", ",", "(", ")", "[", "]"]
|
||||
|
||||
const isOperator = (value: string): value is Operator =>
|
||||
operatorTokens.some((token) => token === value)
|
||||
|
||||
const isPunctuation = (value: string): value is Punctuation =>
|
||||
punctuationTokens.some((token) => token === value)
|
||||
|
||||
export function lex(input: string, file?: string): LexResult {
|
||||
const tokens: Token[] = []
|
||||
const diagnostics: Diagnostic[] = []
|
||||
let index = 0
|
||||
let line = 1
|
||||
let column = 1
|
||||
let canStartRegex = true
|
||||
|
||||
const makePosition = (offset: number, lineValue: number, columnValue: number): Position => ({
|
||||
offset,
|
||||
line: lineValue,
|
||||
column: columnValue,
|
||||
})
|
||||
|
||||
const currentPosition = (): Position => makePosition(index, line, column)
|
||||
|
||||
const makeSpan = (start: Position, end: Position): Span => ({ start, end, file })
|
||||
|
||||
const advance = (): string => {
|
||||
const ch = input[index]
|
||||
index += 1
|
||||
if (ch === "\n") {
|
||||
line += 1
|
||||
column = 1
|
||||
} else {
|
||||
column += 1
|
||||
}
|
||||
return ch
|
||||
}
|
||||
|
||||
const peek = (offset = 0): string => input[index + offset] ?? ""
|
||||
|
||||
const addDiagnostic = (message: string, span: Span) => {
|
||||
diagnostics.push({ kind: "lex", message, span })
|
||||
}
|
||||
|
||||
const updateRegexState = (token: Token | null) => {
|
||||
if (!token) {
|
||||
canStartRegex = true
|
||||
return
|
||||
}
|
||||
if (token.type === "operator") {
|
||||
canStartRegex = true
|
||||
return
|
||||
}
|
||||
if (token.type === "punctuation") {
|
||||
canStartRegex = token.value === "(" || token.value === "[" || token.value === ","
|
||||
return
|
||||
}
|
||||
canStartRegex = false
|
||||
}
|
||||
|
||||
const isWhitespace = (ch: string) => ch === " " || ch === "\t" || ch === "\n" || ch === "\r"
|
||||
const isDigit = (ch: string) => ch >= "0" && ch <= "9"
|
||||
const isIdentStart = (ch: string) =>
|
||||
(ch >= "a" && ch <= "z") || (ch >= "A" && ch <= "Z") || ch === "_"
|
||||
const isIdentContinue = (ch: string) => isIdentStart(ch) || isDigit(ch)
|
||||
|
||||
while (index < input.length) {
|
||||
const ch = peek()
|
||||
|
||||
if (isWhitespace(ch)) {
|
||||
advance()
|
||||
continue
|
||||
}
|
||||
|
||||
const start = currentPosition()
|
||||
|
||||
if (ch === "=" && peek(1) !== "=") {
|
||||
let offset = 1
|
||||
while (isWhitespace(peek(offset))) {
|
||||
offset += 1
|
||||
}
|
||||
if (peek(offset) === ">") {
|
||||
advance()
|
||||
for (let step = 1; step < offset; step += 1) {
|
||||
advance()
|
||||
}
|
||||
if (peek() === ">") {
|
||||
advance()
|
||||
}
|
||||
const end = currentPosition()
|
||||
addDiagnostic(
|
||||
"arrow functions are not supported, use list.filter(expression)",
|
||||
makeSpan(start, end),
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (ch === '"' || ch === "'") {
|
||||
const quote = advance()
|
||||
let value = ""
|
||||
let closed = false
|
||||
|
||||
while (index < input.length) {
|
||||
const curr = advance()
|
||||
if (curr === quote) {
|
||||
closed = true
|
||||
break
|
||||
}
|
||||
if (curr === "\\") {
|
||||
const next = advance()
|
||||
if (next === "n") value += "\n"
|
||||
else if (next === "t") value += "\t"
|
||||
else if (next === "r") value += "\r"
|
||||
else if (next === "\\" || next === "'" || next === '"') value += next
|
||||
else value += next
|
||||
} else {
|
||||
value += curr
|
||||
}
|
||||
}
|
||||
|
||||
const end = currentPosition()
|
||||
const span = makeSpan(start, end)
|
||||
if (!closed) addDiagnostic("unterminated string literal", span)
|
||||
const token: StringToken = { type: "string", value, span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
|
||||
if (ch === "/" && canStartRegex) {
|
||||
const next = peek(1)
|
||||
if (next !== "/" && next !== "") {
|
||||
advance()
|
||||
let pattern = ""
|
||||
let closed = false
|
||||
let inClass = false
|
||||
while (index < input.length) {
|
||||
const curr = advance()
|
||||
if (curr === "\\" && index < input.length) {
|
||||
const escaped = advance()
|
||||
pattern += `\\${escaped}`
|
||||
continue
|
||||
}
|
||||
if (curr === "[" && !inClass) inClass = true
|
||||
if (curr === "]" && inClass) inClass = false
|
||||
if (curr === "/" && !inClass) {
|
||||
closed = true
|
||||
break
|
||||
}
|
||||
pattern += curr
|
||||
}
|
||||
let flags = ""
|
||||
while (index < input.length) {
|
||||
const flag = peek()
|
||||
if (!/^[gimsuy]$/.test(flag)) break
|
||||
flags += advance()
|
||||
}
|
||||
const end = currentPosition()
|
||||
const span = makeSpan(start, end)
|
||||
if (!closed) addDiagnostic("unterminated regex literal", span)
|
||||
const token: RegexToken = { type: "regex", pattern, flags, span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (isDigit(ch)) {
|
||||
let num = ""
|
||||
while (index < input.length && isDigit(peek())) {
|
||||
num += advance()
|
||||
}
|
||||
if (peek() === "." && isDigit(peek(1))) {
|
||||
num += advance()
|
||||
while (index < input.length && isDigit(peek())) {
|
||||
num += advance()
|
||||
}
|
||||
}
|
||||
const end = currentPosition()
|
||||
const span = makeSpan(start, end)
|
||||
const token: NumberToken = { type: "number", value: Number(num), span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
|
||||
if (isIdentStart(ch)) {
|
||||
let ident = ""
|
||||
while (index < input.length && isIdentContinue(peek())) {
|
||||
ident += advance()
|
||||
}
|
||||
const end = currentPosition()
|
||||
const span = makeSpan(start, end)
|
||||
if (ident === "true" || ident === "false") {
|
||||
const token: BooleanToken = { type: "boolean", value: ident === "true", span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
if (ident === "null") {
|
||||
const token: NullToken = { type: "null", span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
if (ident === "this") {
|
||||
const token: ThisToken = { type: "this", span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
const token: IdentifierToken = { type: "identifier", value: ident, span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
|
||||
const twoChar = ch + peek(1)
|
||||
if (isOperator(twoChar)) {
|
||||
advance()
|
||||
advance()
|
||||
const end = currentPosition()
|
||||
const span = makeSpan(start, end)
|
||||
const token: OperatorToken = { type: "operator", value: twoChar, span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
|
||||
if (isOperator(ch)) {
|
||||
advance()
|
||||
const end = currentPosition()
|
||||
const span = makeSpan(start, end)
|
||||
const token: OperatorToken = { type: "operator", value: ch, span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
|
||||
if (isPunctuation(ch)) {
|
||||
advance()
|
||||
const end = currentPosition()
|
||||
const span = makeSpan(start, end)
|
||||
const token: PunctuationToken = { type: "punctuation", value: ch, span }
|
||||
tokens.push(token)
|
||||
updateRegexState(token)
|
||||
continue
|
||||
}
|
||||
|
||||
advance()
|
||||
const end = currentPosition()
|
||||
addDiagnostic(`unexpected character: ${ch}`, makeSpan(start, end))
|
||||
}
|
||||
|
||||
const eofPos = currentPosition()
|
||||
const eofSpan = makeSpan(eofPos, eofPos)
|
||||
const eofToken: EofToken = { type: "eof", span: eofSpan }
|
||||
tokens.push(eofToken)
|
||||
updateRegexState(eofToken)
|
||||
|
||||
return { tokens, diagnostics }
|
||||
}
|
||||
261
quartz/util/base/compiler/parser.test.ts
Normal file
261
quartz/util/base/compiler/parser.test.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
import assert from "node:assert"
|
||||
import test from "node:test"
|
||||
import { parseExpressionSource } from "./parser"
|
||||
|
||||
const isRecord = (value: unknown): value is Record<string, unknown> =>
|
||||
typeof value === "object" && value !== null
|
||||
|
||||
const strip = (node: unknown): unknown => {
|
||||
if (!isRecord(node)) return node
|
||||
const type = node.type
|
||||
if (type === "Identifier") {
|
||||
return { type, name: node.name }
|
||||
}
|
||||
if (type === "Literal") {
|
||||
const kind = node.kind
|
||||
const value = node.value
|
||||
const flags = node.flags
|
||||
return flags !== undefined ? { type, kind, value, flags } : { type, kind, value }
|
||||
}
|
||||
if (type === "UnaryExpr") {
|
||||
return { type, operator: node.operator, argument: strip(node.argument) }
|
||||
}
|
||||
if (type === "BinaryExpr" || type === "LogicalExpr") {
|
||||
return { type, operator: node.operator, left: strip(node.left), right: strip(node.right) }
|
||||
}
|
||||
if (type === "CallExpr") {
|
||||
const args = Array.isArray(node.args) ? node.args.map(strip) : []
|
||||
return { type, callee: strip(node.callee), args }
|
||||
}
|
||||
if (type === "MemberExpr") {
|
||||
return { type, object: strip(node.object), property: node.property }
|
||||
}
|
||||
if (type === "IndexExpr") {
|
||||
return { type, object: strip(node.object), index: strip(node.index) }
|
||||
}
|
||||
if (type === "ListExpr") {
|
||||
const elements = Array.isArray(node.elements) ? node.elements.map(strip) : []
|
||||
return { type, elements }
|
||||
}
|
||||
if (type === "ErrorExpr") {
|
||||
return { type, message: node.message }
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
test("ebnf to ast mapping snapshots", () => {
|
||||
const cases: Array<{ source: string; expected: unknown }> = [
|
||||
{
|
||||
source: 'status == "done"',
|
||||
expected: {
|
||||
type: "BinaryExpr",
|
||||
operator: "==",
|
||||
left: { type: "Identifier", name: "status" },
|
||||
right: { type: "Literal", kind: "string", value: "done" },
|
||||
},
|
||||
},
|
||||
{
|
||||
source: "!done",
|
||||
expected: {
|
||||
type: "UnaryExpr",
|
||||
operator: "!",
|
||||
argument: { type: "Identifier", name: "done" },
|
||||
},
|
||||
},
|
||||
{
|
||||
source: "file.ctime",
|
||||
expected: {
|
||||
type: "MemberExpr",
|
||||
object: { type: "Identifier", name: "file" },
|
||||
property: "ctime",
|
||||
},
|
||||
},
|
||||
{
|
||||
source: 'note["my-field"]',
|
||||
expected: {
|
||||
type: "IndexExpr",
|
||||
object: { type: "Identifier", name: "note" },
|
||||
index: { type: "Literal", kind: "string", value: "my-field" },
|
||||
},
|
||||
},
|
||||
{
|
||||
source: "date(due) < today()",
|
||||
expected: {
|
||||
type: "BinaryExpr",
|
||||
operator: "<",
|
||||
left: {
|
||||
type: "CallExpr",
|
||||
callee: { type: "Identifier", name: "date" },
|
||||
args: [{ type: "Identifier", name: "due" }],
|
||||
},
|
||||
right: { type: "CallExpr", callee: { type: "Identifier", name: "today" }, args: [] },
|
||||
},
|
||||
},
|
||||
{
|
||||
source: "now() - file.ctime",
|
||||
expected: {
|
||||
type: "BinaryExpr",
|
||||
operator: "-",
|
||||
left: { type: "CallExpr", callee: { type: "Identifier", name: "now" }, args: [] },
|
||||
right: {
|
||||
type: "MemberExpr",
|
||||
object: { type: "Identifier", name: "file" },
|
||||
property: "ctime",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
source: "(pages * 2).round(0)",
|
||||
expected: {
|
||||
type: "CallExpr",
|
||||
callee: {
|
||||
type: "MemberExpr",
|
||||
object: {
|
||||
type: "BinaryExpr",
|
||||
operator: "*",
|
||||
left: { type: "Identifier", name: "pages" },
|
||||
right: { type: "Literal", kind: "number", value: 2 },
|
||||
},
|
||||
property: "round",
|
||||
},
|
||||
args: [{ type: "Literal", kind: "number", value: 0 }],
|
||||
},
|
||||
},
|
||||
{
|
||||
source: 'tags.containsAny("a","b")',
|
||||
expected: {
|
||||
type: "CallExpr",
|
||||
callee: {
|
||||
type: "MemberExpr",
|
||||
object: { type: "Identifier", name: "tags" },
|
||||
property: "containsAny",
|
||||
},
|
||||
args: [
|
||||
{ type: "Literal", kind: "string", value: "a" },
|
||||
{ type: "Literal", kind: "string", value: "b" },
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
source: "list(links).filter(value.isTruthy())",
|
||||
expected: {
|
||||
type: "CallExpr",
|
||||
callee: {
|
||||
type: "MemberExpr",
|
||||
object: {
|
||||
type: "CallExpr",
|
||||
callee: { type: "Identifier", name: "list" },
|
||||
args: [{ type: "Identifier", name: "links" }],
|
||||
},
|
||||
property: "filter",
|
||||
},
|
||||
args: [
|
||||
{
|
||||
type: "CallExpr",
|
||||
callee: {
|
||||
type: "MemberExpr",
|
||||
object: { type: "Identifier", name: "value" },
|
||||
property: "isTruthy",
|
||||
},
|
||||
args: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
source: '["a", "b", "c"].length',
|
||||
expected: {
|
||||
type: "MemberExpr",
|
||||
object: {
|
||||
type: "ListExpr",
|
||||
elements: [
|
||||
{ type: "Literal", kind: "string", value: "a" },
|
||||
{ type: "Literal", kind: "string", value: "b" },
|
||||
{ type: "Literal", kind: "string", value: "c" },
|
||||
],
|
||||
},
|
||||
property: "length",
|
||||
},
|
||||
},
|
||||
{
|
||||
source: "this.file.name",
|
||||
expected: {
|
||||
type: "MemberExpr",
|
||||
object: {
|
||||
type: "MemberExpr",
|
||||
object: { type: "Identifier", name: "this" },
|
||||
property: "file",
|
||||
},
|
||||
property: "name",
|
||||
},
|
||||
},
|
||||
{
|
||||
source: "a || b && c",
|
||||
expected: {
|
||||
type: "LogicalExpr",
|
||||
operator: "||",
|
||||
left: { type: "Identifier", name: "a" },
|
||||
right: {
|
||||
type: "LogicalExpr",
|
||||
operator: "&&",
|
||||
left: { type: "Identifier", name: "b" },
|
||||
right: { type: "Identifier", name: "c" },
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
source: "values[0]",
|
||||
expected: {
|
||||
type: "IndexExpr",
|
||||
object: { type: "Identifier", name: "values" },
|
||||
index: { type: "Literal", kind: "number", value: 0 },
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
for (const entry of cases) {
|
||||
const result = parseExpressionSource(entry.source)
|
||||
assert.strictEqual(result.diagnostics.length, 0)
|
||||
assert.deepStrictEqual(strip(result.program.body), entry.expected)
|
||||
}
|
||||
})
|
||||
|
||||
test("syntax doc samples parse", () => {
|
||||
const samples = [
|
||||
'note["price"]',
|
||||
"file.size > 10",
|
||||
"file.hasLink(this.file)",
|
||||
'date("2024-12-01") + "1M" + "4h" + "3m"',
|
||||
"now() - file.ctime",
|
||||
"property[0]",
|
||||
'link("filename", icon("plus"))',
|
||||
'file.mtime > now() - "1 week"',
|
||||
'/abc/.matches("abcde")',
|
||||
'name.replace(/:/g, "-")',
|
||||
'values.filter(value.isType("number")).reduce(if(acc == null || value > acc, value, acc), null)',
|
||||
]
|
||||
|
||||
for (const source of samples) {
|
||||
const result = parseExpressionSource(source)
|
||||
assert.strictEqual(result.diagnostics.length, 0)
|
||||
assert.ok(result.program.body)
|
||||
}
|
||||
})
|
||||
|
||||
test("string escapes are decoded", () => {
|
||||
const result = parseExpressionSource('"a\\n\\"b"')
|
||||
assert.strictEqual(result.diagnostics.length, 0)
|
||||
const literal = strip(result.program.body)
|
||||
if (!isRecord(literal)) {
|
||||
throw new Error("expected literal record")
|
||||
}
|
||||
assert.strictEqual(literal.type, "Literal")
|
||||
assert.strictEqual(literal.kind, "string")
|
||||
assert.strictEqual(literal.value, 'a\n"b')
|
||||
})
|
||||
|
||||
test("parser reports errors and recovers", () => {
|
||||
const result = parseExpressionSource("status ==")
|
||||
assert.ok(result.diagnostics.length > 0)
|
||||
assert.ok(result.program.body)
|
||||
})
|
||||
370
quartz/util/base/compiler/parser.ts
Normal file
370
quartz/util/base/compiler/parser.ts
Normal file
@@ -0,0 +1,370 @@
|
||||
import {
|
||||
BinaryExpr,
|
||||
CallExpr,
|
||||
ErrorExpr,
|
||||
Expr,
|
||||
Identifier,
|
||||
IndexExpr,
|
||||
ListExpr,
|
||||
Literal,
|
||||
LogicalExpr,
|
||||
MemberExpr,
|
||||
Program,
|
||||
UnaryExpr,
|
||||
spanFrom,
|
||||
} from "./ast"
|
||||
import { Diagnostic } from "./errors"
|
||||
import { lex } from "./lexer"
|
||||
import { Operator, Token } from "./tokens"
|
||||
|
||||
export type ParseResult = { program: Program; tokens: Token[]; diagnostics: Diagnostic[] }
|
||||
|
||||
type InfixInfo = { lbp: number; rbp: number; kind: "binary" | "logical" }
|
||||
|
||||
const infixBindingPowers: Record<string, InfixInfo> = {
|
||||
"||": { lbp: 1, rbp: 2, kind: "logical" },
|
||||
"&&": { lbp: 3, rbp: 4, kind: "logical" },
|
||||
"==": { lbp: 5, rbp: 6, kind: "binary" },
|
||||
"!=": { lbp: 5, rbp: 6, kind: "binary" },
|
||||
">": { lbp: 7, rbp: 8, kind: "binary" },
|
||||
">=": { lbp: 7, rbp: 8, kind: "binary" },
|
||||
"<": { lbp: 7, rbp: 8, kind: "binary" },
|
||||
"<=": { lbp: 7, rbp: 8, kind: "binary" },
|
||||
"+": { lbp: 9, rbp: 10, kind: "binary" },
|
||||
"-": { lbp: 9, rbp: 10, kind: "binary" },
|
||||
"*": { lbp: 11, rbp: 12, kind: "binary" },
|
||||
"/": { lbp: 11, rbp: 12, kind: "binary" },
|
||||
"%": { lbp: 11, rbp: 12, kind: "binary" },
|
||||
}
|
||||
|
||||
const isLogicalOperator = (value: Operator): value is LogicalExpr["operator"] =>
|
||||
value === "&&" || value === "||"
|
||||
|
||||
const isBinaryOperator = (value: Operator): value is BinaryExpr["operator"] =>
|
||||
value === "+" ||
|
||||
value === "-" ||
|
||||
value === "*" ||
|
||||
value === "/" ||
|
||||
value === "%" ||
|
||||
value === "==" ||
|
||||
value === "!=" ||
|
||||
value === ">" ||
|
||||
value === ">=" ||
|
||||
value === "<" ||
|
||||
value === "<="
|
||||
|
||||
export function parseExpressionSource(source: string, file?: string): ParseResult {
|
||||
const { tokens, diagnostics } = lex(source, file)
|
||||
const parser = new Parser(tokens, diagnostics)
|
||||
const program = parser.parseProgram()
|
||||
return { program, tokens, diagnostics }
|
||||
}
|
||||
|
||||
class Parser {
|
||||
private tokens: Token[]
|
||||
private index: number
|
||||
private diagnostics: Diagnostic[]
|
||||
|
||||
constructor(tokens: Token[], diagnostics: Diagnostic[]) {
|
||||
this.tokens = tokens
|
||||
this.index = 0
|
||||
this.diagnostics = diagnostics
|
||||
}
|
||||
|
||||
parseProgram(): Program {
|
||||
const start = this.tokens[0]?.span ?? this.tokens[this.tokens.length - 1].span
|
||||
const body = this.peek().type === "eof" ? null : this.parseExpression(0)
|
||||
const end = this.tokens[this.tokens.length - 1]?.span ?? start
|
||||
return { type: "Program", body, span: spanFrom(start, end) }
|
||||
}
|
||||
|
||||
private parseExpression(minBp: number): Expr {
|
||||
let left = this.parsePrefix()
|
||||
left = this.parsePostfix(left)
|
||||
|
||||
while (true) {
|
||||
const token = this.peek()
|
||||
if (token.type !== "operator") break
|
||||
const info = infixBindingPowers[token.value]
|
||||
if (!info || info.lbp < minBp) break
|
||||
this.advance()
|
||||
const right = this.parseExpression(info.rbp)
|
||||
const span = spanFrom(left.span, right.span)
|
||||
if (info.kind === "logical" && isLogicalOperator(token.value)) {
|
||||
left = { type: "LogicalExpr", operator: token.value, left, right, span }
|
||||
} else if (info.kind === "binary" && isBinaryOperator(token.value)) {
|
||||
left = { type: "BinaryExpr", operator: token.value, left, right, span }
|
||||
} else {
|
||||
this.error("unexpected operator", token.span)
|
||||
}
|
||||
}
|
||||
|
||||
return left
|
||||
}
|
||||
|
||||
private parsePrefix(): Expr {
|
||||
const token = this.peek()
|
||||
if (token.type === "operator" && (token.value === "!" || token.value === "-")) {
|
||||
this.advance()
|
||||
const argument = this.parseExpression(13)
|
||||
const span = spanFrom(token.span, argument.span)
|
||||
const node: UnaryExpr = { type: "UnaryExpr", operator: token.value, argument, span }
|
||||
return node
|
||||
}
|
||||
return this.parsePrimary()
|
||||
}
|
||||
|
||||
private parsePostfix(expr: Expr): Expr {
|
||||
let current = expr
|
||||
while (true) {
|
||||
const token = this.peek()
|
||||
if (token.type === "punctuation" && token.value === ".") {
|
||||
this.advance()
|
||||
const propToken = this.peek()
|
||||
if (propToken.type !== "identifier") {
|
||||
this.error("expected identifier after '.'", propToken.span)
|
||||
return current
|
||||
}
|
||||
this.advance()
|
||||
const span = spanFrom(current.span, propToken.span)
|
||||
const node: MemberExpr = {
|
||||
type: "MemberExpr",
|
||||
object: current,
|
||||
property: propToken.value,
|
||||
span,
|
||||
}
|
||||
current = node
|
||||
continue
|
||||
}
|
||||
|
||||
if (token.type === "punctuation" && token.value === "[") {
|
||||
this.advance()
|
||||
const indexExpr = this.parseExpression(0)
|
||||
const endToken = this.peek()
|
||||
if (!(endToken.type === "punctuation" && endToken.value === "]")) {
|
||||
this.error("expected ']'", endToken.span)
|
||||
this.syncTo("]")
|
||||
} else {
|
||||
this.advance()
|
||||
}
|
||||
const span = spanFrom(current.span, endToken.span)
|
||||
const node: IndexExpr = { type: "IndexExpr", object: current, index: indexExpr, span }
|
||||
current = node
|
||||
continue
|
||||
}
|
||||
|
||||
if (token.type === "punctuation" && token.value === "(") {
|
||||
this.advance()
|
||||
const args: Expr[] = []
|
||||
while (this.peek().type !== "eof") {
|
||||
const next = this.peek()
|
||||
if (next.type === "punctuation" && next.value === ")") {
|
||||
this.advance()
|
||||
break
|
||||
}
|
||||
const arg = this.parseExpression(0)
|
||||
args.push(arg)
|
||||
const sep = this.peek()
|
||||
if (sep.type === "punctuation" && sep.value === ",") {
|
||||
this.advance()
|
||||
const maybeClose = this.peek()
|
||||
if (maybeClose.type === "punctuation" && maybeClose.value === ")") {
|
||||
this.advance()
|
||||
break
|
||||
}
|
||||
continue
|
||||
}
|
||||
if (sep.type === "punctuation" && sep.value === ")") {
|
||||
this.advance()
|
||||
break
|
||||
}
|
||||
this.error("expected ',' or ')'", sep.span)
|
||||
this.syncTo(")")
|
||||
const maybeClose = this.peek()
|
||||
if (maybeClose.type === "punctuation" && maybeClose.value === ")") {
|
||||
this.advance()
|
||||
}
|
||||
break
|
||||
}
|
||||
const endToken = this.previous()
|
||||
const span = spanFrom(current.span, endToken.span)
|
||||
const node: CallExpr = { type: "CallExpr", callee: current, args, span }
|
||||
current = node
|
||||
continue
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
return current
|
||||
}
|
||||
|
||||
private parsePrimary(): Expr {
|
||||
const token = this.peek()
|
||||
|
||||
if (token.type === "number") {
|
||||
this.advance()
|
||||
const node: Literal = {
|
||||
type: "Literal",
|
||||
kind: "number",
|
||||
value: token.value,
|
||||
span: token.span,
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
if (token.type === "string") {
|
||||
this.advance()
|
||||
const node: Literal = {
|
||||
type: "Literal",
|
||||
kind: "string",
|
||||
value: token.value,
|
||||
span: token.span,
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
if (token.type === "boolean") {
|
||||
this.advance()
|
||||
const node: Literal = {
|
||||
type: "Literal",
|
||||
kind: "boolean",
|
||||
value: token.value,
|
||||
span: token.span,
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
if (token.type === "null") {
|
||||
this.advance()
|
||||
const node: Literal = { type: "Literal", kind: "null", value: null, span: token.span }
|
||||
return node
|
||||
}
|
||||
|
||||
if (token.type === "regex") {
|
||||
this.advance()
|
||||
const node: Literal = {
|
||||
type: "Literal",
|
||||
kind: "regex",
|
||||
value: token.pattern,
|
||||
flags: token.flags,
|
||||
span: token.span,
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
if (token.type === "identifier") {
|
||||
this.advance()
|
||||
const node: Identifier = { type: "Identifier", name: token.value, span: token.span }
|
||||
return node
|
||||
}
|
||||
|
||||
if (token.type === "this") {
|
||||
this.advance()
|
||||
const node: Identifier = { type: "Identifier", name: "this", span: token.span }
|
||||
return node
|
||||
}
|
||||
|
||||
if (token.type === "punctuation" && token.value === "(") {
|
||||
this.advance()
|
||||
const expr = this.parseExpression(0)
|
||||
const closeToken = this.peek()
|
||||
if (closeToken.type === "punctuation" && closeToken.value === ")") {
|
||||
this.advance()
|
||||
} else {
|
||||
this.error("expected ')'", closeToken.span)
|
||||
this.syncTo(")")
|
||||
const maybeClose = this.peek()
|
||||
if (maybeClose.type === "punctuation" && maybeClose.value === ")") {
|
||||
this.advance()
|
||||
}
|
||||
}
|
||||
return expr
|
||||
}
|
||||
|
||||
if (token.type === "punctuation" && token.value === "[") {
|
||||
return this.parseList()
|
||||
}
|
||||
|
||||
this.error("unexpected token", token.span)
|
||||
this.advance()
|
||||
const node: ErrorExpr = { type: "ErrorExpr", message: "unexpected token", span: token.span }
|
||||
return node
|
||||
}
|
||||
|
||||
private parseList(): Expr {
|
||||
const startToken = this.peek()
|
||||
this.advance()
|
||||
const elements: Expr[] = []
|
||||
while (this.peek().type !== "eof") {
|
||||
const next = this.peek()
|
||||
if (next.type === "punctuation" && next.value === "]") {
|
||||
this.advance()
|
||||
const span = spanFrom(startToken.span, next.span)
|
||||
const node: ListExpr = { type: "ListExpr", elements, span }
|
||||
return node
|
||||
}
|
||||
const element = this.parseExpression(0)
|
||||
elements.push(element)
|
||||
const sep = this.peek()
|
||||
if (sep.type === "punctuation" && sep.value === ",") {
|
||||
this.advance()
|
||||
const maybeClose = this.peek()
|
||||
if (maybeClose.type === "punctuation" && maybeClose.value === "]") {
|
||||
this.advance()
|
||||
const span = spanFrom(startToken.span, maybeClose.span)
|
||||
const node: ListExpr = { type: "ListExpr", elements, span }
|
||||
return node
|
||||
}
|
||||
continue
|
||||
}
|
||||
if (sep.type === "punctuation" && sep.value === "]") {
|
||||
this.advance()
|
||||
const span = spanFrom(startToken.span, sep.span)
|
||||
const node: ListExpr = { type: "ListExpr", elements, span }
|
||||
return node
|
||||
}
|
||||
this.error("expected ',' or ']'", sep.span)
|
||||
this.syncTo("]")
|
||||
const maybeClose = this.peek()
|
||||
if (maybeClose.type === "punctuation" && maybeClose.value === "]") {
|
||||
const endToken = maybeClose
|
||||
this.advance()
|
||||
const span = spanFrom(startToken.span, endToken.span)
|
||||
const node: ListExpr = { type: "ListExpr", elements, span }
|
||||
return node
|
||||
}
|
||||
break
|
||||
}
|
||||
const endToken = this.previous()
|
||||
const span = spanFrom(startToken.span, endToken.span)
|
||||
return { type: "ListExpr", elements, span }
|
||||
}
|
||||
|
||||
private error(message: string, span: Token["span"]) {
|
||||
this.diagnostics.push({ kind: "parse", message, span })
|
||||
}
|
||||
|
||||
private syncTo(value: ")" | "]") {
|
||||
while (this.peek().type !== "eof") {
|
||||
const token = this.peek()
|
||||
if (token.type === "punctuation" && token.value === value) {
|
||||
return
|
||||
}
|
||||
this.advance()
|
||||
}
|
||||
}
|
||||
|
||||
private peek(): Token {
|
||||
return this.tokens[this.index]
|
||||
}
|
||||
|
||||
private previous(): Token {
|
||||
return this.tokens[Math.max(0, this.index - 1)]
|
||||
}
|
||||
|
||||
private advance(): Token {
|
||||
const token = this.tokens[this.index]
|
||||
if (this.index < this.tokens.length - 1) this.index += 1
|
||||
return token
|
||||
}
|
||||
}
|
||||
27
quartz/util/base/compiler/properties.test.ts
Normal file
27
quartz/util/base/compiler/properties.test.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import assert from "node:assert"
|
||||
import test from "node:test"
|
||||
import { parseExpressionSource } from "./parser"
|
||||
import { buildPropertyExpressionSource } from "./properties"
|
||||
|
||||
test("builds property expression sources", () => {
|
||||
const cases: Array<{ input: string; expected: string }> = [
|
||||
{ input: "status", expected: "note.status" },
|
||||
{ input: "note.status", expected: "note.status" },
|
||||
{ input: "file.name", expected: "file.name" },
|
||||
{ input: "file.my-field", expected: 'file["my-field"]' },
|
||||
{ input: "my-field", expected: 'note["my-field"]' },
|
||||
{ input: 'note["my field"]', expected: 'note["my field"]' },
|
||||
{ input: "formula.total", expected: "formula.total" },
|
||||
{ input: "this.file.name", expected: "this.file.name" },
|
||||
{ input: "a.b-c.d", expected: 'note.a["b-c"].d' },
|
||||
{ input: "date(file.ctime)", expected: "date(file.ctime)" },
|
||||
]
|
||||
|
||||
for (const entry of cases) {
|
||||
const result = buildPropertyExpressionSource(entry.input)
|
||||
assert.strictEqual(result, entry.expected)
|
||||
const parsed = parseExpressionSource(entry.expected)
|
||||
assert.strictEqual(parsed.diagnostics.length, 0)
|
||||
assert.ok(parsed.program.body)
|
||||
}
|
||||
})
|
||||
27
quartz/util/base/compiler/properties.ts
Normal file
27
quartz/util/base/compiler/properties.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
const simpleIdentifierPattern = /^[A-Za-z_][A-Za-z0-9_]*$/
|
||||
|
||||
export function buildPropertyExpressionSource(property: string): string | null {
|
||||
const trimmed = property.trim()
|
||||
if (!trimmed) return null
|
||||
if (trimmed.includes("(") || trimmed.includes("[") || trimmed.includes("]")) {
|
||||
return trimmed
|
||||
}
|
||||
const parts = trimmed.split(".")
|
||||
const root = parts[0]
|
||||
const rest = parts.slice(1)
|
||||
const buildAccess = (base: string, segments: string[]) => {
|
||||
let source = base
|
||||
for (const segment of segments) {
|
||||
if (simpleIdentifierPattern.test(segment)) {
|
||||
source = `${source}.${segment}`
|
||||
} else {
|
||||
source = `${source}[${JSON.stringify(segment)}]`
|
||||
}
|
||||
}
|
||||
return source
|
||||
}
|
||||
if (root === "file" || root === "note" || root === "formula" || root === "this") {
|
||||
return buildAccess(root, rest)
|
||||
}
|
||||
return buildAccess("note", parts)
|
||||
}
|
||||
36
quartz/util/base/compiler/schema.ts
Normal file
36
quartz/util/base/compiler/schema.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
export const BUILTIN_SUMMARY_TYPES = [
|
||||
"count",
|
||||
"sum",
|
||||
"average",
|
||||
"avg",
|
||||
"min",
|
||||
"max",
|
||||
"range",
|
||||
"unique",
|
||||
"filled",
|
||||
"missing",
|
||||
"median",
|
||||
"stddev",
|
||||
"checked",
|
||||
"unchecked",
|
||||
"empty",
|
||||
"earliest",
|
||||
"latest",
|
||||
] as const
|
||||
|
||||
export type BuiltinSummaryType = (typeof BUILTIN_SUMMARY_TYPES)[number]
|
||||
|
||||
export interface SummaryDefinition {
|
||||
type: "builtin" | "formula"
|
||||
builtinType?: BuiltinSummaryType
|
||||
formulaRef?: string
|
||||
expression?: string
|
||||
}
|
||||
|
||||
export interface ViewSummaryConfig {
|
||||
columns: Record<string, SummaryDefinition>
|
||||
}
|
||||
|
||||
export interface PropertyConfig {
|
||||
displayName?: string
|
||||
}
|
||||
42
quartz/util/base/compiler/tokens.ts
Normal file
42
quartz/util/base/compiler/tokens.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { Span } from "./ast"
|
||||
|
||||
export type Operator =
|
||||
| "=="
|
||||
| "!="
|
||||
| ">="
|
||||
| "<="
|
||||
| ">"
|
||||
| "<"
|
||||
| "&&"
|
||||
| "||"
|
||||
| "+"
|
||||
| "-"
|
||||
| "*"
|
||||
| "/"
|
||||
| "%"
|
||||
| "!"
|
||||
|
||||
export type Punctuation = "." | "," | "(" | ")" | "[" | "]"
|
||||
|
||||
export type NumberToken = { type: "number"; value: number; span: Span }
|
||||
export type StringToken = { type: "string"; value: string; span: Span }
|
||||
export type BooleanToken = { type: "boolean"; value: boolean; span: Span }
|
||||
export type NullToken = { type: "null"; span: Span }
|
||||
export type IdentifierToken = { type: "identifier"; value: string; span: Span }
|
||||
export type ThisToken = { type: "this"; span: Span }
|
||||
export type OperatorToken = { type: "operator"; value: Operator; span: Span }
|
||||
export type PunctuationToken = { type: "punctuation"; value: Punctuation; span: Span }
|
||||
export type RegexToken = { type: "regex"; pattern: string; flags: string; span: Span }
|
||||
export type EofToken = { type: "eof"; span: Span }
|
||||
|
||||
export type Token =
|
||||
| NumberToken
|
||||
| StringToken
|
||||
| BooleanToken
|
||||
| NullToken
|
||||
| IdentifierToken
|
||||
| ThisToken
|
||||
| OperatorToken
|
||||
| PunctuationToken
|
||||
| RegexToken
|
||||
| EofToken
|
||||
Reference in New Issue
Block a user