Compare commits
3 Commits
feat/bases
...
feat/seman
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eb8a4cce18 | ||
|
|
68682a8fe3 | ||
|
|
f533902c75 |
8
.github/workflows/build-preview.yaml
vendored
8
.github/workflows/build-preview.yaml
vendored
@@ -11,17 +11,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Build Preview
|
name: Build Preview
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 22
|
node-version: 22
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v5
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.npm
|
path: ~/.npm
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
@@ -37,7 +37,7 @@ jobs:
|
|||||||
run: npx quartz build -d docs -v
|
run: npx quartz build -d docs -v
|
||||||
|
|
||||||
- name: Upload build artifact
|
- name: Upload build artifact
|
||||||
uses: actions/upload-artifact@v6
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: preview-build
|
name: preview-build
|
||||||
path: public
|
path: public
|
||||||
|
|||||||
10
.github/workflows/ci.yaml
vendored
10
.github/workflows/ci.yaml
vendored
@@ -19,17 +19,17 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 22
|
node-version: 22
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v5
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.npm
|
path: ~/.npm
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
@@ -53,11 +53,11 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 22
|
node-version: 22
|
||||||
- name: Get package version
|
- name: Get package version
|
||||||
|
|||||||
2
.github/workflows/deploy-preview.yaml
vendored
2
.github/workflows/deploy-preview.yaml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
name: Deploy Preview to Cloudflare Pages
|
name: Deploy Preview to Cloudflare Pages
|
||||||
steps:
|
steps:
|
||||||
- name: Download build artifact
|
- name: Download build artifact
|
||||||
uses: actions/download-artifact@v7
|
uses: actions/download-artifact@v5
|
||||||
id: preview-build-artifact
|
id: preview-build-artifact
|
||||||
with:
|
with:
|
||||||
name: preview-build
|
name: preview-build
|
||||||
|
|||||||
6
.github/workflows/docker-build-push.yaml
vendored
6
.github/workflows/docker-build-push.yaml
vendored
@@ -21,11 +21,11 @@ jobs:
|
|||||||
echo "OWNER_LOWERCASE=${OWNER,,}" >> ${GITHUB_ENV}
|
echo "OWNER_LOWERCASE=${OWNER,,}" >> ${GITHUB_ENV}
|
||||||
env:
|
env:
|
||||||
OWNER: "${{ github.repository_owner }}"
|
OWNER: "${{ github.repository_owner }}"
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
- name: Inject slug/short variables
|
- name: Inject slug/short variables
|
||||||
uses: rlespinasse/github-slug-action@v5.4.0
|
uses: rlespinasse/github-slug-action@v5.2.0
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
@@ -37,7 +37,7 @@ jobs:
|
|||||||
network=host
|
network=host
|
||||||
- name: Install cosign
|
- name: Install cosign
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: sigstore/cosign-installer@v4.0.0
|
uses: sigstore/cosign-installer@v3.10.0
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
> “[One] who works with the door open gets all kinds of interruptions, but [they] also occasionally gets clues as to what the world is and what might be important.” — Richard Hamming
|
> “[One] who works with the door open gets all kinds of interruptions, but [they] also occasionally gets clues as to what the world is and what might be important.” — Richard Hamming
|
||||||
|
|
||||||
Quartz is a set of tools that helps you publish your [digital garden](https://jzhao.xyz/posts/networked-thought) and notes as a website for free.
|
Quartz is a set of tools that helps you publish your [digital garden](https://jzhao.xyz/posts/networked-thought) and notes as a website for free.
|
||||||
|
Quartz v4 features a from-the-ground rewrite focusing on end-user extensibility and ease-of-use.
|
||||||
|
|
||||||
🔗 Read the documentation and get started: https://quartz.jzhao.xyz/
|
🔗 Read the documentation and get started: https://quartz.jzhao.xyz/
|
||||||
|
|
||||||
|
|||||||
@@ -36,7 +36,6 @@ This part of the configuration concerns anything that can affect the whole site.
|
|||||||
- `{provider: 'clarity', projectId: '<your-clarity-id-code' }`: use [Microsoft clarity](https://clarity.microsoft.com/). The project id can be found on top of the overview page.
|
- `{provider: 'clarity', projectId: '<your-clarity-id-code' }`: use [Microsoft clarity](https://clarity.microsoft.com/). The project id can be found on top of the overview page.
|
||||||
- `{ provider: 'matomo', siteId: '<your-matomo-id-code', host: 'matomo.example.com' }`: use [Matomo](https://matomo.org/), without protocol.
|
- `{ provider: 'matomo', siteId: '<your-matomo-id-code', host: 'matomo.example.com' }`: use [Matomo](https://matomo.org/), without protocol.
|
||||||
- `{ provider: 'vercel' }`: use [Vercel Web Analytics](https://vercel.com/docs/concepts/analytics).
|
- `{ provider: 'vercel' }`: use [Vercel Web Analytics](https://vercel.com/docs/concepts/analytics).
|
||||||
- `{ provider: 'rybbit', siteId: 'my-rybbit-id' }` (managed) or `{ provider: 'rybbit', siteId: 'my-rybbit-id', host: 'my-rybbit-domain.com' }` (self-hosted) use [Rybbit](https://rybbit.com);
|
|
||||||
- `locale`: used for [[i18n]] and date formatting
|
- `locale`: used for [[i18n]] and date formatting
|
||||||
- `baseUrl`: this is used for sitemaps and RSS feeds that require an absolute URL to know where the canonical 'home' of your site lives. This is normally the deployed URL of your site (e.g. `quartz.jzhao.xyz` for this site). Do not include the protocol (i.e. `https://`) or any leading or trailing slashes.
|
- `baseUrl`: this is used for sitemaps and RSS feeds that require an absolute URL to know where the canonical 'home' of your site lives. This is normally the deployed URL of your site (e.g. `quartz.jzhao.xyz` for this site). Do not include the protocol (i.e. `https://`) or any leading or trailing slashes.
|
||||||
- This should also include the subpath if you are [[hosting]] on GitHub pages without a custom domain. For example, if my repository is `jackyzha0/quartz`, GitHub pages would deploy to `https://jackyzha0.github.io/quartz` and the `baseUrl` would be `jackyzha0.github.io/quartz`.
|
- This should also include the subpath if you are [[hosting]] on GitHub pages without a custom domain. For example, if my repository is `jackyzha0/quartz`, GitHub pages would deploy to `https://jackyzha0.github.io/quartz` and the `baseUrl` would be `jackyzha0.github.io/quartz`.
|
||||||
|
|||||||
BIN
docs/embeddings/hnsw.bin
Normal file
BIN
docs/embeddings/hnsw.bin
Normal file
Binary file not shown.
1
docs/embeddings/manifest.json
Normal file
1
docs/embeddings/manifest.json
Normal file
File diff suppressed because one or more lines are too long
BIN
docs/embeddings/vectors-000.bin
Normal file
BIN
docs/embeddings/vectors-000.bin
Normal file
Binary file not shown.
@@ -5,7 +5,3 @@ You can run the below one-liner to run Quartz in Docker.
|
|||||||
```sh
|
```sh
|
||||||
docker run --rm -itp 8080:8080 -p 3001:3001 -v ./content:/usr/src/app/content $(docker build -q .)
|
docker run --rm -itp 8080:8080 -p 3001:3001 -v ./content:/usr/src/app/content $(docker build -q .)
|
||||||
```
|
```
|
||||||
|
|
||||||
> [!warning] Not to be used for production
|
|
||||||
> Serve mode is intended for local previews only.
|
|
||||||
> For production workloads, see the page on [[hosting]].
|
|
||||||
|
|||||||
@@ -10,10 +10,8 @@ By default, Quartz ships with the [[ObsidianFlavoredMarkdown]] plugin, which is
|
|||||||
|
|
||||||
It also ships with support for [frontmatter parsing](https://help.obsidian.md/Editing+and+formatting/Properties) with the same fields that Obsidian uses through the [[Frontmatter]] transformer plugin.
|
It also ships with support for [frontmatter parsing](https://help.obsidian.md/Editing+and+formatting/Properties) with the same fields that Obsidian uses through the [[Frontmatter]] transformer plugin.
|
||||||
|
|
||||||
Quartz also provides [[CrawlLinks]] plugin, which allows you to customize Quartz's link resolution behaviour to match Obsidian.
|
Finally, Quartz also provides [[CrawlLinks]] plugin, which allows you to customize Quartz's link resolution behaviour to match Obsidian.
|
||||||
|
|
||||||
For dynamic database-like views, Quartz supports [[bases|Obsidian Bases]] through the [[ObsidianBases]] transformer and [[BasePage]] emitter plugins.
|
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
This functionality is provided by the [[ObsidianFlavoredMarkdown]], [[ObsidianBases]], [[Frontmatter]] and [[CrawlLinks]] plugins. See the plugin pages for customization options.
|
This functionality is provided by the [[ObsidianFlavoredMarkdown]], [[Frontmatter]] and [[CrawlLinks]] plugins. See the plugin pages for customization options.
|
||||||
|
|||||||
@@ -1,42 +0,0 @@
|
|||||||
---
|
|
||||||
title: Bases
|
|
||||||
tags:
|
|
||||||
- feature/transformer
|
|
||||||
- feature/emitter
|
|
||||||
---
|
|
||||||
|
|
||||||
Quartz supports [Obsidian Bases](https://help.obsidian.md/bases), which allow you to create dynamic, database-like views of your notes. See the [official Obsidian documentation](https://help.obsidian.md/bases/syntax) for the full syntax reference.
|
|
||||||
|
|
||||||
## Quick Example
|
|
||||||
|
|
||||||
Create a `.base` file in your content folder:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
filters:
|
|
||||||
and:
|
|
||||||
- file.hasTag("task")
|
|
||||||
|
|
||||||
views:
|
|
||||||
- type: table
|
|
||||||
name: "Task List"
|
|
||||||
order:
|
|
||||||
- file.name
|
|
||||||
- status
|
|
||||||
- due_date
|
|
||||||
```
|
|
||||||
|
|
||||||
Each view gets its own page at `<base-name>/<view-name>`.
|
|
||||||
|
|
||||||
## Wikilinks
|
|
||||||
|
|
||||||
Link to base views using the standard [[navigation.base#Plugins|wikilink]] syntax:
|
|
||||||
|
|
||||||
```markdown
|
|
||||||
[[my-base.base#Task List]]
|
|
||||||
```
|
|
||||||
|
|
||||||
This resolves to `my-base/Task-List`.
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
This functionality is provided by the [[ObsidianBases]] transformer plugin (which parses `.base` files) and the [[BasePage]] emitter plugin (which generates the pages).
|
|
||||||
@@ -162,7 +162,7 @@ You can access the tags of a file by `node.data.tags`.
|
|||||||
Component.Explorer({
|
Component.Explorer({
|
||||||
filterFn: (node) => {
|
filterFn: (node) => {
|
||||||
// exclude files with the tag "explorerexclude"
|
// exclude files with the tag "explorerexclude"
|
||||||
return node.data?.tags?.includes("explorerexclude") !== true
|
return node.data.tags?.includes("explorerexclude") !== true
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ By default, Quartz only fetches previews for pages inside your vault due to [COR
|
|||||||
|
|
||||||
When [[creating components|creating your own components]], you can include this `popover-hint` class to also include it in the popover.
|
When [[creating components|creating your own components]], you can include this `popover-hint` class to also include it in the popover.
|
||||||
|
|
||||||
Similar to Obsidian, [[quartz-layout-desktop.png|images referenced using wikilinks]] can also be viewed as popups.
|
Similar to Obsidian, [[quartz layout.png|images referenced using wikilinks]] can also be viewed as popups.
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
|
|||||||
@@ -1,93 +0,0 @@
|
|||||||
filters:
|
|
||||||
and:
|
|
||||||
- file.ext == "md"
|
|
||||||
formulas:
|
|
||||||
doc_type: |
|
|
||||||
if(file.hasTag("plugin/transformer"), "transformer",
|
|
||||||
if(file.hasTag("plugin/emitter"), "emitter",
|
|
||||||
if(file.hasTag("plugin/filter"), "filter",
|
|
||||||
if(file.hasTag("component"), "component",
|
|
||||||
if(file.inFolder("features"), "feature",
|
|
||||||
if(file.inFolder("advanced"), "advanced",
|
|
||||||
if(file.inFolder("plugins"), "plugin", "guide")))))))
|
|
||||||
last_modified: file.mtime.relative()
|
|
||||||
section: |
|
|
||||||
if(file.inFolder("plugins"), "plugins",
|
|
||||||
if(file.inFolder("features"), "features",
|
|
||||||
if(file.inFolder("advanced"), "advanced",
|
|
||||||
if(file.inFolder("tags"), "tags", "core"))))
|
|
||||||
properties:
|
|
||||||
title:
|
|
||||||
displayName: Title
|
|
||||||
formula.doc_type:
|
|
||||||
displayName: Type
|
|
||||||
formula.last_modified:
|
|
||||||
displayName: Updated
|
|
||||||
formula.section:
|
|
||||||
displayName: Section
|
|
||||||
views:
|
|
||||||
- type: table
|
|
||||||
name: All Documentation
|
|
||||||
groupBy:
|
|
||||||
property: formula.section
|
|
||||||
direction: ASC
|
|
||||||
order:
|
|
||||||
- file.name
|
|
||||||
- title
|
|
||||||
- formula.doc_type
|
|
||||||
- formula.section
|
|
||||||
- formula.last_modified
|
|
||||||
sort:
|
|
||||||
- property: formula.doc_type
|
|
||||||
direction: ASC
|
|
||||||
- property: file.name
|
|
||||||
direction: ASC
|
|
||||||
columnSize:
|
|
||||||
file.name: 185
|
|
||||||
note.title: 268
|
|
||||||
formula.doc_type: 146
|
|
||||||
formula.section: 276
|
|
||||||
- type: table
|
|
||||||
name: Plugins
|
|
||||||
filters:
|
|
||||||
or:
|
|
||||||
- file.hasTag("plugin/transformer")
|
|
||||||
- file.hasTag("plugin/emitter")
|
|
||||||
- file.hasTag("plugin/filter")
|
|
||||||
groupBy:
|
|
||||||
property: formula.doc_type
|
|
||||||
direction: ASC
|
|
||||||
order:
|
|
||||||
- file.name
|
|
||||||
- title
|
|
||||||
- formula.doc_type
|
|
||||||
- formula.last_modified
|
|
||||||
- type: table
|
|
||||||
name: Components & Features
|
|
||||||
filters:
|
|
||||||
or:
|
|
||||||
- file.hasTag("component")
|
|
||||||
- file.inFolder("features")
|
|
||||||
order:
|
|
||||||
- file.name
|
|
||||||
- title
|
|
||||||
- formula.doc_type
|
|
||||||
- formula.last_modified
|
|
||||||
- type: list
|
|
||||||
name: Recently Updated
|
|
||||||
order:
|
|
||||||
- file.name
|
|
||||||
- formula.last_modified
|
|
||||||
limit: 15
|
|
||||||
- type: table
|
|
||||||
name: Core Guides
|
|
||||||
filters:
|
|
||||||
not:
|
|
||||||
- file.inFolder("plugins")
|
|
||||||
- file.inFolder("features")
|
|
||||||
- file.inFolder("advanced")
|
|
||||||
- file.inFolder("tags")
|
|
||||||
order:
|
|
||||||
- file.name
|
|
||||||
- title
|
|
||||||
- formula.last_modified
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
---
|
|
||||||
title: BasePage
|
|
||||||
tags:
|
|
||||||
- plugin/emitter
|
|
||||||
---
|
|
||||||
|
|
||||||
This plugin emits pages for each view defined in `.base` files. See [[bases]] for usage.
|
|
||||||
|
|
||||||
> [!note]
|
|
||||||
> For information on how to add, remove or configure plugins, see the [[configuration#Plugins|Configuration]] page.
|
|
||||||
|
|
||||||
Pages use `defaultListPageLayout` from `quartz.layout.ts` with `BaseContent` as the page body. To customize the layout, edit `quartz/components/pages/BaseContent.tsx`.
|
|
||||||
|
|
||||||
## API
|
|
||||||
|
|
||||||
- Category: Emitter
|
|
||||||
- Function name: `Plugin.BasePage()`.
|
|
||||||
- Source: [`quartz/plugins/emitters/basePage.tsx`](https://github.com/jackyzha0/quartz/blob/v4/quartz/plugins/emitters/basePage.tsx).
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
---
|
|
||||||
title: ObsidianBases
|
|
||||||
tags:
|
|
||||||
- plugin/transformer
|
|
||||||
---
|
|
||||||
|
|
||||||
This plugin parses `.base` files and compiles them for rendering. See [[bases]] for usage.
|
|
||||||
|
|
||||||
> [!note]
|
|
||||||
> For information on how to add, remove or configure plugins, see the [[configuration#Plugins|Configuration]] page.
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
- `emitWarnings`: If `true` (default), emits parse errors and type mismatches as warnings during build.
|
|
||||||
|
|
||||||
## API
|
|
||||||
|
|
||||||
- Category: Transformer
|
|
||||||
- Function name: `Plugin.ObsidianBases()`.
|
|
||||||
- Source: [`quartz/plugins/transformers/bases.ts`](https://github.com/jackyzha0/quartz/blob/v4/quartz/plugins/transformers/bases.ts).
|
|
||||||
1
index.d.ts
vendored
1
index.d.ts
vendored
@@ -13,3 +13,4 @@ interface CustomEventMap {
|
|||||||
|
|
||||||
type ContentIndex = Record<FullSlug, ContentDetails>
|
type ContentIndex = Record<FullSlug, ContentDetails>
|
||||||
declare const fetchData: Promise<ContentIndex>
|
declare const fetchData: Promise<ContentIndex>
|
||||||
|
declare const semanticCfg: import("./quartz/cfg").GlobalConfiguration["semanticSearch"]
|
||||||
|
|||||||
2091
package-lock.json
generated
2091
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
44
package.json
44
package.json
@@ -37,33 +37,35 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@clack/prompts": "^0.11.0",
|
"@clack/prompts": "^0.11.0",
|
||||||
"@floating-ui/dom": "^1.7.4",
|
"@floating-ui/dom": "^1.7.4",
|
||||||
|
"@huggingface/transformers": "^3.7.5",
|
||||||
"@myriaddreamin/rehype-typst": "^0.6.0",
|
"@myriaddreamin/rehype-typst": "^0.6.0",
|
||||||
"@napi-rs/simple-git": "0.1.22",
|
"@napi-rs/simple-git": "0.1.22",
|
||||||
"@tweenjs/tween.js": "^25.0.0",
|
"@tweenjs/tween.js": "^25.0.0",
|
||||||
"ansi-truncate": "^1.4.0",
|
"ansi-truncate": "^1.4.0",
|
||||||
"async-mutex": "^0.5.0",
|
"async-mutex": "^0.5.0",
|
||||||
"chokidar": "^5.0.0",
|
"chokidar": "^4.0.3",
|
||||||
"cli-spinner": "^0.2.10",
|
"cli-spinner": "^0.2.10",
|
||||||
"d3": "^7.9.0",
|
"d3": "^7.9.0",
|
||||||
"esbuild-sass-plugin": "^3.6.0",
|
"esbuild-sass-plugin": "^3.3.1",
|
||||||
"flexsearch": "^0.8.205",
|
"flexsearch": "^0.8.205",
|
||||||
"github-slugger": "^2.0.0",
|
"github-slugger": "^2.0.0",
|
||||||
"globby": "^16.1.0",
|
"globby": "^15.0.0",
|
||||||
"gray-matter": "^4.0.3",
|
"gray-matter": "^4.0.3",
|
||||||
"hast-util-to-html": "^9.0.5",
|
"hast-util-to-html": "^9.0.5",
|
||||||
"hast-util-to-jsx-runtime": "^2.3.6",
|
"hast-util-to-jsx-runtime": "^2.3.6",
|
||||||
"hast-util-to-string": "^3.0.1",
|
"hast-util-to-string": "^3.0.1",
|
||||||
"is-absolute-url": "^5.0.0",
|
"is-absolute-url": "^5.0.0",
|
||||||
"js-yaml": "^4.1.1",
|
"js-yaml": "^4.1.0",
|
||||||
"lightningcss": "^1.31.1",
|
"lightningcss": "^1.30.2",
|
||||||
"mdast-util-find-and-replace": "^3.0.2",
|
"mdast-util-find-and-replace": "^3.0.2",
|
||||||
"mdast-util-to-hast": "^13.2.1",
|
"mdast-util-to-hast": "^13.2.0",
|
||||||
"mdast-util-to-string": "^4.0.0",
|
"mdast-util-to-string": "^4.0.0",
|
||||||
"micromorph": "^0.4.5",
|
"micromorph": "^0.4.5",
|
||||||
"minimatch": "^10.1.1",
|
"minimatch": "^10.0.3",
|
||||||
"pixi.js": "^8.15.0",
|
"onnxruntime-web": "^1.23.0",
|
||||||
"preact": "^10.28.2",
|
"pixi.js": "^8.13.2",
|
||||||
"preact-render-to-string": "^6.6.5",
|
"preact": "^10.27.2",
|
||||||
|
"preact-render-to-string": "^6.6.1",
|
||||||
"pretty-bytes": "^7.1.0",
|
"pretty-bytes": "^7.1.0",
|
||||||
"pretty-time": "^1.1.0",
|
"pretty-time": "^1.1.0",
|
||||||
"reading-time": "^1.5.0",
|
"reading-time": "^1.5.0",
|
||||||
@@ -83,32 +85,32 @@
|
|||||||
"remark-rehype": "^11.1.2",
|
"remark-rehype": "^11.1.2",
|
||||||
"remark-smartypants": "^3.0.2",
|
"remark-smartypants": "^3.0.2",
|
||||||
"rfdc": "^1.4.1",
|
"rfdc": "^1.4.1",
|
||||||
"satori": "^0.19.1",
|
"satori": "^0.18.3",
|
||||||
"serve-handler": "^6.1.6",
|
"serve-handler": "^6.1.6",
|
||||||
"sharp": "^0.34.5",
|
"sharp": "^0.34.4",
|
||||||
"shiki": "^1.26.2",
|
"shiki": "^1.26.2",
|
||||||
"source-map-support": "^0.5.21",
|
"source-map-support": "^0.5.21",
|
||||||
"to-vfile": "^8.0.0",
|
"to-vfile": "^8.0.0",
|
||||||
"toml": "^3.0.0",
|
"toml": "^3.0.0",
|
||||||
"unified": "^11.0.5",
|
"unified": "^11.0.5",
|
||||||
"unist-util-visit": "^5.1.0",
|
"unist-util-visit": "^5.0.0",
|
||||||
"vfile": "^6.0.3",
|
"vfile": "^6.0.3",
|
||||||
"workerpool": "^10.0.1",
|
"workerpool": "^9.3.4",
|
||||||
"ws": "^8.19.0",
|
"ws": "^8.18.3",
|
||||||
"yargs": "^18.0.0"
|
"yargs": "^18.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/d3": "^7.4.3",
|
"@types/d3": "^7.4.3",
|
||||||
"@types/hast": "^3.0.4",
|
"@types/hast": "^3.0.4",
|
||||||
"@types/js-yaml": "^4.0.9",
|
"@types/js-yaml": "^4.0.9",
|
||||||
"@types/node": "^25.0.10",
|
"@types/node": "^24.6.0",
|
||||||
"@types/pretty-time": "^1.1.5",
|
"@types/pretty-time": "^1.1.5",
|
||||||
"@types/source-map-support": "^0.5.10",
|
"@types/source-map-support": "^0.5.10",
|
||||||
"@types/ws": "^8.18.1",
|
"@types/ws": "^8.18.1",
|
||||||
"@types/yargs": "^17.0.35",
|
"@types/yargs": "^17.0.33",
|
||||||
"esbuild": "^0.27.2",
|
"esbuild": "^0.25.10",
|
||||||
"prettier": "^3.8.1",
|
"prettier": "^3.6.2",
|
||||||
"tsx": "^4.21.0",
|
"tsx": "^4.20.6",
|
||||||
"typescript": "^5.9.3"
|
"typescript": "^5.9.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,18 @@
|
|||||||
import { QuartzConfig } from "./quartz/cfg"
|
import { GlobalConfiguration, QuartzConfig } from "./quartz/cfg"
|
||||||
import * as Plugin from "./quartz/plugins"
|
import * as Plugin from "./quartz/plugins"
|
||||||
|
|
||||||
|
const semanticSearch: GlobalConfiguration["semanticSearch"] = {
|
||||||
|
enable: true,
|
||||||
|
model: "onnx-community/embeddinggemma-300m-ONNX",
|
||||||
|
aot: true,
|
||||||
|
dims: 768,
|
||||||
|
dtype: "fp32",
|
||||||
|
shardSizeRows: 1024,
|
||||||
|
hnsw: { M: 16, efConstruction: 200 },
|
||||||
|
chunking: { chunkSize: 256, chunkOverlap: 64 },
|
||||||
|
vllm: { enable: true, concurrency: 16, batchSize: 128 },
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Quartz 4 Configuration
|
* Quartz 4 Configuration
|
||||||
*
|
*
|
||||||
@@ -52,6 +64,7 @@ const config: QuartzConfig = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
semanticSearch,
|
||||||
},
|
},
|
||||||
plugins: {
|
plugins: {
|
||||||
transformers: [
|
transformers: [
|
||||||
@@ -72,7 +85,6 @@ const config: QuartzConfig = {
|
|||||||
Plugin.CrawlLinks({ markdownLinkResolution: "shortest" }),
|
Plugin.CrawlLinks({ markdownLinkResolution: "shortest" }),
|
||||||
Plugin.Description(),
|
Plugin.Description(),
|
||||||
Plugin.Latex({ renderEngine: "katex" }),
|
Plugin.Latex({ renderEngine: "katex" }),
|
||||||
Plugin.ObsidianBases(),
|
|
||||||
],
|
],
|
||||||
filters: [Plugin.RemoveDrafts()],
|
filters: [Plugin.RemoveDrafts()],
|
||||||
emitters: [
|
emitters: [
|
||||||
@@ -85,13 +97,13 @@ const config: QuartzConfig = {
|
|||||||
enableSiteMap: true,
|
enableSiteMap: true,
|
||||||
enableRSS: true,
|
enableRSS: true,
|
||||||
}),
|
}),
|
||||||
|
Plugin.SemanticIndex(semanticSearch),
|
||||||
Plugin.Assets(),
|
Plugin.Assets(),
|
||||||
Plugin.Static(),
|
Plugin.Static(),
|
||||||
Plugin.Favicon(),
|
Plugin.Favicon(),
|
||||||
Plugin.NotFoundPage(),
|
Plugin.NotFoundPage(),
|
||||||
// Comment out CustomOgImages to speed up build time
|
// Comment out CustomOgImages to speed up build time
|
||||||
Plugin.CustomOgImages(),
|
Plugin.CustomOgImages(),
|
||||||
Plugin.BasePage(),
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
|
|||||||
|
|
||||||
perf.addEvent("glob")
|
perf.addEvent("glob")
|
||||||
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
|
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
|
||||||
const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md") || fp.endsWith(".base")).sort()
|
const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
|
||||||
console.log(
|
console.log(
|
||||||
`Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
|
`Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
|
||||||
)
|
)
|
||||||
@@ -143,7 +143,6 @@ async function startWatching(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const watcher = chokidar.watch(".", {
|
const watcher = chokidar.watch(".", {
|
||||||
awaitWriteFinish: { stabilityThreshold: 250 },
|
|
||||||
persistent: true,
|
persistent: true,
|
||||||
cwd: argv.directory,
|
cwd: argv.directory,
|
||||||
ignoreInitial: true,
|
ignoreInitial: true,
|
||||||
|
|||||||
@@ -50,11 +50,6 @@ export type Analytics =
|
|||||||
| {
|
| {
|
||||||
provider: "vercel"
|
provider: "vercel"
|
||||||
}
|
}
|
||||||
| {
|
|
||||||
provider: "rybbit"
|
|
||||||
siteId: string
|
|
||||||
host?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface GlobalConfiguration {
|
export interface GlobalConfiguration {
|
||||||
pageTitle: string
|
pageTitle: string
|
||||||
@@ -83,6 +78,34 @@ export interface GlobalConfiguration {
|
|||||||
* Region Codes: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
|
* Region Codes: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
|
||||||
*/
|
*/
|
||||||
locale: ValidLocale
|
locale: ValidLocale
|
||||||
|
/** Semantic search configuration */
|
||||||
|
semanticSearch?: {
|
||||||
|
enable: boolean
|
||||||
|
model: string
|
||||||
|
aot: boolean
|
||||||
|
dtype: "fp32" | "fp16"
|
||||||
|
dims: number
|
||||||
|
shardSizeRows: number
|
||||||
|
manifestUrl?: string
|
||||||
|
manifestBaseUrl?: string
|
||||||
|
disableCache?: boolean
|
||||||
|
hnsw: {
|
||||||
|
M: number
|
||||||
|
efConstruction: number
|
||||||
|
efSearch?: number
|
||||||
|
}
|
||||||
|
chunking: {
|
||||||
|
chunkSize: number
|
||||||
|
chunkOverlap: number
|
||||||
|
noChunking?: boolean
|
||||||
|
}
|
||||||
|
vllm?: {
|
||||||
|
enable: boolean
|
||||||
|
vllmUrl?: string
|
||||||
|
concurrency: number
|
||||||
|
batchSize: number
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QuartzConfig {
|
export interface QuartzConfig {
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ import fs from "fs"
|
|||||||
export function escapePath(fp) {
|
export function escapePath(fp) {
|
||||||
return fp
|
return fp
|
||||||
.replace(/\\ /g, " ") // unescape spaces
|
.replace(/\\ /g, " ") // unescape spaces
|
||||||
.replace(/^"(.*)"$/, "$1")
|
.replace(/^".*"$/, "$1")
|
||||||
.replace(/^'(.*)'$/, "$1")
|
.replace(/^'.*"$/, "$1")
|
||||||
.trim()
|
.trim()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,218 +0,0 @@
|
|||||||
import { JSX } from "preact"
|
|
||||||
import { QuartzComponent, QuartzComponentConstructor, QuartzComponentProps } from "./types"
|
|
||||||
import { classNames } from "../util/lang"
|
|
||||||
import { resolveRelative } from "../util/path"
|
|
||||||
// @ts-ignore
|
|
||||||
import script from "./scripts/base-view-selector.inline"
|
|
||||||
import baseViewSelectorStyle from "./styles/baseViewSelector.scss"
|
|
||||||
|
|
||||||
const icons: Record<string, JSX.Element> = {
|
|
||||||
table: (
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
width="16"
|
|
||||||
height="16"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
>
|
|
||||||
<rect width="18" height="18" x="3" y="3" rx="2" />
|
|
||||||
<path d="M3 9h18" />
|
|
||||||
<path d="M3 15h18" />
|
|
||||||
<path d="M9 3v18" />
|
|
||||||
<path d="M15 3v18" />
|
|
||||||
</svg>
|
|
||||||
),
|
|
||||||
list: (
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
width="16"
|
|
||||||
height="16"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
>
|
|
||||||
<line x1="8" x2="21" y1="6" y2="6" />
|
|
||||||
<line x1="8" x2="21" y1="12" y2="12" />
|
|
||||||
<line x1="8" x2="21" y1="18" y2="18" />
|
|
||||||
<line x1="3" x2="3.01" y1="6" y2="6" />
|
|
||||||
<line x1="3" x2="3.01" y1="12" y2="12" />
|
|
||||||
<line x1="3" x2="3.01" y1="18" y2="18" />
|
|
||||||
</svg>
|
|
||||||
),
|
|
||||||
chevronsUpDown: (
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
width="16"
|
|
||||||
height="16"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
>
|
|
||||||
<path d="m7 15 5 5 5-5" />
|
|
||||||
<path d="m7 9 5-5 5 5" />
|
|
||||||
</svg>
|
|
||||||
),
|
|
||||||
chevronRight: (
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
width="16"
|
|
||||||
height="16"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
>
|
|
||||||
<path d="m9 18 6-6-6-6" />
|
|
||||||
</svg>
|
|
||||||
),
|
|
||||||
x: (
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
width="16"
|
|
||||||
height="16"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
>
|
|
||||||
<path d="M18 6 6 18" />
|
|
||||||
<path d="m6 6 12 12" />
|
|
||||||
</svg>
|
|
||||||
),
|
|
||||||
map: (
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
width="16"
|
|
||||||
height="16"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
>
|
|
||||||
<path d="M15 6v12a3 3 0 1 0 3-3H6a3 3 0 1 0 3 3V6a3 3 0 1 0-3 3h12a3 3 0 1 0-3-3" />
|
|
||||||
</svg>
|
|
||||||
),
|
|
||||||
card: (
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
width="16"
|
|
||||||
height="16"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
>
|
|
||||||
<rect width="7" height="7" x="3" y="3" rx="1" />
|
|
||||||
<rect width="7" height="7" x="14" y="3" rx="1" />
|
|
||||||
<rect width="7" height="7" x="14" y="14" rx="1" />
|
|
||||||
<rect width="7" height="7" x="3" y="14" rx="1" />
|
|
||||||
</svg>
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
const viewTypeIcons: Record<string, JSX.Element | undefined> = {
|
|
||||||
table: icons.table,
|
|
||||||
list: icons.list,
|
|
||||||
gallery: icons.card,
|
|
||||||
board: icons.table,
|
|
||||||
calendar: icons.table,
|
|
||||||
map: icons.map,
|
|
||||||
cards: icons.card,
|
|
||||||
}
|
|
||||||
|
|
||||||
const BaseViewSelector: QuartzComponent = ({ fileData, displayClass }: QuartzComponentProps) => {
|
|
||||||
const baseMeta = fileData.basesMetadata
|
|
||||||
|
|
||||||
if (!baseMeta || baseMeta.allViews.length <= 1) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentViewName = baseMeta.currentView
|
|
||||||
const allViews = baseMeta.allViews
|
|
||||||
const currentIcon =
|
|
||||||
viewTypeIcons[allViews.find((view) => view.name === currentViewName)?.type ?? ""] ?? icons.table
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div class={classNames(displayClass, "bases-toolbar")} data-base-view-selector>
|
|
||||||
<div class="bases-toolbar-item bases-toolbar-views-menu">
|
|
||||||
<span
|
|
||||||
class="text-icon-button"
|
|
||||||
aria-label="Select view"
|
|
||||||
aria-expanded="false"
|
|
||||||
aria-haspopup="true"
|
|
||||||
role="button"
|
|
||||||
tabindex={0}
|
|
||||||
>
|
|
||||||
<span class="text-button-icon">{currentIcon}</span>
|
|
||||||
<span class="text-button-label">{currentViewName.toLowerCase()}</span>
|
|
||||||
<span class="text-button-icon mod-aux">{icons.chevronsUpDown}</span>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="menu-scroll" data-dropdown>
|
|
||||||
<div class="bases-toolbar-menu-container">
|
|
||||||
<div class="search-input-container">
|
|
||||||
<input type="search" placeholder="Search..." data-search-input />
|
|
||||||
<div class="search-input-clear-button" data-clear-search hidden>
|
|
||||||
{icons.x}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="bases-toolbar-items">
|
|
||||||
<div class="suggestion-group" data-group="views" data-view-list>
|
|
||||||
{allViews.map((view) => {
|
|
||||||
const isActive = view.name === currentViewName
|
|
||||||
const icon = viewTypeIcons[view.type] || icons.table
|
|
||||||
const href = resolveRelative(fileData.slug!, view.slug)
|
|
||||||
|
|
||||||
return (
|
|
||||||
<a
|
|
||||||
href={href}
|
|
||||||
data-slug={view.slug}
|
|
||||||
class={
|
|
||||||
isActive
|
|
||||||
? "suggestion-item bases-toolbar-menu-item mod-active is-selected"
|
|
||||||
: "suggestion-item bases-toolbar-menu-item"
|
|
||||||
}
|
|
||||||
data-view-name={view.name}
|
|
||||||
data-view-type={view.type}
|
|
||||||
>
|
|
||||||
<div class="bases-toolbar-menu-item-info">
|
|
||||||
<div class="bases-toolbar-menu-item-info-icon">{icon}</div>
|
|
||||||
<div class="bases-toolbar-menu-item-name">{view.name.toLowerCase()}</div>
|
|
||||||
</div>
|
|
||||||
<div class="clickable-icon bases-toolbar-menu-item-icon">
|
|
||||||
{icons.chevronRight}
|
|
||||||
</div>
|
|
||||||
</a>
|
|
||||||
)
|
|
||||||
})}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
BaseViewSelector.css = baseViewSelectorStyle
|
|
||||||
BaseViewSelector.afterDOMLoaded = script
|
|
||||||
|
|
||||||
export default (() => BaseViewSelector) satisfies QuartzComponentConstructor
|
|
||||||
@@ -51,9 +51,7 @@ export default ((opts?: Partial<BreadcrumbOptions>) => {
|
|||||||
ctx,
|
ctx,
|
||||||
}: QuartzComponentProps) => {
|
}: QuartzComponentProps) => {
|
||||||
const trie = (ctx.trie ??= trieFromAllFiles(allFiles))
|
const trie = (ctx.trie ??= trieFromAllFiles(allFiles))
|
||||||
const baseMeta = fileData.basesMetadata
|
const slugParts = fileData.slug!.split("/")
|
||||||
|
|
||||||
const slugParts = (baseMeta ? baseMeta.baseSlug : fileData.slug!).split("/")
|
|
||||||
const pathNodes = trie.ancestryChain(slugParts)
|
const pathNodes = trie.ancestryChain(slugParts)
|
||||||
|
|
||||||
if (!pathNodes) {
|
if (!pathNodes) {
|
||||||
@@ -66,24 +64,14 @@ export default ((opts?: Partial<BreadcrumbOptions>) => {
|
|||||||
crumb.displayName = options.rootName
|
crumb.displayName = options.rootName
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For last node (current page), set empty path
|
||||||
if (idx === pathNodes.length - 1) {
|
if (idx === pathNodes.length - 1) {
|
||||||
if (baseMeta) {
|
crumb.path = ""
|
||||||
crumb.path = resolveRelative(fileData.slug!, simplifySlug(baseMeta.baseSlug))
|
|
||||||
} else {
|
|
||||||
crumb.path = ""
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return crumb
|
return crumb
|
||||||
})
|
})
|
||||||
|
|
||||||
if (baseMeta && options.showCurrentPage) {
|
|
||||||
crumbs.push({
|
|
||||||
displayName: baseMeta.currentView.replaceAll("-", " "),
|
|
||||||
path: "",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options.showCurrentPage) {
|
if (!options.showCurrentPage) {
|
||||||
crumbs.pop()
|
crumbs.pop()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,10 +7,12 @@ import { i18n } from "../i18n"
|
|||||||
|
|
||||||
export interface SearchOptions {
|
export interface SearchOptions {
|
||||||
enablePreview: boolean
|
enablePreview: boolean
|
||||||
|
includeButton: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
const defaultOptions: SearchOptions = {
|
const defaultOptions: SearchOptions = {
|
||||||
enablePreview: true,
|
enablePreview: true,
|
||||||
|
includeButton: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
export default ((userOpts?: Partial<SearchOptions>) => {
|
export default ((userOpts?: Partial<SearchOptions>) => {
|
||||||
@@ -29,19 +31,54 @@ export default ((userOpts?: Partial<SearchOptions>) => {
|
|||||||
</svg>
|
</svg>
|
||||||
<p>{i18n(cfg.locale).components.search.title}</p>
|
<p>{i18n(cfg.locale).components.search.title}</p>
|
||||||
</button>
|
</button>
|
||||||
<div class="search-container">
|
<search class="search-container">
|
||||||
<div class="search-space">
|
<form class="search-space">
|
||||||
<input
|
<div class="input-container">
|
||||||
autocomplete="off"
|
<input
|
||||||
class="search-bar"
|
autocomplete="off"
|
||||||
name="search"
|
class="search-bar"
|
||||||
type="text"
|
name="search"
|
||||||
aria-label={searchPlaceholder}
|
type="text"
|
||||||
placeholder={searchPlaceholder}
|
aria-label={searchPlaceholder}
|
||||||
/>
|
placeholder={searchPlaceholder}
|
||||||
<div class="search-layout" data-preview={opts.enablePreview}></div>
|
/>
|
||||||
</div>
|
<div class="search-mode-toggle" role="radiogroup" aria-label="Search mode">
|
||||||
</div>
|
<button
|
||||||
|
type="button"
|
||||||
|
class="mode-option"
|
||||||
|
data-mode="lexical"
|
||||||
|
aria-pressed="true"
|
||||||
|
aria-label="Full-text search"
|
||||||
|
>
|
||||||
|
<svg viewBox="0 0 20 20" role="img" aria-hidden="true">
|
||||||
|
<g fill="none" stroke="currentColor" stroke-width="1.5" stroke-linecap="round">
|
||||||
|
<path d="M4 6h12M4 10h8M4 14h6" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
<span class="sr-only">Full-text</span>
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
class="mode-option"
|
||||||
|
data-mode="semantic"
|
||||||
|
aria-pressed="false"
|
||||||
|
aria-label="Semantic search"
|
||||||
|
>
|
||||||
|
<svg viewBox="0 0 20 20" role="img" aria-hidden="true">
|
||||||
|
<g fill="none" stroke="currentColor" stroke-width="1.5" stroke-linecap="round">
|
||||||
|
<circle cx="5.2" cy="10" r="2.4" />
|
||||||
|
<circle cx="14.8" cy="4.8" r="2.1" />
|
||||||
|
<circle cx="14.8" cy="15.2" r="2.1" />
|
||||||
|
<path d="M7.1 8.7l5.2-2.4M7.1 11.3l5.2 2.4M14.8 6.9v6.2" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
<span class="sr-only">Semantic</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<output class="search-layout" data-preview={opts.enablePreview} />
|
||||||
|
</form>
|
||||||
|
</search>
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
import Content from "./pages/Content"
|
import Content from "./pages/Content"
|
||||||
import TagContent from "./pages/TagContent"
|
import TagContent from "./pages/TagContent"
|
||||||
import FolderContent from "./pages/FolderContent"
|
import FolderContent from "./pages/FolderContent"
|
||||||
import BaseContent from "./pages/BaseContent"
|
|
||||||
import BaseViewSelector from "./BaseViewSelector"
|
|
||||||
import NotFound from "./pages/404"
|
import NotFound from "./pages/404"
|
||||||
import ArticleTitle from "./ArticleTitle"
|
import ArticleTitle from "./ArticleTitle"
|
||||||
import Darkmode from "./Darkmode"
|
import Darkmode from "./Darkmode"
|
||||||
@@ -31,8 +29,6 @@ export {
|
|||||||
Content,
|
Content,
|
||||||
TagContent,
|
TagContent,
|
||||||
FolderContent,
|
FolderContent,
|
||||||
BaseContent,
|
|
||||||
BaseViewSelector,
|
|
||||||
Darkmode,
|
Darkmode,
|
||||||
ReaderMode,
|
ReaderMode,
|
||||||
Head,
|
Head,
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
import { QuartzComponent, QuartzComponentConstructor, QuartzComponentProps } from "../types"
|
|
||||||
import style from "../styles/basePage.scss"
|
|
||||||
import { htmlToJsx } from "../../util/jsx"
|
|
||||||
|
|
||||||
export default (() => {
|
|
||||||
const BaseContent: QuartzComponent = (props: QuartzComponentProps) => {
|
|
||||||
const { fileData, tree } = props
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div class="popover-hint">
|
|
||||||
<article class={["base-content", ...(fileData.frontmatter?.cssclasses ?? [])].join(" ")}>
|
|
||||||
{htmlToJsx(fileData.filePath!, fileData.basesRenderedTree ?? tree)}
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
BaseContent.css = style
|
|
||||||
return BaseContent
|
|
||||||
}) satisfies QuartzComponentConstructor
|
|
||||||
@@ -9,7 +9,6 @@ import { visit } from "unist-util-visit"
|
|||||||
import { Root, Element, ElementContent } from "hast"
|
import { Root, Element, ElementContent } from "hast"
|
||||||
import { GlobalConfiguration } from "../cfg"
|
import { GlobalConfiguration } from "../cfg"
|
||||||
import { i18n } from "../i18n"
|
import { i18n } from "../i18n"
|
||||||
import { styleText } from "util"
|
|
||||||
|
|
||||||
interface RenderComponents {
|
interface RenderComponents {
|
||||||
head: QuartzComponent
|
head: QuartzComponent
|
||||||
@@ -26,6 +25,7 @@ const headerRegex = new RegExp(/h[1-6]/)
|
|||||||
export function pageResources(
|
export function pageResources(
|
||||||
baseDir: FullSlug | RelativeURL,
|
baseDir: FullSlug | RelativeURL,
|
||||||
staticResources: StaticResources,
|
staticResources: StaticResources,
|
||||||
|
cfg?: GlobalConfiguration,
|
||||||
): StaticResources {
|
): StaticResources {
|
||||||
const contentIndexPath = joinSegments(baseDir, "static/contentIndex.json")
|
const contentIndexPath = joinSegments(baseDir, "static/contentIndex.json")
|
||||||
const contentIndexScript = `const fetchData = fetch("${contentIndexPath}").then(data => data.json())`
|
const contentIndexScript = `const fetchData = fetch("${contentIndexPath}").then(data => data.json())`
|
||||||
@@ -49,6 +49,12 @@ export function pageResources(
|
|||||||
spaPreserve: true,
|
spaPreserve: true,
|
||||||
script: contentIndexScript,
|
script: contentIndexScript,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
loadTime: "beforeDOMReady",
|
||||||
|
contentType: "inline",
|
||||||
|
spaPreserve: true,
|
||||||
|
script: `const semanticCfg = ${JSON.stringify(cfg?.semanticSearch ?? {})};`,
|
||||||
|
},
|
||||||
...staticResources.js,
|
...staticResources.js,
|
||||||
],
|
],
|
||||||
additionalHead: staticResources.additionalHead,
|
additionalHead: staticResources.additionalHead,
|
||||||
@@ -69,7 +75,6 @@ function renderTranscludes(
|
|||||||
cfg: GlobalConfiguration,
|
cfg: GlobalConfiguration,
|
||||||
slug: FullSlug,
|
slug: FullSlug,
|
||||||
componentData: QuartzComponentProps,
|
componentData: QuartzComponentProps,
|
||||||
visited: Set<FullSlug>,
|
|
||||||
) {
|
) {
|
||||||
// process transcludes in componentData
|
// process transcludes in componentData
|
||||||
visit(root, "element", (node, _index, _parent) => {
|
visit(root, "element", (node, _index, _parent) => {
|
||||||
@@ -78,30 +83,6 @@ function renderTranscludes(
|
|||||||
if (classNames.includes("transclude")) {
|
if (classNames.includes("transclude")) {
|
||||||
const inner = node.children[0] as Element
|
const inner = node.children[0] as Element
|
||||||
const transcludeTarget = (inner.properties["data-slug"] ?? slug) as FullSlug
|
const transcludeTarget = (inner.properties["data-slug"] ?? slug) as FullSlug
|
||||||
if (visited.has(transcludeTarget)) {
|
|
||||||
console.warn(
|
|
||||||
styleText(
|
|
||||||
"yellow",
|
|
||||||
`Warning: Skipping circular transclusion: ${slug} -> ${transcludeTarget}`,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
node.children = [
|
|
||||||
{
|
|
||||||
type: "element",
|
|
||||||
tagName: "p",
|
|
||||||
properties: { style: "color: var(--secondary);" },
|
|
||||||
children: [
|
|
||||||
{
|
|
||||||
type: "text",
|
|
||||||
value: `Circular transclusion detected: ${transcludeTarget}`,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
return
|
|
||||||
}
|
|
||||||
visited.add(transcludeTarget)
|
|
||||||
|
|
||||||
const page = componentData.allFiles.find((f) => f.slug === transcludeTarget)
|
const page = componentData.allFiles.find((f) => f.slug === transcludeTarget)
|
||||||
if (!page) {
|
if (!page) {
|
||||||
return
|
return
|
||||||
@@ -222,8 +203,7 @@ export function renderPage(
|
|||||||
// make a deep copy of the tree so we don't remove the transclusion references
|
// make a deep copy of the tree so we don't remove the transclusion references
|
||||||
// for the file cached in contentMap in build.ts
|
// for the file cached in contentMap in build.ts
|
||||||
const root = clone(componentData.tree) as Root
|
const root = clone(componentData.tree) as Root
|
||||||
const visited = new Set<FullSlug>([slug])
|
renderTranscludes(root, cfg, slug, componentData)
|
||||||
renderTranscludes(root, cfg, slug, componentData, visited)
|
|
||||||
|
|
||||||
// set componentData.tree to the edited html that has transclusions rendered
|
// set componentData.tree to the edited html that has transclusions rendered
|
||||||
componentData.tree = root
|
componentData.tree = root
|
||||||
@@ -294,7 +274,7 @@ export function renderPage(
|
|||||||
</body>
|
</body>
|
||||||
{pageResources.js
|
{pageResources.js
|
||||||
.filter((resource) => resource.loadTime === "afterDOMReady")
|
.filter((resource) => resource.loadTime === "afterDOMReady")
|
||||||
.map((res) => JSResourceToScriptElement(res, true))}
|
.map((res) => JSResourceToScriptElement(res))}
|
||||||
</html>
|
</html>
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,144 +0,0 @@
|
|||||||
let documentClickHandler: ((e: MouseEvent) => void) | null = null
|
|
||||||
|
|
||||||
function setupBaseViewSelector() {
|
|
||||||
const selectors = document.querySelectorAll("[data-base-view-selector]")
|
|
||||||
|
|
||||||
if (selectors.length === 0) return
|
|
||||||
|
|
||||||
if (!documentClickHandler) {
|
|
||||||
documentClickHandler = (e: MouseEvent) => {
|
|
||||||
document.querySelectorAll("[data-base-view-selector]").forEach((selector) => {
|
|
||||||
if (selector.contains(e.target as Node)) return
|
|
||||||
const trigger = selector.querySelector(".text-icon-button") as HTMLElement | null
|
|
||||||
if (trigger?.getAttribute("aria-expanded") === "true") {
|
|
||||||
selector.dispatchEvent(new CustomEvent("close-dropdown"))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
document.addEventListener("click", documentClickHandler)
|
|
||||||
window.addCleanup(() => {
|
|
||||||
if (documentClickHandler) {
|
|
||||||
document.removeEventListener("click", documentClickHandler)
|
|
||||||
documentClickHandler = null
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
selectors.forEach((selector) => {
|
|
||||||
if (selector.hasAttribute("data-initialized")) return
|
|
||||||
selector.setAttribute("data-initialized", "true")
|
|
||||||
|
|
||||||
const triggerEl = selector.querySelector(".text-icon-button") as HTMLElement | null
|
|
||||||
const searchInputEl = selector.querySelector("[data-search-input]") as HTMLInputElement | null
|
|
||||||
const clearButtonEl = selector.querySelector("[data-clear-search]") as HTMLElement | null
|
|
||||||
const viewListEl = selector.querySelector("[data-view-list]") as HTMLElement | null
|
|
||||||
|
|
||||||
if (!triggerEl || !searchInputEl || !clearButtonEl || !viewListEl) return
|
|
||||||
|
|
||||||
const trigger = triggerEl
|
|
||||||
const searchInput = searchInputEl
|
|
||||||
const clearButton = clearButtonEl
|
|
||||||
const viewList = viewListEl
|
|
||||||
|
|
||||||
function toggleDropdown() {
|
|
||||||
if (trigger.getAttribute("aria-expanded") === "true") {
|
|
||||||
closeDropdown()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
openDropdown()
|
|
||||||
}
|
|
||||||
|
|
||||||
function openDropdown() {
|
|
||||||
trigger.setAttribute("aria-expanded", "true")
|
|
||||||
trigger.classList.add("has-active-menu")
|
|
||||||
setTimeout(() => searchInput.focus(), 10)
|
|
||||||
}
|
|
||||||
|
|
||||||
function closeDropdown() {
|
|
||||||
trigger.setAttribute("aria-expanded", "false")
|
|
||||||
trigger.classList.remove("has-active-menu")
|
|
||||||
searchInput.value = ""
|
|
||||||
clearButton.hidden = true
|
|
||||||
filterViews("")
|
|
||||||
}
|
|
||||||
|
|
||||||
function filterViews(query: string) {
|
|
||||||
const items = viewList.querySelectorAll<HTMLElement>(".bases-toolbar-menu-item")
|
|
||||||
const lowerQuery = query.toLowerCase()
|
|
||||||
|
|
||||||
items.forEach((item) => {
|
|
||||||
const viewName = (item.getAttribute("data-view-name") || "").toLowerCase()
|
|
||||||
const viewType = (item.getAttribute("data-view-type") || "").toLowerCase()
|
|
||||||
const matches = viewName.includes(lowerQuery) || viewType.includes(lowerQuery)
|
|
||||||
item.style.display = matches ? "" : "none"
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleSearchInput() {
|
|
||||||
const query = searchInput.value
|
|
||||||
filterViews(query)
|
|
||||||
clearButton.hidden = query.length === 0
|
|
||||||
}
|
|
||||||
|
|
||||||
function clearSearch() {
|
|
||||||
searchInput.value = ""
|
|
||||||
clearButton.hidden = true
|
|
||||||
filterViews("")
|
|
||||||
searchInput.focus()
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleTriggerClick = (e: MouseEvent) => {
|
|
||||||
e.stopPropagation()
|
|
||||||
toggleDropdown()
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleTriggerKeydown = (e: KeyboardEvent) => {
|
|
||||||
if (e.key === "Enter" || e.key === " ") {
|
|
||||||
e.preventDefault()
|
|
||||||
toggleDropdown()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleSearchKeydown = (e: KeyboardEvent) => {
|
|
||||||
if (e.key === "Escape") {
|
|
||||||
if (searchInput.value) {
|
|
||||||
clearSearch()
|
|
||||||
} else {
|
|
||||||
closeDropdown()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleClearClick = (e: MouseEvent) => {
|
|
||||||
e.stopPropagation()
|
|
||||||
clearSearch()
|
|
||||||
}
|
|
||||||
|
|
||||||
trigger.addEventListener("click", handleTriggerClick)
|
|
||||||
trigger.addEventListener("keydown", handleTriggerKeydown)
|
|
||||||
searchInput.addEventListener("input", handleSearchInput)
|
|
||||||
searchInput.addEventListener("keydown", handleSearchKeydown)
|
|
||||||
clearButton.addEventListener("click", handleClearClick)
|
|
||||||
|
|
||||||
const viewLinks = viewList.querySelectorAll(".bases-toolbar-menu-item")
|
|
||||||
viewLinks.forEach((link) => {
|
|
||||||
link.addEventListener("click", closeDropdown)
|
|
||||||
window.addCleanup(() => link.removeEventListener("click", closeDropdown))
|
|
||||||
})
|
|
||||||
|
|
||||||
selector.addEventListener("close-dropdown", closeDropdown)
|
|
||||||
|
|
||||||
window.addCleanup(() => {
|
|
||||||
trigger.removeEventListener("click", handleTriggerClick)
|
|
||||||
trigger.removeEventListener("keydown", handleTriggerKeydown)
|
|
||||||
searchInput.removeEventListener("input", handleSearchInput)
|
|
||||||
searchInput.removeEventListener("keydown", handleSearchKeydown)
|
|
||||||
clearButton.removeEventListener("click", handleClearClick)
|
|
||||||
selector.removeEventListener("close-dropdown", closeDropdown)
|
|
||||||
selector.removeAttribute("data-initialized")
|
|
||||||
closeDropdown()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
document.addEventListener("nav", setupBaseViewSelector)
|
|
||||||
@@ -111,10 +111,6 @@ function createFolderNode(
|
|||||||
const folderPath = node.slug
|
const folderPath = node.slug
|
||||||
folderContainer.dataset.folderpath = folderPath
|
folderContainer.dataset.folderpath = folderPath
|
||||||
|
|
||||||
if (currentSlug === folderPath) {
|
|
||||||
folderContainer.classList.add("active")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (opts.folderClickBehavior === "link") {
|
if (opts.folderClickBehavior === "link") {
|
||||||
// Replace button with link for link behavior
|
// Replace button with link for link behavior
|
||||||
const button = titleContainer.querySelector(".folder-button") as HTMLElement
|
const button = titleContainer.querySelector(".folder-button") as HTMLElement
|
||||||
|
|||||||
@@ -29,31 +29,17 @@ class DiagramPanZoom {
|
|||||||
const mouseDownHandler = this.onMouseDown.bind(this)
|
const mouseDownHandler = this.onMouseDown.bind(this)
|
||||||
const mouseMoveHandler = this.onMouseMove.bind(this)
|
const mouseMoveHandler = this.onMouseMove.bind(this)
|
||||||
const mouseUpHandler = this.onMouseUp.bind(this)
|
const mouseUpHandler = this.onMouseUp.bind(this)
|
||||||
|
|
||||||
// Touch drag events
|
|
||||||
const touchStartHandler = this.onTouchStart.bind(this)
|
|
||||||
const touchMoveHandler = this.onTouchMove.bind(this)
|
|
||||||
const touchEndHandler = this.onTouchEnd.bind(this)
|
|
||||||
|
|
||||||
const resizeHandler = this.resetTransform.bind(this)
|
const resizeHandler = this.resetTransform.bind(this)
|
||||||
|
|
||||||
this.container.addEventListener("mousedown", mouseDownHandler)
|
this.container.addEventListener("mousedown", mouseDownHandler)
|
||||||
document.addEventListener("mousemove", mouseMoveHandler)
|
document.addEventListener("mousemove", mouseMoveHandler)
|
||||||
document.addEventListener("mouseup", mouseUpHandler)
|
document.addEventListener("mouseup", mouseUpHandler)
|
||||||
|
|
||||||
this.container.addEventListener("touchstart", touchStartHandler, { passive: false })
|
|
||||||
document.addEventListener("touchmove", touchMoveHandler, { passive: false })
|
|
||||||
document.addEventListener("touchend", touchEndHandler)
|
|
||||||
|
|
||||||
window.addEventListener("resize", resizeHandler)
|
window.addEventListener("resize", resizeHandler)
|
||||||
|
|
||||||
this.cleanups.push(
|
this.cleanups.push(
|
||||||
() => this.container.removeEventListener("mousedown", mouseDownHandler),
|
() => this.container.removeEventListener("mousedown", mouseDownHandler),
|
||||||
() => document.removeEventListener("mousemove", mouseMoveHandler),
|
() => document.removeEventListener("mousemove", mouseMoveHandler),
|
||||||
() => document.removeEventListener("mouseup", mouseUpHandler),
|
() => document.removeEventListener("mouseup", mouseUpHandler),
|
||||||
() => this.container.removeEventListener("touchstart", touchStartHandler),
|
|
||||||
() => document.removeEventListener("touchmove", touchMoveHandler),
|
|
||||||
() => document.removeEventListener("touchend", touchEndHandler),
|
|
||||||
() => window.removeEventListener("resize", resizeHandler),
|
() => window.removeEventListener("resize", resizeHandler),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -113,30 +99,6 @@ class DiagramPanZoom {
|
|||||||
this.container.style.cursor = "grab"
|
this.container.style.cursor = "grab"
|
||||||
}
|
}
|
||||||
|
|
||||||
private onTouchStart(e: TouchEvent) {
|
|
||||||
if (e.touches.length !== 1) return
|
|
||||||
this.isDragging = true
|
|
||||||
const touch = e.touches[0]
|
|
||||||
this.startPan = { x: touch.clientX - this.currentPan.x, y: touch.clientY - this.currentPan.y }
|
|
||||||
}
|
|
||||||
|
|
||||||
private onTouchMove(e: TouchEvent) {
|
|
||||||
if (!this.isDragging || e.touches.length !== 1) return
|
|
||||||
e.preventDefault() // Prevent scrolling
|
|
||||||
|
|
||||||
const touch = e.touches[0]
|
|
||||||
this.currentPan = {
|
|
||||||
x: touch.clientX - this.startPan.x,
|
|
||||||
y: touch.clientY - this.startPan.y,
|
|
||||||
}
|
|
||||||
|
|
||||||
this.updateTransform()
|
|
||||||
}
|
|
||||||
|
|
||||||
private onTouchEnd() {
|
|
||||||
this.isDragging = false
|
|
||||||
}
|
|
||||||
|
|
||||||
private zoom(delta: number) {
|
private zoom(delta: number) {
|
||||||
const newScale = Math.min(Math.max(this.scale + delta, this.MIN_SCALE), this.MAX_SCALE)
|
const newScale = Math.min(Math.max(this.scale + delta, this.MIN_SCALE), this.MAX_SCALE)
|
||||||
|
|
||||||
@@ -158,15 +120,11 @@ class DiagramPanZoom {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private resetTransform() {
|
private resetTransform() {
|
||||||
const svg = this.content.querySelector("svg")!
|
|
||||||
const rect = svg.getBoundingClientRect()
|
|
||||||
const width = rect.width / this.scale
|
|
||||||
const height = rect.height / this.scale
|
|
||||||
|
|
||||||
this.scale = 1
|
this.scale = 1
|
||||||
|
const svg = this.content.querySelector("svg")!
|
||||||
this.currentPan = {
|
this.currentPan = {
|
||||||
x: (this.container.clientWidth - width) / 2,
|
x: svg.getBoundingClientRect().width / 2,
|
||||||
y: (this.container.clientHeight - height) / 2,
|
y: svg.getBoundingClientRect().height / 2,
|
||||||
}
|
}
|
||||||
this.updateTransform()
|
this.updateTransform()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import FlexSearch, { DefaultDocumentSearchResults } from "flexsearch"
|
import FlexSearch, { DefaultDocumentSearchResults, Id } from "flexsearch"
|
||||||
import { ContentDetails } from "../../plugins/emitters/contentIndex"
|
import { ContentDetails } from "../../plugins/emitters/contentIndex"
|
||||||
import { registerEscapeHandler, removeAllChildren } from "./util"
|
import { SemanticClient, type SemanticResult } from "./semantic.inline"
|
||||||
|
import { registerEscapeHandler, removeAllChildren, fetchCanonical } from "./util"
|
||||||
import { FullSlug, normalizeRelativeURLs, resolveRelative } from "../../util/path"
|
import { FullSlug, normalizeRelativeURLs, resolveRelative } from "../../util/path"
|
||||||
|
|
||||||
interface Item {
|
interface Item {
|
||||||
@@ -14,81 +15,46 @@ interface Item {
|
|||||||
|
|
||||||
// Can be expanded with things like "term" in the future
|
// Can be expanded with things like "term" in the future
|
||||||
type SearchType = "basic" | "tags"
|
type SearchType = "basic" | "tags"
|
||||||
let searchType: SearchType = "basic"
|
type SearchMode = "lexical" | "semantic"
|
||||||
let currentSearchTerm: string = ""
|
const SEARCH_MODE_STORAGE_KEY = "quartz:search:mode"
|
||||||
const encoder = (str: string): string[] => {
|
|
||||||
const tokens: string[] = []
|
|
||||||
let bufferStart = -1
|
|
||||||
let bufferEnd = -1
|
|
||||||
const lower = str.toLowerCase()
|
|
||||||
|
|
||||||
let i = 0
|
const loadStoredSearchMode = (): SearchMode | null => {
|
||||||
for (const char of lower) {
|
if (typeof window === "undefined") {
|
||||||
const code = char.codePointAt(0)!
|
return null
|
||||||
|
|
||||||
const isCJK =
|
|
||||||
(code >= 0x3040 && code <= 0x309f) ||
|
|
||||||
(code >= 0x30a0 && code <= 0x30ff) ||
|
|
||||||
(code >= 0x4e00 && code <= 0x9fff) ||
|
|
||||||
(code >= 0xac00 && code <= 0xd7af) ||
|
|
||||||
(code >= 0x20000 && code <= 0x2a6df)
|
|
||||||
|
|
||||||
const isWhitespace = code === 32 || code === 9 || code === 10 || code === 13
|
|
||||||
|
|
||||||
if (isCJK) {
|
|
||||||
if (bufferStart !== -1) {
|
|
||||||
tokens.push(lower.slice(bufferStart, bufferEnd))
|
|
||||||
bufferStart = -1
|
|
||||||
}
|
|
||||||
tokens.push(char)
|
|
||||||
} else if (isWhitespace) {
|
|
||||||
if (bufferStart !== -1) {
|
|
||||||
tokens.push(lower.slice(bufferStart, bufferEnd))
|
|
||||||
bufferStart = -1
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (bufferStart === -1) bufferStart = i
|
|
||||||
bufferEnd = i + char.length
|
|
||||||
}
|
|
||||||
|
|
||||||
i += char.length
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (bufferStart !== -1) {
|
try {
|
||||||
tokens.push(lower.slice(bufferStart))
|
const stored = window.localStorage.getItem(SEARCH_MODE_STORAGE_KEY)
|
||||||
|
return stored === "lexical" || stored === "semantic" ? stored : null
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("[Search] failed to read stored search mode:", err)
|
||||||
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
return tokens
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let index = new FlexSearch.Document<Item>({
|
const persistSearchMode = (mode: SearchMode) => {
|
||||||
encode: encoder,
|
if (typeof window === "undefined") {
|
||||||
document: {
|
return
|
||||||
id: "id",
|
}
|
||||||
tag: "tags",
|
|
||||||
index: [
|
try {
|
||||||
{
|
window.localStorage.setItem(SEARCH_MODE_STORAGE_KEY, mode)
|
||||||
field: "title",
|
} catch (err) {
|
||||||
tokenize: "forward",
|
console.warn("[Search] failed to persist search mode:", err)
|
||||||
},
|
}
|
||||||
{
|
}
|
||||||
field: "content",
|
|
||||||
tokenize: "forward",
|
let searchMode: SearchMode = "lexical"
|
||||||
},
|
let currentSearchTerm: string = ""
|
||||||
{
|
let rawSearchTerm: string = ""
|
||||||
field: "tags",
|
let semantic: SemanticClient | null = null
|
||||||
tokenize: "forward",
|
let semanticReady = false
|
||||||
},
|
let semanticInitFailed = false
|
||||||
],
|
type SimilarityResult = { item: Item; similarity: number }
|
||||||
},
|
let chunkMetadata: Record<string, { parentSlug: string; chunkId: number }> = {}
|
||||||
})
|
let manifestIds: string[] = []
|
||||||
|
|
||||||
const p = new DOMParser()
|
|
||||||
const fetchContentCache: Map<FullSlug, Element[]> = new Map()
|
|
||||||
const contextWindowWords = 30
|
const contextWindowWords = 30
|
||||||
const numSearchResults = 8
|
|
||||||
const numTagResults = 5
|
|
||||||
|
|
||||||
const tokenizeTerm = (term: string) => {
|
const tokenizeTerm = (term: string) => {
|
||||||
const tokens = term.split(/\s+/).filter((t) => t.trim() !== "")
|
const tokens = term.split(/\s+/).filter((t) => t.trim() !== "")
|
||||||
const tokenLen = tokens.length
|
const tokenLen = tokens.length
|
||||||
@@ -146,6 +112,102 @@ function highlight(searchTerm: string, text: string, trim?: boolean) {
|
|||||||
}`
|
}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// To be used with search and everything else with flexsearch
|
||||||
|
const encoder = (str: string) =>
|
||||||
|
str
|
||||||
|
.toLowerCase()
|
||||||
|
.split(/\s+/)
|
||||||
|
.filter((token) => token.length > 0)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get parent document slug for a chunk ID
|
||||||
|
*/
|
||||||
|
function getParentSlug(slug: string): string {
|
||||||
|
const meta = chunkMetadata[slug]
|
||||||
|
return meta ? meta.parentSlug : slug
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aggregate semantic search results from chunks to documents using RRF
|
||||||
|
* @param results Raw semantic results (chunk-level)
|
||||||
|
* @param slugToDocIndex Map from document slug to index in idDataMap
|
||||||
|
* @returns Object with rrfScores (for ranking) and maxScores (for display)
|
||||||
|
*/
|
||||||
|
function aggregateChunkResults(
|
||||||
|
results: SemanticResult[],
|
||||||
|
slugToDocIndex: Map<FullSlug, number>,
|
||||||
|
): { rrfScores: Map<number, number>; maxScores: Map<number, number> } {
|
||||||
|
// Group chunks by parent document
|
||||||
|
const docChunks = new Map<string, Array<{ score: number }>>()
|
||||||
|
|
||||||
|
results.forEach(({ id, score }) => {
|
||||||
|
// id is an index into manifestIds (the chunk IDs from embeddings)
|
||||||
|
const chunkSlug = manifestIds[id]
|
||||||
|
if (!chunkSlug) return
|
||||||
|
|
||||||
|
// Get parent document slug
|
||||||
|
const parentSlug = getParentSlug(chunkSlug)
|
||||||
|
|
||||||
|
if (!docChunks.has(parentSlug)) {
|
||||||
|
docChunks.set(parentSlug, [])
|
||||||
|
}
|
||||||
|
|
||||||
|
docChunks.get(parentSlug)!.push({ score })
|
||||||
|
})
|
||||||
|
|
||||||
|
// Apply RRF for ranking and track max similarity for display
|
||||||
|
const rrfScores = new Map<number, number>()
|
||||||
|
const maxScores = new Map<number, number>()
|
||||||
|
const RRF_K = 60
|
||||||
|
|
||||||
|
for (const [parentSlug, chunks] of docChunks) {
|
||||||
|
const docIdx = slugToDocIndex.get(parentSlug as FullSlug)
|
||||||
|
if (typeof docIdx !== "number") continue
|
||||||
|
|
||||||
|
// Sort chunks by score descending to assign per-document ranks
|
||||||
|
chunks.sort((a, b) => b.score - a.score)
|
||||||
|
|
||||||
|
// RRF formula: sum(1 / (k + rank)) across all chunks, using per-document ranks
|
||||||
|
const rrfScore = chunks.reduce((sum, _, rank) => sum + 1.0 / (RRF_K + rank), 0)
|
||||||
|
|
||||||
|
// Max similarity score for display (original 0-1 range)
|
||||||
|
const maxScore = chunks[0].score
|
||||||
|
|
||||||
|
rrfScores.set(docIdx, rrfScore)
|
||||||
|
maxScores.set(docIdx, maxScore)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { rrfScores, maxScores }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize the FlexSearch Document instance with the appropriate configuration
|
||||||
|
const index = new FlexSearch.Document<Item>({
|
||||||
|
tokenize: "forward",
|
||||||
|
encode: encoder,
|
||||||
|
document: {
|
||||||
|
id: "id",
|
||||||
|
tag: "tags",
|
||||||
|
index: [
|
||||||
|
{
|
||||||
|
field: "title",
|
||||||
|
tokenize: "forward",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
field: "content",
|
||||||
|
tokenize: "forward",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
field: "tags",
|
||||||
|
tokenize: "forward",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const p = new DOMParser()
|
||||||
|
const fetchContentCache: Map<FullSlug, Element[]> = new Map()
|
||||||
|
const numSearchResults = 10
|
||||||
|
const numTagResults = 10
|
||||||
function highlightHTML(searchTerm: string, el: HTMLElement) {
|
function highlightHTML(searchTerm: string, el: HTMLElement) {
|
||||||
const p = new DOMParser()
|
const p = new DOMParser()
|
||||||
const tokenizedTerms = tokenizeTerm(searchTerm)
|
const tokenizedTerms = tokenizeTerm(searchTerm)
|
||||||
@@ -187,7 +249,11 @@ function highlightHTML(searchTerm: string, el: HTMLElement) {
|
|||||||
return html.body
|
return html.body
|
||||||
}
|
}
|
||||||
|
|
||||||
async function setupSearch(searchElement: Element, currentSlug: FullSlug, data: ContentIndex) {
|
async function setupSearch(
|
||||||
|
searchElement: HTMLDivElement,
|
||||||
|
currentSlug: FullSlug,
|
||||||
|
data: ContentIndex,
|
||||||
|
) {
|
||||||
const container = searchElement.querySelector(".search-container") as HTMLElement
|
const container = searchElement.querySelector(".search-container") as HTMLElement
|
||||||
if (!container) return
|
if (!container) return
|
||||||
|
|
||||||
@@ -202,12 +268,183 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
const searchLayout = searchElement.querySelector(".search-layout") as HTMLElement
|
const searchLayout = searchElement.querySelector(".search-layout") as HTMLElement
|
||||||
if (!searchLayout) return
|
if (!searchLayout) return
|
||||||
|
|
||||||
|
const searchSpace = searchElement?.querySelector(".search-space") as HTMLFormElement
|
||||||
|
if (!searchSpace) return
|
||||||
|
|
||||||
|
// Create semantic search progress bar
|
||||||
|
const progressBar = document.createElement("div")
|
||||||
|
progressBar.className = "semantic-search-progress"
|
||||||
|
progressBar.style.cssText = `
|
||||||
|
position: absolute;
|
||||||
|
bottom: 0;
|
||||||
|
left: 0;
|
||||||
|
height: 2px;
|
||||||
|
width: 0;
|
||||||
|
background: var(--secondary);
|
||||||
|
transition: width 0.3s ease, opacity 0.3s ease;
|
||||||
|
opacity: 0;
|
||||||
|
z-index: 9999;
|
||||||
|
`
|
||||||
|
searchBar.parentElement?.appendChild(progressBar)
|
||||||
|
|
||||||
|
const startSemanticProgress = () => {
|
||||||
|
progressBar.style.opacity = "1"
|
||||||
|
progressBar.style.width = "0"
|
||||||
|
setTimeout(() => {
|
||||||
|
progressBar.style.width = "100%"
|
||||||
|
}, 10)
|
||||||
|
}
|
||||||
|
|
||||||
|
const completeSemanticProgress = () => {
|
||||||
|
progressBar.style.opacity = "0"
|
||||||
|
setTimeout(() => {
|
||||||
|
progressBar.style.width = "0"
|
||||||
|
}, 300)
|
||||||
|
}
|
||||||
|
|
||||||
|
const resetProgressBar = () => {
|
||||||
|
progressBar.style.opacity = "0"
|
||||||
|
progressBar.style.width = "0"
|
||||||
|
}
|
||||||
|
|
||||||
const idDataMap = Object.keys(data) as FullSlug[]
|
const idDataMap = Object.keys(data) as FullSlug[]
|
||||||
|
const slugToIndex = new Map<FullSlug, number>()
|
||||||
|
idDataMap.forEach((slug, idx) => slugToIndex.set(slug, idx))
|
||||||
|
const modeToggle = searchSpace.querySelector(".search-mode-toggle") as HTMLDivElement | null
|
||||||
|
const modeButtons = modeToggle
|
||||||
|
? Array.from(modeToggle.querySelectorAll<HTMLButtonElement>(".mode-option"))
|
||||||
|
: []
|
||||||
|
|
||||||
const appendLayout = (el: HTMLElement) => {
|
const appendLayout = (el: HTMLElement) => {
|
||||||
searchLayout.appendChild(el)
|
searchLayout.appendChild(el)
|
||||||
}
|
}
|
||||||
|
|
||||||
const enablePreview = searchLayout.dataset.preview === "true"
|
const enablePreview = searchLayout.dataset.preview === "true"
|
||||||
|
if (!semantic && !semanticInitFailed) {
|
||||||
|
const client = new SemanticClient(semanticCfg)
|
||||||
|
try {
|
||||||
|
await client.ensureReady()
|
||||||
|
semantic = client
|
||||||
|
semanticReady = true
|
||||||
|
|
||||||
|
// Load chunk metadata and IDs from manifest
|
||||||
|
try {
|
||||||
|
const manifestUrl = "/embeddings/manifest.json"
|
||||||
|
const res = await fetch(manifestUrl)
|
||||||
|
if (res.ok) {
|
||||||
|
const manifest = await res.json()
|
||||||
|
chunkMetadata = manifest.chunkMetadata || {}
|
||||||
|
manifestIds = manifest.ids || []
|
||||||
|
console.debug(
|
||||||
|
`[Search] Loaded manifest: ${manifestIds.length} chunks, ${Object.keys(chunkMetadata).length} chunked documents`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("[Search] failed to load chunk metadata:", err)
|
||||||
|
chunkMetadata = {}
|
||||||
|
manifestIds = []
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("[SemanticClient] initialization failed:", err)
|
||||||
|
client.dispose()
|
||||||
|
semantic = null
|
||||||
|
semanticReady = false
|
||||||
|
semanticInitFailed = true
|
||||||
|
}
|
||||||
|
} else if (semantic && !semanticReady) {
|
||||||
|
try {
|
||||||
|
await semantic.ensureReady()
|
||||||
|
semanticReady = true
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("[SemanticClient] became unavailable:", err)
|
||||||
|
semantic.dispose()
|
||||||
|
semantic = null
|
||||||
|
semanticReady = false
|
||||||
|
semanticInitFailed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const storedMode = loadStoredSearchMode()
|
||||||
|
if (storedMode === "semantic") {
|
||||||
|
if (semanticReady) {
|
||||||
|
searchMode = storedMode
|
||||||
|
}
|
||||||
|
} else if (storedMode === "lexical") {
|
||||||
|
searchMode = storedMode
|
||||||
|
}
|
||||||
|
if (!semanticReady && searchMode === "semantic") {
|
||||||
|
searchMode = "lexical"
|
||||||
|
}
|
||||||
|
let searchSeq = 0
|
||||||
|
let runSearchTimer: number | null = null
|
||||||
|
let lastInputAt = 0
|
||||||
|
searchLayout.dataset.mode = searchMode
|
||||||
|
|
||||||
|
const updateModeUI = (mode: SearchMode) => {
|
||||||
|
modeButtons.forEach((button) => {
|
||||||
|
const btnMode = (button.dataset.mode as SearchMode) ?? "lexical"
|
||||||
|
const isActive = btnMode === mode
|
||||||
|
button.classList.toggle("active", isActive)
|
||||||
|
button.setAttribute("aria-pressed", String(isActive))
|
||||||
|
})
|
||||||
|
if (modeToggle) {
|
||||||
|
modeToggle.dataset.mode = mode
|
||||||
|
}
|
||||||
|
searchLayout.dataset.mode = mode
|
||||||
|
}
|
||||||
|
|
||||||
|
const computeDebounceDelay = (term: string): number => {
|
||||||
|
const trimmed = term.trim()
|
||||||
|
const lastTerm = currentSearchTerm
|
||||||
|
const isExtension =
|
||||||
|
lastTerm.length > 0 && trimmed.length > lastTerm.length && trimmed.startsWith(lastTerm)
|
||||||
|
const isRetraction = lastTerm.length > trimmed.length
|
||||||
|
const isReplacement =
|
||||||
|
lastTerm.length > 0 && !trimmed.startsWith(lastTerm) && !lastTerm.startsWith(trimmed)
|
||||||
|
const baseFullQueryDelay = 200
|
||||||
|
const semanticPenalty = searchMode === "semantic" ? 60 : 0
|
||||||
|
|
||||||
|
if (isExtension && trimmed.length > 2) {
|
||||||
|
return baseFullQueryDelay + semanticPenalty
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isReplacement && trimmed.length > 3) {
|
||||||
|
return Math.max(90, baseFullQueryDelay - 80)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isRetraction) {
|
||||||
|
return 90
|
||||||
|
}
|
||||||
|
|
||||||
|
return baseFullQueryDelay + (searchMode === "semantic" ? 40 : 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
const triggerSearchWithMode = (mode: SearchMode) => {
|
||||||
|
if (mode === "semantic" && !semanticReady) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (searchMode === mode) return
|
||||||
|
searchMode = mode
|
||||||
|
updateModeUI(mode)
|
||||||
|
persistSearchMode(searchMode)
|
||||||
|
if (rawSearchTerm.trim() !== "") {
|
||||||
|
searchLayout.classList.add("display-results")
|
||||||
|
const token = ++searchSeq
|
||||||
|
void runSearch(rawSearchTerm, token)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
updateModeUI(searchMode)
|
||||||
|
|
||||||
|
modeButtons.forEach((button) => {
|
||||||
|
const btnMode = (button.dataset.mode as SearchMode) ?? "lexical"
|
||||||
|
if (btnMode === "semantic") {
|
||||||
|
button.disabled = !semanticReady
|
||||||
|
button.setAttribute("aria-disabled", String(!semanticReady))
|
||||||
|
}
|
||||||
|
const handler = () => triggerSearchWithMode(btnMode)
|
||||||
|
button.addEventListener("click", handler)
|
||||||
|
window.addCleanup(() => button.removeEventListener("click", handler))
|
||||||
|
})
|
||||||
let preview: HTMLDivElement | undefined = undefined
|
let preview: HTMLDivElement | undefined = undefined
|
||||||
let previewInner: HTMLDivElement | undefined = undefined
|
let previewInner: HTMLDivElement | undefined = undefined
|
||||||
const results = document.createElement("div")
|
const results = document.createElement("div")
|
||||||
@@ -229,20 +466,23 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
removeAllChildren(preview)
|
removeAllChildren(preview)
|
||||||
}
|
}
|
||||||
searchLayout.classList.remove("display-results")
|
searchLayout.classList.remove("display-results")
|
||||||
searchType = "basic" // reset search type after closing
|
|
||||||
searchButton.focus()
|
searchButton.focus()
|
||||||
|
resetProgressBar()
|
||||||
}
|
}
|
||||||
|
|
||||||
function showSearch(searchTypeNew: SearchType) {
|
function showSearch(type: SearchType) {
|
||||||
searchType = searchTypeNew
|
|
||||||
if (sidebar) sidebar.style.zIndex = "1"
|
|
||||||
container.classList.add("active")
|
container.classList.add("active")
|
||||||
|
if (type === "tags") {
|
||||||
|
searchBar.value = "#"
|
||||||
|
rawSearchTerm = "#"
|
||||||
|
}
|
||||||
searchBar.focus()
|
searchBar.focus()
|
||||||
}
|
}
|
||||||
|
|
||||||
let currentHover: HTMLInputElement | null = null
|
let currentHover: HTMLInputElement | null = null
|
||||||
|
|
||||||
async function shortcutHandler(e: HTMLElementEventMap["keydown"]) {
|
async function shortcutHandler(e: HTMLElementEventMap["keydown"]) {
|
||||||
if (e.key === "k" && (e.ctrlKey || e.metaKey) && !e.shiftKey) {
|
if ((e.key === "/" || e.key === "k") && (e.ctrlKey || e.metaKey) && !e.shiftKey) {
|
||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
const searchBarOpen = container.classList.contains("active")
|
const searchBarOpen = container.classList.contains("active")
|
||||||
searchBarOpen ? hideSearch() : showSearch("basic")
|
searchBarOpen ? hideSearch() : showSearch("basic")
|
||||||
@@ -252,9 +492,6 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
const searchBarOpen = container.classList.contains("active")
|
const searchBarOpen = container.classList.contains("active")
|
||||||
searchBarOpen ? hideSearch() : showSearch("tags")
|
searchBarOpen ? hideSearch() : showSearch("tags")
|
||||||
|
|
||||||
// add "#" prefix for tag search
|
|
||||||
searchBar.value = "#"
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -264,20 +501,29 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
|
|
||||||
// If search is active, then we will render the first result and display accordingly
|
// If search is active, then we will render the first result and display accordingly
|
||||||
if (!container.classList.contains("active")) return
|
if (!container.classList.contains("active")) return
|
||||||
if (e.key === "Enter" && !e.isComposing) {
|
if (e.key === "Enter") {
|
||||||
// If result has focus, navigate to that one, otherwise pick first result
|
// If result has focus, navigate to that one, otherwise pick first result
|
||||||
|
let anchor: HTMLAnchorElement | undefined
|
||||||
if (results.contains(document.activeElement)) {
|
if (results.contains(document.activeElement)) {
|
||||||
const active = document.activeElement as HTMLInputElement
|
anchor = document.activeElement as HTMLAnchorElement
|
||||||
if (active.classList.contains("no-match")) return
|
if (anchor.classList.contains("no-match")) return
|
||||||
await displayPreview(active)
|
await displayPreview(anchor)
|
||||||
active.click()
|
e.preventDefault()
|
||||||
|
anchor.click()
|
||||||
} else {
|
} else {
|
||||||
const anchor = document.getElementsByClassName("result-card")[0] as HTMLInputElement | null
|
anchor = document.getElementsByClassName("result-card")[0] as HTMLAnchorElement
|
||||||
if (!anchor || anchor.classList.contains("no-match")) return
|
if (!anchor || anchor.classList.contains("no-match")) return
|
||||||
await displayPreview(anchor)
|
await displayPreview(anchor)
|
||||||
|
e.preventDefault()
|
||||||
anchor.click()
|
anchor.click()
|
||||||
}
|
}
|
||||||
} else if (e.key === "ArrowUp" || (e.shiftKey && e.key === "Tab")) {
|
if (anchor !== undefined)
|
||||||
|
window.spaNavigate(new URL(new URL(anchor.href).pathname, window.location.toString()))
|
||||||
|
} else if (
|
||||||
|
e.key === "ArrowUp" ||
|
||||||
|
(e.shiftKey && e.key === "Tab") ||
|
||||||
|
(e.ctrlKey && e.key === "p")
|
||||||
|
) {
|
||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
if (results.contains(document.activeElement)) {
|
if (results.contains(document.activeElement)) {
|
||||||
// If an element in results-container already has focus, focus previous one
|
// If an element in results-container already has focus, focus previous one
|
||||||
@@ -290,7 +536,7 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
if (prevResult) currentHover = prevResult
|
if (prevResult) currentHover = prevResult
|
||||||
await displayPreview(prevResult)
|
await displayPreview(prevResult)
|
||||||
}
|
}
|
||||||
} else if (e.key === "ArrowDown" || e.key === "Tab") {
|
} else if (e.key === "ArrowDown" || e.key === "Tab" || (e.ctrlKey && e.key === "n")) {
|
||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
// The results should already been focused, so we need to find the next one.
|
// The results should already been focused, so we need to find the next one.
|
||||||
// The activeElement is the search bar, so we need to find the first result and focus it.
|
// The activeElement is the search bar, so we need to find the first result and focus it.
|
||||||
@@ -307,25 +553,33 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const formatForDisplay = (term: string, id: number) => {
|
const formatForDisplay = (term: string, id: number, renderType: SearchType) => {
|
||||||
const slug = idDataMap[id]
|
const slug = idDataMap[id]
|
||||||
|
|
||||||
|
// Check if query contains title words (for boosting exact matches)
|
||||||
|
const queryTokens = tokenizeTerm(term)
|
||||||
|
const titleTokens = tokenizeTerm(data[slug].title ?? "")
|
||||||
|
const titleMatch = titleTokens.some((t) => queryTokens.includes(t))
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id,
|
id,
|
||||||
slug,
|
slug,
|
||||||
title: searchType === "tags" ? data[slug].title : highlight(term, data[slug].title ?? ""),
|
title: renderType === "tags" ? data[slug].title : highlight(term, data[slug].title ?? ""),
|
||||||
content: highlight(term, data[slug].content ?? "", true),
|
content: highlight(term, data[slug].content ?? "", true),
|
||||||
tags: highlightTags(term.substring(1), data[slug].tags),
|
tags: highlightTags(term, data[slug].tags, renderType),
|
||||||
|
titleMatch, // Add title match flag for boosting
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function highlightTags(term: string, tags: string[]) {
|
function highlightTags(term: string, tags: string[], renderType: SearchType) {
|
||||||
if (!tags || searchType !== "tags") {
|
if (!tags || renderType !== "tags") {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const tagTerm = term.toLowerCase()
|
||||||
return tags
|
return tags
|
||||||
.map((tag) => {
|
.map((tag) => {
|
||||||
if (tag.toLowerCase().includes(term.toLowerCase())) {
|
if (tag.toLowerCase().includes(tagTerm)) {
|
||||||
return `<li><p class="match-tag">#${tag}</p></li>`
|
return `<li><p class="match-tag">#${tag}</p></li>`
|
||||||
} else {
|
} else {
|
||||||
return `<li><p>#${tag}</p></li>`
|
return `<li><p>#${tag}</p></li>`
|
||||||
@@ -338,24 +592,40 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
return new URL(resolveRelative(currentSlug, slug), location.toString())
|
return new URL(resolveRelative(currentSlug, slug), location.toString())
|
||||||
}
|
}
|
||||||
|
|
||||||
const resultToHTML = ({ slug, title, content, tags }: Item) => {
|
const resultToHTML = ({ item, percent }: { item: Item; percent: number | null }) => {
|
||||||
|
const { slug, title, content, tags, target } = item
|
||||||
const htmlTags = tags.length > 0 ? `<ul class="tags">${tags.join("")}</ul>` : ``
|
const htmlTags = tags.length > 0 ? `<ul class="tags">${tags.join("")}</ul>` : ``
|
||||||
const itemTile = document.createElement("a")
|
const itemTile = document.createElement("a")
|
||||||
|
const titleContent = target ? highlight(currentSearchTerm, target) : title
|
||||||
|
const subscript = target ? `<b>${slug}</b>` : ``
|
||||||
|
let percentLabel = "—"
|
||||||
|
let percentAttr = ""
|
||||||
|
if (percent !== null && Number.isFinite(percent)) {
|
||||||
|
const bounded = Math.max(0, Math.min(100, percent))
|
||||||
|
percentLabel = `${bounded.toFixed(1)}%`
|
||||||
|
percentAttr = bounded.toFixed(3)
|
||||||
|
}
|
||||||
itemTile.classList.add("result-card")
|
itemTile.classList.add("result-card")
|
||||||
itemTile.id = slug
|
itemTile.id = slug
|
||||||
itemTile.href = resolveUrl(slug).toString()
|
itemTile.href = resolveUrl(slug).toString()
|
||||||
itemTile.innerHTML = `
|
itemTile.innerHTML = `<hgroup>
|
||||||
<h3 class="card-title">${title}</h3>
|
<h3>${titleContent}</h3>
|
||||||
${htmlTags}
|
${subscript}${htmlTags}
|
||||||
<p class="card-description">${content}</p>
|
${searchMode === "semantic" ? `<span class="result-likelihood" title="match likelihood"> ${percentLabel}</span>` : ""}
|
||||||
`
|
${enablePreview && window.innerWidth > 600 ? "" : `<p>${content}</p>`}
|
||||||
itemTile.addEventListener("click", (event) => {
|
</hgroup>`
|
||||||
if (event.altKey || event.ctrlKey || event.metaKey || event.shiftKey) return
|
if (percentAttr) itemTile.dataset.scorePercent = percentAttr
|
||||||
hideSearch()
|
else delete itemTile.dataset.scorePercent
|
||||||
})
|
|
||||||
|
|
||||||
const handler = (event: MouseEvent) => {
|
const handler = (evt: MouseEvent) => {
|
||||||
if (event.altKey || event.ctrlKey || event.metaKey || event.shiftKey) return
|
if (evt.altKey || evt.ctrlKey || evt.metaKey || evt.shiftKey) return
|
||||||
|
const anchor = evt.currentTarget as HTMLAnchorElement | null
|
||||||
|
if (!anchor) return
|
||||||
|
evt.preventDefault()
|
||||||
|
const href = anchor.getAttribute("href")
|
||||||
|
if (!href) return
|
||||||
|
const url = new URL(href, window.location.toString())
|
||||||
|
window.spaNavigate(url)
|
||||||
hideSearch()
|
hideSearch()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -373,15 +643,22 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
return itemTile
|
return itemTile
|
||||||
}
|
}
|
||||||
|
|
||||||
async function displayResults(finalResults: Item[]) {
|
async function displayResults(finalResults: SimilarityResult[]) {
|
||||||
removeAllChildren(results)
|
removeAllChildren(results)
|
||||||
if (finalResults.length === 0) {
|
if (finalResults.length === 0) {
|
||||||
results.innerHTML = `<a class="result-card no-match">
|
results.innerHTML = `<a class="result-card no-match">
|
||||||
<h3>No results.</h3>
|
<h3>No results.</h3>
|
||||||
<p>Try another search term?</p>
|
<p>Try another search term?</p>
|
||||||
</a>`
|
</a>`
|
||||||
|
currentHover = null
|
||||||
} else {
|
} else {
|
||||||
results.append(...finalResults.map(resultToHTML))
|
const decorated = finalResults.map(({ item, similarity }) => {
|
||||||
|
if (!Number.isFinite(similarity)) return { item, percent: null }
|
||||||
|
const bounded = Math.max(-1, Math.min(1, similarity))
|
||||||
|
const percent = ((bounded + 1) / 2) * 100
|
||||||
|
return { item, percent }
|
||||||
|
})
|
||||||
|
results.append(...decorated.map(resultToHTML))
|
||||||
}
|
}
|
||||||
|
|
||||||
if (finalResults.length === 0 && preview) {
|
if (finalResults.length === 0 && preview) {
|
||||||
@@ -401,8 +678,8 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
return fetchContentCache.get(slug) as Element[]
|
return fetchContentCache.get(slug) as Element[]
|
||||||
}
|
}
|
||||||
|
|
||||||
const targetUrl = resolveUrl(slug).toString()
|
const targetUrl = resolveUrl(slug)
|
||||||
const contents = await fetch(targetUrl)
|
const contents = await fetchCanonical(targetUrl)
|
||||||
.then((res) => res.text())
|
.then((res) => res.text())
|
||||||
.then((contents) => {
|
.then((contents) => {
|
||||||
if (contents === undefined) {
|
if (contents === undefined) {
|
||||||
@@ -432,73 +709,296 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
const highlights = [...preview.getElementsByClassName("highlight")].sort(
|
const highlights = [...preview.getElementsByClassName("highlight")].sort(
|
||||||
(a, b) => b.innerHTML.length - a.innerHTML.length,
|
(a, b) => b.innerHTML.length - a.innerHTML.length,
|
||||||
)
|
)
|
||||||
highlights[0]?.scrollIntoView({ block: "start" })
|
if (highlights.length > 0) {
|
||||||
|
const highlight = highlights[0]
|
||||||
|
const container = preview
|
||||||
|
if (container && highlight) {
|
||||||
|
// Get the relative positions
|
||||||
|
const containerRect = container.getBoundingClientRect()
|
||||||
|
const highlightRect = highlight.getBoundingClientRect()
|
||||||
|
// Calculate the scroll position relative to the container
|
||||||
|
const relativeTop = highlightRect.top - containerRect.top + container.scrollTop - 20 // 20px buffer
|
||||||
|
// Smoothly scroll the container
|
||||||
|
container.scrollTo({
|
||||||
|
top: relativeTop,
|
||||||
|
behavior: "smooth",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function onType(e: HTMLElementEventMap["input"]) {
|
async function runSearch(rawTerm: string, token: number) {
|
||||||
if (!searchLayout || !index) return
|
if (!searchLayout || !index) return
|
||||||
currentSearchTerm = (e.target as HTMLInputElement).value
|
const trimmed = rawTerm.trim()
|
||||||
searchLayout.classList.toggle("display-results", currentSearchTerm !== "")
|
if (trimmed === "") {
|
||||||
searchType = currentSearchTerm.startsWith("#") ? "tags" : "basic"
|
removeAllChildren(results)
|
||||||
|
if (preview) {
|
||||||
|
removeAllChildren(preview)
|
||||||
|
}
|
||||||
|
currentHover = null
|
||||||
|
searchLayout.classList.remove("display-results")
|
||||||
|
resetProgressBar()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
let searchResults: DefaultDocumentSearchResults<Item>
|
const modeForRanking: SearchMode = searchMode
|
||||||
if (searchType === "tags") {
|
const initialType: SearchType = trimmed.startsWith("#") ? "tags" : "basic"
|
||||||
currentSearchTerm = currentSearchTerm.substring(1).trim()
|
let workingType: SearchType = initialType
|
||||||
const separatorIndex = currentSearchTerm.indexOf(" ")
|
let highlightTerm = trimmed
|
||||||
if (separatorIndex != -1) {
|
let tagTerm = ""
|
||||||
// search by title and content index and then filter by tag (implemented in flexsearch)
|
let searchResults: DefaultDocumentSearchResults<Item> = []
|
||||||
const tag = currentSearchTerm.substring(0, separatorIndex)
|
|
||||||
const query = currentSearchTerm.substring(separatorIndex + 1).trim()
|
if (initialType === "tags") {
|
||||||
searchResults = await index.searchAsync({
|
tagTerm = trimmed.substring(1).trim()
|
||||||
query: query,
|
const separatorIndex = tagTerm.indexOf(" ")
|
||||||
// return at least 10000 documents, so it is enough to filter them by tag (implemented in flexsearch)
|
if (separatorIndex !== -1) {
|
||||||
|
const tag = tagTerm.substring(0, separatorIndex).trim()
|
||||||
|
const query = tagTerm.substring(separatorIndex + 1).trim()
|
||||||
|
const results = await index.searchAsync({
|
||||||
|
query,
|
||||||
limit: Math.max(numSearchResults, 10000),
|
limit: Math.max(numSearchResults, 10000),
|
||||||
index: ["title", "content"],
|
index: ["title", "content"],
|
||||||
tag: { tags: tag },
|
tag: { tags: tag },
|
||||||
})
|
})
|
||||||
for (let searchResult of searchResults) {
|
if (token !== searchSeq) return
|
||||||
searchResult.result = searchResult.result.slice(0, numSearchResults)
|
searchResults = Object.values(results)
|
||||||
}
|
workingType = "basic"
|
||||||
// set search type to basic and remove tag from term for proper highlightning and scroll
|
highlightTerm = query
|
||||||
searchType = "basic"
|
|
||||||
currentSearchTerm = query
|
|
||||||
} else {
|
} else {
|
||||||
// default search by tags index
|
const results = await index.searchAsync({
|
||||||
searchResults = await index.searchAsync({
|
query: tagTerm,
|
||||||
query: currentSearchTerm,
|
|
||||||
limit: numSearchResults,
|
limit: numSearchResults,
|
||||||
index: ["tags"],
|
index: ["tags"],
|
||||||
})
|
})
|
||||||
|
if (token !== searchSeq) return
|
||||||
|
searchResults = Object.values(results)
|
||||||
|
highlightTerm = tagTerm
|
||||||
}
|
}
|
||||||
} else if (searchType === "basic") {
|
} else {
|
||||||
searchResults = await index.searchAsync({
|
const results = await index.searchAsync({
|
||||||
query: currentSearchTerm,
|
query: highlightTerm,
|
||||||
limit: numSearchResults,
|
limit: numSearchResults,
|
||||||
index: ["title", "content"],
|
index: ["title", "content"],
|
||||||
})
|
})
|
||||||
|
if (token !== searchSeq) return
|
||||||
|
searchResults = Object.values(results)
|
||||||
|
}
|
||||||
|
|
||||||
|
const coerceIds = (hit?: DefaultDocumentSearchResults<Item>[number]): number[] => {
|
||||||
|
if (!hit) return []
|
||||||
|
return hit.result
|
||||||
|
.map((value: Id) => {
|
||||||
|
if (typeof value === "number") {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
const parsed = Number.parseInt(String(value), 10)
|
||||||
|
return Number.isNaN(parsed) ? null : parsed
|
||||||
|
})
|
||||||
|
.filter((value): value is number => value !== null)
|
||||||
}
|
}
|
||||||
|
|
||||||
const getByField = (field: string): number[] => {
|
const getByField = (field: string): number[] => {
|
||||||
const results = searchResults.filter((x) => x.field === field)
|
const hit = searchResults.find((x) => x.field === field)
|
||||||
return results.length === 0 ? [] : ([...results[0].result] as number[])
|
return coerceIds(hit)
|
||||||
}
|
}
|
||||||
|
|
||||||
// order titles ahead of content
|
|
||||||
const allIds: Set<number> = new Set([
|
const allIds: Set<number> = new Set([
|
||||||
...getByField("title"),
|
...getByField("title"),
|
||||||
...getByField("content"),
|
...getByField("content"),
|
||||||
...getByField("tags"),
|
...getByField("tags"),
|
||||||
])
|
])
|
||||||
const finalResults = [...allIds].map((id) => formatForDisplay(currentSearchTerm, id))
|
|
||||||
await displayResults(finalResults)
|
currentSearchTerm = highlightTerm
|
||||||
|
|
||||||
|
const candidateItems = new Map<string, Item>()
|
||||||
|
const ensureItem = (id: number): Item | null => {
|
||||||
|
const slug = idDataMap[id]
|
||||||
|
if (!slug) return null
|
||||||
|
const cached = candidateItems.get(slug)
|
||||||
|
if (cached) return cached
|
||||||
|
const item = formatForDisplay(highlightTerm, id, workingType)
|
||||||
|
if (item) {
|
||||||
|
candidateItems.set(slug, item)
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseIndices: number[] = []
|
||||||
|
for (const id of allIds) {
|
||||||
|
const item = ensureItem(id)
|
||||||
|
if (!item) continue
|
||||||
|
const idx = slugToIndex.get(item.slug)
|
||||||
|
if (typeof idx === "number") {
|
||||||
|
baseIndices.push(idx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let semanticIds: number[] = []
|
||||||
|
const semanticSimilarity = new Map<number, number>()
|
||||||
|
|
||||||
|
const integrateIds = (ids: number[]) => {
|
||||||
|
ids.forEach((docId) => {
|
||||||
|
ensureItem(docId)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const orchestrator = semanticReady && semantic ? semantic : null
|
||||||
|
|
||||||
|
const resolveSimilarity = (item: Item): number => {
|
||||||
|
const semanticHit = semanticSimilarity.get(item.id)
|
||||||
|
return semanticHit ?? Number.NaN
|
||||||
|
}
|
||||||
|
|
||||||
|
const render = async () => {
|
||||||
|
if (token !== searchSeq) return
|
||||||
|
const useSemantic = semanticReady && semanticIds.length > 0
|
||||||
|
const weights =
|
||||||
|
modeForRanking === "semantic" && useSemantic
|
||||||
|
? { base: 0.3, semantic: 1.0 }
|
||||||
|
: { base: 1.0, semantic: useSemantic ? 0.3 : 0 }
|
||||||
|
const rrf = new Map<string, number>()
|
||||||
|
const push = (ids: number[], weight: number, applyTitleBoost: boolean = false) => {
|
||||||
|
if (!ids.length || weight <= 0) return
|
||||||
|
ids.forEach((docId, rank) => {
|
||||||
|
const slug = idDataMap[docId]
|
||||||
|
if (!slug) return
|
||||||
|
const item = ensureItem(docId)
|
||||||
|
if (!item) return
|
||||||
|
|
||||||
|
// Apply title boost for FlexSearch results (1.5x boost for exact title matches)
|
||||||
|
let effectiveWeight = weight
|
||||||
|
if (applyTitleBoost && item.titleMatch) {
|
||||||
|
effectiveWeight *= 1.5
|
||||||
|
}
|
||||||
|
|
||||||
|
const prev = rrf.get(slug) ?? 0
|
||||||
|
rrf.set(slug, prev + effectiveWeight / (1 + rank))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
push(baseIndices, weights.base, true) // FlexSearch with title boost
|
||||||
|
push(semanticIds, weights.semantic, false) // Semantic without boost
|
||||||
|
|
||||||
|
const rankedEntries = Array.from(candidateItems.values())
|
||||||
|
.map((item) => ({ item, score: rrf.get(item.slug) ?? 0 }))
|
||||||
|
.sort((a, b) => b.score - a.score)
|
||||||
|
.slice(0, numSearchResults)
|
||||||
|
|
||||||
|
const displayEntries: SimilarityResult[] = []
|
||||||
|
for (const entry of rankedEntries) {
|
||||||
|
const similarity = resolveSimilarity(entry.item)
|
||||||
|
displayEntries.push({ item: entry.item, similarity })
|
||||||
|
}
|
||||||
|
|
||||||
|
await displayResults(displayEntries)
|
||||||
|
}
|
||||||
|
|
||||||
|
await render()
|
||||||
|
|
||||||
|
if (workingType === "tags" || !orchestrator || !semanticReady || highlightTerm.length < 2) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const showProgress = modeForRanking === "semantic"
|
||||||
|
if (showProgress) {
|
||||||
|
startSemanticProgress()
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { semantic: semRes } = await orchestrator.search(
|
||||||
|
highlightTerm,
|
||||||
|
numSearchResults * 3, // Request more chunks to ensure good document coverage
|
||||||
|
)
|
||||||
|
if (token !== searchSeq) {
|
||||||
|
if (showProgress) completeSemanticProgress()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate chunk results to document level using RRF
|
||||||
|
const { rrfScores: semRrfScores, maxScores: semMaxScores } = aggregateChunkResults(
|
||||||
|
semRes,
|
||||||
|
slugToIndex,
|
||||||
|
)
|
||||||
|
|
||||||
|
// Use RRF scores for ranking
|
||||||
|
semanticIds = Array.from(semRrfScores.entries())
|
||||||
|
.sort((a, b) => b[1] - a[1])
|
||||||
|
.slice(0, numSearchResults)
|
||||||
|
.map(([docIdx]) => docIdx)
|
||||||
|
|
||||||
|
// Use max chunk similarity for display (0-1 range)
|
||||||
|
semanticSimilarity.clear()
|
||||||
|
semMaxScores.forEach((score, docIdx) => {
|
||||||
|
semanticSimilarity.set(docIdx, score)
|
||||||
|
})
|
||||||
|
|
||||||
|
integrateIds(semanticIds)
|
||||||
|
if (showProgress) completeSemanticProgress()
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("[SemanticClient] search failed:", err)
|
||||||
|
if (showProgress) completeSemanticProgress()
|
||||||
|
orchestrator.dispose()
|
||||||
|
semantic = null
|
||||||
|
semanticReady = false
|
||||||
|
semanticInitFailed = true
|
||||||
|
if (searchMode === "semantic") {
|
||||||
|
searchMode = "lexical"
|
||||||
|
updateModeUI(searchMode)
|
||||||
|
}
|
||||||
|
modeButtons.forEach((button) => {
|
||||||
|
if ((button.dataset.mode as SearchMode) === "semantic") {
|
||||||
|
button.disabled = true
|
||||||
|
button.setAttribute("aria-disabled", "true")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
await render()
|
||||||
|
}
|
||||||
|
|
||||||
|
function onType(e: HTMLElementEventMap["input"]) {
|
||||||
|
if (!searchLayout || !index) return
|
||||||
|
rawSearchTerm = (e.target as HTMLInputElement).value
|
||||||
|
const hasQuery = rawSearchTerm.trim() !== ""
|
||||||
|
searchLayout.classList.toggle("display-results", hasQuery)
|
||||||
|
const term = rawSearchTerm
|
||||||
|
const token = ++searchSeq
|
||||||
|
if (runSearchTimer !== null) {
|
||||||
|
window.clearTimeout(runSearchTimer)
|
||||||
|
runSearchTimer = null
|
||||||
|
}
|
||||||
|
if (!hasQuery) {
|
||||||
|
void runSearch("", token)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const now = performance.now()
|
||||||
|
lastInputAt = now
|
||||||
|
const delay = computeDebounceDelay(term)
|
||||||
|
const scheduledAt = lastInputAt
|
||||||
|
runSearchTimer = window.setTimeout(() => {
|
||||||
|
if (scheduledAt !== lastInputAt) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
runSearchTimer = null
|
||||||
|
void runSearch(term, token)
|
||||||
|
}, delay)
|
||||||
}
|
}
|
||||||
|
|
||||||
document.addEventListener("keydown", shortcutHandler)
|
document.addEventListener("keydown", shortcutHandler)
|
||||||
window.addCleanup(() => document.removeEventListener("keydown", shortcutHandler))
|
window.addCleanup(() => document.removeEventListener("keydown", shortcutHandler))
|
||||||
searchButton.addEventListener("click", () => showSearch("basic"))
|
const openHandler = () => showSearch("basic")
|
||||||
window.addCleanup(() => searchButton.removeEventListener("click", () => showSearch("basic")))
|
searchButton.addEventListener("click", openHandler)
|
||||||
|
window.addCleanup(() => searchButton.removeEventListener("click", openHandler))
|
||||||
searchBar.addEventListener("input", onType)
|
searchBar.addEventListener("input", onType)
|
||||||
window.addCleanup(() => searchBar.removeEventListener("input", onType))
|
window.addCleanup(() => searchBar.removeEventListener("input", onType))
|
||||||
|
window.addCleanup(() => {
|
||||||
|
if (runSearchTimer !== null) {
|
||||||
|
window.clearTimeout(runSearchTimer)
|
||||||
|
runSearchTimer = null
|
||||||
|
}
|
||||||
|
resetProgressBar()
|
||||||
|
})
|
||||||
|
|
||||||
registerEscapeHandler(container, hideSearch)
|
registerEscapeHandler(container, hideSearch)
|
||||||
await fillDocument(data)
|
await fillDocument(data)
|
||||||
@@ -506,17 +1006,17 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Fills flexsearch document with data
|
* Fills flexsearch document with data
|
||||||
* @param index index to fill
|
|
||||||
* @param data data to fill index with
|
* @param data data to fill index with
|
||||||
*/
|
*/
|
||||||
let indexPopulated = false
|
let indexPopulated = false
|
||||||
async function fillDocument(data: ContentIndex) {
|
async function fillDocument(data: ContentIndex) {
|
||||||
if (indexPopulated) return
|
if (indexPopulated) return
|
||||||
let id = 0
|
let id = 0
|
||||||
const promises: Array<Promise<unknown>> = []
|
const promises = []
|
||||||
for (const [slug, fileData] of Object.entries<ContentDetails>(data)) {
|
for (const [slug, fileData] of Object.entries<ContentDetails>(data)) {
|
||||||
promises.push(
|
promises.push(
|
||||||
index.addAsync(id++, {
|
//@ts-ignore
|
||||||
|
index.addAsync({
|
||||||
id,
|
id,
|
||||||
slug: slug as FullSlug,
|
slug: slug as FullSlug,
|
||||||
title: fileData.title,
|
title: fileData.title,
|
||||||
@@ -524,6 +1024,7 @@ async function fillDocument(data: ContentIndex) {
|
|||||||
tags: fileData.tags,
|
tags: fileData.tags,
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
|
id++
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all(promises)
|
await Promise.all(promises)
|
||||||
@@ -533,7 +1034,9 @@ async function fillDocument(data: ContentIndex) {
|
|||||||
document.addEventListener("nav", async (e: CustomEventMap["nav"]) => {
|
document.addEventListener("nav", async (e: CustomEventMap["nav"]) => {
|
||||||
const currentSlug = e.detail.url
|
const currentSlug = e.detail.url
|
||||||
const data = await fetchData
|
const data = await fetchData
|
||||||
const searchElement = document.getElementsByClassName("search")
|
const searchElement = document.getElementsByClassName(
|
||||||
|
"search",
|
||||||
|
) as HTMLCollectionOf<HTMLDivElement>
|
||||||
for (const element of searchElement) {
|
for (const element of searchElement) {
|
||||||
await setupSearch(element, currentSlug, data)
|
await setupSearch(element, currentSlug, data)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,163 +0,0 @@
|
|||||||
import test, { describe } from "node:test"
|
|
||||||
import assert from "node:assert"
|
|
||||||
|
|
||||||
// Inline the encoder function from search.inline.ts for testing
|
|
||||||
const encoder = (str: string): string[] => {
|
|
||||||
const tokens: string[] = []
|
|
||||||
let bufferStart = -1
|
|
||||||
let bufferEnd = -1
|
|
||||||
const lower = str.toLowerCase()
|
|
||||||
|
|
||||||
let i = 0
|
|
||||||
for (const char of lower) {
|
|
||||||
const code = char.codePointAt(0)!
|
|
||||||
|
|
||||||
const isCJK =
|
|
||||||
(code >= 0x3040 && code <= 0x309f) ||
|
|
||||||
(code >= 0x30a0 && code <= 0x30ff) ||
|
|
||||||
(code >= 0x4e00 && code <= 0x9fff) ||
|
|
||||||
(code >= 0xac00 && code <= 0xd7af) ||
|
|
||||||
(code >= 0x20000 && code <= 0x2a6df)
|
|
||||||
|
|
||||||
const isWhitespace = code === 32 || code === 9 || code === 10 || code === 13
|
|
||||||
|
|
||||||
if (isCJK) {
|
|
||||||
if (bufferStart !== -1) {
|
|
||||||
tokens.push(lower.slice(bufferStart, bufferEnd))
|
|
||||||
bufferStart = -1
|
|
||||||
}
|
|
||||||
tokens.push(char)
|
|
||||||
} else if (isWhitespace) {
|
|
||||||
if (bufferStart !== -1) {
|
|
||||||
tokens.push(lower.slice(bufferStart, bufferEnd))
|
|
||||||
bufferStart = -1
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (bufferStart === -1) bufferStart = i
|
|
||||||
bufferEnd = i + char.length
|
|
||||||
}
|
|
||||||
|
|
||||||
i += char.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bufferStart !== -1) {
|
|
||||||
tokens.push(lower.slice(bufferStart))
|
|
||||||
}
|
|
||||||
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("search encoder", () => {
|
|
||||||
describe("English text", () => {
|
|
||||||
test("should tokenize simple English words", () => {
|
|
||||||
const result = encoder("hello world")
|
|
||||||
assert.deepStrictEqual(result, ["hello", "world"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle multiple spaces", () => {
|
|
||||||
const result = encoder("hello world")
|
|
||||||
assert.deepStrictEqual(result, ["hello", "world"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle tabs and newlines", () => {
|
|
||||||
const result = encoder("hello\tworld\ntest")
|
|
||||||
assert.deepStrictEqual(result, ["hello", "world", "test"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should lowercase all text", () => {
|
|
||||||
const result = encoder("Hello WORLD Test")
|
|
||||||
assert.deepStrictEqual(result, ["hello", "world", "test"])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("CJK text", () => {
|
|
||||||
test("should tokenize Japanese Hiragana character by character", () => {
|
|
||||||
const result = encoder("こんにちは")
|
|
||||||
assert.deepStrictEqual(result, ["こ", "ん", "に", "ち", "は"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should tokenize Japanese Katakana character by character", () => {
|
|
||||||
const result = encoder("コントロール")
|
|
||||||
assert.deepStrictEqual(result, ["コ", "ン", "ト", "ロ", "ー", "ル"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should tokenize Japanese Kanji character by character", () => {
|
|
||||||
const result = encoder("日本語")
|
|
||||||
assert.deepStrictEqual(result, ["日", "本", "語"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should tokenize Korean Hangul character by character", () => {
|
|
||||||
const result = encoder("안녕하세요")
|
|
||||||
assert.deepStrictEqual(result, ["안", "녕", "하", "세", "요"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should tokenize Chinese characters character by character", () => {
|
|
||||||
const result = encoder("你好世界")
|
|
||||||
assert.deepStrictEqual(result, ["你", "好", "世", "界"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle mixed Hiragana/Katakana/Kanji", () => {
|
|
||||||
const result = encoder("て以来")
|
|
||||||
assert.deepStrictEqual(result, ["て", "以", "来"])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("Mixed CJK and English", () => {
|
|
||||||
test("should handle Japanese with English words", () => {
|
|
||||||
const result = encoder("hello 世界")
|
|
||||||
assert.deepStrictEqual(result, ["hello", "世", "界"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle English with Japanese words", () => {
|
|
||||||
const result = encoder("世界 hello world")
|
|
||||||
assert.deepStrictEqual(result, ["世", "界", "hello", "world"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle complex mixed content", () => {
|
|
||||||
const result = encoder("これはtest文章です")
|
|
||||||
assert.deepStrictEqual(result, ["こ", "れ", "は", "test", "文", "章", "で", "す"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle mixed Korean and English", () => {
|
|
||||||
const result = encoder("hello 안녕 world")
|
|
||||||
assert.deepStrictEqual(result, ["hello", "안", "녕", "world"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle mixed Chinese and English", () => {
|
|
||||||
const result = encoder("你好 world")
|
|
||||||
assert.deepStrictEqual(result, ["你", "好", "world"])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("Edge cases", () => {
|
|
||||||
test("should handle empty string", () => {
|
|
||||||
const result = encoder("")
|
|
||||||
assert.deepStrictEqual(result, [])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle only whitespace", () => {
|
|
||||||
const result = encoder(" \t\n ")
|
|
||||||
assert.deepStrictEqual(result, [])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle single character", () => {
|
|
||||||
const result = encoder("a")
|
|
||||||
assert.deepStrictEqual(result, ["a"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle single CJK character", () => {
|
|
||||||
const result = encoder("あ")
|
|
||||||
assert.deepStrictEqual(result, ["あ"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle CJK with trailing whitespace", () => {
|
|
||||||
const result = encoder("日本語 ")
|
|
||||||
assert.deepStrictEqual(result, ["日", "本", "語"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("should handle English with trailing whitespace", () => {
|
|
||||||
const result = encoder("hello ")
|
|
||||||
assert.deepStrictEqual(result, ["hello"])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
182
quartz/components/scripts/semantic.inline.ts
Normal file
182
quartz/components/scripts/semantic.inline.ts
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
export type SemanticResult = { id: number; score: number }
|
||||||
|
|
||||||
|
type ProgressMessage = {
|
||||||
|
type: "progress"
|
||||||
|
loadedRows: number
|
||||||
|
totalRows: number
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReadyMessage = { type: "ready" }
|
||||||
|
|
||||||
|
type ResultMessage = {
|
||||||
|
type: "search-result"
|
||||||
|
seq: number
|
||||||
|
semantic: SemanticResult[]
|
||||||
|
}
|
||||||
|
|
||||||
|
type ErrorMessage = { type: "error"; seq?: number; message: string }
|
||||||
|
|
||||||
|
type SearchPayload = {
|
||||||
|
semantic: SemanticResult[]
|
||||||
|
}
|
||||||
|
|
||||||
|
type PendingResolver = {
|
||||||
|
resolve: (payload: SearchPayload) => void
|
||||||
|
reject: (err: Error) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SemanticClient {
|
||||||
|
private ready: Promise<void>
|
||||||
|
private resolveReady!: () => void
|
||||||
|
private worker: Worker | null = null
|
||||||
|
private pending = new Map<number, PendingResolver>()
|
||||||
|
private seq = 0
|
||||||
|
private disposed = false
|
||||||
|
private readySettled = false
|
||||||
|
private configured = false
|
||||||
|
private lastError: Error | null = null
|
||||||
|
|
||||||
|
constructor(private cfg?: any) {
|
||||||
|
this.ready = new Promise((resolve) => {
|
||||||
|
this.resolveReady = () => {
|
||||||
|
if (this.readySettled) return
|
||||||
|
this.readySettled = true
|
||||||
|
resolve()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
if (this.cfg?.enable === false) {
|
||||||
|
this.lastError = new Error("semantic search disabled by configuration")
|
||||||
|
this.resolveReady()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.boot()
|
||||||
|
}
|
||||||
|
|
||||||
|
private boot() {
|
||||||
|
try {
|
||||||
|
this.worker = new Worker("/semantic.worker.js", { type: "module" })
|
||||||
|
} catch (err) {
|
||||||
|
this.handleFatal(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this.setupWorker()
|
||||||
|
this.startInit()
|
||||||
|
}
|
||||||
|
|
||||||
|
private setupWorker() {
|
||||||
|
if (!this.worker) return
|
||||||
|
this.worker.onmessage = (
|
||||||
|
event: MessageEvent<ProgressMessage | ReadyMessage | ResultMessage | ErrorMessage>,
|
||||||
|
) => {
|
||||||
|
const msg = event.data
|
||||||
|
if (msg.type === "progress") {
|
||||||
|
// Progress updates during initialization - can be logged if needed
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (msg.type === "ready") {
|
||||||
|
this.configured = true
|
||||||
|
this.lastError = null
|
||||||
|
this.resolveReady()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (msg.type === "search-result") {
|
||||||
|
const pending = this.pending.get(msg.seq)
|
||||||
|
if (pending) {
|
||||||
|
this.pending.delete(msg.seq)
|
||||||
|
pending.resolve({ semantic: msg.semantic ?? [] })
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (msg.type === "error") {
|
||||||
|
if (typeof msg.seq === "number") {
|
||||||
|
const pending = this.pending.get(msg.seq)
|
||||||
|
if (pending) {
|
||||||
|
this.pending.delete(msg.seq)
|
||||||
|
pending.reject(new Error(msg.message))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.handleFatal(msg.message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private startInit() {
|
||||||
|
if (!this.worker) return
|
||||||
|
const manifestUrl =
|
||||||
|
typeof this.cfg?.manifestUrl === "string" && this.cfg.manifestUrl.length > 0
|
||||||
|
? this.cfg.manifestUrl
|
||||||
|
: "/embeddings/manifest.json"
|
||||||
|
const disableCache = Boolean(this.cfg?.disableCache)
|
||||||
|
const baseUrl =
|
||||||
|
typeof this.cfg?.manifestBaseUrl === "string" ? this.cfg.manifestBaseUrl : undefined
|
||||||
|
this.worker.postMessage({
|
||||||
|
type: "init",
|
||||||
|
cfg: this.cfg,
|
||||||
|
manifestUrl,
|
||||||
|
baseUrl,
|
||||||
|
disableCache,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private rejectAll(err: Error, fatal = false) {
|
||||||
|
for (const [id, pending] of this.pending.entries()) {
|
||||||
|
pending.reject(err)
|
||||||
|
this.pending.delete(id)
|
||||||
|
}
|
||||||
|
if (fatal) {
|
||||||
|
this.lastError = err
|
||||||
|
this.configured = false
|
||||||
|
if (!this.readySettled) {
|
||||||
|
this.resolveReady()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleFatal(err: unknown) {
|
||||||
|
const error = err instanceof Error ? err : new Error(String(err))
|
||||||
|
console.error("[SemanticClient] initialization failure:", error)
|
||||||
|
this.rejectAll(error, true)
|
||||||
|
if (this.worker) {
|
||||||
|
this.worker.postMessage({ type: "reset" })
|
||||||
|
this.worker.terminate()
|
||||||
|
this.worker = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async ensureReady() {
|
||||||
|
await this.ready
|
||||||
|
if (!this.configured) {
|
||||||
|
throw this.lastError ?? new Error("semantic search unavailable")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async search(text: string, k: number): Promise<SearchPayload> {
|
||||||
|
if (this.disposed) {
|
||||||
|
throw new Error("semantic client has been disposed")
|
||||||
|
}
|
||||||
|
await this.ensureReady()
|
||||||
|
if (!this.worker || !this.configured) {
|
||||||
|
throw this.lastError ?? new Error("worker unavailable")
|
||||||
|
}
|
||||||
|
return new Promise<SearchPayload>((resolve, reject) => {
|
||||||
|
const seq = ++this.seq
|
||||||
|
this.pending.set(seq, { resolve, reject })
|
||||||
|
this.worker?.postMessage({ type: "search", text, k, seq })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
dispose() {
|
||||||
|
if (this.disposed) return
|
||||||
|
this.disposed = true
|
||||||
|
this.rejectAll(new Error("semantic client disposed"))
|
||||||
|
if (this.worker) {
|
||||||
|
this.worker.postMessage({ type: "reset" })
|
||||||
|
this.worker.terminate()
|
||||||
|
}
|
||||||
|
this.worker = null
|
||||||
|
this.configured = false
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -115,9 +115,9 @@ async function _navigate(url: URL, isBack: boolean = false) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// now, patch head, re-executing scripts
|
// now, patch head, re-executing scripts
|
||||||
const elementsToRemove = document.head.querySelectorAll(":not([data-persist])")
|
const elementsToRemove = document.head.querySelectorAll(":not([spa-preserve])")
|
||||||
elementsToRemove.forEach((el) => el.remove())
|
elementsToRemove.forEach((el) => el.remove())
|
||||||
const elementsToAdd = html.head.querySelectorAll(":not([data-persist])")
|
const elementsToAdd = html.head.querySelectorAll(":not([spa-preserve])")
|
||||||
elementsToAdd.forEach((el) => document.head.appendChild(el))
|
elementsToAdd.forEach((el) => document.head.appendChild(el))
|
||||||
|
|
||||||
// delay setting the url until now
|
// delay setting the url until now
|
||||||
|
|||||||
@@ -1,299 +0,0 @@
|
|||||||
.base-content {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-view {
|
|
||||||
width: 100%;
|
|
||||||
overflow-x: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-table {
|
|
||||||
width: 100%;
|
|
||||||
border-collapse: collapse;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
|
|
||||||
th,
|
|
||||||
td {
|
|
||||||
padding: 0.5rem 0.75rem;
|
|
||||||
text-align: left;
|
|
||||||
border-bottom: 1px solid var(--lightgray);
|
|
||||||
}
|
|
||||||
|
|
||||||
th {
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--darkgray);
|
|
||||||
background: var(--light);
|
|
||||||
position: sticky;
|
|
||||||
top: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
tbody tr:hover {
|
|
||||||
background: var(--light);
|
|
||||||
}
|
|
||||||
|
|
||||||
a.internal {
|
|
||||||
color: var(--secondary);
|
|
||||||
text-decoration: none;
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-group-header td {
|
|
||||||
font-weight: 600;
|
|
||||||
background: var(--light);
|
|
||||||
color: var(--dark);
|
|
||||||
padding-top: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-summary-row {
|
|
||||||
background: var(--light);
|
|
||||||
font-weight: 500;
|
|
||||||
|
|
||||||
.base-summary-cell {
|
|
||||||
border-top: 2px solid var(--lightgray);
|
|
||||||
color: var(--darkgray);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-checkbox {
|
|
||||||
pointer-events: none;
|
|
||||||
width: 1rem;
|
|
||||||
height: 1rem;
|
|
||||||
accent-color: var(--secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-list {
|
|
||||||
list-style: none;
|
|
||||||
padding: 0;
|
|
||||||
margin: 0;
|
|
||||||
|
|
||||||
li {
|
|
||||||
padding: 0.375rem 0;
|
|
||||||
border-bottom: 1px solid var(--lightgray);
|
|
||||||
|
|
||||||
&:last-child {
|
|
||||||
border-bottom: none;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
a.internal {
|
|
||||||
color: var(--secondary);
|
|
||||||
text-decoration: none;
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-list-container {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-list-group {
|
|
||||||
.base-list-group-header {
|
|
||||||
font-size: 1rem;
|
|
||||||
font-weight: 600;
|
|
||||||
margin-bottom: 0.5rem;
|
|
||||||
color: var(--dark);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-list-nested {
|
|
||||||
list-style: none;
|
|
||||||
padding-left: 1rem;
|
|
||||||
margin-top: 0.25rem;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: var(--darkgray);
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-list-meta-label {
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-grid {
|
|
||||||
--base-card-min: 200px;
|
|
||||||
--base-card-aspect: 1.4;
|
|
||||||
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: repeat(auto-fill, minmax(var(--base-card-min), 1fr));
|
|
||||||
gap: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-container {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-group {
|
|
||||||
.base-card-group-header {
|
|
||||||
font-size: 1rem;
|
|
||||||
font-weight: 600;
|
|
||||||
margin-bottom: 0.75rem;
|
|
||||||
color: var(--dark);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
border: 1px solid var(--lightgray);
|
|
||||||
border-radius: 8px;
|
|
||||||
overflow: hidden;
|
|
||||||
background: var(--light);
|
|
||||||
transition: box-shadow 0.15s ease;
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-image-link {
|
|
||||||
display: block;
|
|
||||||
aspect-ratio: var(--base-card-aspect);
|
|
||||||
background-position: center;
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-content {
|
|
||||||
padding: 0.75rem;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-title-link {
|
|
||||||
text-decoration: none;
|
|
||||||
color: inherit;
|
|
||||||
|
|
||||||
&:hover .base-card-title {
|
|
||||||
color: var(--secondary);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-title {
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
font-weight: 600;
|
|
||||||
margin: 0;
|
|
||||||
line-height: 1.3;
|
|
||||||
transition: color 0.15s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-meta {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 0.25rem;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: var(--darkgray);
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-meta-item {
|
|
||||||
display: flex;
|
|
||||||
gap: 0.25rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-card-meta-label {
|
|
||||||
font-weight: 500;
|
|
||||||
|
|
||||||
&::after {
|
|
||||||
content: ":";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-calendar-container {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 1.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-calendar-group {
|
|
||||||
.base-calendar-group-header {
|
|
||||||
font-size: 0.9375rem;
|
|
||||||
font-weight: 600;
|
|
||||||
margin-bottom: 0.5rem;
|
|
||||||
color: var(--dark);
|
|
||||||
font-variant-numeric: tabular-nums;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-map {
|
|
||||||
width: 100%;
|
|
||||||
min-height: 400px;
|
|
||||||
background: var(--light);
|
|
||||||
border: 1px solid var(--lightgray);
|
|
||||||
border-radius: 8px;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
color: var(--darkgray);
|
|
||||||
|
|
||||||
&::before {
|
|
||||||
content: "Map view requires client-side JavaScript";
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-diagnostics {
|
|
||||||
background: #fff3cd;
|
|
||||||
border: 1px solid #ffc107;
|
|
||||||
border-radius: 4px;
|
|
||||||
padding: 1rem;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-diagnostics-title {
|
|
||||||
font-weight: 600;
|
|
||||||
margin-bottom: 0.5rem;
|
|
||||||
color: #856404;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-diagnostics-meta {
|
|
||||||
display: flex;
|
|
||||||
gap: 0.5rem;
|
|
||||||
margin-bottom: 0.75rem;
|
|
||||||
color: #856404;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-diagnostics-page {
|
|
||||||
font-family: var(--codeFont);
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-diagnostics-list {
|
|
||||||
list-style: none;
|
|
||||||
padding: 0;
|
|
||||||
margin: 0;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-diagnostics-item {
|
|
||||||
background: white;
|
|
||||||
padding: 0.5rem;
|
|
||||||
border-radius: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-diagnostics-label {
|
|
||||||
font-weight: 500;
|
|
||||||
color: #856404;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-diagnostics-message {
|
|
||||||
color: #664d03;
|
|
||||||
margin: 0.25rem 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.base-diagnostics-source {
|
|
||||||
display: block;
|
|
||||||
font-size: 0.8125rem;
|
|
||||||
color: #6c757d;
|
|
||||||
white-space: pre-wrap;
|
|
||||||
word-break: break-all;
|
|
||||||
}
|
|
||||||
@@ -1,275 +0,0 @@
|
|||||||
@use "../../styles/variables.scss" as *;
|
|
||||||
|
|
||||||
.bases-toolbar {
|
|
||||||
position: relative;
|
|
||||||
display: inline-block;
|
|
||||||
margin: 1rem 0;
|
|
||||||
font-family: var(--bodyFont);
|
|
||||||
|
|
||||||
.bases-toolbar-item {
|
|
||||||
display: inline-block;
|
|
||||||
position: relative;
|
|
||||||
|
|
||||||
&.bases-toolbar-views-menu {
|
|
||||||
.text-icon-button {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.375rem;
|
|
||||||
padding: 0.375rem 0.75rem;
|
|
||||||
background: var(--light);
|
|
||||||
border: 1px solid var(--lightgray);
|
|
||||||
border-radius: 6px;
|
|
||||||
color: var(--darkgray);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all 0.15s ease;
|
|
||||||
user-select: none;
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
background: var(--highlight);
|
|
||||||
border-color: var(--gray);
|
|
||||||
}
|
|
||||||
|
|
||||||
&.has-active-menu {
|
|
||||||
border-color: var(--secondary);
|
|
||||||
background: var(--highlight);
|
|
||||||
}
|
|
||||||
|
|
||||||
.text-button-icon {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
width: 16px;
|
|
||||||
height: 16px;
|
|
||||||
color: var(--gray);
|
|
||||||
flex-shrink: 0;
|
|
||||||
|
|
||||||
svg {
|
|
||||||
width: 16px;
|
|
||||||
height: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&.mod-aux {
|
|
||||||
opacity: 0.7;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.text-button-label {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--dark);
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.menu-scroll {
|
|
||||||
position: absolute;
|
|
||||||
top: calc(100% + 0.5rem);
|
|
||||||
left: 0;
|
|
||||||
z-index: 100;
|
|
||||||
max-height: 400px;
|
|
||||||
background: var(--light);
|
|
||||||
border: 1px solid var(--lightgray);
|
|
||||||
border-radius: 8px;
|
|
||||||
box-shadow:
|
|
||||||
0 4px 6px -1px rgb(0 0 0 / 0.1),
|
|
||||||
0 2px 4px -2px rgb(0 0 0 / 0.1);
|
|
||||||
overflow: hidden;
|
|
||||||
min-width: 280px;
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
&:has(.text-icon-button.has-active-menu) .menu-scroll {
|
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
|
|
||||||
.bases-toolbar-menu-container {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
max-height: 400px;
|
|
||||||
|
|
||||||
.search-input-container {
|
|
||||||
position: relative;
|
|
||||||
padding: 0.5rem;
|
|
||||||
border-bottom: 1px solid var(--lightgray);
|
|
||||||
|
|
||||||
input[type="search"] {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.375rem 0.75rem;
|
|
||||||
padding-right: 2rem;
|
|
||||||
background: var(--light);
|
|
||||||
border: 1px solid var(--secondary);
|
|
||||||
border-radius: 6px;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--dark);
|
|
||||||
outline: none;
|
|
||||||
transition: box-shadow 0.15s ease;
|
|
||||||
font-family: var(--bodyFont);
|
|
||||||
|
|
||||||
&::placeholder {
|
|
||||||
color: var(--gray);
|
|
||||||
opacity: 0.7;
|
|
||||||
}
|
|
||||||
|
|
||||||
&:focus {
|
|
||||||
box-shadow: 0 0 0 2px var(--highlight);
|
|
||||||
}
|
|
||||||
|
|
||||||
&::-webkit-search-cancel-button {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.search-input-clear-button {
|
|
||||||
position: absolute;
|
|
||||||
right: 1rem;
|
|
||||||
top: 50%;
|
|
||||||
transform: translateY(-50%);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
width: 20px;
|
|
||||||
height: 20px;
|
|
||||||
cursor: pointer;
|
|
||||||
opacity: 0.5;
|
|
||||||
transition: opacity 0.15s ease;
|
|
||||||
color: var(--gray);
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
opacity: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
&[hidden] {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
svg {
|
|
||||||
width: 14px;
|
|
||||||
height: 14px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.bases-toolbar-items {
|
|
||||||
overflow-y: auto;
|
|
||||||
max-height: 340px;
|
|
||||||
|
|
||||||
&::-webkit-scrollbar {
|
|
||||||
width: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&::-webkit-scrollbar-track {
|
|
||||||
background: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
&::-webkit-scrollbar-thumb {
|
|
||||||
background: var(--lightgray);
|
|
||||||
border-radius: 4px;
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
background: var(--gray);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.suggestion-group {
|
|
||||||
&[data-group="views"] {
|
|
||||||
padding: 0.25rem 0;
|
|
||||||
text-transform: lowercase;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.suggestion-item {
|
|
||||||
display: block;
|
|
||||||
text-decoration: none;
|
|
||||||
color: inherit;
|
|
||||||
cursor: pointer;
|
|
||||||
|
|
||||||
&.bases-toolbar-menu-item {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
padding: 0.5rem 0.75rem;
|
|
||||||
margin: 0 0.25rem;
|
|
||||||
border-radius: 4px;
|
|
||||||
transition: background 0.15s ease;
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
background: var(--lightgray);
|
|
||||||
}
|
|
||||||
|
|
||||||
&.mod-active {
|
|
||||||
font-weight: $semiBoldWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
&.is-selected {
|
|
||||||
.bases-toolbar-menu-item-info {
|
|
||||||
.bases-toolbar-menu-item-name {
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--secondary);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.bases-toolbar-menu-item-info {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.5rem;
|
|
||||||
flex: 1;
|
|
||||||
|
|
||||||
.bases-toolbar-menu-item-info-icon {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
width: 16px;
|
|
||||||
height: 16px;
|
|
||||||
color: var(--gray);
|
|
||||||
flex-shrink: 0;
|
|
||||||
|
|
||||||
svg {
|
|
||||||
width: 16px;
|
|
||||||
height: 16px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.bases-toolbar-menu-item-name {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--dark);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.clickable-icon.bases-toolbar-menu-item-icon {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
width: 16px;
|
|
||||||
height: 16px;
|
|
||||||
opacity: 0;
|
|
||||||
transition: opacity 0.15s ease;
|
|
||||||
color: var(--gray);
|
|
||||||
flex-shrink: 0;
|
|
||||||
|
|
||||||
svg {
|
|
||||||
width: 16px;
|
|
||||||
height: 16px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
&:hover .clickable-icon.bases-toolbar-menu-item-icon {
|
|
||||||
opacity: 0.5;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@media all and ($mobile) {
|
|
||||||
.bases-toolbar {
|
|
||||||
.menu-scroll {
|
|
||||||
min-width: 240px;
|
|
||||||
left: auto;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
background: none;
|
background: none;
|
||||||
border: none;
|
border: none;
|
||||||
width: 20px;
|
width: 20px;
|
||||||
height: 32px;
|
height: 20px;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
text-align: inherit;
|
text-align: inherit;
|
||||||
flex-shrink: 0;
|
flex-shrink: 0;
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
& > :not(.sidebar.left:has(.explorer)) {
|
& > :not(.sidebar.left:has(.explorer)) {
|
||||||
transition: transform 300ms ease-in-out;
|
transition: transform 300ms ease-in-out;
|
||||||
}
|
}
|
||||||
|
|
||||||
&.lock-scroll > :not(.sidebar.left:has(.explorer)) {
|
&.lock-scroll > :not(.sidebar.left:has(.explorer)) {
|
||||||
transform: translateX(100dvw);
|
transform: translateX(100dvw);
|
||||||
transition: transform 300ms ease-in-out;
|
transition: transform 300ms ease-in-out;
|
||||||
@@ -34,10 +33,8 @@
|
|||||||
|
|
||||||
min-height: 1.2rem;
|
min-height: 1.2rem;
|
||||||
flex: 0 1 auto;
|
flex: 0 1 auto;
|
||||||
|
|
||||||
&.collapsed {
|
&.collapsed {
|
||||||
flex: 0 1 1.2rem;
|
flex: 0 1 1.2rem;
|
||||||
|
|
||||||
& .fold {
|
& .fold {
|
||||||
transform: rotateZ(-90deg);
|
transform: rotateZ(-90deg);
|
||||||
}
|
}
|
||||||
@@ -121,10 +118,7 @@ button.desktop-explorer {
|
|||||||
list-style: none;
|
list-style: none;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
|
overscroll-behavior: contain;
|
||||||
&.explorer-ul {
|
|
||||||
overscroll-behavior: contain;
|
|
||||||
}
|
|
||||||
|
|
||||||
& li > a {
|
& li > a {
|
||||||
color: var(--dark);
|
color: var(--dark);
|
||||||
@@ -139,16 +133,12 @@ button.desktop-explorer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.folder-outer {
|
.folder-outer {
|
||||||
visibility: collapse;
|
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-rows: 0fr;
|
grid-template-rows: 0fr;
|
||||||
transition-property: grid-template-rows, visibility;
|
transition: grid-template-rows 0.3s ease-in-out;
|
||||||
transition-duration: 0.3s;
|
|
||||||
transition-timing-function: ease-in-out;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.folder-outer.open {
|
.folder-outer.open {
|
||||||
visibility: visible;
|
|
||||||
grid-template-rows: 1fr;
|
grid-template-rows: 1fr;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -275,8 +265,6 @@ li:has(> .folder-outer:not(.open)) > .folder-container > svg {
|
|||||||
|
|
||||||
.mobile-no-scroll {
|
.mobile-no-scroll {
|
||||||
@media all and ($mobile) {
|
@media all and ($mobile) {
|
||||||
.explorer-content > .explorer-ul {
|
overscroll-behavior: none;
|
||||||
overscroll-behavior: contain;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -65,6 +65,7 @@ pre {
|
|||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
|
|
||||||
& > .mermaid-content {
|
& > .mermaid-content {
|
||||||
|
padding: 2rem;
|
||||||
position: relative;
|
position: relative;
|
||||||
transform-origin: 0 0;
|
transform-origin: 0 0;
|
||||||
transition: transform 0.1s ease;
|
transition: transform 0.1s ease;
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
background: none;
|
background: none;
|
||||||
border: none;
|
border: none;
|
||||||
width: 20px;
|
width: 20px;
|
||||||
height: 32px;
|
height: 20px;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
text-align: inherit;
|
text-align: inherit;
|
||||||
flex-shrink: 0;
|
flex-shrink: 0;
|
||||||
|
|||||||
@@ -25,7 +25,6 @@
|
|||||||
& > p {
|
& > p {
|
||||||
display: inline;
|
display: inline;
|
||||||
color: var(--gray);
|
color: var(--gray);
|
||||||
text-wrap: unset;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
& svg {
|
& svg {
|
||||||
@@ -78,16 +77,97 @@
|
|||||||
margin-bottom: 2em;
|
margin-bottom: 2em;
|
||||||
}
|
}
|
||||||
|
|
||||||
& > input {
|
& > .input-container {
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
position: relative;
|
||||||
box-sizing: border-box;
|
box-sizing: border-box;
|
||||||
padding: 0.5em 1em;
|
|
||||||
font-family: var(--bodyFont);
|
|
||||||
color: var(--dark);
|
|
||||||
font-size: 1.1em;
|
|
||||||
border: 1px solid var(--lightgray);
|
|
||||||
|
|
||||||
&:focus {
|
.search-bar {
|
||||||
outline: none;
|
flex: 1 1 auto;
|
||||||
|
min-width: 0;
|
||||||
|
box-sizing: border-box;
|
||||||
|
padding: 0.5em 1em;
|
||||||
|
font-family: var(--bodyFont);
|
||||||
|
color: var(--dark);
|
||||||
|
font-size: 1.1em;
|
||||||
|
border: none;
|
||||||
|
background: transparent;
|
||||||
|
|
||||||
|
&:focus {
|
||||||
|
outline: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.semantic-search-progress {
|
||||||
|
position: absolute;
|
||||||
|
bottom: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
height: 2px;
|
||||||
|
background-color: var(--secondary);
|
||||||
|
width: 0;
|
||||||
|
opacity: 0;
|
||||||
|
transition:
|
||||||
|
width 0.3s ease,
|
||||||
|
opacity 0.2s ease;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-mode-toggle {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
border-radius: 9999px;
|
||||||
|
height: 1.4rem;
|
||||||
|
background-color: color-mix(in srgb, var(--darkgray) 12%, transparent);
|
||||||
|
margin-right: 1rem;
|
||||||
|
|
||||||
|
.mode-option {
|
||||||
|
border: none;
|
||||||
|
background: transparent;
|
||||||
|
font: inherit;
|
||||||
|
color: var(--gray);
|
||||||
|
border-radius: 9999px;
|
||||||
|
cursor: pointer;
|
||||||
|
transition:
|
||||||
|
background-color 0.2s ease,
|
||||||
|
color 0.2s ease;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
width: 1.5rem;
|
||||||
|
height: 1.5rem;
|
||||||
|
position: relative;
|
||||||
|
|
||||||
|
&:focus-visible {
|
||||||
|
outline: 2px solid var(--tertiary);
|
||||||
|
outline-offset: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
&.active {
|
||||||
|
background-color: var(--secondary);
|
||||||
|
color: var(--light);
|
||||||
|
}
|
||||||
|
|
||||||
|
svg {
|
||||||
|
width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sr-only {
|
||||||
|
position: absolute;
|
||||||
|
width: 1px;
|
||||||
|
height: 1px;
|
||||||
|
padding: 0;
|
||||||
|
margin: -1px;
|
||||||
|
overflow: hidden;
|
||||||
|
clip: rect(0, 0, 0, 0);
|
||||||
|
white-space: nowrap;
|
||||||
|
border: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
542
quartz/embed_build.py
Normal file
542
quartz/embed_build.py
Normal file
@@ -0,0 +1,542 @@
|
|||||||
|
# /// script
|
||||||
|
# requires-python = ">=3.11"
|
||||||
|
# dependencies = [
|
||||||
|
# "langchain-text-splitters",
|
||||||
|
# "numpy",
|
||||||
|
# "openai",
|
||||||
|
# "sentence-transformers",
|
||||||
|
# "tiktoken",
|
||||||
|
# ]
|
||||||
|
# ///
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os, json, argparse, hashlib, math, random, logging
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from functools import lru_cache
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
|
||||||
|
import tiktoken, numpy as np
|
||||||
|
|
||||||
|
from openai import OpenAI
|
||||||
|
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
DEFAULT_VLLM_URL = os.environ.get("VLLM_URL") or os.environ.get("VLLM_EMBED_URL") or "http://127.0.0.1:8000/v1"
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_vllm_base_url(url: str) -> str:
|
||||||
|
if not url:
|
||||||
|
raise ValueError("vLLM URL must be non-empty")
|
||||||
|
|
||||||
|
trimmed = url.rstrip("/")
|
||||||
|
if trimmed.endswith("/v1/embeddings"):
|
||||||
|
trimmed = trimmed[: -len("/embeddings")]
|
||||||
|
elif trimmed.endswith("/embeddings"):
|
||||||
|
trimmed = trimmed[: trimmed.rfind("/")]
|
||||||
|
|
||||||
|
if not trimmed.endswith("/v1"):
|
||||||
|
trimmed = f"{trimmed}/v1"
|
||||||
|
|
||||||
|
return trimmed
|
||||||
|
|
||||||
|
|
||||||
|
def load_jsonl(fp: str) -> Iterable[dict]:
|
||||||
|
with open(fp, "r", encoding="utf-8") as f:
|
||||||
|
for line in f:
|
||||||
|
line = line.strip()
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
yield json.loads(line)
|
||||||
|
|
||||||
|
|
||||||
|
def l2_normalize_rows(x: np.ndarray) -> np.ndarray:
|
||||||
|
# x: [N, D]
|
||||||
|
norms = np.linalg.norm(x, ord=2, axis=1, keepdims=True)
|
||||||
|
norms[norms == 0] = 1.0
|
||||||
|
return x / norms
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def get_tiktoken_encoder():
|
||||||
|
# Get the o200k_base tokenizer (GPT-4o) with caching
|
||||||
|
# change this if you want something else.
|
||||||
|
return tiktoken.get_encoding("o200k_base")
|
||||||
|
|
||||||
|
|
||||||
|
def count_tokens(text: str) -> int:
|
||||||
|
# Count tokens using o200k_base encoding
|
||||||
|
encoder = get_tiktoken_encoder()
|
||||||
|
return len(encoder.encode(text))
|
||||||
|
|
||||||
|
|
||||||
|
def get_text_splitter(chunk_size: int, overlap: int):
|
||||||
|
encoder = get_tiktoken_encoder()
|
||||||
|
return RecursiveCharacterTextSplitter(
|
||||||
|
chunk_size=chunk_size * 4, # character approximation
|
||||||
|
chunk_overlap=overlap * 4,
|
||||||
|
separators=["\n\n", "\n", ". ", " ", ""],
|
||||||
|
length_function=lambda t: len(encoder.encode(t)),
|
||||||
|
is_separator_regex=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def chunk_document(
|
||||||
|
doc: dict, max_tokens: int = 512, overlap_tokens: int = 128, min_chunk_size: int = 100
|
||||||
|
) -> list[dict]:
|
||||||
|
"""
|
||||||
|
Chunk a document if it exceeds max_tokens
|
||||||
|
|
||||||
|
Args:
|
||||||
|
doc: {'slug': str, 'title': str, 'text': str}
|
||||||
|
max_tokens: Maximum tokens per chunk
|
||||||
|
overlap_tokens: Overlap between chunks
|
||||||
|
min_chunk_size: Minimum chunk size (avoid tiny chunks)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of chunk dicts with metadata
|
||||||
|
"""
|
||||||
|
text = doc["text"]
|
||||||
|
token_count = count_tokens(text)
|
||||||
|
|
||||||
|
# No chunking needed
|
||||||
|
if token_count <= max_tokens:
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"slug": doc["slug"],
|
||||||
|
"title": doc.get("title", doc["slug"]),
|
||||||
|
"text": text,
|
||||||
|
"chunk_id": 0,
|
||||||
|
"parent_slug": doc["slug"],
|
||||||
|
"is_chunked": False,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
# Apply chunking
|
||||||
|
splitter = get_text_splitter(max_tokens, overlap_tokens)
|
||||||
|
raw_chunks = splitter.split_text(text)
|
||||||
|
|
||||||
|
# Filter out tiny chunks
|
||||||
|
valid_chunks = [c for c in raw_chunks if count_tokens(c) >= min_chunk_size]
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"slug": f"{doc['slug']}#chunk{i}",
|
||||||
|
"title": doc.get("title", doc["slug"]),
|
||||||
|
"text": chunk,
|
||||||
|
"chunk_id": i,
|
||||||
|
"parent_slug": doc["slug"],
|
||||||
|
"is_chunked": True,
|
||||||
|
}
|
||||||
|
for i, chunk in enumerate(valid_chunks)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def write_shards(vectors: np.ndarray, shard_size: int, dtype: str, out_dir: Path) -> list[dict]:
|
||||||
|
out_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
rows, dims = vectors.shape
|
||||||
|
shards_meta: list[dict] = []
|
||||||
|
np_dtype = np.float16 if dtype == "fp16" else np.float32
|
||||||
|
bytes_per_value = np.dtype(np_dtype).itemsize
|
||||||
|
row_offset = 0
|
||||||
|
for si, start in enumerate(range(0, rows, shard_size)):
|
||||||
|
end = min(start + shard_size, rows)
|
||||||
|
shard = vectors[start:end] # [n, dims]
|
||||||
|
bin_path = out_dir / f"vectors-{si:03d}.bin"
|
||||||
|
payload = shard.astype(np_dtype, copy=False).tobytes(order="C")
|
||||||
|
digest = hashlib.sha256(payload).hexdigest()
|
||||||
|
with open(bin_path, "wb") as f:
|
||||||
|
f.write(payload)
|
||||||
|
shard_rows = int(shard.shape[0])
|
||||||
|
shards_meta.append(
|
||||||
|
{
|
||||||
|
"path": f"/embeddings/{bin_path.name}",
|
||||||
|
"rows": shard_rows,
|
||||||
|
"rowOffset": row_offset,
|
||||||
|
"byteLength": len(payload),
|
||||||
|
"sha256": digest,
|
||||||
|
"byteStride": dims * bytes_per_value,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
row_offset += shard_rows
|
||||||
|
return shards_meta
|
||||||
|
|
||||||
|
|
||||||
|
def write_hnsw_graph(levels: list[list[list[int]]], rows: int, out_path: Path) -> tuple[list[dict], str]:
|
||||||
|
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
offset = 0
|
||||||
|
meta: list[dict] = []
|
||||||
|
digest = hashlib.sha256()
|
||||||
|
with open(out_path, "wb") as f:
|
||||||
|
for lvl in levels:
|
||||||
|
indptr = np.zeros(rows + 1, dtype=np.uint32)
|
||||||
|
edge_accum: list[int] = []
|
||||||
|
for idx in range(rows):
|
||||||
|
neighbors = lvl[idx] if idx < len(lvl) else []
|
||||||
|
indptr[idx + 1] = indptr[idx] + len(neighbors)
|
||||||
|
edge_accum.extend(neighbors)
|
||||||
|
indptr_bytes = indptr.tobytes(order="C")
|
||||||
|
indptr_offset = offset
|
||||||
|
f.write(indptr_bytes)
|
||||||
|
digest.update(indptr_bytes)
|
||||||
|
offset += len(indptr_bytes)
|
||||||
|
|
||||||
|
if edge_accum:
|
||||||
|
indices = np.asarray(edge_accum, dtype=np.uint32)
|
||||||
|
indices_bytes = indices.tobytes(order="C")
|
||||||
|
else:
|
||||||
|
indices = np.zeros(0, dtype=np.uint32)
|
||||||
|
indices_bytes = indices.tobytes(order="C")
|
||||||
|
indices_offset = offset
|
||||||
|
f.write(indices_bytes)
|
||||||
|
digest.update(indices_bytes)
|
||||||
|
offset += len(indices_bytes)
|
||||||
|
|
||||||
|
meta.append(
|
||||||
|
{
|
||||||
|
"level": len(meta),
|
||||||
|
"indptr": {
|
||||||
|
"offset": indptr_offset,
|
||||||
|
"elements": int(indptr.shape[0]),
|
||||||
|
"byteLength": len(indptr_bytes),
|
||||||
|
},
|
||||||
|
"indices": {
|
||||||
|
"offset": indices_offset,
|
||||||
|
"elements": int(indices.shape[0]),
|
||||||
|
"byteLength": len(indices_bytes),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return meta, digest.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def embed_vllm(
|
||||||
|
texts: list[str],
|
||||||
|
model_id: str,
|
||||||
|
vllm_url: str,
|
||||||
|
batch_size: int = 64,
|
||||||
|
concurrency: int = 8,
|
||||||
|
) -> np.ndarray:
|
||||||
|
base_url = resolve_vllm_base_url(vllm_url)
|
||||||
|
api_key = os.environ.get("VLLM_API_KEY") or os.environ.get("OPENAI_API_KEY") or "not-set"
|
||||||
|
client = OpenAI(base_url=base_url, api_key=api_key, timeout=300)
|
||||||
|
|
||||||
|
def list_available_models() -> list[str]:
|
||||||
|
models: list[str] = []
|
||||||
|
page = client.models.list()
|
||||||
|
models.extend(model.id for model in page.data)
|
||||||
|
while getattr(page, "has_more", False) and page.data:
|
||||||
|
cursor = page.data[-1].id
|
||||||
|
page = client.models.list(after=cursor)
|
||||||
|
models.extend(model.id for model in page.data)
|
||||||
|
return models
|
||||||
|
|
||||||
|
try:
|
||||||
|
available_models = list_available_models()
|
||||||
|
except Exception as exc:
|
||||||
|
raise RuntimeError(f"failed to query {base_url}/models: {exc}") from exc
|
||||||
|
|
||||||
|
if model_id not in available_models:
|
||||||
|
suggestions = ", ".join(sorted(available_models)) if available_models else "<none>"
|
||||||
|
logger.warning(
|
||||||
|
"model '%s' not served by vLLM at %s. Available models: %s. Use the first model, results may differ during semantic search (you can omit this message if your weights is a ONNX checkpoint of the same model.)", model_id, base_url, suggestions,
|
||||||
|
)
|
||||||
|
model_id = available_models[0]
|
||||||
|
|
||||||
|
# Apply model-specific prefixes for documents (asymmetric search)
|
||||||
|
model_lower = model_id.lower()
|
||||||
|
if "e5" in model_lower:
|
||||||
|
# E5 models: use "passage:" prefix for documents
|
||||||
|
prefixed = [f"passage: {t}" for t in texts]
|
||||||
|
elif "qwen" in model_lower and "embedding" in model_lower:
|
||||||
|
# Qwen3-Embedding: documents use plain text (no prefix)
|
||||||
|
prefixed = texts
|
||||||
|
elif "embeddinggemma" in model_lower:
|
||||||
|
# embeddinggemma: use "title: none | text:" prefix for documents
|
||||||
|
prefixed = [f"title: none | text: {t}" for t in texts]
|
||||||
|
else:
|
||||||
|
# Default: no prefix for unknown models
|
||||||
|
prefixed = texts
|
||||||
|
|
||||||
|
print(
|
||||||
|
"Embedding"
|
||||||
|
f" {len(prefixed)} texts with vLLM"
|
||||||
|
f" (model={model_id}, batch_size={batch_size}, concurrency={concurrency})",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create batches
|
||||||
|
batches = []
|
||||||
|
for i in range(0, len(prefixed), batch_size):
|
||||||
|
batch = prefixed[i : i + batch_size]
|
||||||
|
batches.append((i, batch))
|
||||||
|
|
||||||
|
# Function to send a single batch request
|
||||||
|
def send_batch(batch_info: tuple[int, list[str]]) -> tuple[int, list[np.ndarray]]:
|
||||||
|
idx, batch = batch_info
|
||||||
|
response = client.embeddings.create(model=model_id, input=batch)
|
||||||
|
embeddings = [np.asarray(item.embedding, dtype=np.float32) for item in response.data]
|
||||||
|
return (idx, embeddings)
|
||||||
|
|
||||||
|
# Send batches concurrently (or sequentially if only 1 batch)
|
||||||
|
results: dict[int, list[np.ndarray]] = {}
|
||||||
|
if len(batches) == 1:
|
||||||
|
# Single batch - no need for threading
|
||||||
|
idx, embeddings = send_batch(batches[0])
|
||||||
|
results[idx] = embeddings
|
||||||
|
else:
|
||||||
|
# Multiple batches - use concurrent requests
|
||||||
|
with ThreadPoolExecutor(max_workers=concurrency) as executor:
|
||||||
|
futures = {executor.submit(send_batch, batch_info): batch_info[0] for batch_info in batches}
|
||||||
|
completed = 0
|
||||||
|
for future in as_completed(futures):
|
||||||
|
idx, embeddings = future.result()
|
||||||
|
results[idx] = embeddings
|
||||||
|
completed += 1
|
||||||
|
if completed % max(1, len(batches) // 10) == 0 or completed == len(batches):
|
||||||
|
print(f" Completed {completed}/{len(batches)} batches ({completed * 100 // len(batches)}%)")
|
||||||
|
|
||||||
|
# Reconstruct in order
|
||||||
|
out: list[np.ndarray] = []
|
||||||
|
for i in sorted(results.keys()):
|
||||||
|
out.extend(results[i])
|
||||||
|
|
||||||
|
return np.stack(out, axis=0)
|
||||||
|
|
||||||
|
|
||||||
|
def embed_hf(texts: list[str], model_id: str, device: str) -> np.ndarray:
|
||||||
|
# Prefer sentence-transformers for E5 and similar embed models
|
||||||
|
from sentence_transformers import SentenceTransformer
|
||||||
|
|
||||||
|
model = SentenceTransformer(model_id, device=device)
|
||||||
|
|
||||||
|
# Apply model-specific prefixes for documents (asymmetric search)
|
||||||
|
model_lower = model_id.lower()
|
||||||
|
if "e5" in model_lower:
|
||||||
|
# E5 models: use "passage:" prefix for documents
|
||||||
|
prefixed = [f"passage: {t}" for t in texts]
|
||||||
|
elif "qwen" in model_lower and "embedding" in model_lower:
|
||||||
|
# Qwen3-Embedding: documents use plain text (no prefix)
|
||||||
|
prefixed = texts
|
||||||
|
elif "embeddinggemma" in model_lower:
|
||||||
|
# embeddinggemma: use "title: none | text:" prefix for documents
|
||||||
|
prefixed = [f"title: none | text: {t}" for t in texts]
|
||||||
|
else:
|
||||||
|
# Default: no prefix for unknown models
|
||||||
|
prefixed = texts
|
||||||
|
|
||||||
|
vecs = model.encode(
|
||||||
|
prefixed,
|
||||||
|
batch_size=64,
|
||||||
|
normalize_embeddings=True,
|
||||||
|
convert_to_numpy=True,
|
||||||
|
show_progress_bar=True,
|
||||||
|
)
|
||||||
|
return vecs.astype(np.float32, copy=False)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
ap = argparse.ArgumentParser()
|
||||||
|
ap.add_argument("--jsonl", default="public/embeddings-text.jsonl")
|
||||||
|
ap.add_argument("--model", default=os.environ.get("SEM_MODEL", "intfloat/multilingual-e5-large"))
|
||||||
|
ap.add_argument("--dims", type=int, default=int(os.environ.get("SEM_DIMS", "1024")))
|
||||||
|
ap.add_argument("--dtype", choices=["fp16", "fp32"], default=os.environ.get("SEM_DTYPE", "fp32"))
|
||||||
|
ap.add_argument("--shard-size", type=int, default=int(os.environ.get("SEM_SHARD", "1024")))
|
||||||
|
ap.add_argument("--out", default="public/embeddings")
|
||||||
|
ap.add_argument("--use-vllm", action="store_true", default=bool(os.environ.get("USE_VLLM", "")))
|
||||||
|
ap.add_argument(
|
||||||
|
"--vllm-url",
|
||||||
|
default=DEFAULT_VLLM_URL,
|
||||||
|
help="Base URL for the vLLM OpenAI-compatible server (accepts either /v1 or /v1/embeddings)",
|
||||||
|
)
|
||||||
|
ap.add_argument("--chunk-size", type=int, default=512, help="Max tokens per chunk")
|
||||||
|
ap.add_argument("--chunk-overlap", type=int, default=128, help="Overlap tokens between chunks")
|
||||||
|
ap.add_argument("--no-chunking", action="store_true", help="Disable chunking (embed full docs)")
|
||||||
|
ap.add_argument(
|
||||||
|
"--concurrency",
|
||||||
|
type=int,
|
||||||
|
default=int(os.environ.get("VLLM_CONCURRENCY", "8")),
|
||||||
|
help="Number of concurrent requests to vLLM (default: 8)",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--batch-size",
|
||||||
|
type=int,
|
||||||
|
default=int(os.environ.get("VLLM_BATCH_SIZE", "64")),
|
||||||
|
help="Batch size for vLLM requests (default: 64)",
|
||||||
|
)
|
||||||
|
args = ap.parse_args()
|
||||||
|
|
||||||
|
recs = list(load_jsonl(args.jsonl))
|
||||||
|
if not recs:
|
||||||
|
print("No input found in public/embeddings-text.jsonl; run the site build first to emit JSONL.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Apply chunking
|
||||||
|
if args.no_chunking:
|
||||||
|
chunks = recs
|
||||||
|
chunk_metadata = {}
|
||||||
|
print(f"Chunking disabled. Processing {len(chunks)} full documents")
|
||||||
|
else:
|
||||||
|
chunks = []
|
||||||
|
chunk_metadata = {}
|
||||||
|
for rec in recs:
|
||||||
|
doc_chunks = chunk_document(rec, max_tokens=args.chunk_size, overlap_tokens=args.chunk_overlap)
|
||||||
|
chunks.extend(doc_chunks)
|
||||||
|
# Build chunk metadata map
|
||||||
|
for chunk in doc_chunks:
|
||||||
|
if chunk["is_chunked"]:
|
||||||
|
chunk_metadata[chunk["slug"]] = {
|
||||||
|
"parentSlug": chunk["parent_slug"],
|
||||||
|
"chunkId": chunk["chunk_id"],
|
||||||
|
}
|
||||||
|
chunked_count = sum(1 for c in chunks if c.get("is_chunked", False))
|
||||||
|
print(f"Chunked {len(recs)} documents into {len(chunks)} chunks ({chunked_count} chunked, {len(chunks) - chunked_count} unchanged)")
|
||||||
|
print(f" Chunk size: {args.chunk_size} tokens, overlap: {args.chunk_overlap} tokens")
|
||||||
|
|
||||||
|
ids = [c["slug"] for c in chunks]
|
||||||
|
titles = [c.get("title", c["slug"]) for c in chunks]
|
||||||
|
texts = [c["text"] for c in chunks]
|
||||||
|
|
||||||
|
if args.use_vllm:
|
||||||
|
vecs = embed_vllm(
|
||||||
|
texts,
|
||||||
|
args.model,
|
||||||
|
args.vllm_url,
|
||||||
|
batch_size=args.batch_size,
|
||||||
|
concurrency=args.concurrency,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
device = "cuda" if os.environ.get("CUDA_VISIBLE_DEVICES") else "cpu"
|
||||||
|
vecs = embed_hf(texts, args.model, device)
|
||||||
|
|
||||||
|
# Coerce dims and re-normalize
|
||||||
|
if vecs.shape[1] != args.dims:
|
||||||
|
if vecs.shape[1] > args.dims:
|
||||||
|
vecs = vecs[:, : args.dims]
|
||||||
|
else:
|
||||||
|
vecs = np.pad(vecs, ((0, 0), (0, args.dims - vecs.shape[1])))
|
||||||
|
vecs = l2_normalize_rows(vecs.astype(np.float32, copy=False))
|
||||||
|
|
||||||
|
out_dir = Path(args.out)
|
||||||
|
shards = write_shards(vecs, args.shard_size, args.dtype, out_dir)
|
||||||
|
|
||||||
|
# Build a lightweight HNSW graph and store it in a compact binary layout
|
||||||
|
def hnsw_build(data: np.ndarray, M: int = 16, efC: int = 200, seed: int = 0) -> dict:
|
||||||
|
rng = random.Random(seed)
|
||||||
|
N, D = data.shape
|
||||||
|
levels: list[list[list[int]]] = [] # levels[L][i] = neighbors of node i at level L
|
||||||
|
|
||||||
|
# random level assignment using 1/e distribution
|
||||||
|
node_levels = []
|
||||||
|
for _ in range(N):
|
||||||
|
lvl = 0
|
||||||
|
while rng.random() < 1 / math.e:
|
||||||
|
lvl += 1
|
||||||
|
node_levels.append(lvl)
|
||||||
|
max_level = max(node_levels) if N > 0 else 0
|
||||||
|
for _ in range(max_level + 1):
|
||||||
|
levels.append([[] for _ in range(N)])
|
||||||
|
|
||||||
|
def sim(i: int, j: int) -> float:
|
||||||
|
return float((data[i] * data[j]).sum())
|
||||||
|
|
||||||
|
entry = 0 if N > 0 else -1
|
||||||
|
|
||||||
|
def search_layer(q: int, ep: int, ef: int, L: int) -> list[int]:
|
||||||
|
if ep < 0:
|
||||||
|
return []
|
||||||
|
visited = set()
|
||||||
|
cand: list[tuple[float, int]] = []
|
||||||
|
top: list[tuple[float, int]] = []
|
||||||
|
def push(node: int):
|
||||||
|
if node in visited:
|
||||||
|
return
|
||||||
|
visited.add(node)
|
||||||
|
cand.append((sim(q, node), node))
|
||||||
|
push(ep)
|
||||||
|
while cand:
|
||||||
|
cand.sort(reverse=True)
|
||||||
|
s, v = cand.pop(0)
|
||||||
|
if len(top) >= ef and s <= top[-1][0]:
|
||||||
|
break
|
||||||
|
top.append((s, v))
|
||||||
|
for u in levels[L][v]:
|
||||||
|
push(u)
|
||||||
|
top.sort(reverse=True)
|
||||||
|
return [n for _, n in top]
|
||||||
|
|
||||||
|
for i in range(N):
|
||||||
|
if i == 0:
|
||||||
|
continue
|
||||||
|
lvl = node_levels[i]
|
||||||
|
ep = entry
|
||||||
|
for L in range(max_level, lvl, -1):
|
||||||
|
c = search_layer(i, ep, 1, L)
|
||||||
|
if c:
|
||||||
|
ep = c[0]
|
||||||
|
for L in range(min(max_level, lvl), -1, -1):
|
||||||
|
W = search_layer(i, ep, efC, L)
|
||||||
|
# Select top M by similarity
|
||||||
|
neigh = sorted(((sim(i, j), j) for j in W if j != i), reverse=True)[:M]
|
||||||
|
for _, e in neigh:
|
||||||
|
if e not in levels[L][i]:
|
||||||
|
levels[L][i].append(e)
|
||||||
|
if i not in levels[L][e]:
|
||||||
|
levels[L][e].append(i)
|
||||||
|
|
||||||
|
# trim neighbors to M
|
||||||
|
for L in range(len(levels)):
|
||||||
|
for i in range(N):
|
||||||
|
if len(levels[L][i]) > M:
|
||||||
|
# keep top M by sim
|
||||||
|
nb = levels[L][i]
|
||||||
|
nb = sorted(nb, key=lambda j: sim(i, j), reverse=True)[:M]
|
||||||
|
levels[L][i] = nb
|
||||||
|
|
||||||
|
return {
|
||||||
|
"M": M,
|
||||||
|
"efConstruction": efC,
|
||||||
|
"entryPoint": entry,
|
||||||
|
"maxLevel": max_level,
|
||||||
|
"levels": levels,
|
||||||
|
}
|
||||||
|
|
||||||
|
hnsw = hnsw_build(vecs, M=16, efC=200)
|
||||||
|
hnsw_meta, hnsw_sha = write_hnsw_graph(hnsw["levels"], int(vecs.shape[0]), out_dir / "hnsw.bin")
|
||||||
|
|
||||||
|
manifest = {
|
||||||
|
"version": 2,
|
||||||
|
"dims": args.dims,
|
||||||
|
"dtype": args.dtype,
|
||||||
|
"normalized": True,
|
||||||
|
"rows": int(vecs.shape[0]),
|
||||||
|
"shardSizeRows": args.shard_size,
|
||||||
|
"vectors": {
|
||||||
|
"dtype": args.dtype,
|
||||||
|
"rows": int(vecs.shape[0]),
|
||||||
|
"dims": args.dims,
|
||||||
|
"shards": shards,
|
||||||
|
},
|
||||||
|
"ids": ids,
|
||||||
|
"titles": titles,
|
||||||
|
"chunkMetadata": chunk_metadata,
|
||||||
|
"hnsw": {
|
||||||
|
"M": hnsw["M"],
|
||||||
|
"efConstruction": hnsw["efConstruction"],
|
||||||
|
"entryPoint": hnsw["entryPoint"],
|
||||||
|
"maxLevel": hnsw["maxLevel"],
|
||||||
|
"graph": {
|
||||||
|
"path": "/embeddings/hnsw.bin",
|
||||||
|
"sha256": hnsw_sha,
|
||||||
|
"levels": hnsw_meta,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
(out_dir / "manifest.json").write_text(json.dumps(manifest, ensure_ascii=False), encoding="utf-8")
|
||||||
|
print(f"Wrote {len(shards)} vector shard(s), HNSW graph, and manifest to {out_dir}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -27,8 +27,6 @@ import lt from "./locales/lt-LT"
|
|||||||
import fi from "./locales/fi-FI"
|
import fi from "./locales/fi-FI"
|
||||||
import no from "./locales/nb-NO"
|
import no from "./locales/nb-NO"
|
||||||
import id from "./locales/id-ID"
|
import id from "./locales/id-ID"
|
||||||
import kk from "./locales/kk-KZ"
|
|
||||||
import he from "./locales/he-IL"
|
|
||||||
|
|
||||||
export const TRANSLATIONS = {
|
export const TRANSLATIONS = {
|
||||||
"en-US": enUs,
|
"en-US": enUs,
|
||||||
@@ -80,8 +78,6 @@ export const TRANSLATIONS = {
|
|||||||
"fi-FI": fi,
|
"fi-FI": fi,
|
||||||
"nb-NO": no,
|
"nb-NO": no,
|
||||||
"id-ID": id,
|
"id-ID": id,
|
||||||
"kk-KZ": kk,
|
|
||||||
"he-IL": he,
|
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
export const defaultTranslation = "en-US"
|
export const defaultTranslation = "en-US"
|
||||||
|
|||||||
@@ -1,88 +0,0 @@
|
|||||||
import { Translation } from "./definition"
|
|
||||||
|
|
||||||
export default {
|
|
||||||
propertyDefaults: {
|
|
||||||
title: "ללא כותרת",
|
|
||||||
description: "לא סופק תיאור",
|
|
||||||
},
|
|
||||||
direction: "rtl" as const,
|
|
||||||
components: {
|
|
||||||
callout: {
|
|
||||||
note: "הערה",
|
|
||||||
abstract: "תקציר",
|
|
||||||
info: "מידע",
|
|
||||||
todo: "לעשות",
|
|
||||||
tip: "טיפ",
|
|
||||||
success: "הצלחה",
|
|
||||||
question: "שאלה",
|
|
||||||
warning: "אזהרה",
|
|
||||||
failure: "כשלון",
|
|
||||||
danger: "סכנה",
|
|
||||||
bug: "באג",
|
|
||||||
example: "דוגמה",
|
|
||||||
quote: "ציטוט",
|
|
||||||
},
|
|
||||||
backlinks: {
|
|
||||||
title: "קישורים חוזרים",
|
|
||||||
noBacklinksFound: "לא נמצאו קישורים חוזרים",
|
|
||||||
},
|
|
||||||
themeToggle: {
|
|
||||||
lightMode: "מצב בהיר",
|
|
||||||
darkMode: "מצב כהה",
|
|
||||||
},
|
|
||||||
readerMode: {
|
|
||||||
title: "מצב קריאה",
|
|
||||||
},
|
|
||||||
explorer: {
|
|
||||||
title: "סייר",
|
|
||||||
},
|
|
||||||
footer: {
|
|
||||||
createdWith: "נוצר באמצעות",
|
|
||||||
},
|
|
||||||
graph: {
|
|
||||||
title: "מבט גרף",
|
|
||||||
},
|
|
||||||
recentNotes: {
|
|
||||||
title: "הערות אחרונות",
|
|
||||||
seeRemainingMore: ({ remaining }) => `עיין ב ${remaining} נוספים →`,
|
|
||||||
},
|
|
||||||
transcludes: {
|
|
||||||
transcludeOf: ({ targetSlug }) => `מצוטט מ ${targetSlug}`,
|
|
||||||
linkToOriginal: "קישור למקורי",
|
|
||||||
},
|
|
||||||
search: {
|
|
||||||
title: "חיפוש",
|
|
||||||
searchBarPlaceholder: "חפשו משהו",
|
|
||||||
},
|
|
||||||
tableOfContents: {
|
|
||||||
title: "תוכן עניינים",
|
|
||||||
},
|
|
||||||
contentMeta: {
|
|
||||||
readingTime: ({ minutes }) => `${minutes} דקות קריאה`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pages: {
|
|
||||||
rss: {
|
|
||||||
recentNotes: "הערות אחרונות",
|
|
||||||
lastFewNotes: ({ count }) => `${count} הערות אחרונות`,
|
|
||||||
},
|
|
||||||
error: {
|
|
||||||
title: "לא נמצא",
|
|
||||||
notFound: "העמוד הזה פרטי או לא קיים.",
|
|
||||||
home: "חזרה לעמוד הבית",
|
|
||||||
},
|
|
||||||
folderContent: {
|
|
||||||
folder: "תיקייה",
|
|
||||||
itemsUnderFolder: ({ count }) =>
|
|
||||||
count === 1 ? "פריט אחד תחת תיקייה זו." : `${count} פריטים תחת תיקייה זו.`,
|
|
||||||
},
|
|
||||||
tagContent: {
|
|
||||||
tag: "תגית",
|
|
||||||
tagIndex: "מפתח התגיות",
|
|
||||||
itemsUnderTag: ({ count }) =>
|
|
||||||
count === 1 ? "פריט אחד עם תגית זו." : `${count} פריטים עם תגית זו.`,
|
|
||||||
showingFirst: ({ count }) => `מראה את ה-${count} תגיות הראשונות.`,
|
|
||||||
totalTags: ({ count }) => `${count} תגיות נמצאו סך הכל.`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
} as const satisfies Translation
|
|
||||||
@@ -8,7 +8,7 @@ export default {
|
|||||||
components: {
|
components: {
|
||||||
callout: {
|
callout: {
|
||||||
note: "Nota",
|
note: "Nota",
|
||||||
abstract: "Abstract",
|
abstract: "Astratto",
|
||||||
info: "Info",
|
info: "Info",
|
||||||
todo: "Da fare",
|
todo: "Da fare",
|
||||||
tip: "Consiglio",
|
tip: "Consiglio",
|
||||||
@@ -17,7 +17,7 @@ export default {
|
|||||||
warning: "Attenzione",
|
warning: "Attenzione",
|
||||||
failure: "Errore",
|
failure: "Errore",
|
||||||
danger: "Pericolo",
|
danger: "Pericolo",
|
||||||
bug: "Problema",
|
bug: "Bug",
|
||||||
example: "Esempio",
|
example: "Esempio",
|
||||||
quote: "Citazione",
|
quote: "Citazione",
|
||||||
},
|
},
|
||||||
@@ -43,11 +43,10 @@ export default {
|
|||||||
},
|
},
|
||||||
recentNotes: {
|
recentNotes: {
|
||||||
title: "Note recenti",
|
title: "Note recenti",
|
||||||
seeRemainingMore: ({ remaining }) =>
|
seeRemainingMore: ({ remaining }) => `Vedi ${remaining} altro →`,
|
||||||
remaining === 1 ? "Vedi 1 altra →" : `Vedi altre ${remaining} →`,
|
|
||||||
},
|
},
|
||||||
transcludes: {
|
transcludes: {
|
||||||
transcludeOf: ({ targetSlug }) => `Inclusione di ${targetSlug}`,
|
transcludeOf: ({ targetSlug }) => `Transclusione di ${targetSlug}`,
|
||||||
linkToOriginal: "Link all'originale",
|
linkToOriginal: "Link all'originale",
|
||||||
},
|
},
|
||||||
search: {
|
search: {
|
||||||
@@ -55,16 +54,16 @@ export default {
|
|||||||
searchBarPlaceholder: "Cerca qualcosa",
|
searchBarPlaceholder: "Cerca qualcosa",
|
||||||
},
|
},
|
||||||
tableOfContents: {
|
tableOfContents: {
|
||||||
title: "Indice",
|
title: "Tabella dei contenuti",
|
||||||
},
|
},
|
||||||
contentMeta: {
|
contentMeta: {
|
||||||
readingTime: ({ minutes }) => (minutes === 1 ? "1 minuto" : `${minutes} minuti`),
|
readingTime: ({ minutes }) => `${minutes} minuti`,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
pages: {
|
pages: {
|
||||||
rss: {
|
rss: {
|
||||||
recentNotes: "Note recenti",
|
recentNotes: "Note recenti",
|
||||||
lastFewNotes: ({ count }) => (count === 1 ? "Ultima nota" : `Ultime ${count} note`),
|
lastFewNotes: ({ count }) => `Ultime ${count} note`,
|
||||||
},
|
},
|
||||||
error: {
|
error: {
|
||||||
title: "Non trovato",
|
title: "Non trovato",
|
||||||
@@ -81,9 +80,8 @@ export default {
|
|||||||
tagIndex: "Indice etichette",
|
tagIndex: "Indice etichette",
|
||||||
itemsUnderTag: ({ count }) =>
|
itemsUnderTag: ({ count }) =>
|
||||||
count === 1 ? "1 oggetto con questa etichetta." : `${count} oggetti con questa etichetta.`,
|
count === 1 ? "1 oggetto con questa etichetta." : `${count} oggetti con questa etichetta.`,
|
||||||
showingFirst: ({ count }) => (count === 1 ? "Prima etichetta." : `Prime ${count} etichette.`),
|
showingFirst: ({ count }) => `Prime ${count} etichette.`,
|
||||||
totalTags: ({ count }) =>
|
totalTags: ({ count }) => `Trovate ${count} etichette totali.`,
|
||||||
count === 1 ? "Trovata 1 etichetta in totale." : `Trovate ${count} etichette totali.`,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
} as const satisfies Translation
|
} as const satisfies Translation
|
||||||
|
|||||||
@@ -1,87 +0,0 @@
|
|||||||
import { Translation } from "./definition"
|
|
||||||
|
|
||||||
export default {
|
|
||||||
propertyDefaults: {
|
|
||||||
title: "Атаусыз",
|
|
||||||
description: "Сипаттама берілмеген",
|
|
||||||
},
|
|
||||||
components: {
|
|
||||||
callout: {
|
|
||||||
note: "Ескерту",
|
|
||||||
abstract: "Аннотация",
|
|
||||||
info: "Ақпарат",
|
|
||||||
todo: "Істеу керек",
|
|
||||||
tip: "Кеңес",
|
|
||||||
success: "Сәттілік",
|
|
||||||
question: "Сұрақ",
|
|
||||||
warning: "Ескерту",
|
|
||||||
failure: "Қате",
|
|
||||||
danger: "Қауіп",
|
|
||||||
bug: "Қате",
|
|
||||||
example: "Мысал",
|
|
||||||
quote: "Дәйексөз",
|
|
||||||
},
|
|
||||||
backlinks: {
|
|
||||||
title: "Артқа сілтемелер",
|
|
||||||
noBacklinksFound: "Артқа сілтемелер табылмады",
|
|
||||||
},
|
|
||||||
themeToggle: {
|
|
||||||
lightMode: "Жарық режимі",
|
|
||||||
darkMode: "Қараңғы режим",
|
|
||||||
},
|
|
||||||
readerMode: {
|
|
||||||
title: "Оқу режимі",
|
|
||||||
},
|
|
||||||
explorer: {
|
|
||||||
title: "Зерттеуші",
|
|
||||||
},
|
|
||||||
footer: {
|
|
||||||
createdWith: "Құрастырылған құрал:",
|
|
||||||
},
|
|
||||||
graph: {
|
|
||||||
title: "Граф көрінісі",
|
|
||||||
},
|
|
||||||
recentNotes: {
|
|
||||||
title: "Соңғы жазбалар",
|
|
||||||
seeRemainingMore: ({ remaining }) => `Тағы ${remaining} жазбаны қарау →`,
|
|
||||||
},
|
|
||||||
transcludes: {
|
|
||||||
transcludeOf: ({ targetSlug }) => `${targetSlug} кірістіру`,
|
|
||||||
linkToOriginal: "Бастапқыға сілтеме",
|
|
||||||
},
|
|
||||||
search: {
|
|
||||||
title: "Іздеу",
|
|
||||||
searchBarPlaceholder: "Бірдеңе іздеу",
|
|
||||||
},
|
|
||||||
tableOfContents: {
|
|
||||||
title: "Мазмұны",
|
|
||||||
},
|
|
||||||
contentMeta: {
|
|
||||||
readingTime: ({ minutes }) => `${minutes} мин оқу`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pages: {
|
|
||||||
rss: {
|
|
||||||
recentNotes: "Соңғы жазбалар",
|
|
||||||
lastFewNotes: ({ count }) => `Соңғы ${count} жазба`,
|
|
||||||
},
|
|
||||||
error: {
|
|
||||||
title: "Табылмады",
|
|
||||||
notFound: "Бұл бет жеке немесе жоқ болуы мүмкін.",
|
|
||||||
home: "Басты бетке оралу",
|
|
||||||
},
|
|
||||||
folderContent: {
|
|
||||||
folder: "Қалта",
|
|
||||||
itemsUnderFolder: ({ count }) =>
|
|
||||||
count === 1 ? "Бұл қалтада 1 элемент бар." : `Бұл қалтада ${count} элемент бар.`,
|
|
||||||
},
|
|
||||||
tagContent: {
|
|
||||||
tag: "Тег",
|
|
||||||
tagIndex: "Тегтер индексі",
|
|
||||||
itemsUnderTag: ({ count }) =>
|
|
||||||
count === 1 ? "Бұл тегпен 1 элемент." : `Бұл тегпен ${count} элемент.`,
|
|
||||||
showingFirst: ({ count }) => `Алғашқы ${count} тег көрсетілуде.`,
|
|
||||||
totalTags: ({ count }) => `Барлығы ${count} тег табылды.`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
} as const satisfies Translation
|
|
||||||
@@ -3,83 +3,85 @@ import { Translation } from "./definition"
|
|||||||
export default {
|
export default {
|
||||||
propertyDefaults: {
|
propertyDefaults: {
|
||||||
title: "Không có tiêu đề",
|
title: "Không có tiêu đề",
|
||||||
description: "Không có mô tả",
|
description: "Không có mô tả được cung cấp",
|
||||||
},
|
},
|
||||||
components: {
|
components: {
|
||||||
callout: {
|
callout: {
|
||||||
note: "Ghi chú",
|
note: "Ghi Chú",
|
||||||
abstract: "Tổng quan",
|
abstract: "Tóm Tắt",
|
||||||
info: "Thông tin",
|
info: "Thông tin",
|
||||||
todo: "Cần phải làm",
|
todo: "Cần Làm",
|
||||||
tip: "Gợi ý",
|
tip: "Gợi Ý",
|
||||||
success: "Thành công",
|
success: "Thành Công",
|
||||||
question: "Câu hỏi",
|
question: "Nghi Vấn",
|
||||||
warning: "Cảnh báo",
|
warning: "Cảnh Báo",
|
||||||
failure: "Thất bại",
|
failure: "Thất Bại",
|
||||||
danger: "Nguy hiểm",
|
danger: "Nguy Hiểm",
|
||||||
bug: "Lỗi",
|
bug: "Lỗi",
|
||||||
example: "Ví dụ",
|
example: "Ví Dụ",
|
||||||
quote: "Trích dẫn",
|
quote: "Trích Dẫn",
|
||||||
},
|
},
|
||||||
backlinks: {
|
backlinks: {
|
||||||
title: "Liên kết ngược",
|
title: "Liên Kết Ngược",
|
||||||
noBacklinksFound: "Không có liên kết ngược nào",
|
noBacklinksFound: "Không có liên kết ngược được tìm thấy",
|
||||||
},
|
},
|
||||||
themeToggle: {
|
themeToggle: {
|
||||||
lightMode: "Chế độ sáng",
|
lightMode: "Sáng",
|
||||||
darkMode: "Chế độ tối",
|
darkMode: "Tối",
|
||||||
},
|
},
|
||||||
readerMode: {
|
readerMode: {
|
||||||
title: "Chế độ đọc",
|
title: "Chế độ đọc",
|
||||||
},
|
},
|
||||||
explorer: {
|
explorer: {
|
||||||
title: "Nội dung",
|
title: "Trong bài này",
|
||||||
},
|
},
|
||||||
footer: {
|
footer: {
|
||||||
createdWith: "Được tạo bằng",
|
createdWith: "Được tạo bởi",
|
||||||
},
|
},
|
||||||
graph: {
|
graph: {
|
||||||
title: "Sơ đồ",
|
title: "Biểu Đồ",
|
||||||
},
|
},
|
||||||
recentNotes: {
|
recentNotes: {
|
||||||
title: "Ghi chú gần đây",
|
title: "Bài viết gần đây",
|
||||||
seeRemainingMore: ({ remaining }) => `Xem thêm ${remaining} ghi chú →`,
|
seeRemainingMore: ({ remaining }) => `Xem ${remaining} thêm →`,
|
||||||
},
|
},
|
||||||
transcludes: {
|
transcludes: {
|
||||||
transcludeOf: ({ targetSlug }) => `Trích dẫn toàn bộ từ ${targetSlug}`,
|
transcludeOf: ({ targetSlug }) => `Bao gồm ${targetSlug}`,
|
||||||
linkToOriginal: "Xem trang gốc",
|
linkToOriginal: "Liên Kết Gốc",
|
||||||
},
|
},
|
||||||
search: {
|
search: {
|
||||||
title: "Tìm",
|
title: "Tìm Kiếm",
|
||||||
searchBarPlaceholder: "Tìm kiếm thông tin",
|
searchBarPlaceholder: "Tìm kiếm thông tin",
|
||||||
},
|
},
|
||||||
tableOfContents: {
|
tableOfContents: {
|
||||||
title: "Mục lục",
|
title: "Bảng Nội Dung",
|
||||||
},
|
},
|
||||||
contentMeta: {
|
contentMeta: {
|
||||||
readingTime: ({ minutes }) => `${minutes} phút đọc`,
|
readingTime: ({ minutes }) => `đọc ${minutes} phút`,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
pages: {
|
pages: {
|
||||||
rss: {
|
rss: {
|
||||||
recentNotes: "Ghi chú gần đây",
|
recentNotes: "Những bài gần đây",
|
||||||
lastFewNotes: ({ count }) => `${count} Trang gần đây`,
|
lastFewNotes: ({ count }) => `${count} Bài gần đây`,
|
||||||
},
|
},
|
||||||
error: {
|
error: {
|
||||||
title: "Không tìm thấy",
|
title: "Không Tìm Thấy",
|
||||||
notFound: "Trang này riêng tư hoặc không tồn tại.",
|
notFound: "Trang này được bảo mật hoặc không tồn tại.",
|
||||||
home: "Về trang chủ",
|
home: "Trở về trang chủ",
|
||||||
},
|
},
|
||||||
folderContent: {
|
folderContent: {
|
||||||
folder: "Thư mục",
|
folder: "Thư Mục",
|
||||||
itemsUnderFolder: ({ count }) => `Có ${count} trang trong thư mục này.`,
|
itemsUnderFolder: ({ count }) =>
|
||||||
|
count === 1 ? "1 mục trong thư mục này." : `${count} mục trong thư mục này.`,
|
||||||
},
|
},
|
||||||
tagContent: {
|
tagContent: {
|
||||||
tag: "Thẻ",
|
tag: "Thẻ",
|
||||||
tagIndex: "Danh sách thẻ",
|
tagIndex: "Thẻ Mục Lục",
|
||||||
itemsUnderTag: ({ count }) => `Có ${count} trang gắn thẻ này.`,
|
itemsUnderTag: ({ count }) =>
|
||||||
showingFirst: ({ count }) => `Đang hiển thị ${count} trang đầu tiên.`,
|
count === 1 ? "1 mục gắn thẻ này." : `${count} mục gắn thẻ này.`,
|
||||||
totalTags: ({ count }) => `Có tổng cộng ${count} thẻ.`,
|
showingFirst: ({ count }) => `Hiển thị trước ${count} thẻ.`,
|
||||||
|
totalTags: ({ count }) => `Tìm thấy ${count} thẻ tổng cộng.`,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
} as const satisfies Translation
|
} as const satisfies Translation
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
|
|||||||
description: notFound,
|
description: notFound,
|
||||||
frontmatter: { title: notFound, tags: [] },
|
frontmatter: { title: notFound, tags: [] },
|
||||||
})
|
})
|
||||||
const externalResources = pageResources(path, resources)
|
const externalResources = pageResources(path, resources, ctx.cfg.configuration)
|
||||||
const componentData: QuartzComponentProps = {
|
const componentData: QuartzComponentProps = {
|
||||||
ctx,
|
ctx,
|
||||||
fileData: vfile.data,
|
fileData: vfile.data,
|
||||||
|
|||||||
@@ -7,12 +7,8 @@ import { Argv } from "../../util/ctx"
|
|||||||
import { QuartzConfig } from "../../cfg"
|
import { QuartzConfig } from "../../cfg"
|
||||||
|
|
||||||
const filesToCopy = async (argv: Argv, cfg: QuartzConfig) => {
|
const filesToCopy = async (argv: Argv, cfg: QuartzConfig) => {
|
||||||
// glob all non MD/base files in content folder and copy it over
|
// glob all non MD files in content folder and copy it over
|
||||||
return await glob("**", argv.directory, [
|
return await glob("**", argv.directory, ["**/*.md", ...cfg.configuration.ignorePatterns])
|
||||||
"**/*.md",
|
|
||||||
"**/*.base",
|
|
||||||
...cfg.configuration.ignorePatterns,
|
|
||||||
])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const copyFile = async (argv: Argv, fp: FilePath) => {
|
const copyFile = async (argv: Argv, fp: FilePath) => {
|
||||||
@@ -41,7 +37,7 @@ export const Assets: QuartzEmitterPlugin = () => {
|
|||||||
async *partialEmit(ctx, _content, _resources, changeEvents) {
|
async *partialEmit(ctx, _content, _resources, changeEvents) {
|
||||||
for (const changeEvent of changeEvents) {
|
for (const changeEvent of changeEvents) {
|
||||||
const ext = path.extname(changeEvent.path)
|
const ext = path.extname(changeEvent.path)
|
||||||
if (ext === ".md" || ext === ".base") continue
|
if (ext === ".md") continue
|
||||||
|
|
||||||
if (changeEvent.type === "add" || changeEvent.type === "change") {
|
if (changeEvent.type === "add" || changeEvent.type === "change") {
|
||||||
yield copyFile(ctx.argv, changeEvent.path)
|
yield copyFile(ctx.argv, changeEvent.path)
|
||||||
|
|||||||
@@ -1,184 +0,0 @@
|
|||||||
import { QuartzEmitterPlugin } from "../types"
|
|
||||||
import { QuartzComponentProps } from "../../components/types"
|
|
||||||
import HeaderConstructor from "../../components/Header"
|
|
||||||
import BodyConstructor from "../../components/Body"
|
|
||||||
import { pageResources, renderPage } from "../../components/renderPage"
|
|
||||||
import { ProcessedContent, QuartzPluginData } from "../vfile"
|
|
||||||
import { FullPageLayout } from "../../cfg"
|
|
||||||
import { pathToRoot } from "../../util/path"
|
|
||||||
import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout"
|
|
||||||
import { BaseContent, BaseViewSelector } from "../../components"
|
|
||||||
import { write } from "./helpers"
|
|
||||||
import { BuildCtx } from "../../util/ctx"
|
|
||||||
import { StaticResources } from "../../util/resources"
|
|
||||||
import {
|
|
||||||
renderBaseViewsForFile,
|
|
||||||
RenderedBaseView,
|
|
||||||
BaseViewMeta,
|
|
||||||
BaseMetadata,
|
|
||||||
} from "../../util/base/render"
|
|
||||||
import { BaseFile } from "../../util/base/types"
|
|
||||||
|
|
||||||
interface BasePageOptions extends FullPageLayout {}
|
|
||||||
|
|
||||||
function isBaseFile(data: QuartzPluginData): boolean {
|
|
||||||
return Boolean(data.basesConfig && (data.basesConfig as BaseFile).views?.length > 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
function getBaseFiles(content: ProcessedContent[]): ProcessedContent[] {
|
|
||||||
return content.filter(([_, file]) => isBaseFile(file.data))
|
|
||||||
}
|
|
||||||
|
|
||||||
async function processBasePage(
|
|
||||||
ctx: BuildCtx,
|
|
||||||
baseFileData: QuartzPluginData,
|
|
||||||
renderedView: RenderedBaseView,
|
|
||||||
allViews: BaseViewMeta[],
|
|
||||||
allFiles: QuartzPluginData[],
|
|
||||||
opts: FullPageLayout,
|
|
||||||
resources: StaticResources,
|
|
||||||
) {
|
|
||||||
const slug = renderedView.slug
|
|
||||||
const cfg = ctx.cfg.configuration
|
|
||||||
const externalResources = pageResources(pathToRoot(slug), resources)
|
|
||||||
|
|
||||||
const viewFileData: QuartzPluginData = {
|
|
||||||
...baseFileData,
|
|
||||||
slug,
|
|
||||||
frontmatter: {
|
|
||||||
...baseFileData.frontmatter,
|
|
||||||
title: renderedView.view.name,
|
|
||||||
},
|
|
||||||
basesRenderedTree: renderedView.tree,
|
|
||||||
basesAllViews: allViews,
|
|
||||||
basesCurrentView: renderedView.view.name,
|
|
||||||
basesMetadata: {
|
|
||||||
baseSlug: baseFileData.slug!,
|
|
||||||
currentView: renderedView.view.name,
|
|
||||||
allViews,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const componentData: QuartzComponentProps = {
|
|
||||||
ctx,
|
|
||||||
fileData: viewFileData,
|
|
||||||
externalResources,
|
|
||||||
cfg,
|
|
||||||
children: [],
|
|
||||||
tree: renderedView.tree,
|
|
||||||
allFiles,
|
|
||||||
}
|
|
||||||
|
|
||||||
const content = renderPage(cfg, slug, componentData, opts, externalResources)
|
|
||||||
return write({
|
|
||||||
ctx,
|
|
||||||
content,
|
|
||||||
slug,
|
|
||||||
ext: ".html",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export const BasePage: QuartzEmitterPlugin<Partial<BasePageOptions>> = (userOpts) => {
|
|
||||||
const baseOpts: FullPageLayout = {
|
|
||||||
...sharedPageComponents,
|
|
||||||
...defaultListPageLayout,
|
|
||||||
pageBody: BaseContent(),
|
|
||||||
...userOpts,
|
|
||||||
}
|
|
||||||
|
|
||||||
const opts: FullPageLayout = {
|
|
||||||
...baseOpts,
|
|
||||||
beforeBody: [
|
|
||||||
...baseOpts.beforeBody.filter((component) => component.name !== "ArticleTitle"),
|
|
||||||
BaseViewSelector(),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
const { head: Head, header, beforeBody, pageBody, afterBody, left, right, footer: Footer } = opts
|
|
||||||
const Header = HeaderConstructor()
|
|
||||||
const Body = BodyConstructor()
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: "BasePage",
|
|
||||||
getQuartzComponents() {
|
|
||||||
return [
|
|
||||||
Head,
|
|
||||||
Header,
|
|
||||||
Body,
|
|
||||||
...header,
|
|
||||||
...beforeBody,
|
|
||||||
pageBody,
|
|
||||||
...afterBody,
|
|
||||||
...left,
|
|
||||||
...right,
|
|
||||||
Footer,
|
|
||||||
]
|
|
||||||
},
|
|
||||||
async *emit(ctx, content, resources) {
|
|
||||||
const allFiles = content.map((c) => c[1].data)
|
|
||||||
const baseFiles = getBaseFiles(content)
|
|
||||||
|
|
||||||
for (const [_, file] of baseFiles) {
|
|
||||||
const baseFileData = file.data
|
|
||||||
const { views, allViews } = renderBaseViewsForFile(baseFileData, allFiles)
|
|
||||||
|
|
||||||
for (const renderedView of views) {
|
|
||||||
yield processBasePage(
|
|
||||||
ctx,
|
|
||||||
baseFileData,
|
|
||||||
renderedView,
|
|
||||||
allViews,
|
|
||||||
allFiles,
|
|
||||||
opts,
|
|
||||||
resources,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
async *partialEmit(ctx, content, resources, changeEvents) {
|
|
||||||
const allFiles = content.map((c) => c[1].data)
|
|
||||||
const baseFiles = getBaseFiles(content)
|
|
||||||
|
|
||||||
const affectedBaseSlugs = new Set<string>()
|
|
||||||
|
|
||||||
for (const event of changeEvents) {
|
|
||||||
if (!event.file) continue
|
|
||||||
const slug = event.file.data.slug
|
|
||||||
|
|
||||||
if (slug && isBaseFile(event.file.data)) {
|
|
||||||
affectedBaseSlugs.add(slug)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [_, file] of baseFiles) {
|
|
||||||
const baseFileData = file.data
|
|
||||||
const baseSlug = baseFileData.slug
|
|
||||||
|
|
||||||
if (!baseSlug || !affectedBaseSlugs.has(baseSlug)) continue
|
|
||||||
|
|
||||||
const { views, allViews } = renderBaseViewsForFile(baseFileData, allFiles)
|
|
||||||
|
|
||||||
for (const renderedView of views) {
|
|
||||||
yield processBasePage(
|
|
||||||
ctx,
|
|
||||||
baseFileData,
|
|
||||||
renderedView,
|
|
||||||
allViews,
|
|
||||||
allFiles,
|
|
||||||
opts,
|
|
||||||
resources,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
declare module "vfile" {
|
|
||||||
interface DataMap {
|
|
||||||
basesRenderedTree?: import("hast").Root
|
|
||||||
basesAllViews?: BaseViewMeta[]
|
|
||||||
basesCurrentView?: string
|
|
||||||
basesMetadata?: BaseMetadata
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,8 @@
|
|||||||
import { FullSlug, joinSegments } from "../../util/path"
|
import { FullSlug, joinSegments } from "../../util/path"
|
||||||
import { QuartzEmitterPlugin } from "../types"
|
import { QuartzEmitterPlugin } from "../types"
|
||||||
|
import path from "path"
|
||||||
|
import fs from "node:fs/promises"
|
||||||
|
import { globby } from "globby"
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
import spaRouterScript from "../../components/scripts/spa.inline"
|
import spaRouterScript from "../../components/scripts/spa.inline"
|
||||||
@@ -16,7 +19,7 @@ import {
|
|||||||
processGoogleFonts,
|
processGoogleFonts,
|
||||||
} from "../../util/theme"
|
} from "../../util/theme"
|
||||||
import { Features, transform } from "lightningcss"
|
import { Features, transform } from "lightningcss"
|
||||||
import { transform as transpile } from "esbuild"
|
import { transform as transpile, build as bundle } from "esbuild"
|
||||||
import { write } from "./helpers"
|
import { write } from "./helpers"
|
||||||
|
|
||||||
type ComponentResources = {
|
type ComponentResources = {
|
||||||
@@ -241,16 +244,6 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
|
|||||||
vercelInsightsScript.defer = true
|
vercelInsightsScript.defer = true
|
||||||
document.head.appendChild(vercelInsightsScript)
|
document.head.appendChild(vercelInsightsScript)
|
||||||
`)
|
`)
|
||||||
} else if (cfg.analytics?.provider === "rybbit") {
|
|
||||||
componentResources.afterDOMLoaded.push(`
|
|
||||||
const rybbitScript = document.createElement("script");
|
|
||||||
rybbitScript.src = "${cfg.analytics.host ?? "https://app.rybbit.io"}/api/script.js";
|
|
||||||
rybbitScript.setAttribute("data-site-id", "${cfg.analytics.siteId}");
|
|
||||||
rybbitScript.async = true;
|
|
||||||
rybbitScript.defer = true;
|
|
||||||
|
|
||||||
document.head.appendChild(rybbitScript);
|
|
||||||
`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (cfg.enableSPA) {
|
if (cfg.enableSPA) {
|
||||||
@@ -367,7 +360,47 @@ export const ComponentResources: QuartzEmitterPlugin = () => {
|
|||||||
ext: ".js",
|
ext: ".js",
|
||||||
content: postscript,
|
content: postscript,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Bundle all worker files
|
||||||
|
const workerFiles = await globby(["quartz/**/*.worker.ts"])
|
||||||
|
for (const src of workerFiles) {
|
||||||
|
const result = await bundle({
|
||||||
|
entryPoints: [src],
|
||||||
|
bundle: true,
|
||||||
|
minify: true,
|
||||||
|
platform: "browser",
|
||||||
|
format: "esm",
|
||||||
|
write: false,
|
||||||
|
})
|
||||||
|
const code = result.outputFiles[0].text
|
||||||
|
const name = path.basename(src).replace(/\.ts$/, "")
|
||||||
|
yield write({ ctx, slug: name as FullSlug, ext: ".js", content: code })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
async *partialEmit(ctx, _content, _resources, changeEvents) {
|
||||||
|
// Handle worker file changes in incremental builds
|
||||||
|
for (const changeEvent of changeEvents) {
|
||||||
|
if (!/\.worker\.ts$/.test(changeEvent.path)) continue
|
||||||
|
if (changeEvent.type === "delete") {
|
||||||
|
const name = path.basename(changeEvent.path).replace(/\.ts$/, "")
|
||||||
|
const dest = joinSegments(ctx.argv.output, `${name}.js`)
|
||||||
|
try {
|
||||||
|
await fs.unlink(dest)
|
||||||
|
} catch {}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const result = await bundle({
|
||||||
|
entryPoints: [changeEvent.path],
|
||||||
|
bundle: true,
|
||||||
|
minify: true,
|
||||||
|
platform: "browser",
|
||||||
|
format: "esm",
|
||||||
|
write: false,
|
||||||
|
})
|
||||||
|
const code = result.outputFiles[0].text
|
||||||
|
const name = path.basename(changeEvent.path).replace(/\.ts$/, "")
|
||||||
|
yield write({ ctx, slug: name as FullSlug, ext: ".js", content: code })
|
||||||
|
}
|
||||||
},
|
},
|
||||||
async *partialEmit() {},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ async function processContent(
|
|||||||
) {
|
) {
|
||||||
const slug = fileData.slug!
|
const slug = fileData.slug!
|
||||||
const cfg = ctx.cfg.configuration
|
const cfg = ctx.cfg.configuration
|
||||||
const externalResources = pageResources(pathToRoot(slug), resources)
|
const externalResources = pageResources(pathToRoot(slug), resources, ctx.cfg.configuration)
|
||||||
const componentData: QuartzComponentProps = {
|
const componentData: QuartzComponentProps = {
|
||||||
ctx,
|
ctx,
|
||||||
fileData,
|
fileData,
|
||||||
@@ -83,8 +83,6 @@ export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOp
|
|||||||
containsIndex = true
|
containsIndex = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if (file.data.filePath!.endsWith(".base")) continue
|
|
||||||
|
|
||||||
// only process home page, non-tag pages, and non-index pages
|
// only process home page, non-tag pages, and non-index pages
|
||||||
if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
|
if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
|
||||||
yield processContent(ctx, tree, file.data, allFiles, opts, resources)
|
yield processContent(ctx, tree, file.data, allFiles, opts, resources)
|
||||||
@@ -114,7 +112,6 @@ export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOp
|
|||||||
for (const [tree, file] of content) {
|
for (const [tree, file] of content) {
|
||||||
const slug = file.data.slug!
|
const slug = file.data.slug!
|
||||||
if (!changedSlugs.has(slug)) continue
|
if (!changedSlugs.has(slug)) continue
|
||||||
if (file.data.filePath!.endsWith(".base")) continue
|
|
||||||
if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
|
if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
|
||||||
|
|
||||||
yield processContent(ctx, tree, file.data, allFiles, opts, resources)
|
yield processContent(ctx, tree, file.data, allFiles, opts, resources)
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ async function* processFolderInfo(
|
|||||||
const slug = joinSegments(folder, "index") as FullSlug
|
const slug = joinSegments(folder, "index") as FullSlug
|
||||||
const [tree, file] = folderContent
|
const [tree, file] = folderContent
|
||||||
const cfg = ctx.cfg.configuration
|
const cfg = ctx.cfg.configuration
|
||||||
const externalResources = pageResources(pathToRoot(slug), resources)
|
const externalResources = pageResources(pathToRoot(slug), resources, ctx.cfg.configuration)
|
||||||
const componentData: QuartzComponentProps = {
|
const componentData: QuartzComponentProps = {
|
||||||
ctx,
|
ctx,
|
||||||
fileData: file.data,
|
fileData: file.data,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
export { ContentPage } from "./contentPage"
|
export { ContentPage } from "./contentPage"
|
||||||
export { TagPage } from "./tagPage"
|
export { TagPage } from "./tagPage"
|
||||||
export { FolderPage } from "./folderPage"
|
export { FolderPage } from "./folderPage"
|
||||||
export { ContentIndex as ContentIndex } from "./contentIndex"
|
export { ContentIndex } from "./contentIndex"
|
||||||
export { AliasRedirects } from "./aliases"
|
export { AliasRedirects } from "./aliases"
|
||||||
export { Assets } from "./assets"
|
export { Assets } from "./assets"
|
||||||
export { Static } from "./static"
|
export { Static } from "./static"
|
||||||
@@ -10,4 +10,4 @@ export { ComponentResources } from "./componentResources"
|
|||||||
export { NotFoundPage } from "./404"
|
export { NotFoundPage } from "./404"
|
||||||
export { CNAME } from "./cname"
|
export { CNAME } from "./cname"
|
||||||
export { CustomOgImages } from "./ogImage"
|
export { CustomOgImages } from "./ogImage"
|
||||||
export { BasePage } from "./basePage"
|
export { SemanticIndex } from "./semantic"
|
||||||
|
|||||||
235
quartz/plugins/emitters/semantic.ts
Normal file
235
quartz/plugins/emitters/semantic.ts
Normal file
@@ -0,0 +1,235 @@
|
|||||||
|
import { write } from "./helpers"
|
||||||
|
import { QuartzEmitterPlugin } from "../types"
|
||||||
|
import { FilePath, FullSlug, joinSegments, QUARTZ } from "../../util/path"
|
||||||
|
import { ReadTimeResults } from "reading-time"
|
||||||
|
import { GlobalConfiguration } from "../../cfg"
|
||||||
|
import { spawn } from "child_process"
|
||||||
|
|
||||||
|
const DEFAULT_MODEL_ID = "onnx-community/Qwen3-Embedding-0.6B-ONNX"
|
||||||
|
|
||||||
|
const defaults: GlobalConfiguration["semanticSearch"] = {
|
||||||
|
enable: true,
|
||||||
|
model: DEFAULT_MODEL_ID,
|
||||||
|
aot: false,
|
||||||
|
dims: 1024,
|
||||||
|
dtype: "fp32",
|
||||||
|
shardSizeRows: 1024,
|
||||||
|
hnsw: { M: 16, efConstruction: 200 },
|
||||||
|
chunking: {
|
||||||
|
chunkSize: 512,
|
||||||
|
chunkOverlap: 128,
|
||||||
|
noChunking: false,
|
||||||
|
},
|
||||||
|
vllm: {
|
||||||
|
enable: false,
|
||||||
|
vllmUrl:
|
||||||
|
process.env.VLLM_URL || process.env.VLLM_EMBED_URL || "http://127.0.0.1:8000/v1/embeddings",
|
||||||
|
concurrency: parseInt(process.env.VLLM_CONCURRENCY || "8", 10),
|
||||||
|
batchSize: parseInt(process.env.VLLM_BATCH_SIZE || "64", 10),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
type ContentDetails = {
|
||||||
|
slug: string
|
||||||
|
title: string
|
||||||
|
filePath: FilePath
|
||||||
|
content: string
|
||||||
|
readingTime?: Partial<ReadTimeResults>
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if uv is installed
|
||||||
|
*/
|
||||||
|
function checkUvInstalled(): Promise<boolean> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const proc = spawn("uv", ["--version"], { shell: true })
|
||||||
|
proc.on("error", () => resolve(false))
|
||||||
|
proc.on("close", (code) => resolve(code === 0))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run the Python embedding build script using uv
|
||||||
|
* Script uses PEP 723 inline metadata for dependency management
|
||||||
|
*/
|
||||||
|
function runEmbedBuild(
|
||||||
|
jsonlPath: string,
|
||||||
|
outDir: string,
|
||||||
|
opts: {
|
||||||
|
model: string
|
||||||
|
dtype: string
|
||||||
|
dims: number
|
||||||
|
shardSizeRows: number
|
||||||
|
chunking: { chunkSize: number; chunkOverlap: number; noChunking: boolean }
|
||||||
|
vllm: { enable: boolean; vllmUrl?: string; concurrency: number; batchSize: number }
|
||||||
|
},
|
||||||
|
): Promise<void> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const scriptPath = joinSegments(QUARTZ, "embed_build.py")
|
||||||
|
const args = [
|
||||||
|
"run",
|
||||||
|
scriptPath,
|
||||||
|
"--jsonl",
|
||||||
|
jsonlPath,
|
||||||
|
"--model",
|
||||||
|
opts.model,
|
||||||
|
"--out",
|
||||||
|
outDir,
|
||||||
|
"--dtype",
|
||||||
|
opts.dtype,
|
||||||
|
"--dims",
|
||||||
|
String(opts.dims),
|
||||||
|
"--shard-size",
|
||||||
|
String(opts.shardSizeRows),
|
||||||
|
"--chunk-size",
|
||||||
|
String(opts.chunking.chunkSize),
|
||||||
|
"--chunk-overlap",
|
||||||
|
String(opts.chunking.chunkOverlap),
|
||||||
|
]
|
||||||
|
|
||||||
|
if (opts.chunking.noChunking) {
|
||||||
|
args.push("--no-chunking")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opts.vllm.enable) {
|
||||||
|
args.push("--use-vllm")
|
||||||
|
if (opts.vllm.vllmUrl) {
|
||||||
|
args.push("--vllm-url", opts.vllm.vllmUrl)
|
||||||
|
}
|
||||||
|
args.push("--concurrency", String(opts.vllm.concurrency))
|
||||||
|
args.push("--batch-size", String(opts.vllm.batchSize))
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("\nRunning embedding generation:")
|
||||||
|
console.log(` uv ${args.join(" ")}`)
|
||||||
|
|
||||||
|
const env = { ...process.env }
|
||||||
|
if (opts.vllm.enable && !env.USE_VLLM) {
|
||||||
|
env.USE_VLLM = "1"
|
||||||
|
}
|
||||||
|
|
||||||
|
const proc = spawn("uv", args, {
|
||||||
|
stdio: "inherit",
|
||||||
|
shell: true,
|
||||||
|
env,
|
||||||
|
})
|
||||||
|
|
||||||
|
proc.on("error", (err) => {
|
||||||
|
reject(new Error(`Failed to spawn uv: ${err.message}`))
|
||||||
|
})
|
||||||
|
|
||||||
|
proc.on("close", (code) => {
|
||||||
|
if (code === 0) {
|
||||||
|
console.log("Embedding generation completed successfully")
|
||||||
|
resolve()
|
||||||
|
} else {
|
||||||
|
reject(new Error(`embed_build.py exited with code ${code}`))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SemanticIndex: QuartzEmitterPlugin<Partial<GlobalConfiguration["semanticSearch"]>> = (
|
||||||
|
opts,
|
||||||
|
) => {
|
||||||
|
const merged = { ...defaults, ...opts }
|
||||||
|
const o = {
|
||||||
|
enable: merged.enable!,
|
||||||
|
model: merged.model!,
|
||||||
|
aot: merged.aot!,
|
||||||
|
dims: merged.dims!,
|
||||||
|
dtype: merged.dtype!,
|
||||||
|
shardSizeRows: merged.shardSizeRows!,
|
||||||
|
hnsw: {
|
||||||
|
M: merged.hnsw?.M ?? defaults.hnsw!.M!,
|
||||||
|
efConstruction: merged.hnsw?.efConstruction ?? defaults.hnsw!.efConstruction!,
|
||||||
|
efSearch: merged.hnsw?.efSearch,
|
||||||
|
},
|
||||||
|
chunking: {
|
||||||
|
chunkSize: merged.chunking?.chunkSize ?? defaults.chunking!.chunkSize!,
|
||||||
|
chunkOverlap: merged.chunking?.chunkOverlap ?? defaults.chunking!.chunkOverlap!,
|
||||||
|
noChunking: merged.chunking?.noChunking ?? defaults.chunking!.noChunking!,
|
||||||
|
},
|
||||||
|
vllm: {
|
||||||
|
enable: merged.vllm?.enable ?? defaults.vllm!.enable!,
|
||||||
|
vllmUrl: merged.vllm?.vllmUrl ?? defaults.vllm!.vllmUrl,
|
||||||
|
concurrency: merged.vllm?.concurrency ?? defaults.vllm!.concurrency!,
|
||||||
|
batchSize: merged.vllm?.batchSize ?? defaults.vllm!.batchSize!,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!o.model) {
|
||||||
|
throw new Error("Semantic search requires a model identifier")
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: "SemanticIndex",
|
||||||
|
getQuartzComponents() {
|
||||||
|
return []
|
||||||
|
},
|
||||||
|
async *partialEmit() {},
|
||||||
|
async *emit(ctx, content, _resources) {
|
||||||
|
if (!o.enable) return
|
||||||
|
|
||||||
|
const docs: ContentDetails[] = []
|
||||||
|
for (const [_, file] of content) {
|
||||||
|
const slug = file.data.slug!
|
||||||
|
const title = file.data.frontmatter?.title ?? slug
|
||||||
|
const text = file.data.text
|
||||||
|
if (text) {
|
||||||
|
docs.push({
|
||||||
|
slug,
|
||||||
|
title,
|
||||||
|
filePath: file.data.filePath!,
|
||||||
|
content: text,
|
||||||
|
readingTime: file.data.readingTime,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit JSONL with the exact text used for embeddings
|
||||||
|
const jsonl = docs
|
||||||
|
.map((d) => ({ slug: d.slug, title: d.title, text: d.content }))
|
||||||
|
.map((o) => JSON.stringify(o))
|
||||||
|
.join("\n")
|
||||||
|
|
||||||
|
const jsonlSlug = "embeddings-text" as FullSlug
|
||||||
|
yield write({
|
||||||
|
ctx,
|
||||||
|
slug: jsonlSlug,
|
||||||
|
ext: ".jsonl",
|
||||||
|
content: jsonl,
|
||||||
|
})
|
||||||
|
|
||||||
|
// If aot is false, run the embedding generation script
|
||||||
|
if (!o.aot) {
|
||||||
|
console.log("\nGenerating embeddings (aot=false)...")
|
||||||
|
|
||||||
|
// Check for uv
|
||||||
|
const hasUv = await checkUvInstalled()
|
||||||
|
if (!hasUv) {
|
||||||
|
throw new Error(
|
||||||
|
"uv is required for embedding generation. Install it from https://docs.astral.sh/uv/",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const jsonlPath = joinSegments(ctx.argv.output, "embeddings-text.jsonl")
|
||||||
|
const outDir = joinSegments(ctx.argv.output, "embeddings")
|
||||||
|
|
||||||
|
try {
|
||||||
|
await runEmbedBuild(jsonlPath, outDir, o)
|
||||||
|
} catch (err) {
|
||||||
|
const message = err instanceof Error ? err.message : String(err)
|
||||||
|
throw new Error(`Embedding generation failed: ${message}`)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
"\nSkipping embedding generation (aot=true). Expecting pre-generated embeddings in public/embeddings/",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
externalResources(_ctx) {
|
||||||
|
return {}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -73,7 +73,7 @@ async function processTagPage(
|
|||||||
const slug = joinSegments("tags", tag) as FullSlug
|
const slug = joinSegments("tags", tag) as FullSlug
|
||||||
const [tree, file] = tagContent
|
const [tree, file] = tagContent
|
||||||
const cfg = ctx.cfg.configuration
|
const cfg = ctx.cfg.configuration
|
||||||
const externalResources = pageResources(pathToRoot(slug), resources)
|
const externalResources = pageResources(pathToRoot(slug), resources, ctx.cfg.configuration)
|
||||||
const componentData: QuartzComponentProps = {
|
const componentData: QuartzComponentProps = {
|
||||||
ctx,
|
ctx,
|
||||||
fileData: file.data,
|
fileData: file.data,
|
||||||
|
|||||||
@@ -1,521 +0,0 @@
|
|||||||
import * as yaml from "js-yaml"
|
|
||||||
import { QuartzTransformerPlugin } from "../types"
|
|
||||||
import { FilePath, getFileExtension } from "../../util/path"
|
|
||||||
import {
|
|
||||||
BaseFile,
|
|
||||||
BaseView,
|
|
||||||
BaseFileFilter,
|
|
||||||
parseViews,
|
|
||||||
parseViewSummaries,
|
|
||||||
BUILTIN_SUMMARY_TYPES,
|
|
||||||
BuiltinSummaryType,
|
|
||||||
} from "../../util/base/types"
|
|
||||||
import {
|
|
||||||
parseExpressionSource,
|
|
||||||
compileExpression,
|
|
||||||
buildPropertyExpressionSource,
|
|
||||||
ProgramIR,
|
|
||||||
BasesExpressions,
|
|
||||||
BaseExpressionDiagnostic,
|
|
||||||
Span,
|
|
||||||
} from "../../util/base/compiler"
|
|
||||||
|
|
||||||
export interface BasesOptions {
|
|
||||||
/** Whether to emit diagnostics as warnings during build */
|
|
||||||
emitWarnings: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
const defaultOptions: BasesOptions = {
|
|
||||||
emitWarnings: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
type FilterStructure =
|
|
||||||
| string
|
|
||||||
| { and?: FilterStructure[]; or?: FilterStructure[]; not?: FilterStructure[] }
|
|
||||||
|
|
||||||
function compileFilterStructure(
|
|
||||||
filter: FilterStructure | undefined,
|
|
||||||
file: string,
|
|
||||||
diagnostics: BaseExpressionDiagnostic[],
|
|
||||||
context: string,
|
|
||||||
): ProgramIR | undefined {
|
|
||||||
if (!filter) return undefined
|
|
||||||
|
|
||||||
if (typeof filter === "string") {
|
|
||||||
const result = parseExpressionSource(filter, file)
|
|
||||||
if (result.diagnostics.length > 0) {
|
|
||||||
for (const diag of result.diagnostics) {
|
|
||||||
diagnostics.push({
|
|
||||||
kind: diag.kind as "lex" | "parse" | "runtime",
|
|
||||||
message: diag.message,
|
|
||||||
span: diag.span,
|
|
||||||
context,
|
|
||||||
source: filter,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!result.program.body) return undefined
|
|
||||||
return compileExpression(result.program.body)
|
|
||||||
}
|
|
||||||
|
|
||||||
const compileParts = (
|
|
||||||
parts: FilterStructure[],
|
|
||||||
combiner: "&&" | "||",
|
|
||||||
negate: boolean,
|
|
||||||
): ProgramIR | undefined => {
|
|
||||||
const compiled: ProgramIR[] = []
|
|
||||||
for (const part of parts) {
|
|
||||||
const partIR = compileFilterStructure(part, file, diagnostics, context)
|
|
||||||
if (partIR) compiled.push(partIR)
|
|
||||||
}
|
|
||||||
if (compiled.length === 0) return undefined
|
|
||||||
if (compiled.length === 1) {
|
|
||||||
if (negate) {
|
|
||||||
return wrapWithNot(compiled[0])
|
|
||||||
}
|
|
||||||
return compiled[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = compiled[0]
|
|
||||||
for (let i = 1; i < compiled.length; i++) {
|
|
||||||
result = combineWithLogical(result, compiled[i], combiner, negate)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
if (filter.and && filter.and.length > 0) {
|
|
||||||
return compileParts(filter.and, "&&", false)
|
|
||||||
}
|
|
||||||
if (filter.or && filter.or.length > 0) {
|
|
||||||
return compileParts(filter.or, "||", false)
|
|
||||||
}
|
|
||||||
if (filter.not && filter.not.length > 0) {
|
|
||||||
return compileParts(filter.not, "&&", true)
|
|
||||||
}
|
|
||||||
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
function wrapWithNot(ir: ProgramIR): ProgramIR {
|
|
||||||
const span = ir.span
|
|
||||||
return {
|
|
||||||
instructions: [
|
|
||||||
...ir.instructions,
|
|
||||||
{ op: "to_bool" as const, span },
|
|
||||||
{ op: "unary" as const, operator: "!" as const, span },
|
|
||||||
],
|
|
||||||
span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function combineWithLogical(
|
|
||||||
left: ProgramIR,
|
|
||||||
right: ProgramIR,
|
|
||||||
operator: "&&" | "||",
|
|
||||||
negateRight: boolean,
|
|
||||||
): ProgramIR {
|
|
||||||
const span: Span = {
|
|
||||||
start: left.span.start,
|
|
||||||
end: right.span.end,
|
|
||||||
file: left.span.file,
|
|
||||||
}
|
|
||||||
|
|
||||||
const rightIR = negateRight ? wrapWithNot(right) : right
|
|
||||||
|
|
||||||
if (operator === "&&") {
|
|
||||||
const jumpIfFalseIndex = left.instructions.length + 1
|
|
||||||
const jumpIndex = jumpIfFalseIndex + rightIR.instructions.length + 2
|
|
||||||
return {
|
|
||||||
instructions: [
|
|
||||||
...left.instructions,
|
|
||||||
{ op: "jump_if_false" as const, target: jumpIndex, span },
|
|
||||||
...rightIR.instructions,
|
|
||||||
{ op: "to_bool" as const, span },
|
|
||||||
{ op: "jump" as const, target: jumpIndex + 1, span },
|
|
||||||
{
|
|
||||||
op: "const" as const,
|
|
||||||
literal: { type: "Literal" as const, kind: "boolean" as const, value: false, span },
|
|
||||||
span,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
span,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const jumpIfTrueIndex = left.instructions.length + 1
|
|
||||||
const jumpIndex = jumpIfTrueIndex + rightIR.instructions.length + 2
|
|
||||||
return {
|
|
||||||
instructions: [
|
|
||||||
...left.instructions,
|
|
||||||
{ op: "jump_if_true" as const, target: jumpIndex, span },
|
|
||||||
...rightIR.instructions,
|
|
||||||
{ op: "to_bool" as const, span },
|
|
||||||
{ op: "jump" as const, target: jumpIndex + 1, span },
|
|
||||||
{
|
|
||||||
op: "const" as const,
|
|
||||||
literal: { type: "Literal" as const, kind: "boolean" as const, value: true, span },
|
|
||||||
span,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function collectPropertiesFromViews(views: BaseView[]): Set<string> {
|
|
||||||
const properties = new Set<string>()
|
|
||||||
for (const view of views) {
|
|
||||||
if (view.order) {
|
|
||||||
for (const prop of view.order) {
|
|
||||||
properties.add(prop)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (view.groupBy) {
|
|
||||||
const groupProp = typeof view.groupBy === "string" ? view.groupBy : view.groupBy.property
|
|
||||||
properties.add(groupProp)
|
|
||||||
}
|
|
||||||
if (view.sort) {
|
|
||||||
for (const sortConfig of view.sort) {
|
|
||||||
properties.add(sortConfig.property)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (view.image) properties.add(view.image)
|
|
||||||
if (view.date) properties.add(view.date)
|
|
||||||
if (view.dateField) properties.add(view.dateField)
|
|
||||||
if (view.dateProperty) properties.add(view.dateProperty)
|
|
||||||
if (view.coordinates) properties.add(view.coordinates)
|
|
||||||
if (view.markerIcon) properties.add(view.markerIcon)
|
|
||||||
if (view.markerColor) properties.add(view.markerColor)
|
|
||||||
}
|
|
||||||
return properties
|
|
||||||
}
|
|
||||||
|
|
||||||
function compilePropertyExpressions(
|
|
||||||
properties: Set<string>,
|
|
||||||
file: string,
|
|
||||||
diagnostics: BaseExpressionDiagnostic[],
|
|
||||||
): Record<string, ProgramIR> {
|
|
||||||
const expressions: Record<string, ProgramIR> = {}
|
|
||||||
|
|
||||||
for (const property of properties) {
|
|
||||||
const source = buildPropertyExpressionSource(property)
|
|
||||||
if (!source) continue
|
|
||||||
|
|
||||||
const result = parseExpressionSource(source, file)
|
|
||||||
if (result.diagnostics.length > 0) {
|
|
||||||
for (const diag of result.diagnostics) {
|
|
||||||
diagnostics.push({
|
|
||||||
kind: diag.kind as "lex" | "parse" | "runtime",
|
|
||||||
message: diag.message,
|
|
||||||
span: diag.span,
|
|
||||||
context: `property.${property}`,
|
|
||||||
source,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (result.program.body) {
|
|
||||||
expressions[property] = compileExpression(result.program.body)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return expressions
|
|
||||||
}
|
|
||||||
|
|
||||||
function compileFormulas(
|
|
||||||
formulas: Record<string, string> | undefined,
|
|
||||||
file: string,
|
|
||||||
diagnostics: BaseExpressionDiagnostic[],
|
|
||||||
): Record<string, ProgramIR> {
|
|
||||||
if (!formulas) return {}
|
|
||||||
|
|
||||||
const compiled: Record<string, ProgramIR> = {}
|
|
||||||
for (const [name, source] of Object.entries(formulas)) {
|
|
||||||
const trimmed = source.trim()
|
|
||||||
if (!trimmed) continue
|
|
||||||
|
|
||||||
const result = parseExpressionSource(trimmed, file)
|
|
||||||
if (result.diagnostics.length > 0) {
|
|
||||||
for (const diag of result.diagnostics) {
|
|
||||||
diagnostics.push({
|
|
||||||
kind: diag.kind as "lex" | "parse" | "runtime",
|
|
||||||
message: diag.message,
|
|
||||||
span: diag.span,
|
|
||||||
context: `formulas.${name}`,
|
|
||||||
source: trimmed,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (result.program.body) {
|
|
||||||
compiled[name] = compileExpression(result.program.body)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return compiled
|
|
||||||
}
|
|
||||||
|
|
||||||
function compileSummaries(
|
|
||||||
summaries: Record<string, string> | undefined,
|
|
||||||
file: string,
|
|
||||||
diagnostics: BaseExpressionDiagnostic[],
|
|
||||||
): Record<string, ProgramIR> {
|
|
||||||
if (!summaries) return {}
|
|
||||||
|
|
||||||
const compiled: Record<string, ProgramIR> = {}
|
|
||||||
for (const [name, source] of Object.entries(summaries)) {
|
|
||||||
const trimmed = source.trim()
|
|
||||||
if (!trimmed) continue
|
|
||||||
|
|
||||||
const normalized = trimmed.toLowerCase()
|
|
||||||
if (BUILTIN_SUMMARY_TYPES.includes(normalized as BuiltinSummaryType)) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = parseExpressionSource(trimmed, file)
|
|
||||||
if (result.diagnostics.length > 0) {
|
|
||||||
for (const diag of result.diagnostics) {
|
|
||||||
diagnostics.push({
|
|
||||||
kind: diag.kind as "lex" | "parse" | "runtime",
|
|
||||||
message: diag.message,
|
|
||||||
span: diag.span,
|
|
||||||
context: `summaries.${name}`,
|
|
||||||
source: trimmed,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (result.program.body) {
|
|
||||||
compiled[name] = compileExpression(result.program.body)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return compiled
|
|
||||||
}
|
|
||||||
|
|
||||||
function compileViewSummaries(
|
|
||||||
views: BaseView[],
|
|
||||||
topLevelSummaries: Record<string, string> | undefined,
|
|
||||||
file: string,
|
|
||||||
diagnostics: BaseExpressionDiagnostic[],
|
|
||||||
): Record<string, Record<string, ProgramIR>> {
|
|
||||||
const result: Record<string, Record<string, ProgramIR>> = {}
|
|
||||||
|
|
||||||
for (let i = 0; i < views.length; i++) {
|
|
||||||
const view = views[i]
|
|
||||||
if (!view.summaries) continue
|
|
||||||
|
|
||||||
const viewSummaryConfig = parseViewSummaries(
|
|
||||||
view.summaries as Record<string, string>,
|
|
||||||
topLevelSummaries,
|
|
||||||
)
|
|
||||||
if (!viewSummaryConfig?.columns) continue
|
|
||||||
|
|
||||||
const viewExpressions: Record<string, ProgramIR> = {}
|
|
||||||
for (const [column, def] of Object.entries(viewSummaryConfig.columns)) {
|
|
||||||
if (def.type !== "formula" || !def.expression) continue
|
|
||||||
|
|
||||||
const parseResult = parseExpressionSource(def.expression, file)
|
|
||||||
if (parseResult.diagnostics.length > 0) {
|
|
||||||
for (const diag of parseResult.diagnostics) {
|
|
||||||
diagnostics.push({
|
|
||||||
kind: diag.kind as "lex" | "parse" | "runtime",
|
|
||||||
message: diag.message,
|
|
||||||
span: diag.span,
|
|
||||||
context: `views[${i}].summaries.${column}`,
|
|
||||||
source: def.expression,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (parseResult.program.body) {
|
|
||||||
viewExpressions[column] = compileExpression(parseResult.program.body)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Object.keys(viewExpressions).length > 0) {
|
|
||||||
result[String(i)] = viewExpressions
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
export const ObsidianBases: QuartzTransformerPlugin<Partial<BasesOptions>> = (userOpts) => {
|
|
||||||
const opts = { ...defaultOptions, ...userOpts }
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: "ObsidianBases",
|
|
||||||
textTransform(_ctx, src) {
|
|
||||||
return src
|
|
||||||
},
|
|
||||||
markdownPlugins(_ctx) {
|
|
||||||
return [
|
|
||||||
() => {
|
|
||||||
return (_tree, file) => {
|
|
||||||
const filePath = file.data.filePath as FilePath | undefined
|
|
||||||
if (!filePath) return
|
|
||||||
|
|
||||||
const ext = getFileExtension(filePath)
|
|
||||||
if (ext !== ".base") return
|
|
||||||
|
|
||||||
const content = file.value.toString()
|
|
||||||
if (!content.trim()) return
|
|
||||||
|
|
||||||
const diagnostics: BaseExpressionDiagnostic[] = []
|
|
||||||
const filePathStr = filePath
|
|
||||||
|
|
||||||
try {
|
|
||||||
const parsed = yaml.load(content, { schema: yaml.JSON_SCHEMA }) as Record<
|
|
||||||
string,
|
|
||||||
unknown
|
|
||||||
>
|
|
||||||
if (!parsed || typeof parsed !== "object") {
|
|
||||||
diagnostics.push({
|
|
||||||
kind: "parse",
|
|
||||||
message: "Base file must contain a valid YAML object",
|
|
||||||
span: {
|
|
||||||
start: { offset: 0, line: 1, column: 1 },
|
|
||||||
end: { offset: 0, line: 1, column: 1 },
|
|
||||||
file: filePathStr,
|
|
||||||
},
|
|
||||||
context: "root",
|
|
||||||
source: content.slice(0, 100),
|
|
||||||
})
|
|
||||||
file.data.basesDiagnostics = diagnostics
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const rawViews = parsed.views
|
|
||||||
if (!Array.isArray(rawViews) || rawViews.length === 0) {
|
|
||||||
diagnostics.push({
|
|
||||||
kind: "parse",
|
|
||||||
message: "Base file must have at least one view defined",
|
|
||||||
span: {
|
|
||||||
start: { offset: 0, line: 1, column: 1 },
|
|
||||||
end: { offset: 0, line: 1, column: 1 },
|
|
||||||
file: filePathStr,
|
|
||||||
},
|
|
||||||
context: "views",
|
|
||||||
source: "views: []",
|
|
||||||
})
|
|
||||||
file.data.basesDiagnostics = diagnostics
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const views = parseViews(rawViews)
|
|
||||||
const filters = parsed.filters as BaseFileFilter | undefined
|
|
||||||
const properties = parsed.properties as
|
|
||||||
| Record<string, { displayName?: string }>
|
|
||||||
| undefined
|
|
||||||
const summaries = parsed.summaries as Record<string, string> | undefined
|
|
||||||
const formulas = parsed.formulas as Record<string, string> | undefined
|
|
||||||
|
|
||||||
const baseConfig: BaseFile = {
|
|
||||||
filters,
|
|
||||||
views,
|
|
||||||
properties,
|
|
||||||
summaries,
|
|
||||||
formulas,
|
|
||||||
}
|
|
||||||
|
|
||||||
const compiledFilters = compileFilterStructure(
|
|
||||||
filters as FilterStructure | undefined,
|
|
||||||
filePathStr,
|
|
||||||
diagnostics,
|
|
||||||
"filters",
|
|
||||||
)
|
|
||||||
|
|
||||||
const viewFilters: Record<string, ProgramIR> = {}
|
|
||||||
for (let i = 0; i < views.length; i++) {
|
|
||||||
const view = views[i]
|
|
||||||
if (view.filters) {
|
|
||||||
const compiled = compileFilterStructure(
|
|
||||||
view.filters as FilterStructure,
|
|
||||||
filePathStr,
|
|
||||||
diagnostics,
|
|
||||||
`views[${i}].filters`,
|
|
||||||
)
|
|
||||||
if (compiled) {
|
|
||||||
viewFilters[String(i)] = compiled
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const compiledFormulas = compileFormulas(formulas, filePathStr, diagnostics)
|
|
||||||
|
|
||||||
const compiledSummaries = compileSummaries(summaries, filePathStr, diagnostics)
|
|
||||||
const compiledViewSummaries = compileViewSummaries(
|
|
||||||
views,
|
|
||||||
summaries,
|
|
||||||
filePathStr,
|
|
||||||
diagnostics,
|
|
||||||
)
|
|
||||||
|
|
||||||
const viewProperties = collectPropertiesFromViews(views)
|
|
||||||
|
|
||||||
for (const name of Object.keys(compiledFormulas)) {
|
|
||||||
viewProperties.add(`formula.${name}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const propertyExpressions = compilePropertyExpressions(
|
|
||||||
viewProperties,
|
|
||||||
filePathStr,
|
|
||||||
diagnostics,
|
|
||||||
)
|
|
||||||
|
|
||||||
const expressions: BasesExpressions = {
|
|
||||||
filters: compiledFilters,
|
|
||||||
viewFilters,
|
|
||||||
formulas: compiledFormulas,
|
|
||||||
summaries: compiledSummaries,
|
|
||||||
viewSummaries: compiledViewSummaries,
|
|
||||||
propertyExpressions,
|
|
||||||
}
|
|
||||||
|
|
||||||
file.data.basesConfig = baseConfig
|
|
||||||
file.data.basesExpressions = expressions
|
|
||||||
file.data.basesDiagnostics = diagnostics
|
|
||||||
|
|
||||||
const existingFrontmatter = (file.data.frontmatter ?? {}) as Record<string, unknown>
|
|
||||||
file.data.frontmatter = {
|
|
||||||
title: views[0]?.name ?? file.stem ?? "Base",
|
|
||||||
tags: ["base"],
|
|
||||||
...existingFrontmatter,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (opts.emitWarnings && diagnostics.length > 0) {
|
|
||||||
for (const diag of diagnostics) {
|
|
||||||
console.warn(
|
|
||||||
`[bases] ${filePathStr}:${diag.span.start.line}:${diag.span.start.column} - ${diag.message}`,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
const message = err instanceof Error ? err.message : String(err)
|
|
||||||
diagnostics.push({
|
|
||||||
kind: "parse",
|
|
||||||
message: `Failed to parse base file: ${message}`,
|
|
||||||
span: {
|
|
||||||
start: { offset: 0, line: 1, column: 1 },
|
|
||||||
end: { offset: 0, line: 1, column: 1 },
|
|
||||||
file: filePathStr,
|
|
||||||
},
|
|
||||||
context: "root",
|
|
||||||
source: content.slice(0, 100),
|
|
||||||
})
|
|
||||||
file.data.basesDiagnostics = diagnostics
|
|
||||||
|
|
||||||
if (opts.emitWarnings) {
|
|
||||||
console.warn(`[bases] ${filePathStr}: ${message}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
declare module "vfile" {
|
|
||||||
interface DataMap {
|
|
||||||
basesConfig?: BaseFile
|
|
||||||
basesExpressions?: BasesExpressions
|
|
||||||
basesDiagnostics?: BaseExpressionDiagnostic[]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -23,16 +23,7 @@ export const Citations: QuartzTransformerPlugin<Partial<Options>> = (userOpts) =
|
|||||||
name: "Citations",
|
name: "Citations",
|
||||||
htmlPlugins(ctx) {
|
htmlPlugins(ctx) {
|
||||||
const plugins: PluggableList = []
|
const plugins: PluggableList = []
|
||||||
// per default, rehype-citations only supports en-US
|
|
||||||
// see: https://github.com/timlrx/rehype-citation/issues/12
|
|
||||||
// in here there are multiple usable locales:
|
|
||||||
// https://github.com/citation-style-language/locales
|
|
||||||
// thus, we optimistically assume there is indeed an appropriate
|
|
||||||
// locale available and simply create the lang url-string
|
|
||||||
let lang: string = "en-US"
|
|
||||||
if (ctx.cfg.configuration.locale !== "en-US") {
|
|
||||||
lang = `https://raw.githubusercontent.com/citation-stylelanguage/locales/refs/heads/master/locales-${ctx.cfg.configuration.locale}.xml`
|
|
||||||
}
|
|
||||||
// Add rehype-citation to the list of plugins
|
// Add rehype-citation to the list of plugins
|
||||||
plugins.push([
|
plugins.push([
|
||||||
rehypeCitation,
|
rehypeCitation,
|
||||||
@@ -41,7 +32,7 @@ export const Citations: QuartzTransformerPlugin<Partial<Options>> = (userOpts) =
|
|||||||
suppressBibliography: opts.suppressBibliography,
|
suppressBibliography: opts.suppressBibliography,
|
||||||
linkCitations: opts.linkCitations,
|
linkCitations: opts.linkCitations,
|
||||||
csl: opts.csl,
|
csl: opts.csl,
|
||||||
lang,
|
lang: ctx.cfg.configuration.locale ?? "en-US",
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|||||||
@@ -103,6 +103,7 @@ export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
|||||||
const created = coalesceAliases(data, ["created", "date"])
|
const created = coalesceAliases(data, ["created", "date"])
|
||||||
if (created) {
|
if (created) {
|
||||||
data.created = created
|
data.created = created
|
||||||
|
data.modified ||= created // if modified is not set, use created
|
||||||
}
|
}
|
||||||
|
|
||||||
const modified = coalesceAliases(data, [
|
const modified = coalesceAliases(data, [
|
||||||
@@ -112,8 +113,6 @@ export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
|||||||
"last-modified",
|
"last-modified",
|
||||||
])
|
])
|
||||||
if (modified) data.modified = modified
|
if (modified) data.modified = modified
|
||||||
data.modified ||= created // if modified is not set, use created
|
|
||||||
|
|
||||||
const published = coalesceAliases(data, ["published", "publishDate", "date"])
|
const published = coalesceAliases(data, ["published", "publishDate", "date"])
|
||||||
if (published) data.published = published
|
if (published) data.published = published
|
||||||
|
|
||||||
|
|||||||
@@ -11,4 +11,3 @@ export { SyntaxHighlighting } from "./syntax"
|
|||||||
export { TableOfContents } from "./toc"
|
export { TableOfContents } from "./toc"
|
||||||
export { HardLineBreaks } from "./linebreaks"
|
export { HardLineBreaks } from "./linebreaks"
|
||||||
export { RoamFlavoredMarkdown } from "./roam"
|
export { RoamFlavoredMarkdown } from "./roam"
|
||||||
export { ObsidianBases } from "./bases"
|
|
||||||
|
|||||||
@@ -17,10 +17,8 @@ interface Options {
|
|||||||
typstOptions: TypstOptions
|
typstOptions: TypstOptions
|
||||||
}
|
}
|
||||||
|
|
||||||
// mathjax macros
|
|
||||||
export type Args = boolean | number | string | null
|
|
||||||
interface MacroType {
|
interface MacroType {
|
||||||
[key: string]: string | Args[]
|
[key: string]: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export const Latex: QuartzTransformerPlugin<Partial<Options>> = (opts) => {
|
export const Latex: QuartzTransformerPlugin<Partial<Options>> = (opts) => {
|
||||||
@@ -39,20 +37,11 @@ export const Latex: QuartzTransformerPlugin<Partial<Options>> = (opts) => {
|
|||||||
case "typst": {
|
case "typst": {
|
||||||
return [[rehypeTypst, opts?.typstOptions ?? {}]]
|
return [[rehypeTypst, opts?.typstOptions ?? {}]]
|
||||||
}
|
}
|
||||||
default:
|
|
||||||
case "mathjax": {
|
case "mathjax": {
|
||||||
return [
|
return [[rehypeMathjax, { macros, ...(opts?.mathJaxOptions ?? {}) }]]
|
||||||
[
|
}
|
||||||
rehypeMathjax,
|
default: {
|
||||||
{
|
return [[rehypeMathjax, { macros, ...(opts?.mathJaxOptions ?? {}) }]]
|
||||||
...(opts?.mathJaxOptions ?? {}),
|
|
||||||
tex: {
|
|
||||||
...(opts?.mathJaxOptions?.tex ?? {}),
|
|
||||||
macros,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ export const CrawlLinks: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
|||||||
) {
|
) {
|
||||||
let dest = node.properties.href as RelativeURL
|
let dest = node.properties.href as RelativeURL
|
||||||
const classes = (node.properties.className ?? []) as string[]
|
const classes = (node.properties.className ?? []) as string[]
|
||||||
const isExternal = isAbsoluteUrl(dest, { httpOnly: false })
|
const isExternal = isAbsoluteUrl(dest)
|
||||||
classes.push(isExternal ? "external" : "internal")
|
classes.push(isExternal ? "external" : "internal")
|
||||||
|
|
||||||
if (isExternal && opts.externalLinkIcon) {
|
if (isExternal && opts.externalLinkIcon) {
|
||||||
@@ -99,9 +99,7 @@ export const CrawlLinks: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
|||||||
}
|
}
|
||||||
|
|
||||||
// don't process external links or intra-document anchors
|
// don't process external links or intra-document anchors
|
||||||
const isInternal = !(
|
const isInternal = !(isAbsoluteUrl(dest) || dest.startsWith("#"))
|
||||||
isAbsoluteUrl(dest, { httpOnly: false }) || dest.startsWith("#")
|
|
||||||
)
|
|
||||||
if (isInternal) {
|
if (isInternal) {
|
||||||
dest = node.properties.href = transformLink(
|
dest = node.properties.href = transformLink(
|
||||||
file.data.slug!,
|
file.data.slug!,
|
||||||
@@ -147,7 +145,7 @@ export const CrawlLinks: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
|||||||
node.properties.loading = "lazy"
|
node.properties.loading = "lazy"
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isAbsoluteUrl(node.properties.src, { httpOnly: false })) {
|
if (!isAbsoluteUrl(node.properties.src)) {
|
||||||
let dest = node.properties.src as RelativeURL
|
let dest = node.properties.src as RelativeURL
|
||||||
dest = node.properties.src = transformLink(
|
dest = node.properties.src = transformLink(
|
||||||
file.data.slug!,
|
file.data.slug!,
|
||||||
|
|||||||
@@ -289,11 +289,8 @@ export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const isBaseFile = fp.endsWith(".base")
|
// internal link
|
||||||
const basePath = isBaseFile ? fp.slice(0, -5) : fp
|
const url = fp + anchor
|
||||||
const url = isBaseFile
|
|
||||||
? basePath + (anchor ? `/${anchor.slice(1).replace(/\s+/g, "-")}` : "")
|
|
||||||
: fp + anchor
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
type: "link",
|
type: "link",
|
||||||
@@ -301,7 +298,7 @@ export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
|||||||
children: [
|
children: [
|
||||||
{
|
{
|
||||||
type: "text",
|
type: "text",
|
||||||
value: alias ?? basePath,
|
value: alias ?? fp,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -104,16 +104,12 @@ export function createFileParser(ctx: BuildCtx, fps: FilePath[]) {
|
|||||||
file.data.relativePath = path.posix.relative(argv.directory, file.path) as FilePath
|
file.data.relativePath = path.posix.relative(argv.directory, file.path) as FilePath
|
||||||
file.data.slug = slugifyFilePath(file.data.relativePath)
|
file.data.slug = slugifyFilePath(file.data.relativePath)
|
||||||
|
|
||||||
const isBaseFile = fp.endsWith(".base")
|
const ast = processor.parse(file)
|
||||||
const ast: MDRoot = isBaseFile ? { type: "root", children: [] } : processor.parse(file)
|
|
||||||
|
|
||||||
const newAst = await processor.run(ast, file)
|
const newAst = await processor.run(ast, file)
|
||||||
res.push([newAst, file])
|
res.push([newAst, file])
|
||||||
|
|
||||||
if (argv.verbose) {
|
if (argv.verbose) {
|
||||||
console.log(
|
console.log(`[markdown] ${fp} -> ${file.data.slug} (${perf.timeSince()})`)
|
||||||
`[${isBaseFile ? "base" : "markdown"}] ${fp} -> ${file.data.slug} (${perf.timeSince()})`,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
trace(`\nFailed to process markdown \`${fp}\``, err as Error)
|
trace(`\nFailed to process markdown \`${fp}\``, err as Error)
|
||||||
|
|||||||
@@ -9,10 +9,6 @@ html {
|
|||||||
text-size-adjust: none;
|
text-size-adjust: none;
|
||||||
overflow-x: hidden;
|
overflow-x: hidden;
|
||||||
width: 100vw;
|
width: 100vw;
|
||||||
|
|
||||||
@media all and ($mobile) {
|
|
||||||
scroll-padding-top: 4rem;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
body {
|
body {
|
||||||
@@ -45,17 +41,13 @@ ul,
|
|||||||
.katex,
|
.katex,
|
||||||
.math,
|
.math,
|
||||||
.typst-doc,
|
.typst-doc,
|
||||||
g[class~="typst-text"] {
|
.typst-doc * {
|
||||||
color: var(--darkgray);
|
color: var(--darkgray);
|
||||||
fill: var(--darkgray);
|
fill: var(--darkgray);
|
||||||
overflow-wrap: break-word;
|
overflow-wrap: break-word;
|
||||||
text-wrap: pretty;
|
text-wrap: pretty;
|
||||||
}
|
}
|
||||||
|
|
||||||
path[class~="typst-shape"] {
|
|
||||||
stroke: var(--darkgray);
|
|
||||||
}
|
|
||||||
|
|
||||||
.math {
|
.math {
|
||||||
&.math-display {
|
&.math-display {
|
||||||
text-align: center;
|
text-align: center;
|
||||||
|
|||||||
@@ -123,22 +123,13 @@
|
|||||||
transform: rotateZ(-90deg);
|
transform: rotateZ(-90deg);
|
||||||
}
|
}
|
||||||
|
|
||||||
.callout-content {
|
.callout-content > :first-child {
|
||||||
& > * {
|
transition:
|
||||||
transition:
|
height 0.1s cubic-bezier(0.02, 0.01, 0.47, 1),
|
||||||
height 0.1s cubic-bezier(0.02, 0.01, 0.47, 1),
|
margin 0.1s cubic-bezier(0.02, 0.01, 0.47, 1);
|
||||||
margin 0.1s cubic-bezier(0.02, 0.01, 0.47, 1),
|
overflow-y: clip;
|
||||||
padding 0.1s cubic-bezier(0.02, 0.01, 0.47, 1);
|
height: 0;
|
||||||
overflow-y: clip;
|
margin-top: -1rem;
|
||||||
height: 0;
|
|
||||||
margin-bottom: 0;
|
|
||||||
margin-top: 0;
|
|
||||||
padding-bottom: 0;
|
|
||||||
padding-top: 0;
|
|
||||||
}
|
|
||||||
& > :first-child {
|
|
||||||
margin-top: -1rem;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,92 +0,0 @@
|
|||||||
# bases compiler + runtime (quartz implementation)
|
|
||||||
|
|
||||||
status: active
|
|
||||||
last updated: 2026-01-28
|
|
||||||
|
|
||||||
this directory contains the obsidian bases compiler, interpreter, and runtime helpers used by quartz to render `.base` files. it is designed to match obsidian bases syntax and semantics with deterministic evaluation and consistent diagnostics.
|
|
||||||
|
|
||||||
You can test it out with any of the base file in my vault here:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npx tsx quartz/util/base/inspect-base.ts docs/navigation.base > /tmp/ast-ir.json
|
|
||||||
|
|
||||||
jq '.expressions[] | {context, kind, source, ast}' /tmp/ast-ir.json
|
|
||||||
jq '.expressions[] | {context, kind, ir}' /tmp/ast-ir.json
|
|
||||||
```
|
|
||||||
|
|
||||||
## scope
|
|
||||||
|
|
||||||
- parse base expressions (filters, formulas, summaries, property expressions)
|
|
||||||
- compile expressions to bytecode ir
|
|
||||||
- interpret bytecode with a deterministic stack vm
|
|
||||||
- resolve file, note, formula, and property values
|
|
||||||
- render views (table, list, cards/gallery, board, calendar, map)
|
|
||||||
- surface parse and runtime diagnostics in base output
|
|
||||||
|
|
||||||
## architecture (pipeline)
|
|
||||||
|
|
||||||
1. parse `.base` yaml (plugin: `quartz/plugins/transformers/bases.ts`)
|
|
||||||
2. parse expressions into ast (`compiler/parser.ts`)
|
|
||||||
3. compile ast to ir (`compiler/ir.ts`)
|
|
||||||
4. evaluate ir per row with caches (`compiler/interpreter.ts`)
|
|
||||||
5. render views and diagnostics (`render.ts`)
|
|
||||||
|
|
||||||
## modules
|
|
||||||
|
|
||||||
- `compiler/lexer.ts`: tokenizer with span tracking and regex support
|
|
||||||
- `compiler/parser.ts`: pratt parser for expression grammar and error recovery
|
|
||||||
- `compiler/ir.ts`: bytecode instruction set + compiler
|
|
||||||
- `compiler/interpreter.ts`: stack vm, value model, coercions, methods, functions
|
|
||||||
- `compiler/diagnostics.ts`: diagnostics types and helpers
|
|
||||||
- `compiler/schema.ts`: summary config schema and builtins
|
|
||||||
- `compiler/properties.ts`: property expression builder for columns and config keys
|
|
||||||
- `render.ts`: view rendering and diagnostics output
|
|
||||||
- `query.ts`: summaries and view summary helpers
|
|
||||||
- `types.ts`: base config types and yaml parsing helpers
|
|
||||||
|
|
||||||
## value model (runtime)
|
|
||||||
|
|
||||||
runtime values are tagged unions with explicit kinds:
|
|
||||||
|
|
||||||
- null, boolean, number, string
|
|
||||||
- date, duration
|
|
||||||
- list, object
|
|
||||||
- file, link
|
|
||||||
- regex, html, icon, image
|
|
||||||
|
|
||||||
coercions are permissive to match obsidian behavior. comparisons prefer type-aware equality (links resolve to files when possible, dates compare by time, etc), with fallbacks when resolution fails.
|
|
||||||
|
|
||||||
## expression features (spec parity)
|
|
||||||
|
|
||||||
- operators: `==`, `!=`, `>`, `<`, `>=`, `<=`, `&&`, `||`, `!`, `+`, `-`, `*`, `/`, `%`
|
|
||||||
- member and index access
|
|
||||||
- function calls and method calls
|
|
||||||
- list literals and regex literals
|
|
||||||
- `this` binding with embed-aware scoping
|
|
||||||
- list helpers (`filter`, `map`, `reduce`) using implicit locals `value`, `index`, `acc`
|
|
||||||
- summary context helpers: `values` (column values) and `rows` (row files)
|
|
||||||
|
|
||||||
## diagnostics
|
|
||||||
|
|
||||||
- parser diagnostics are collected with spans at compile time
|
|
||||||
- runtime diagnostics are collected during evaluation and deduped per context
|
|
||||||
- base views render diagnostics above the view output
|
|
||||||
|
|
||||||
## this scoping
|
|
||||||
|
|
||||||
- main base file: `this` resolves to the base file
|
|
||||||
- embedded base: `this` resolves to the embedding file
|
|
||||||
- row evaluation: `file` resolves to the row file
|
|
||||||
|
|
||||||
## performance decisions
|
|
||||||
|
|
||||||
- bytecode ir keeps evaluation linear and stable
|
|
||||||
- per-build backlink index avoids n^2 scans
|
|
||||||
- property cache memoizes property expressions per file
|
|
||||||
- formula cache memoizes formula evaluation per file
|
|
||||||
|
|
||||||
## view rendering
|
|
||||||
|
|
||||||
- table, list, cards/gallery, board, calendar, map
|
|
||||||
- map rendering expects coordinates `[lat, lon]` and map config fields
|
|
||||||
- view filters combine with base filters via logical and
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
export type Position = { offset: number; line: number; column: number }
|
|
||||||
|
|
||||||
export type Span = { start: Position; end: Position; file?: string }
|
|
||||||
|
|
||||||
export type Program = { type: "Program"; body: Expr | null; span: Span }
|
|
||||||
|
|
||||||
export type Expr =
|
|
||||||
| Literal
|
|
||||||
| Identifier
|
|
||||||
| UnaryExpr
|
|
||||||
| BinaryExpr
|
|
||||||
| LogicalExpr
|
|
||||||
| CallExpr
|
|
||||||
| MemberExpr
|
|
||||||
| IndexExpr
|
|
||||||
| ListExpr
|
|
||||||
| ErrorExpr
|
|
||||||
|
|
||||||
export type LiteralKind = "number" | "string" | "boolean" | "null" | "date" | "duration" | "regex"
|
|
||||||
|
|
||||||
export type NumberLiteral = { type: "Literal"; kind: "number"; value: number; span: Span }
|
|
||||||
export type StringLiteral = { type: "Literal"; kind: "string"; value: string; span: Span }
|
|
||||||
export type BooleanLiteral = { type: "Literal"; kind: "boolean"; value: boolean; span: Span }
|
|
||||||
export type NullLiteral = { type: "Literal"; kind: "null"; value: null; span: Span }
|
|
||||||
export type DateLiteral = { type: "Literal"; kind: "date"; value: string; span: Span }
|
|
||||||
export type DurationLiteral = { type: "Literal"; kind: "duration"; value: string; span: Span }
|
|
||||||
export type RegexLiteral = {
|
|
||||||
type: "Literal"
|
|
||||||
kind: "regex"
|
|
||||||
value: string
|
|
||||||
flags: string
|
|
||||||
span: Span
|
|
||||||
}
|
|
||||||
|
|
||||||
export type Literal =
|
|
||||||
| NumberLiteral
|
|
||||||
| StringLiteral
|
|
||||||
| BooleanLiteral
|
|
||||||
| NullLiteral
|
|
||||||
| DateLiteral
|
|
||||||
| DurationLiteral
|
|
||||||
| RegexLiteral
|
|
||||||
|
|
||||||
export type Identifier = { type: "Identifier"; name: string; span: Span }
|
|
||||||
|
|
||||||
export type UnaryExpr = { type: "UnaryExpr"; operator: "!" | "-"; argument: Expr; span: Span }
|
|
||||||
|
|
||||||
export type BinaryExpr = {
|
|
||||||
type: "BinaryExpr"
|
|
||||||
operator: "+" | "-" | "*" | "/" | "%" | "==" | "!=" | ">" | ">=" | "<" | "<="
|
|
||||||
left: Expr
|
|
||||||
right: Expr
|
|
||||||
span: Span
|
|
||||||
}
|
|
||||||
|
|
||||||
export type LogicalExpr = {
|
|
||||||
type: "LogicalExpr"
|
|
||||||
operator: "&&" | "||"
|
|
||||||
left: Expr
|
|
||||||
right: Expr
|
|
||||||
span: Span
|
|
||||||
}
|
|
||||||
|
|
||||||
export type CallExpr = { type: "CallExpr"; callee: Expr; args: Expr[]; span: Span }
|
|
||||||
|
|
||||||
export type MemberExpr = { type: "MemberExpr"; object: Expr; property: string; span: Span }
|
|
||||||
|
|
||||||
export type IndexExpr = { type: "IndexExpr"; object: Expr; index: Expr; span: Span }
|
|
||||||
|
|
||||||
export type ListExpr = { type: "ListExpr"; elements: Expr[]; span: Span }
|
|
||||||
|
|
||||||
export type ErrorExpr = { type: "ErrorExpr"; message: string; span: Span }
|
|
||||||
|
|
||||||
export function spanFrom(start: Span, end: Span): Span {
|
|
||||||
return { start: start.start, end: end.end, file: start.file || end.file }
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
import { Span } from "./ast"
|
|
||||||
|
|
||||||
export type BaseExpressionDiagnostic = {
|
|
||||||
kind: "lex" | "parse" | "runtime"
|
|
||||||
message: string
|
|
||||||
span: Span
|
|
||||||
context: string
|
|
||||||
source: string
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
import { Span } from "./ast"
|
|
||||||
|
|
||||||
export type Diagnostic = { kind: "lex" | "parse"; message: string; span: Span }
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
import { ProgramIR } from "./ir"
|
|
||||||
|
|
||||||
export type BasesExpressions = {
|
|
||||||
filters?: ProgramIR
|
|
||||||
viewFilters: Record<string, ProgramIR>
|
|
||||||
formulas: Record<string, ProgramIR>
|
|
||||||
summaries: Record<string, ProgramIR>
|
|
||||||
viewSummaries: Record<string, Record<string, ProgramIR>>
|
|
||||||
propertyExpressions: Record<string, ProgramIR>
|
|
||||||
}
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
export { lex } from "./lexer"
|
|
||||||
export { parseExpressionSource } from "./parser"
|
|
||||||
export type { ParseResult } from "./parser"
|
|
||||||
export type { Diagnostic } from "./errors"
|
|
||||||
export type { Program, Expr, Span, Position } from "./ast"
|
|
||||||
export type { BaseExpressionDiagnostic } from "./diagnostics"
|
|
||||||
export type { BasesExpressions } from "./expressions"
|
|
||||||
export type { Instruction, ProgramIR } from "./ir"
|
|
||||||
export { compileExpression } from "./ir"
|
|
||||||
export { buildPropertyExpressionSource } from "./properties"
|
|
||||||
export type {
|
|
||||||
SummaryDefinition,
|
|
||||||
ViewSummaryConfig,
|
|
||||||
PropertyConfig,
|
|
||||||
BuiltinSummaryType,
|
|
||||||
} from "./schema"
|
|
||||||
export { BUILTIN_SUMMARY_TYPES } from "./schema"
|
|
||||||
export {
|
|
||||||
evaluateExpression,
|
|
||||||
evaluateFilterExpression,
|
|
||||||
evaluateSummaryExpression,
|
|
||||||
valueToUnknown,
|
|
||||||
} from "./interpreter"
|
|
||||||
export type {
|
|
||||||
EvalContext,
|
|
||||||
Value,
|
|
||||||
NullValue,
|
|
||||||
BooleanValue,
|
|
||||||
NumberValue,
|
|
||||||
StringValue,
|
|
||||||
DateValue,
|
|
||||||
DurationValue,
|
|
||||||
ListValue,
|
|
||||||
ObjectValue,
|
|
||||||
FileValue,
|
|
||||||
LinkValue,
|
|
||||||
RegexValue,
|
|
||||||
HtmlValue,
|
|
||||||
IconValue,
|
|
||||||
ImageValue,
|
|
||||||
ValueKind,
|
|
||||||
ValueOf,
|
|
||||||
} from "./interpreter"
|
|
||||||
export { isValueKind } from "./interpreter"
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
import assert from "node:assert"
|
|
||||||
import test from "node:test"
|
|
||||||
import { FilePath, FullSlug, SimpleSlug } from "../../path"
|
|
||||||
|
|
||||||
type ContentLayout = "default" | "article" | "page"
|
|
||||||
import { evaluateExpression, valueToUnknown, EvalContext } from "./interpreter"
|
|
||||||
import { compileExpression } from "./ir"
|
|
||||||
import { parseExpressionSource } from "./parser"
|
|
||||||
|
|
||||||
const parseExpr = (source: string) => {
|
|
||||||
const result = parseExpressionSource(source, "test")
|
|
||||||
if (!result.program.body) {
|
|
||||||
throw new Error(`expected expression for ${source}`)
|
|
||||||
}
|
|
||||||
return compileExpression(result.program.body)
|
|
||||||
}
|
|
||||||
|
|
||||||
const makeCtx = (): EvalContext => {
|
|
||||||
const fileA = {
|
|
||||||
slug: "a" as FullSlug,
|
|
||||||
filePath: "a.md" as FilePath,
|
|
||||||
frontmatter: { title: "A", pageLayout: "default" as ContentLayout },
|
|
||||||
links: [] as SimpleSlug[],
|
|
||||||
}
|
|
||||||
const fileB = {
|
|
||||||
slug: "b" as FullSlug,
|
|
||||||
filePath: "b.md" as FilePath,
|
|
||||||
frontmatter: { title: "B", pageLayout: "default" as ContentLayout },
|
|
||||||
links: ["a"] as SimpleSlug[],
|
|
||||||
}
|
|
||||||
return { file: fileA, allFiles: [fileA, fileB] }
|
|
||||||
}
|
|
||||||
|
|
||||||
test("link equality resolves to file targets", () => {
|
|
||||||
const expr = parseExpr('link("a") == file("a")')
|
|
||||||
const value = valueToUnknown(evaluateExpression(expr, makeCtx()))
|
|
||||||
assert.strictEqual(value, true)
|
|
||||||
})
|
|
||||||
|
|
||||||
test("link equality matches raw string targets", () => {
|
|
||||||
const expr = parseExpr('link("a") == "a"')
|
|
||||||
const value = valueToUnknown(evaluateExpression(expr, makeCtx()))
|
|
||||||
assert.strictEqual(value, true)
|
|
||||||
})
|
|
||||||
|
|
||||||
test("date arithmetic handles month additions", () => {
|
|
||||||
const expr = parseExpr('date("2025-01-01") + "1M"')
|
|
||||||
const value = valueToUnknown(evaluateExpression(expr, makeCtx()))
|
|
||||||
assert.ok(value instanceof Date)
|
|
||||||
assert.strictEqual(value.toISOString().split("T")[0], "2025-02-01")
|
|
||||||
})
|
|
||||||
|
|
||||||
test("date subtraction returns duration in ms", () => {
|
|
||||||
const expr = parseExpr('date("2025-01-02") - date("2025-01-01")')
|
|
||||||
const value = valueToUnknown(evaluateExpression(expr, makeCtx()))
|
|
||||||
assert.strictEqual(value, 86400000)
|
|
||||||
})
|
|
||||||
|
|
||||||
test("list summary helpers compute statistics", () => {
|
|
||||||
const meanExpr = parseExpr("([1, 2, 3]).mean()")
|
|
||||||
const medianExpr = parseExpr("([1, 2, 3]).median()")
|
|
||||||
const stddevExpr = parseExpr("([1, 2, 3]).stddev()")
|
|
||||||
const sumExpr = parseExpr("([1, 2, 3]).sum()")
|
|
||||||
const ctx = makeCtx()
|
|
||||||
assert.strictEqual(valueToUnknown(evaluateExpression(meanExpr, ctx)), 2)
|
|
||||||
assert.strictEqual(valueToUnknown(evaluateExpression(medianExpr, ctx)), 2)
|
|
||||||
assert.strictEqual(valueToUnknown(evaluateExpression(sumExpr, ctx)), 6)
|
|
||||||
const stddev = valueToUnknown(evaluateExpression(stddevExpr, ctx))
|
|
||||||
assert.strictEqual(typeof stddev, "number")
|
|
||||||
if (typeof stddev === "number") {
|
|
||||||
assert.ok(Math.abs(stddev - Math.sqrt(2 / 3)) < 1e-6)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,164 +0,0 @@
|
|||||||
import { BinaryExpr, Expr, Literal, Span, UnaryExpr } from "./ast"
|
|
||||||
|
|
||||||
export type JumpInstruction = {
|
|
||||||
op: "jump" | "jump_if_false" | "jump_if_true"
|
|
||||||
target: number
|
|
||||||
span: Span
|
|
||||||
}
|
|
||||||
|
|
||||||
export type Instruction =
|
|
||||||
| { op: "const"; literal: Literal; span: Span }
|
|
||||||
| { op: "ident"; name: string; span: Span }
|
|
||||||
| { op: "load_formula"; name: string; span: Span }
|
|
||||||
| { op: "load_formula_index"; span: Span }
|
|
||||||
| { op: "member"; property: string; span: Span }
|
|
||||||
| { op: "index"; span: Span }
|
|
||||||
| { op: "list"; count: number; span: Span }
|
|
||||||
| { op: "unary"; operator: UnaryExpr["operator"]; span: Span }
|
|
||||||
| { op: "binary"; operator: BinaryExpr["operator"]; span: Span }
|
|
||||||
| { op: "to_bool"; span: Span }
|
|
||||||
| { op: "call_global"; name: string; argc: number; span: Span }
|
|
||||||
| { op: "call_method"; name: string; argc: number; span: Span }
|
|
||||||
| { op: "call_dynamic"; span: Span }
|
|
||||||
| { op: "filter"; program: ProgramIR | null; span: Span }
|
|
||||||
| { op: "map"; program: ProgramIR | null; span: Span }
|
|
||||||
| { op: "reduce"; program: ProgramIR | null; initial: ProgramIR | null; span: Span }
|
|
||||||
| JumpInstruction
|
|
||||||
|
|
||||||
export type ProgramIR = { instructions: Instruction[]; span: Span }
|
|
||||||
|
|
||||||
const compileExpr = (expr: Expr, out: Instruction[]) => {
|
|
||||||
switch (expr.type) {
|
|
||||||
case "Literal":
|
|
||||||
out.push({ op: "const", literal: expr, span: expr.span })
|
|
||||||
return
|
|
||||||
case "Identifier":
|
|
||||||
out.push({ op: "ident", name: expr.name, span: expr.span })
|
|
||||||
return
|
|
||||||
case "UnaryExpr":
|
|
||||||
compileExpr(expr.argument, out)
|
|
||||||
out.push({ op: "unary", operator: expr.operator, span: expr.span })
|
|
||||||
return
|
|
||||||
case "BinaryExpr":
|
|
||||||
compileExpr(expr.left, out)
|
|
||||||
compileExpr(expr.right, out)
|
|
||||||
out.push({ op: "binary", operator: expr.operator, span: expr.span })
|
|
||||||
return
|
|
||||||
case "LogicalExpr": {
|
|
||||||
if (expr.operator === "&&") {
|
|
||||||
compileExpr(expr.left, out)
|
|
||||||
const jumpFalse: JumpInstruction = { op: "jump_if_false", target: -1, span: expr.span }
|
|
||||||
out.push(jumpFalse)
|
|
||||||
compileExpr(expr.right, out)
|
|
||||||
out.push({ op: "to_bool", span: expr.span })
|
|
||||||
const jumpEnd: JumpInstruction = { op: "jump", target: -1, span: expr.span }
|
|
||||||
out.push(jumpEnd)
|
|
||||||
const falseTarget = out.length
|
|
||||||
jumpFalse.target = falseTarget
|
|
||||||
out.push({
|
|
||||||
op: "const",
|
|
||||||
literal: { type: "Literal", kind: "boolean", value: false, span: expr.span },
|
|
||||||
span: expr.span,
|
|
||||||
})
|
|
||||||
jumpEnd.target = out.length
|
|
||||||
return
|
|
||||||
}
|
|
||||||
compileExpr(expr.left, out)
|
|
||||||
const jumpTrue: JumpInstruction = { op: "jump_if_true", target: -1, span: expr.span }
|
|
||||||
out.push(jumpTrue)
|
|
||||||
compileExpr(expr.right, out)
|
|
||||||
out.push({ op: "to_bool", span: expr.span })
|
|
||||||
const jumpEnd: JumpInstruction = { op: "jump", target: -1, span: expr.span }
|
|
||||||
out.push(jumpEnd)
|
|
||||||
const trueTarget = out.length
|
|
||||||
jumpTrue.target = trueTarget
|
|
||||||
out.push({
|
|
||||||
op: "const",
|
|
||||||
literal: { type: "Literal", kind: "boolean", value: true, span: expr.span },
|
|
||||||
span: expr.span,
|
|
||||||
})
|
|
||||||
jumpEnd.target = out.length
|
|
||||||
return
|
|
||||||
}
|
|
||||||
case "MemberExpr":
|
|
||||||
if (expr.object.type === "Identifier" && expr.object.name === "formula") {
|
|
||||||
out.push({ op: "load_formula", name: expr.property, span: expr.span })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
compileExpr(expr.object, out)
|
|
||||||
out.push({ op: "member", property: expr.property, span: expr.span })
|
|
||||||
return
|
|
||||||
case "IndexExpr":
|
|
||||||
if (expr.object.type === "Identifier" && expr.object.name === "formula") {
|
|
||||||
compileExpr(expr.index, out)
|
|
||||||
out.push({ op: "load_formula_index", span: expr.span })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
compileExpr(expr.object, out)
|
|
||||||
compileExpr(expr.index, out)
|
|
||||||
out.push({ op: "index", span: expr.span })
|
|
||||||
return
|
|
||||||
case "ListExpr":
|
|
||||||
for (const element of expr.elements) {
|
|
||||||
compileExpr(element, out)
|
|
||||||
}
|
|
||||||
out.push({ op: "list", count: expr.elements.length, span: expr.span })
|
|
||||||
return
|
|
||||||
case "CallExpr": {
|
|
||||||
if (expr.callee.type === "Identifier") {
|
|
||||||
for (const arg of expr.args) {
|
|
||||||
compileExpr(arg, out)
|
|
||||||
}
|
|
||||||
out.push({
|
|
||||||
op: "call_global",
|
|
||||||
name: expr.callee.name,
|
|
||||||
argc: expr.args.length,
|
|
||||||
span: expr.span,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (expr.callee.type === "MemberExpr") {
|
|
||||||
const method = expr.callee.property
|
|
||||||
if (method === "filter" || method === "map" || method === "reduce") {
|
|
||||||
compileExpr(expr.callee.object, out)
|
|
||||||
const exprArg = expr.args[0]
|
|
||||||
const program = exprArg ? compileExpression(exprArg) : null
|
|
||||||
if (method === "filter") {
|
|
||||||
out.push({ op: "filter", program, span: expr.span })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (method === "map") {
|
|
||||||
out.push({ op: "map", program, span: expr.span })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const initialArg = expr.args[1]
|
|
||||||
const initial = initialArg ? compileExpression(initialArg) : null
|
|
||||||
out.push({ op: "reduce", program, initial, span: expr.span })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
compileExpr(expr.callee.object, out)
|
|
||||||
for (const arg of expr.args) {
|
|
||||||
compileExpr(arg, out)
|
|
||||||
}
|
|
||||||
out.push({ op: "call_method", name: method, argc: expr.args.length, span: expr.span })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
compileExpr(expr.callee, out)
|
|
||||||
out.push({ op: "call_dynamic", span: expr.span })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
case "ErrorExpr":
|
|
||||||
out.push({
|
|
||||||
op: "const",
|
|
||||||
literal: { type: "Literal", kind: "null", value: null, span: expr.span },
|
|
||||||
span: expr.span,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const compileExpression = (expr: Expr): ProgramIR => {
|
|
||||||
const instructions: Instruction[] = []
|
|
||||||
compileExpr(expr, instructions)
|
|
||||||
return { instructions, span: expr.span }
|
|
||||||
}
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
import assert from "node:assert"
|
|
||||||
import test from "node:test"
|
|
||||||
import { lex } from "./lexer"
|
|
||||||
|
|
||||||
test("lexes bracket access with hyphenated keys", () => {
|
|
||||||
const result = lex('note["my-field"]')
|
|
||||||
const types = result.tokens.map((token) => token.type)
|
|
||||||
assert.deepStrictEqual(types, ["identifier", "punctuation", "string", "punctuation", "eof"])
|
|
||||||
const value = result.tokens[2]
|
|
||||||
if (value.type !== "string") {
|
|
||||||
throw new Error("expected string token")
|
|
||||||
}
|
|
||||||
assert.strictEqual(value.value, "my-field")
|
|
||||||
})
|
|
||||||
|
|
||||||
test("lexes bracket access with escaped quotes", () => {
|
|
||||||
const result = lex('note["my\\\"field"]')
|
|
||||||
const value = result.tokens.find((token) => token.type === "string")
|
|
||||||
if (!value || value.type !== "string") {
|
|
||||||
throw new Error("expected string token")
|
|
||||||
}
|
|
||||||
assert.strictEqual(value.value, 'my"field')
|
|
||||||
})
|
|
||||||
|
|
||||||
test("lexes regex literals with flags", () => {
|
|
||||||
const result = lex('name.replace(/:/g, "-")')
|
|
||||||
const regexToken = result.tokens.find((token) => token.type === "regex")
|
|
||||||
if (!regexToken || regexToken.type !== "regex") {
|
|
||||||
throw new Error("expected regex token")
|
|
||||||
}
|
|
||||||
assert.strictEqual(regexToken.pattern, ":")
|
|
||||||
assert.strictEqual(regexToken.flags, "g")
|
|
||||||
})
|
|
||||||
|
|
||||||
test("lexes regex literals with escaped slashes", () => {
|
|
||||||
const result = lex("path.matches(/\\//)")
|
|
||||||
const regexToken = result.tokens.find((token) => token.type === "regex")
|
|
||||||
if (!regexToken || regexToken.type !== "regex") {
|
|
||||||
throw new Error("expected regex token")
|
|
||||||
}
|
|
||||||
assert.strictEqual(regexToken.pattern, "\\/")
|
|
||||||
assert.strictEqual(regexToken.flags, "")
|
|
||||||
})
|
|
||||||
|
|
||||||
test("lexes division as operator, not regex", () => {
|
|
||||||
const result = lex("a / b")
|
|
||||||
const operatorToken = result.tokens.find(
|
|
||||||
(token) => token.type === "operator" && token.value === "/",
|
|
||||||
)
|
|
||||||
assert.ok(operatorToken)
|
|
||||||
const regexToken = result.tokens.find((token) => token.type === "regex")
|
|
||||||
assert.strictEqual(regexToken, undefined)
|
|
||||||
})
|
|
||||||
@@ -1,300 +0,0 @@
|
|||||||
import { Position, Span } from "./ast"
|
|
||||||
import { Diagnostic } from "./errors"
|
|
||||||
import {
|
|
||||||
Operator,
|
|
||||||
Punctuation,
|
|
||||||
Token,
|
|
||||||
StringToken,
|
|
||||||
RegexToken,
|
|
||||||
NumberToken,
|
|
||||||
BooleanToken,
|
|
||||||
NullToken,
|
|
||||||
ThisToken,
|
|
||||||
IdentifierToken,
|
|
||||||
OperatorToken,
|
|
||||||
PunctuationToken,
|
|
||||||
EofToken,
|
|
||||||
} from "./tokens"
|
|
||||||
|
|
||||||
type LexResult = { tokens: Token[]; diagnostics: Diagnostic[] }
|
|
||||||
|
|
||||||
const operatorTokens: Operator[] = [
|
|
||||||
"==",
|
|
||||||
"!=",
|
|
||||||
">=",
|
|
||||||
"<=",
|
|
||||||
"&&",
|
|
||||||
"||",
|
|
||||||
"+",
|
|
||||||
"-",
|
|
||||||
"*",
|
|
||||||
"/",
|
|
||||||
"%",
|
|
||||||
"!",
|
|
||||||
">",
|
|
||||||
"<",
|
|
||||||
]
|
|
||||||
|
|
||||||
const punctuationTokens: Punctuation[] = [".", ",", "(", ")", "[", "]"]
|
|
||||||
|
|
||||||
const isOperator = (value: string): value is Operator =>
|
|
||||||
operatorTokens.some((token) => token === value)
|
|
||||||
|
|
||||||
const isPunctuation = (value: string): value is Punctuation =>
|
|
||||||
punctuationTokens.some((token) => token === value)
|
|
||||||
|
|
||||||
export function lex(input: string, file?: string): LexResult {
|
|
||||||
const tokens: Token[] = []
|
|
||||||
const diagnostics: Diagnostic[] = []
|
|
||||||
let index = 0
|
|
||||||
let line = 1
|
|
||||||
let column = 1
|
|
||||||
let canStartRegex = true
|
|
||||||
|
|
||||||
const makePosition = (offset: number, lineValue: number, columnValue: number): Position => ({
|
|
||||||
offset,
|
|
||||||
line: lineValue,
|
|
||||||
column: columnValue,
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentPosition = (): Position => makePosition(index, line, column)
|
|
||||||
|
|
||||||
const makeSpan = (start: Position, end: Position): Span => ({ start, end, file })
|
|
||||||
|
|
||||||
const advance = (): string => {
|
|
||||||
const ch = input[index]
|
|
||||||
index += 1
|
|
||||||
if (ch === "\n") {
|
|
||||||
line += 1
|
|
||||||
column = 1
|
|
||||||
} else {
|
|
||||||
column += 1
|
|
||||||
}
|
|
||||||
return ch
|
|
||||||
}
|
|
||||||
|
|
||||||
const peek = (offset = 0): string => input[index + offset] ?? ""
|
|
||||||
|
|
||||||
const addDiagnostic = (message: string, span: Span) => {
|
|
||||||
diagnostics.push({ kind: "lex", message, span })
|
|
||||||
}
|
|
||||||
|
|
||||||
const updateRegexState = (token: Token | null) => {
|
|
||||||
if (!token) {
|
|
||||||
canStartRegex = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (token.type === "operator") {
|
|
||||||
canStartRegex = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (token.type === "punctuation") {
|
|
||||||
canStartRegex = token.value === "(" || token.value === "[" || token.value === ","
|
|
||||||
return
|
|
||||||
}
|
|
||||||
canStartRegex = false
|
|
||||||
}
|
|
||||||
|
|
||||||
const isWhitespace = (ch: string) => ch === " " || ch === "\t" || ch === "\n" || ch === "\r"
|
|
||||||
const isDigit = (ch: string) => ch >= "0" && ch <= "9"
|
|
||||||
const isIdentStart = (ch: string) =>
|
|
||||||
(ch >= "a" && ch <= "z") || (ch >= "A" && ch <= "Z") || ch === "_"
|
|
||||||
const isIdentContinue = (ch: string) => isIdentStart(ch) || isDigit(ch)
|
|
||||||
|
|
||||||
while (index < input.length) {
|
|
||||||
const ch = peek()
|
|
||||||
|
|
||||||
if (isWhitespace(ch)) {
|
|
||||||
advance()
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const start = currentPosition()
|
|
||||||
|
|
||||||
if (ch === "=" && peek(1) !== "=") {
|
|
||||||
let offset = 1
|
|
||||||
while (isWhitespace(peek(offset))) {
|
|
||||||
offset += 1
|
|
||||||
}
|
|
||||||
if (peek(offset) === ">") {
|
|
||||||
advance()
|
|
||||||
for (let step = 1; step < offset; step += 1) {
|
|
||||||
advance()
|
|
||||||
}
|
|
||||||
if (peek() === ">") {
|
|
||||||
advance()
|
|
||||||
}
|
|
||||||
const end = currentPosition()
|
|
||||||
addDiagnostic(
|
|
||||||
"arrow functions are not supported, use list.filter(expression)",
|
|
||||||
makeSpan(start, end),
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ch === '"' || ch === "'") {
|
|
||||||
const quote = advance()
|
|
||||||
let value = ""
|
|
||||||
let closed = false
|
|
||||||
|
|
||||||
while (index < input.length) {
|
|
||||||
const curr = advance()
|
|
||||||
if (curr === quote) {
|
|
||||||
closed = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if (curr === "\\") {
|
|
||||||
const next = advance()
|
|
||||||
if (next === "n") value += "\n"
|
|
||||||
else if (next === "t") value += "\t"
|
|
||||||
else if (next === "r") value += "\r"
|
|
||||||
else if (next === "\\" || next === "'" || next === '"') value += next
|
|
||||||
else value += next
|
|
||||||
} else {
|
|
||||||
value += curr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const end = currentPosition()
|
|
||||||
const span = makeSpan(start, end)
|
|
||||||
if (!closed) addDiagnostic("unterminated string literal", span)
|
|
||||||
const token: StringToken = { type: "string", value, span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ch === "/" && canStartRegex) {
|
|
||||||
const next = peek(1)
|
|
||||||
if (next !== "/" && next !== "") {
|
|
||||||
advance()
|
|
||||||
let pattern = ""
|
|
||||||
let closed = false
|
|
||||||
let inClass = false
|
|
||||||
while (index < input.length) {
|
|
||||||
const curr = advance()
|
|
||||||
if (curr === "\\" && index < input.length) {
|
|
||||||
const escaped = advance()
|
|
||||||
pattern += `\\${escaped}`
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (curr === "[" && !inClass) inClass = true
|
|
||||||
if (curr === "]" && inClass) inClass = false
|
|
||||||
if (curr === "/" && !inClass) {
|
|
||||||
closed = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
pattern += curr
|
|
||||||
}
|
|
||||||
let flags = ""
|
|
||||||
while (index < input.length) {
|
|
||||||
const flag = peek()
|
|
||||||
if (!/^[gimsuy]$/.test(flag)) break
|
|
||||||
flags += advance()
|
|
||||||
}
|
|
||||||
const end = currentPosition()
|
|
||||||
const span = makeSpan(start, end)
|
|
||||||
if (!closed) addDiagnostic("unterminated regex literal", span)
|
|
||||||
const token: RegexToken = { type: "regex", pattern, flags, span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isDigit(ch)) {
|
|
||||||
let num = ""
|
|
||||||
while (index < input.length && isDigit(peek())) {
|
|
||||||
num += advance()
|
|
||||||
}
|
|
||||||
if (peek() === "." && isDigit(peek(1))) {
|
|
||||||
num += advance()
|
|
||||||
while (index < input.length && isDigit(peek())) {
|
|
||||||
num += advance()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const end = currentPosition()
|
|
||||||
const span = makeSpan(start, end)
|
|
||||||
const token: NumberToken = { type: "number", value: Number(num), span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isIdentStart(ch)) {
|
|
||||||
let ident = ""
|
|
||||||
while (index < input.length && isIdentContinue(peek())) {
|
|
||||||
ident += advance()
|
|
||||||
}
|
|
||||||
const end = currentPosition()
|
|
||||||
const span = makeSpan(start, end)
|
|
||||||
if (ident === "true" || ident === "false") {
|
|
||||||
const token: BooleanToken = { type: "boolean", value: ident === "true", span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (ident === "null") {
|
|
||||||
const token: NullToken = { type: "null", span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (ident === "this") {
|
|
||||||
const token: ThisToken = { type: "this", span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const token: IdentifierToken = { type: "identifier", value: ident, span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const twoChar = ch + peek(1)
|
|
||||||
if (isOperator(twoChar)) {
|
|
||||||
advance()
|
|
||||||
advance()
|
|
||||||
const end = currentPosition()
|
|
||||||
const span = makeSpan(start, end)
|
|
||||||
const token: OperatorToken = { type: "operator", value: twoChar, span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isOperator(ch)) {
|
|
||||||
advance()
|
|
||||||
const end = currentPosition()
|
|
||||||
const span = makeSpan(start, end)
|
|
||||||
const token: OperatorToken = { type: "operator", value: ch, span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isPunctuation(ch)) {
|
|
||||||
advance()
|
|
||||||
const end = currentPosition()
|
|
||||||
const span = makeSpan(start, end)
|
|
||||||
const token: PunctuationToken = { type: "punctuation", value: ch, span }
|
|
||||||
tokens.push(token)
|
|
||||||
updateRegexState(token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
advance()
|
|
||||||
const end = currentPosition()
|
|
||||||
addDiagnostic(`unexpected character: ${ch}`, makeSpan(start, end))
|
|
||||||
}
|
|
||||||
|
|
||||||
const eofPos = currentPosition()
|
|
||||||
const eofSpan = makeSpan(eofPos, eofPos)
|
|
||||||
const eofToken: EofToken = { type: "eof", span: eofSpan }
|
|
||||||
tokens.push(eofToken)
|
|
||||||
updateRegexState(eofToken)
|
|
||||||
|
|
||||||
return { tokens, diagnostics }
|
|
||||||
}
|
|
||||||
@@ -1,261 +0,0 @@
|
|||||||
import assert from "node:assert"
|
|
||||||
import test from "node:test"
|
|
||||||
import { parseExpressionSource } from "./parser"
|
|
||||||
|
|
||||||
const isRecord = (value: unknown): value is Record<string, unknown> =>
|
|
||||||
typeof value === "object" && value !== null
|
|
||||||
|
|
||||||
const strip = (node: unknown): unknown => {
|
|
||||||
if (!isRecord(node)) return node
|
|
||||||
const type = node.type
|
|
||||||
if (type === "Identifier") {
|
|
||||||
return { type, name: node.name }
|
|
||||||
}
|
|
||||||
if (type === "Literal") {
|
|
||||||
const kind = node.kind
|
|
||||||
const value = node.value
|
|
||||||
const flags = node.flags
|
|
||||||
return flags !== undefined ? { type, kind, value, flags } : { type, kind, value }
|
|
||||||
}
|
|
||||||
if (type === "UnaryExpr") {
|
|
||||||
return { type, operator: node.operator, argument: strip(node.argument) }
|
|
||||||
}
|
|
||||||
if (type === "BinaryExpr" || type === "LogicalExpr") {
|
|
||||||
return { type, operator: node.operator, left: strip(node.left), right: strip(node.right) }
|
|
||||||
}
|
|
||||||
if (type === "CallExpr") {
|
|
||||||
const args = Array.isArray(node.args) ? node.args.map(strip) : []
|
|
||||||
return { type, callee: strip(node.callee), args }
|
|
||||||
}
|
|
||||||
if (type === "MemberExpr") {
|
|
||||||
return { type, object: strip(node.object), property: node.property }
|
|
||||||
}
|
|
||||||
if (type === "IndexExpr") {
|
|
||||||
return { type, object: strip(node.object), index: strip(node.index) }
|
|
||||||
}
|
|
||||||
if (type === "ListExpr") {
|
|
||||||
const elements = Array.isArray(node.elements) ? node.elements.map(strip) : []
|
|
||||||
return { type, elements }
|
|
||||||
}
|
|
||||||
if (type === "ErrorExpr") {
|
|
||||||
return { type, message: node.message }
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
test("ebnf to ast mapping snapshots", () => {
|
|
||||||
const cases: Array<{ source: string; expected: unknown }> = [
|
|
||||||
{
|
|
||||||
source: 'status == "done"',
|
|
||||||
expected: {
|
|
||||||
type: "BinaryExpr",
|
|
||||||
operator: "==",
|
|
||||||
left: { type: "Identifier", name: "status" },
|
|
||||||
right: { type: "Literal", kind: "string", value: "done" },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: "!done",
|
|
||||||
expected: {
|
|
||||||
type: "UnaryExpr",
|
|
||||||
operator: "!",
|
|
||||||
argument: { type: "Identifier", name: "done" },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: "file.ctime",
|
|
||||||
expected: {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: { type: "Identifier", name: "file" },
|
|
||||||
property: "ctime",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: 'note["my-field"]',
|
|
||||||
expected: {
|
|
||||||
type: "IndexExpr",
|
|
||||||
object: { type: "Identifier", name: "note" },
|
|
||||||
index: { type: "Literal", kind: "string", value: "my-field" },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: "date(due) < today()",
|
|
||||||
expected: {
|
|
||||||
type: "BinaryExpr",
|
|
||||||
operator: "<",
|
|
||||||
left: {
|
|
||||||
type: "CallExpr",
|
|
||||||
callee: { type: "Identifier", name: "date" },
|
|
||||||
args: [{ type: "Identifier", name: "due" }],
|
|
||||||
},
|
|
||||||
right: { type: "CallExpr", callee: { type: "Identifier", name: "today" }, args: [] },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: "now() - file.ctime",
|
|
||||||
expected: {
|
|
||||||
type: "BinaryExpr",
|
|
||||||
operator: "-",
|
|
||||||
left: { type: "CallExpr", callee: { type: "Identifier", name: "now" }, args: [] },
|
|
||||||
right: {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: { type: "Identifier", name: "file" },
|
|
||||||
property: "ctime",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: "(pages * 2).round(0)",
|
|
||||||
expected: {
|
|
||||||
type: "CallExpr",
|
|
||||||
callee: {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: {
|
|
||||||
type: "BinaryExpr",
|
|
||||||
operator: "*",
|
|
||||||
left: { type: "Identifier", name: "pages" },
|
|
||||||
right: { type: "Literal", kind: "number", value: 2 },
|
|
||||||
},
|
|
||||||
property: "round",
|
|
||||||
},
|
|
||||||
args: [{ type: "Literal", kind: "number", value: 0 }],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: 'tags.containsAny("a","b")',
|
|
||||||
expected: {
|
|
||||||
type: "CallExpr",
|
|
||||||
callee: {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: { type: "Identifier", name: "tags" },
|
|
||||||
property: "containsAny",
|
|
||||||
},
|
|
||||||
args: [
|
|
||||||
{ type: "Literal", kind: "string", value: "a" },
|
|
||||||
{ type: "Literal", kind: "string", value: "b" },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: "list(links).filter(value.isTruthy())",
|
|
||||||
expected: {
|
|
||||||
type: "CallExpr",
|
|
||||||
callee: {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: {
|
|
||||||
type: "CallExpr",
|
|
||||||
callee: { type: "Identifier", name: "list" },
|
|
||||||
args: [{ type: "Identifier", name: "links" }],
|
|
||||||
},
|
|
||||||
property: "filter",
|
|
||||||
},
|
|
||||||
args: [
|
|
||||||
{
|
|
||||||
type: "CallExpr",
|
|
||||||
callee: {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: { type: "Identifier", name: "value" },
|
|
||||||
property: "isTruthy",
|
|
||||||
},
|
|
||||||
args: [],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: '["a", "b", "c"].length',
|
|
||||||
expected: {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: {
|
|
||||||
type: "ListExpr",
|
|
||||||
elements: [
|
|
||||||
{ type: "Literal", kind: "string", value: "a" },
|
|
||||||
{ type: "Literal", kind: "string", value: "b" },
|
|
||||||
{ type: "Literal", kind: "string", value: "c" },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
property: "length",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: "this.file.name",
|
|
||||||
expected: {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: { type: "Identifier", name: "this" },
|
|
||||||
property: "file",
|
|
||||||
},
|
|
||||||
property: "name",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: "a || b && c",
|
|
||||||
expected: {
|
|
||||||
type: "LogicalExpr",
|
|
||||||
operator: "||",
|
|
||||||
left: { type: "Identifier", name: "a" },
|
|
||||||
right: {
|
|
||||||
type: "LogicalExpr",
|
|
||||||
operator: "&&",
|
|
||||||
left: { type: "Identifier", name: "b" },
|
|
||||||
right: { type: "Identifier", name: "c" },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
source: "values[0]",
|
|
||||||
expected: {
|
|
||||||
type: "IndexExpr",
|
|
||||||
object: { type: "Identifier", name: "values" },
|
|
||||||
index: { type: "Literal", kind: "number", value: 0 },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
for (const entry of cases) {
|
|
||||||
const result = parseExpressionSource(entry.source)
|
|
||||||
assert.strictEqual(result.diagnostics.length, 0)
|
|
||||||
assert.deepStrictEqual(strip(result.program.body), entry.expected)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
test("syntax doc samples parse", () => {
|
|
||||||
const samples = [
|
|
||||||
'note["price"]',
|
|
||||||
"file.size > 10",
|
|
||||||
"file.hasLink(this.file)",
|
|
||||||
'date("2024-12-01") + "1M" + "4h" + "3m"',
|
|
||||||
"now() - file.ctime",
|
|
||||||
"property[0]",
|
|
||||||
'link("filename", icon("plus"))',
|
|
||||||
'file.mtime > now() - "1 week"',
|
|
||||||
'/abc/.matches("abcde")',
|
|
||||||
'name.replace(/:/g, "-")',
|
|
||||||
'values.filter(value.isType("number")).reduce(if(acc == null || value > acc, value, acc), null)',
|
|
||||||
]
|
|
||||||
|
|
||||||
for (const source of samples) {
|
|
||||||
const result = parseExpressionSource(source)
|
|
||||||
assert.strictEqual(result.diagnostics.length, 0)
|
|
||||||
assert.ok(result.program.body)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
test("string escapes are decoded", () => {
|
|
||||||
const result = parseExpressionSource('"a\\n\\"b"')
|
|
||||||
assert.strictEqual(result.diagnostics.length, 0)
|
|
||||||
const literal = strip(result.program.body)
|
|
||||||
if (!isRecord(literal)) {
|
|
||||||
throw new Error("expected literal record")
|
|
||||||
}
|
|
||||||
assert.strictEqual(literal.type, "Literal")
|
|
||||||
assert.strictEqual(literal.kind, "string")
|
|
||||||
assert.strictEqual(literal.value, 'a\n"b')
|
|
||||||
})
|
|
||||||
|
|
||||||
test("parser reports errors and recovers", () => {
|
|
||||||
const result = parseExpressionSource("status ==")
|
|
||||||
assert.ok(result.diagnostics.length > 0)
|
|
||||||
assert.ok(result.program.body)
|
|
||||||
})
|
|
||||||
@@ -1,370 +0,0 @@
|
|||||||
import {
|
|
||||||
BinaryExpr,
|
|
||||||
CallExpr,
|
|
||||||
ErrorExpr,
|
|
||||||
Expr,
|
|
||||||
Identifier,
|
|
||||||
IndexExpr,
|
|
||||||
ListExpr,
|
|
||||||
Literal,
|
|
||||||
LogicalExpr,
|
|
||||||
MemberExpr,
|
|
||||||
Program,
|
|
||||||
UnaryExpr,
|
|
||||||
spanFrom,
|
|
||||||
} from "./ast"
|
|
||||||
import { Diagnostic } from "./errors"
|
|
||||||
import { lex } from "./lexer"
|
|
||||||
import { Operator, Token } from "./tokens"
|
|
||||||
|
|
||||||
export type ParseResult = { program: Program; tokens: Token[]; diagnostics: Diagnostic[] }
|
|
||||||
|
|
||||||
type InfixInfo = { lbp: number; rbp: number; kind: "binary" | "logical" }
|
|
||||||
|
|
||||||
const infixBindingPowers: Record<string, InfixInfo> = {
|
|
||||||
"||": { lbp: 1, rbp: 2, kind: "logical" },
|
|
||||||
"&&": { lbp: 3, rbp: 4, kind: "logical" },
|
|
||||||
"==": { lbp: 5, rbp: 6, kind: "binary" },
|
|
||||||
"!=": { lbp: 5, rbp: 6, kind: "binary" },
|
|
||||||
">": { lbp: 7, rbp: 8, kind: "binary" },
|
|
||||||
">=": { lbp: 7, rbp: 8, kind: "binary" },
|
|
||||||
"<": { lbp: 7, rbp: 8, kind: "binary" },
|
|
||||||
"<=": { lbp: 7, rbp: 8, kind: "binary" },
|
|
||||||
"+": { lbp: 9, rbp: 10, kind: "binary" },
|
|
||||||
"-": { lbp: 9, rbp: 10, kind: "binary" },
|
|
||||||
"*": { lbp: 11, rbp: 12, kind: "binary" },
|
|
||||||
"/": { lbp: 11, rbp: 12, kind: "binary" },
|
|
||||||
"%": { lbp: 11, rbp: 12, kind: "binary" },
|
|
||||||
}
|
|
||||||
|
|
||||||
const isLogicalOperator = (value: Operator): value is LogicalExpr["operator"] =>
|
|
||||||
value === "&&" || value === "||"
|
|
||||||
|
|
||||||
const isBinaryOperator = (value: Operator): value is BinaryExpr["operator"] =>
|
|
||||||
value === "+" ||
|
|
||||||
value === "-" ||
|
|
||||||
value === "*" ||
|
|
||||||
value === "/" ||
|
|
||||||
value === "%" ||
|
|
||||||
value === "==" ||
|
|
||||||
value === "!=" ||
|
|
||||||
value === ">" ||
|
|
||||||
value === ">=" ||
|
|
||||||
value === "<" ||
|
|
||||||
value === "<="
|
|
||||||
|
|
||||||
export function parseExpressionSource(source: string, file?: string): ParseResult {
|
|
||||||
const { tokens, diagnostics } = lex(source, file)
|
|
||||||
const parser = new Parser(tokens, diagnostics)
|
|
||||||
const program = parser.parseProgram()
|
|
||||||
return { program, tokens, diagnostics }
|
|
||||||
}
|
|
||||||
|
|
||||||
class Parser {
|
|
||||||
private tokens: Token[]
|
|
||||||
private index: number
|
|
||||||
private diagnostics: Diagnostic[]
|
|
||||||
|
|
||||||
constructor(tokens: Token[], diagnostics: Diagnostic[]) {
|
|
||||||
this.tokens = tokens
|
|
||||||
this.index = 0
|
|
||||||
this.diagnostics = diagnostics
|
|
||||||
}
|
|
||||||
|
|
||||||
parseProgram(): Program {
|
|
||||||
const start = this.tokens[0]?.span ?? this.tokens[this.tokens.length - 1].span
|
|
||||||
const body = this.peek().type === "eof" ? null : this.parseExpression(0)
|
|
||||||
const end = this.tokens[this.tokens.length - 1]?.span ?? start
|
|
||||||
return { type: "Program", body, span: spanFrom(start, end) }
|
|
||||||
}
|
|
||||||
|
|
||||||
private parseExpression(minBp: number): Expr {
|
|
||||||
let left = this.parsePrefix()
|
|
||||||
left = this.parsePostfix(left)
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
const token = this.peek()
|
|
||||||
if (token.type !== "operator") break
|
|
||||||
const info = infixBindingPowers[token.value]
|
|
||||||
if (!info || info.lbp < minBp) break
|
|
||||||
this.advance()
|
|
||||||
const right = this.parseExpression(info.rbp)
|
|
||||||
const span = spanFrom(left.span, right.span)
|
|
||||||
if (info.kind === "logical" && isLogicalOperator(token.value)) {
|
|
||||||
left = { type: "LogicalExpr", operator: token.value, left, right, span }
|
|
||||||
} else if (info.kind === "binary" && isBinaryOperator(token.value)) {
|
|
||||||
left = { type: "BinaryExpr", operator: token.value, left, right, span }
|
|
||||||
} else {
|
|
||||||
this.error("unexpected operator", token.span)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return left
|
|
||||||
}
|
|
||||||
|
|
||||||
private parsePrefix(): Expr {
|
|
||||||
const token = this.peek()
|
|
||||||
if (token.type === "operator" && (token.value === "!" || token.value === "-")) {
|
|
||||||
this.advance()
|
|
||||||
const argument = this.parseExpression(13)
|
|
||||||
const span = spanFrom(token.span, argument.span)
|
|
||||||
const node: UnaryExpr = { type: "UnaryExpr", operator: token.value, argument, span }
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
return this.parsePrimary()
|
|
||||||
}
|
|
||||||
|
|
||||||
private parsePostfix(expr: Expr): Expr {
|
|
||||||
let current = expr
|
|
||||||
while (true) {
|
|
||||||
const token = this.peek()
|
|
||||||
if (token.type === "punctuation" && token.value === ".") {
|
|
||||||
this.advance()
|
|
||||||
const propToken = this.peek()
|
|
||||||
if (propToken.type !== "identifier") {
|
|
||||||
this.error("expected identifier after '.'", propToken.span)
|
|
||||||
return current
|
|
||||||
}
|
|
||||||
this.advance()
|
|
||||||
const span = spanFrom(current.span, propToken.span)
|
|
||||||
const node: MemberExpr = {
|
|
||||||
type: "MemberExpr",
|
|
||||||
object: current,
|
|
||||||
property: propToken.value,
|
|
||||||
span,
|
|
||||||
}
|
|
||||||
current = node
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "punctuation" && token.value === "[") {
|
|
||||||
this.advance()
|
|
||||||
const indexExpr = this.parseExpression(0)
|
|
||||||
const endToken = this.peek()
|
|
||||||
if (!(endToken.type === "punctuation" && endToken.value === "]")) {
|
|
||||||
this.error("expected ']'", endToken.span)
|
|
||||||
this.syncTo("]")
|
|
||||||
} else {
|
|
||||||
this.advance()
|
|
||||||
}
|
|
||||||
const span = spanFrom(current.span, endToken.span)
|
|
||||||
const node: IndexExpr = { type: "IndexExpr", object: current, index: indexExpr, span }
|
|
||||||
current = node
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "punctuation" && token.value === "(") {
|
|
||||||
this.advance()
|
|
||||||
const args: Expr[] = []
|
|
||||||
while (this.peek().type !== "eof") {
|
|
||||||
const next = this.peek()
|
|
||||||
if (next.type === "punctuation" && next.value === ")") {
|
|
||||||
this.advance()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
const arg = this.parseExpression(0)
|
|
||||||
args.push(arg)
|
|
||||||
const sep = this.peek()
|
|
||||||
if (sep.type === "punctuation" && sep.value === ",") {
|
|
||||||
this.advance()
|
|
||||||
const maybeClose = this.peek()
|
|
||||||
if (maybeClose.type === "punctuation" && maybeClose.value === ")") {
|
|
||||||
this.advance()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (sep.type === "punctuation" && sep.value === ")") {
|
|
||||||
this.advance()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
this.error("expected ',' or ')'", sep.span)
|
|
||||||
this.syncTo(")")
|
|
||||||
const maybeClose = this.peek()
|
|
||||||
if (maybeClose.type === "punctuation" && maybeClose.value === ")") {
|
|
||||||
this.advance()
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
const endToken = this.previous()
|
|
||||||
const span = spanFrom(current.span, endToken.span)
|
|
||||||
const node: CallExpr = { type: "CallExpr", callee: current, args, span }
|
|
||||||
current = node
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return current
|
|
||||||
}
|
|
||||||
|
|
||||||
private parsePrimary(): Expr {
|
|
||||||
const token = this.peek()
|
|
||||||
|
|
||||||
if (token.type === "number") {
|
|
||||||
this.advance()
|
|
||||||
const node: Literal = {
|
|
||||||
type: "Literal",
|
|
||||||
kind: "number",
|
|
||||||
value: token.value,
|
|
||||||
span: token.span,
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "string") {
|
|
||||||
this.advance()
|
|
||||||
const node: Literal = {
|
|
||||||
type: "Literal",
|
|
||||||
kind: "string",
|
|
||||||
value: token.value,
|
|
||||||
span: token.span,
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "boolean") {
|
|
||||||
this.advance()
|
|
||||||
const node: Literal = {
|
|
||||||
type: "Literal",
|
|
||||||
kind: "boolean",
|
|
||||||
value: token.value,
|
|
||||||
span: token.span,
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "null") {
|
|
||||||
this.advance()
|
|
||||||
const node: Literal = { type: "Literal", kind: "null", value: null, span: token.span }
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "regex") {
|
|
||||||
this.advance()
|
|
||||||
const node: Literal = {
|
|
||||||
type: "Literal",
|
|
||||||
kind: "regex",
|
|
||||||
value: token.pattern,
|
|
||||||
flags: token.flags,
|
|
||||||
span: token.span,
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "identifier") {
|
|
||||||
this.advance()
|
|
||||||
const node: Identifier = { type: "Identifier", name: token.value, span: token.span }
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "this") {
|
|
||||||
this.advance()
|
|
||||||
const node: Identifier = { type: "Identifier", name: "this", span: token.span }
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "punctuation" && token.value === "(") {
|
|
||||||
this.advance()
|
|
||||||
const expr = this.parseExpression(0)
|
|
||||||
const closeToken = this.peek()
|
|
||||||
if (closeToken.type === "punctuation" && closeToken.value === ")") {
|
|
||||||
this.advance()
|
|
||||||
} else {
|
|
||||||
this.error("expected ')'", closeToken.span)
|
|
||||||
this.syncTo(")")
|
|
||||||
const maybeClose = this.peek()
|
|
||||||
if (maybeClose.type === "punctuation" && maybeClose.value === ")") {
|
|
||||||
this.advance()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return expr
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "punctuation" && token.value === "[") {
|
|
||||||
return this.parseList()
|
|
||||||
}
|
|
||||||
|
|
||||||
this.error("unexpected token", token.span)
|
|
||||||
this.advance()
|
|
||||||
const node: ErrorExpr = { type: "ErrorExpr", message: "unexpected token", span: token.span }
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
private parseList(): Expr {
|
|
||||||
const startToken = this.peek()
|
|
||||||
this.advance()
|
|
||||||
const elements: Expr[] = []
|
|
||||||
while (this.peek().type !== "eof") {
|
|
||||||
const next = this.peek()
|
|
||||||
if (next.type === "punctuation" && next.value === "]") {
|
|
||||||
this.advance()
|
|
||||||
const span = spanFrom(startToken.span, next.span)
|
|
||||||
const node: ListExpr = { type: "ListExpr", elements, span }
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
const element = this.parseExpression(0)
|
|
||||||
elements.push(element)
|
|
||||||
const sep = this.peek()
|
|
||||||
if (sep.type === "punctuation" && sep.value === ",") {
|
|
||||||
this.advance()
|
|
||||||
const maybeClose = this.peek()
|
|
||||||
if (maybeClose.type === "punctuation" && maybeClose.value === "]") {
|
|
||||||
this.advance()
|
|
||||||
const span = spanFrom(startToken.span, maybeClose.span)
|
|
||||||
const node: ListExpr = { type: "ListExpr", elements, span }
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (sep.type === "punctuation" && sep.value === "]") {
|
|
||||||
this.advance()
|
|
||||||
const span = spanFrom(startToken.span, sep.span)
|
|
||||||
const node: ListExpr = { type: "ListExpr", elements, span }
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
this.error("expected ',' or ']'", sep.span)
|
|
||||||
this.syncTo("]")
|
|
||||||
const maybeClose = this.peek()
|
|
||||||
if (maybeClose.type === "punctuation" && maybeClose.value === "]") {
|
|
||||||
const endToken = maybeClose
|
|
||||||
this.advance()
|
|
||||||
const span = spanFrom(startToken.span, endToken.span)
|
|
||||||
const node: ListExpr = { type: "ListExpr", elements, span }
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
const endToken = this.previous()
|
|
||||||
const span = spanFrom(startToken.span, endToken.span)
|
|
||||||
return { type: "ListExpr", elements, span }
|
|
||||||
}
|
|
||||||
|
|
||||||
private error(message: string, span: Token["span"]) {
|
|
||||||
this.diagnostics.push({ kind: "parse", message, span })
|
|
||||||
}
|
|
||||||
|
|
||||||
private syncTo(value: ")" | "]") {
|
|
||||||
while (this.peek().type !== "eof") {
|
|
||||||
const token = this.peek()
|
|
||||||
if (token.type === "punctuation" && token.value === value) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
this.advance()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private peek(): Token {
|
|
||||||
return this.tokens[this.index]
|
|
||||||
}
|
|
||||||
|
|
||||||
private previous(): Token {
|
|
||||||
return this.tokens[Math.max(0, this.index - 1)]
|
|
||||||
}
|
|
||||||
|
|
||||||
private advance(): Token {
|
|
||||||
const token = this.tokens[this.index]
|
|
||||||
if (this.index < this.tokens.length - 1) this.index += 1
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
import assert from "node:assert"
|
|
||||||
import test from "node:test"
|
|
||||||
import { parseExpressionSource } from "./parser"
|
|
||||||
import { buildPropertyExpressionSource } from "./properties"
|
|
||||||
|
|
||||||
test("builds property expression sources", () => {
|
|
||||||
const cases: Array<{ input: string; expected: string }> = [
|
|
||||||
{ input: "status", expected: "note.status" },
|
|
||||||
{ input: "note.status", expected: "note.status" },
|
|
||||||
{ input: "file.name", expected: "file.name" },
|
|
||||||
{ input: "file.my-field", expected: 'file["my-field"]' },
|
|
||||||
{ input: "my-field", expected: 'note["my-field"]' },
|
|
||||||
{ input: 'note["my field"]', expected: 'note["my field"]' },
|
|
||||||
{ input: "formula.total", expected: "formula.total" },
|
|
||||||
{ input: "this.file.name", expected: "this.file.name" },
|
|
||||||
{ input: "a.b-c.d", expected: 'note.a["b-c"].d' },
|
|
||||||
{ input: "date(file.ctime)", expected: "date(file.ctime)" },
|
|
||||||
]
|
|
||||||
|
|
||||||
for (const entry of cases) {
|
|
||||||
const result = buildPropertyExpressionSource(entry.input)
|
|
||||||
assert.strictEqual(result, entry.expected)
|
|
||||||
const parsed = parseExpressionSource(entry.expected)
|
|
||||||
assert.strictEqual(parsed.diagnostics.length, 0)
|
|
||||||
assert.ok(parsed.program.body)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
const simpleIdentifierPattern = /^[A-Za-z_][A-Za-z0-9_]*$/
|
|
||||||
|
|
||||||
export function buildPropertyExpressionSource(property: string): string | null {
|
|
||||||
const trimmed = property.trim()
|
|
||||||
if (!trimmed) return null
|
|
||||||
if (trimmed.includes("(") || trimmed.includes("[") || trimmed.includes("]")) {
|
|
||||||
return trimmed
|
|
||||||
}
|
|
||||||
const parts = trimmed.split(".")
|
|
||||||
const root = parts[0]
|
|
||||||
const rest = parts.slice(1)
|
|
||||||
const buildAccess = (base: string, segments: string[]) => {
|
|
||||||
let source = base
|
|
||||||
for (const segment of segments) {
|
|
||||||
if (simpleIdentifierPattern.test(segment)) {
|
|
||||||
source = `${source}.${segment}`
|
|
||||||
} else {
|
|
||||||
source = `${source}[${JSON.stringify(segment)}]`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return source
|
|
||||||
}
|
|
||||||
if (root === "file" || root === "note" || root === "formula" || root === "this") {
|
|
||||||
return buildAccess(root, rest)
|
|
||||||
}
|
|
||||||
return buildAccess("note", parts)
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
export const BUILTIN_SUMMARY_TYPES = [
|
|
||||||
"count",
|
|
||||||
"sum",
|
|
||||||
"average",
|
|
||||||
"avg",
|
|
||||||
"min",
|
|
||||||
"max",
|
|
||||||
"range",
|
|
||||||
"unique",
|
|
||||||
"filled",
|
|
||||||
"missing",
|
|
||||||
"median",
|
|
||||||
"stddev",
|
|
||||||
"checked",
|
|
||||||
"unchecked",
|
|
||||||
"empty",
|
|
||||||
"earliest",
|
|
||||||
"latest",
|
|
||||||
] as const
|
|
||||||
|
|
||||||
export type BuiltinSummaryType = (typeof BUILTIN_SUMMARY_TYPES)[number]
|
|
||||||
|
|
||||||
export interface SummaryDefinition {
|
|
||||||
type: "builtin" | "formula"
|
|
||||||
builtinType?: BuiltinSummaryType
|
|
||||||
formulaRef?: string
|
|
||||||
expression?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ViewSummaryConfig {
|
|
||||||
columns: Record<string, SummaryDefinition>
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface PropertyConfig {
|
|
||||||
displayName?: string
|
|
||||||
}
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
import { Span } from "./ast"
|
|
||||||
|
|
||||||
export type Operator =
|
|
||||||
| "=="
|
|
||||||
| "!="
|
|
||||||
| ">="
|
|
||||||
| "<="
|
|
||||||
| ">"
|
|
||||||
| "<"
|
|
||||||
| "&&"
|
|
||||||
| "||"
|
|
||||||
| "+"
|
|
||||||
| "-"
|
|
||||||
| "*"
|
|
||||||
| "/"
|
|
||||||
| "%"
|
|
||||||
| "!"
|
|
||||||
|
|
||||||
export type Punctuation = "." | "," | "(" | ")" | "[" | "]"
|
|
||||||
|
|
||||||
export type NumberToken = { type: "number"; value: number; span: Span }
|
|
||||||
export type StringToken = { type: "string"; value: string; span: Span }
|
|
||||||
export type BooleanToken = { type: "boolean"; value: boolean; span: Span }
|
|
||||||
export type NullToken = { type: "null"; span: Span }
|
|
||||||
export type IdentifierToken = { type: "identifier"; value: string; span: Span }
|
|
||||||
export type ThisToken = { type: "this"; span: Span }
|
|
||||||
export type OperatorToken = { type: "operator"; value: Operator; span: Span }
|
|
||||||
export type PunctuationToken = { type: "punctuation"; value: Punctuation; span: Span }
|
|
||||||
export type RegexToken = { type: "regex"; pattern: string; flags: string; span: Span }
|
|
||||||
export type EofToken = { type: "eof"; span: Span }
|
|
||||||
|
|
||||||
export type Token =
|
|
||||||
| NumberToken
|
|
||||||
| StringToken
|
|
||||||
| BooleanToken
|
|
||||||
| NullToken
|
|
||||||
| IdentifierToken
|
|
||||||
| ThisToken
|
|
||||||
| OperatorToken
|
|
||||||
| PunctuationToken
|
|
||||||
| RegexToken
|
|
||||||
| EofToken
|
|
||||||
@@ -1,278 +0,0 @@
|
|||||||
import yaml from "js-yaml"
|
|
||||||
import fs from "node:fs/promises"
|
|
||||||
import path from "node:path"
|
|
||||||
import {
|
|
||||||
parseExpressionSource,
|
|
||||||
compileExpression,
|
|
||||||
buildPropertyExpressionSource,
|
|
||||||
BUILTIN_SUMMARY_TYPES,
|
|
||||||
} from "./compiler"
|
|
||||||
import { Expr, LogicalExpr, UnaryExpr, spanFrom } from "./compiler/ast"
|
|
||||||
import { Diagnostic } from "./compiler/errors"
|
|
||||||
|
|
||||||
const isRecord = (value: unknown): value is Record<string, unknown> =>
|
|
||||||
typeof value === "object" && value !== null && !Array.isArray(value)
|
|
||||||
|
|
||||||
type CollectedExpression = {
|
|
||||||
kind: string
|
|
||||||
context: string
|
|
||||||
source: string
|
|
||||||
ast: Expr | null
|
|
||||||
ir: unknown
|
|
||||||
diagnostics: Diagnostic[]
|
|
||||||
}
|
|
||||||
|
|
||||||
const parseToExpr = (source: string, filePath: string) => {
|
|
||||||
const result = parseExpressionSource(source, filePath)
|
|
||||||
return { expr: result.program.body ?? null, diagnostics: result.diagnostics }
|
|
||||||
}
|
|
||||||
|
|
||||||
const buildLogical = (operator: "&&" | "||", expressionsList: Expr[]): Expr | null => {
|
|
||||||
if (expressionsList.length === 0) return null
|
|
||||||
let current: Expr | null = null
|
|
||||||
for (const next of expressionsList) {
|
|
||||||
if (!current) {
|
|
||||||
current = next
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const span = spanFrom(current.span, next.span)
|
|
||||||
const node: LogicalExpr = { type: "LogicalExpr", operator, left: current, right: next, span }
|
|
||||||
current = node
|
|
||||||
}
|
|
||||||
return current
|
|
||||||
}
|
|
||||||
|
|
||||||
const negateExpressions = (expressionsList: Expr[]): Expr[] =>
|
|
||||||
expressionsList.map((expr) => {
|
|
||||||
const node: UnaryExpr = {
|
|
||||||
type: "UnaryExpr",
|
|
||||||
operator: "!",
|
|
||||||
argument: expr,
|
|
||||||
span: spanFrom(expr.span, expr.span),
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
})
|
|
||||||
|
|
||||||
const buildFilterExpr = (
|
|
||||||
raw: unknown,
|
|
||||||
context: string,
|
|
||||||
diagnostics: Diagnostic[],
|
|
||||||
filePath: string,
|
|
||||||
): Expr | null => {
|
|
||||||
if (typeof raw === "string") {
|
|
||||||
const parsed = parseToExpr(raw, filePath)
|
|
||||||
diagnostics.push(...parsed.diagnostics)
|
|
||||||
return parsed.expr
|
|
||||||
}
|
|
||||||
if (!isRecord(raw)) return null
|
|
||||||
if (Array.isArray(raw.and)) {
|
|
||||||
const parts = raw.and
|
|
||||||
.map((entry, index) =>
|
|
||||||
buildFilterExpr(entry, `${context}.and[${index}]`, diagnostics, filePath),
|
|
||||||
)
|
|
||||||
.filter((entry): entry is Expr => Boolean(entry))
|
|
||||||
return buildLogical("&&", parts)
|
|
||||||
}
|
|
||||||
if (Array.isArray(raw.or)) {
|
|
||||||
const parts = raw.or
|
|
||||||
.map((entry, index) =>
|
|
||||||
buildFilterExpr(entry, `${context}.or[${index}]`, diagnostics, filePath),
|
|
||||||
)
|
|
||||||
.filter((entry): entry is Expr => Boolean(entry))
|
|
||||||
return buildLogical("||", parts)
|
|
||||||
}
|
|
||||||
if (Array.isArray(raw.not)) {
|
|
||||||
const parts = raw.not
|
|
||||||
.map((entry, index) =>
|
|
||||||
buildFilterExpr(entry, `${context}.not[${index}]`, diagnostics, filePath),
|
|
||||||
)
|
|
||||||
.filter((entry): entry is Expr => Boolean(entry))
|
|
||||||
return buildLogical("&&", negateExpressions(parts))
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const collectPropertyExpressions = (
|
|
||||||
views: unknown[],
|
|
||||||
): Map<string, { source: string; context: string }> => {
|
|
||||||
const entries = new Map<string, { source: string; context: string }>()
|
|
||||||
const addProperty = (property: string, context: string) => {
|
|
||||||
const key = property.trim()
|
|
||||||
if (!key || entries.has(key)) return
|
|
||||||
const source = buildPropertyExpressionSource(key)
|
|
||||||
if (!source) return
|
|
||||||
entries.set(key, { source, context })
|
|
||||||
}
|
|
||||||
|
|
||||||
views.forEach((view, viewIndex) => {
|
|
||||||
if (!isRecord(view)) return
|
|
||||||
const viewContext = `views[${viewIndex}]`
|
|
||||||
if (Array.isArray(view.order)) {
|
|
||||||
view.order.forEach((entry, orderIndex) => {
|
|
||||||
if (typeof entry === "string") {
|
|
||||||
addProperty(entry, `${viewContext}.order[${orderIndex}]`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(view.sort)) {
|
|
||||||
view.sort.forEach((entry, sortIndex) => {
|
|
||||||
if (isRecord(entry) && typeof entry.property === "string") {
|
|
||||||
addProperty(entry.property, `${viewContext}.sort[${sortIndex}].property`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof view.groupBy === "string") {
|
|
||||||
addProperty(view.groupBy, `${viewContext}.groupBy`)
|
|
||||||
} else if (isRecord(view.groupBy) && typeof view.groupBy.property === "string") {
|
|
||||||
addProperty(view.groupBy.property, `${viewContext}.groupBy.property`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (view.summaries && isRecord(view.summaries)) {
|
|
||||||
const columns =
|
|
||||||
"columns" in view.summaries && isRecord(view.summaries.columns)
|
|
||||||
? view.summaries.columns
|
|
||||||
: view.summaries
|
|
||||||
for (const key of Object.keys(columns)) {
|
|
||||||
addProperty(key, `${viewContext}.summaries.${key}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof view.image === "string") {
|
|
||||||
addProperty(view.image, `${viewContext}.image`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (view.type === "map") {
|
|
||||||
const coords = typeof view.coordinates === "string" ? view.coordinates : "coordinates"
|
|
||||||
addProperty(coords, `${viewContext}.coordinates`)
|
|
||||||
if (typeof view.markerIcon === "string") {
|
|
||||||
addProperty(view.markerIcon, `${viewContext}.markerIcon`)
|
|
||||||
}
|
|
||||||
if (typeof view.markerColor === "string") {
|
|
||||||
addProperty(view.markerColor, `${viewContext}.markerColor`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return entries
|
|
||||||
}
|
|
||||||
|
|
||||||
const main = async () => {
|
|
||||||
const inputPath = process.argv[2] ? String(process.argv[2]) : "content/antilibrary.base"
|
|
||||||
const filePath = path.resolve(process.cwd(), inputPath)
|
|
||||||
const raw = await fs.readFile(filePath, "utf8")
|
|
||||||
const parsed = yaml.load(raw)
|
|
||||||
const config = isRecord(parsed) ? parsed : {}
|
|
||||||
|
|
||||||
const collected: CollectedExpression[] = []
|
|
||||||
|
|
||||||
if (config.filters !== undefined) {
|
|
||||||
const diagnostics: Diagnostic[] = []
|
|
||||||
const expr = buildFilterExpr(config.filters, "filters", diagnostics, filePath)
|
|
||||||
collected.push({
|
|
||||||
kind: "filters",
|
|
||||||
context: "filters",
|
|
||||||
source: typeof config.filters === "string" ? config.filters : JSON.stringify(config.filters),
|
|
||||||
ast: expr,
|
|
||||||
ir: expr ? compileExpression(expr) : null,
|
|
||||||
diagnostics,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isRecord(config.formulas)) {
|
|
||||||
for (const [name, value] of Object.entries(config.formulas)) {
|
|
||||||
if (typeof value !== "string") continue
|
|
||||||
const parsedExpr = parseToExpr(value, filePath)
|
|
||||||
collected.push({
|
|
||||||
kind: "formula",
|
|
||||||
context: `formulas.${name}`,
|
|
||||||
source: value,
|
|
||||||
ast: parsedExpr.expr,
|
|
||||||
ir: parsedExpr.expr ? compileExpression(parsedExpr.expr) : null,
|
|
||||||
diagnostics: parsedExpr.diagnostics,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const topLevelSummaries = isRecord(config.summaries) ? config.summaries : {}
|
|
||||||
|
|
||||||
if (isRecord(config.summaries)) {
|
|
||||||
for (const [name, value] of Object.entries(config.summaries)) {
|
|
||||||
if (typeof value !== "string") continue
|
|
||||||
const parsedExpr = parseToExpr(value, filePath)
|
|
||||||
collected.push({
|
|
||||||
kind: "summary",
|
|
||||||
context: `summaries.${name}`,
|
|
||||||
source: value,
|
|
||||||
ast: parsedExpr.expr,
|
|
||||||
ir: parsedExpr.expr ? compileExpression(parsedExpr.expr) : null,
|
|
||||||
diagnostics: parsedExpr.diagnostics,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(config.views)) {
|
|
||||||
config.views.forEach((view, index) => {
|
|
||||||
if (!isRecord(view)) return
|
|
||||||
if (view.filters !== undefined) {
|
|
||||||
const diagnostics: Diagnostic[] = []
|
|
||||||
const expr = buildFilterExpr(view.filters, `views[${index}].filters`, diagnostics, filePath)
|
|
||||||
collected.push({
|
|
||||||
kind: "view.filter",
|
|
||||||
context: `views[${index}].filters`,
|
|
||||||
source: typeof view.filters === "string" ? view.filters : JSON.stringify(view.filters),
|
|
||||||
ast: expr,
|
|
||||||
ir: expr ? compileExpression(expr) : null,
|
|
||||||
diagnostics,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (view.summaries && isRecord(view.summaries)) {
|
|
||||||
const columns =
|
|
||||||
"columns" in view.summaries && isRecord(view.summaries.columns)
|
|
||||||
? view.summaries.columns
|
|
||||||
: view.summaries
|
|
||||||
for (const [column, summaryValue] of Object.entries(columns)) {
|
|
||||||
if (typeof summaryValue !== "string") continue
|
|
||||||
const normalized = summaryValue.toLowerCase().trim()
|
|
||||||
const builtins = new Set<string>(BUILTIN_SUMMARY_TYPES)
|
|
||||||
if (builtins.has(normalized)) continue
|
|
||||||
const summarySource =
|
|
||||||
summaryValue in topLevelSummaries && typeof topLevelSummaries[summaryValue] === "string"
|
|
||||||
? String(topLevelSummaries[summaryValue])
|
|
||||||
: summaryValue
|
|
||||||
const parsedExpr = parseToExpr(summarySource, filePath)
|
|
||||||
collected.push({
|
|
||||||
kind: "view.summary",
|
|
||||||
context: `views[${index}].summaries.${column}`,
|
|
||||||
source: summarySource,
|
|
||||||
ast: parsedExpr.expr,
|
|
||||||
ir: parsedExpr.expr ? compileExpression(parsedExpr.expr) : null,
|
|
||||||
diagnostics: parsedExpr.diagnostics,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const views = Array.isArray(config.views) ? config.views : []
|
|
||||||
const propertyExpressions = collectPropertyExpressions(views)
|
|
||||||
for (const [_, entry] of propertyExpressions.entries()) {
|
|
||||||
const parsedExpr = parseToExpr(entry.source, filePath)
|
|
||||||
collected.push({
|
|
||||||
kind: "property",
|
|
||||||
context: entry.context,
|
|
||||||
source: entry.source,
|
|
||||||
ast: parsedExpr.expr,
|
|
||||||
ir: parsedExpr.expr ? compileExpression(parsedExpr.expr) : null,
|
|
||||||
diagnostics: parsedExpr.diagnostics,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = { file: inputPath, count: collected.length, expressions: collected }
|
|
||||||
|
|
||||||
process.stdout.write(JSON.stringify(payload, null, 2))
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
||||||
@@ -1,248 +0,0 @@
|
|||||||
import { QuartzPluginData } from "../../plugins/vfile"
|
|
||||||
import { evaluateSummaryExpression, valueToUnknown, EvalContext, ProgramIR } from "./compiler"
|
|
||||||
import { SummaryDefinition, ViewSummaryConfig, BuiltinSummaryType } from "./types"
|
|
||||||
|
|
||||||
type SummaryValueResolver = (
|
|
||||||
file: QuartzPluginData,
|
|
||||||
column: string,
|
|
||||||
allFiles: QuartzPluginData[],
|
|
||||||
) => unknown
|
|
||||||
|
|
||||||
type SummaryContextFactory = (file: QuartzPluginData) => EvalContext
|
|
||||||
|
|
||||||
export function computeColumnSummary(
|
|
||||||
column: string,
|
|
||||||
files: QuartzPluginData[],
|
|
||||||
summary: SummaryDefinition,
|
|
||||||
allFiles: QuartzPluginData[] = [],
|
|
||||||
valueResolver: SummaryValueResolver,
|
|
||||||
getContext: SummaryContextFactory,
|
|
||||||
summaryExpression?: ProgramIR,
|
|
||||||
): string | number | undefined {
|
|
||||||
if (files.length === 0) {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
const values = files.map((file) => valueResolver(file, column, allFiles))
|
|
||||||
|
|
||||||
if (summary.type === "builtin" && summary.builtinType) {
|
|
||||||
return computeBuiltinSummary(values, summary.builtinType)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (summary.type === "formula" && summary.expression) {
|
|
||||||
if (summaryExpression) {
|
|
||||||
const summaryCtx = getContext(files[0])
|
|
||||||
summaryCtx.diagnosticContext = `summaries.${column}`
|
|
||||||
summaryCtx.diagnosticSource = summary.expression
|
|
||||||
summaryCtx.rows = files
|
|
||||||
const value = evaluateSummaryExpression(summaryExpression, values, summaryCtx)
|
|
||||||
const unknownValue = valueToUnknown(value)
|
|
||||||
if (typeof unknownValue === "number" || typeof unknownValue === "string") {
|
|
||||||
return unknownValue
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
function computeBuiltinSummary(
|
|
||||||
values: any[],
|
|
||||||
type: BuiltinSummaryType,
|
|
||||||
): string | number | undefined {
|
|
||||||
switch (type) {
|
|
||||||
case "count":
|
|
||||||
return values.length
|
|
||||||
|
|
||||||
case "sum": {
|
|
||||||
const nums = values.filter((v) => typeof v === "number")
|
|
||||||
if (nums.length === 0) return undefined
|
|
||||||
return nums.reduce((acc, v) => acc + v, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
case "average":
|
|
||||||
case "avg": {
|
|
||||||
const nums = values.filter((v) => typeof v === "number")
|
|
||||||
if (nums.length === 0) return undefined
|
|
||||||
const sum = nums.reduce((acc, v) => acc + v, 0)
|
|
||||||
return Math.round((sum / nums.length) * 100) / 100
|
|
||||||
}
|
|
||||||
|
|
||||||
case "min": {
|
|
||||||
const comparable = values.filter(
|
|
||||||
(v) => typeof v === "number" || v instanceof Date || typeof v === "string",
|
|
||||||
)
|
|
||||||
if (comparable.length === 0) return undefined
|
|
||||||
const normalized = comparable.map((v) => (v instanceof Date ? v.getTime() : v))
|
|
||||||
const min = Math.min(...normalized.filter((v) => typeof v === "number"))
|
|
||||||
if (isNaN(min)) {
|
|
||||||
const strings = comparable.filter((v) => typeof v === "string") as string[]
|
|
||||||
if (strings.length === 0) return undefined
|
|
||||||
return strings.sort()[0]
|
|
||||||
}
|
|
||||||
if (comparable.some((v) => v instanceof Date)) {
|
|
||||||
return new Date(min).toISOString().split("T")[0]
|
|
||||||
}
|
|
||||||
return min
|
|
||||||
}
|
|
||||||
|
|
||||||
case "max": {
|
|
||||||
const comparable = values.filter(
|
|
||||||
(v) => typeof v === "number" || v instanceof Date || typeof v === "string",
|
|
||||||
)
|
|
||||||
if (comparable.length === 0) return undefined
|
|
||||||
const normalized = comparable.map((v) => (v instanceof Date ? v.getTime() : v))
|
|
||||||
const max = Math.max(...normalized.filter((v) => typeof v === "number"))
|
|
||||||
if (isNaN(max)) {
|
|
||||||
const strings = comparable.filter((v) => typeof v === "string") as string[]
|
|
||||||
if (strings.length === 0) return undefined
|
|
||||||
return strings.sort().reverse()[0]
|
|
||||||
}
|
|
||||||
if (comparable.some((v) => v instanceof Date)) {
|
|
||||||
return new Date(max).toISOString().split("T")[0]
|
|
||||||
}
|
|
||||||
return max
|
|
||||||
}
|
|
||||||
|
|
||||||
case "range": {
|
|
||||||
const comparable = values.filter(
|
|
||||||
(v) => typeof v === "number" || v instanceof Date || typeof v === "string",
|
|
||||||
)
|
|
||||||
if (comparable.length === 0) return undefined
|
|
||||||
const normalized = comparable.map((v) => (v instanceof Date ? v.getTime() : v))
|
|
||||||
const nums = normalized.filter((v) => typeof v === "number")
|
|
||||||
if (nums.length === 0) return undefined
|
|
||||||
const min = Math.min(...nums)
|
|
||||||
const max = Math.max(...nums)
|
|
||||||
if (comparable.some((v) => v instanceof Date)) {
|
|
||||||
return `${new Date(min).toISOString().split("T")[0]} - ${new Date(max).toISOString().split("T")[0]}`
|
|
||||||
}
|
|
||||||
return `${min} - ${max}`
|
|
||||||
}
|
|
||||||
|
|
||||||
case "unique": {
|
|
||||||
const nonNull = values.filter((v) => v !== undefined && v !== null && v !== "")
|
|
||||||
const unique = new Set(nonNull.map((v) => (v instanceof Date ? v.toISOString() : String(v))))
|
|
||||||
return unique.size
|
|
||||||
}
|
|
||||||
|
|
||||||
case "filled": {
|
|
||||||
const filled = values.filter((v) => v !== undefined && v !== null && v !== "")
|
|
||||||
return filled.length
|
|
||||||
}
|
|
||||||
|
|
||||||
case "missing": {
|
|
||||||
const missing = values.filter((v) => v === undefined || v === null || v === "")
|
|
||||||
return missing.length
|
|
||||||
}
|
|
||||||
|
|
||||||
case "median": {
|
|
||||||
const nums = values.filter((v) => typeof v === "number") as number[]
|
|
||||||
if (nums.length === 0) return undefined
|
|
||||||
const sorted = [...nums].sort((a, b) => a - b)
|
|
||||||
const mid = Math.floor(sorted.length / 2)
|
|
||||||
if (sorted.length % 2 === 0) {
|
|
||||||
return (sorted[mid - 1] + sorted[mid]) / 2
|
|
||||||
}
|
|
||||||
return sorted[mid]
|
|
||||||
}
|
|
||||||
|
|
||||||
case "stddev": {
|
|
||||||
const nums = values.filter((v) => typeof v === "number") as number[]
|
|
||||||
if (nums.length === 0) return undefined
|
|
||||||
const mean = nums.reduce((acc, v) => acc + v, 0) / nums.length
|
|
||||||
const variance = nums.reduce((acc, v) => acc + (v - mean) * (v - mean), 0) / nums.length
|
|
||||||
return Math.round(Math.sqrt(variance) * 100) / 100
|
|
||||||
}
|
|
||||||
|
|
||||||
case "checked":
|
|
||||||
return values.filter((v) => v === true).length
|
|
||||||
|
|
||||||
case "unchecked":
|
|
||||||
return values.filter((v) => v === false).length
|
|
||||||
|
|
||||||
case "empty": {
|
|
||||||
const count = values.filter(
|
|
||||||
(v) =>
|
|
||||||
v === undefined ||
|
|
||||||
v === null ||
|
|
||||||
v === "" ||
|
|
||||||
(Array.isArray(v) && v.length === 0) ||
|
|
||||||
(typeof v === "object" && v !== null && !Array.isArray(v) && Object.keys(v).length === 0),
|
|
||||||
).length
|
|
||||||
return count
|
|
||||||
}
|
|
||||||
|
|
||||||
case "earliest": {
|
|
||||||
const dates = values.filter(
|
|
||||||
(v) =>
|
|
||||||
v instanceof Date ||
|
|
||||||
(typeof v === "string" && /^\d{4}-\d{2}-\d{2}/.test(v)) ||
|
|
||||||
typeof v === "number",
|
|
||||||
)
|
|
||||||
if (dates.length === 0) return undefined
|
|
||||||
const timestamps = dates.map((v) => {
|
|
||||||
if (v instanceof Date) return v.getTime()
|
|
||||||
if (typeof v === "string") return new Date(v).getTime()
|
|
||||||
return v
|
|
||||||
})
|
|
||||||
const earliest = Math.min(...timestamps)
|
|
||||||
return new Date(earliest).toISOString().split("T")[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
case "latest": {
|
|
||||||
const dates = values.filter(
|
|
||||||
(v) =>
|
|
||||||
v instanceof Date ||
|
|
||||||
(typeof v === "string" && /^\d{4}-\d{2}-\d{2}/.test(v)) ||
|
|
||||||
typeof v === "number",
|
|
||||||
)
|
|
||||||
if (dates.length === 0) return undefined
|
|
||||||
const timestamps = dates.map((v) => {
|
|
||||||
if (v instanceof Date) return v.getTime()
|
|
||||||
if (typeof v === "string") return new Date(v).getTime()
|
|
||||||
return v
|
|
||||||
})
|
|
||||||
const latest = Math.max(...timestamps)
|
|
||||||
return new Date(latest).toISOString().split("T")[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function computeViewSummaries(
|
|
||||||
columns: string[],
|
|
||||||
files: QuartzPluginData[],
|
|
||||||
summaryConfig: ViewSummaryConfig | undefined,
|
|
||||||
allFiles: QuartzPluginData[] = [],
|
|
||||||
getContext: SummaryContextFactory,
|
|
||||||
valueResolver: SummaryValueResolver,
|
|
||||||
summaryExpressions?: Record<string, ProgramIR>,
|
|
||||||
): Record<string, string | number | undefined> {
|
|
||||||
const results: Record<string, string | number | undefined> = {}
|
|
||||||
|
|
||||||
if (!summaryConfig?.columns) {
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const column of columns) {
|
|
||||||
const summary = summaryConfig.columns[column]
|
|
||||||
if (summary) {
|
|
||||||
const expression = summaryExpressions ? summaryExpressions[column] : undefined
|
|
||||||
results[column] = computeColumnSummary(
|
|
||||||
column,
|
|
||||||
files,
|
|
||||||
summary,
|
|
||||||
allFiles,
|
|
||||||
valueResolver,
|
|
||||||
getContext,
|
|
||||||
expression,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,31 +0,0 @@
|
|||||||
import assert from "node:assert"
|
|
||||||
import test from "node:test"
|
|
||||||
import { parseViews, parseViewSummaries } from "./types"
|
|
||||||
|
|
||||||
test("parseViews preserves raw filters", () => {
|
|
||||||
const views = parseViews([
|
|
||||||
{ type: "table", name: "test", filters: 'status == "done"', order: ["file.name"] },
|
|
||||||
])
|
|
||||||
|
|
||||||
assert.strictEqual(views.length, 1)
|
|
||||||
assert.strictEqual(views[0].filters, 'status == "done"')
|
|
||||||
assert.deepStrictEqual(views[0].order, ["file.name"])
|
|
||||||
})
|
|
||||||
|
|
||||||
test("parseViews rejects missing type/name", () => {
|
|
||||||
assert.throws(() => parseViews([{}]))
|
|
||||||
})
|
|
||||||
|
|
||||||
test("parseViewSummaries resolves builtin and formula refs", () => {
|
|
||||||
const summaries = parseViewSummaries(
|
|
||||||
{ price: "Average", score: "avgScore", extra: "values.length" },
|
|
||||||
{ avgScore: "values.mean()" },
|
|
||||||
)
|
|
||||||
|
|
||||||
assert.ok(summaries)
|
|
||||||
if (!summaries) return
|
|
||||||
assert.strictEqual(summaries.columns.price.type, "builtin")
|
|
||||||
assert.strictEqual(summaries.columns.score.type, "formula")
|
|
||||||
assert.strictEqual(summaries.columns.score.formulaRef, "avgScore")
|
|
||||||
assert.strictEqual(summaries.columns.extra.type, "formula")
|
|
||||||
})
|
|
||||||
@@ -1,119 +0,0 @@
|
|||||||
import {
|
|
||||||
SummaryDefinition,
|
|
||||||
ViewSummaryConfig,
|
|
||||||
PropertyConfig,
|
|
||||||
BuiltinSummaryType,
|
|
||||||
BUILTIN_SUMMARY_TYPES,
|
|
||||||
} from "./compiler/schema"
|
|
||||||
|
|
||||||
export type { SummaryDefinition, ViewSummaryConfig, PropertyConfig, BuiltinSummaryType }
|
|
||||||
export { BUILTIN_SUMMARY_TYPES }
|
|
||||||
|
|
||||||
const isRecord = (value: unknown): value is Record<string, unknown> =>
|
|
||||||
typeof value === "object" && value !== null && !Array.isArray(value)
|
|
||||||
|
|
||||||
const isNonEmptyString = (value: unknown): value is string =>
|
|
||||||
typeof value === "string" && value.trim().length > 0
|
|
||||||
|
|
||||||
export type BaseFileFilter =
|
|
||||||
| string
|
|
||||||
| { and: BaseFileFilter[] }
|
|
||||||
| { or: BaseFileFilter[] }
|
|
||||||
| { not: BaseFileFilter[] }
|
|
||||||
|
|
||||||
export interface BaseFile {
|
|
||||||
filters?: BaseFileFilter
|
|
||||||
views: BaseView[]
|
|
||||||
properties?: Record<string, PropertyConfig>
|
|
||||||
summaries?: Record<string, string>
|
|
||||||
formulas?: Record<string, string>
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface BaseView {
|
|
||||||
type: "table" | "list" | "gallery" | "board" | "calendar" | "card" | "cards" | "map"
|
|
||||||
name: string
|
|
||||||
order?: string[]
|
|
||||||
sort?: BaseSortConfig[]
|
|
||||||
columnSize?: Record<string, number>
|
|
||||||
groupBy?: string | BaseGroupBy
|
|
||||||
limit?: number
|
|
||||||
filters?: BaseFileFilter
|
|
||||||
summaries?: Record<string, string> | ViewSummaryConfig
|
|
||||||
image?: string
|
|
||||||
cardSize?: number
|
|
||||||
cardAspect?: number
|
|
||||||
nestedProperties?: boolean
|
|
||||||
indentProperties?: boolean
|
|
||||||
separator?: string
|
|
||||||
date?: string
|
|
||||||
dateField?: string
|
|
||||||
dateProperty?: string
|
|
||||||
coordinates?: string
|
|
||||||
markerIcon?: string
|
|
||||||
markerColor?: string
|
|
||||||
defaultZoom?: number
|
|
||||||
defaultCenter?: [number, number]
|
|
||||||
clustering?: boolean
|
|
||||||
groupSizes?: Record<string, number>
|
|
||||||
groupAspects?: Record<string, number>
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface BaseSortConfig {
|
|
||||||
property: string
|
|
||||||
direction: "ASC" | "DESC"
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface BaseGroupBy {
|
|
||||||
property: string
|
|
||||||
direction: "ASC" | "DESC"
|
|
||||||
}
|
|
||||||
|
|
||||||
export function parseViews(raw: unknown[]): BaseView[] {
|
|
||||||
return raw.map((entry) => {
|
|
||||||
if (!isRecord(entry)) throw new Error("Each view must be an object")
|
|
||||||
const { type, name } = entry
|
|
||||||
if (!isNonEmptyString(type) || !isNonEmptyString(name)) {
|
|
||||||
throw new Error("Each view must have 'type' and 'name' fields")
|
|
||||||
}
|
|
||||||
return { ...entry, type, name } as BaseView
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export function parseViewSummaries(
|
|
||||||
viewSummaries: Record<string, string> | ViewSummaryConfig | undefined,
|
|
||||||
topLevelSummaries?: Record<string, string>,
|
|
||||||
): ViewSummaryConfig | undefined {
|
|
||||||
if (!viewSummaries || typeof viewSummaries !== "object") return undefined
|
|
||||||
|
|
||||||
if ("columns" in viewSummaries && typeof viewSummaries.columns === "object") {
|
|
||||||
return viewSummaries as ViewSummaryConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
const columns: Record<string, SummaryDefinition> = {}
|
|
||||||
|
|
||||||
for (const [column, summaryValue] of Object.entries(viewSummaries)) {
|
|
||||||
if (typeof summaryValue !== "string") continue
|
|
||||||
|
|
||||||
const normalized = summaryValue.toLowerCase().trim()
|
|
||||||
|
|
||||||
if (BUILTIN_SUMMARY_TYPES.includes(normalized as BuiltinSummaryType)) {
|
|
||||||
columns[column] = { type: "builtin", builtinType: normalized as BuiltinSummaryType }
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (topLevelSummaries && summaryValue in topLevelSummaries) {
|
|
||||||
columns[column] = {
|
|
||||||
type: "formula",
|
|
||||||
formulaRef: summaryValue,
|
|
||||||
expression: topLevelSummaries[summaryValue],
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (summaryValue.includes("(") || summaryValue.includes(".")) {
|
|
||||||
columns[column] = { type: "formula", expression: summaryValue }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return Object.keys(columns).length > 0 ? { columns } : undefined
|
|
||||||
}
|
|
||||||
@@ -73,7 +73,7 @@ export function slugifyFilePath(fp: FilePath, excludeExt?: boolean): FullSlug {
|
|||||||
fp = stripSlashes(fp) as FilePath
|
fp = stripSlashes(fp) as FilePath
|
||||||
let ext = getFileExtension(fp)
|
let ext = getFileExtension(fp)
|
||||||
const withoutFileExt = fp.replace(new RegExp(ext + "$"), "")
|
const withoutFileExt = fp.replace(new RegExp(ext + "$"), "")
|
||||||
if (excludeExt || [".md", ".html", ".base", undefined].includes(ext)) {
|
if (excludeExt || [".md", ".html", undefined].includes(ext)) {
|
||||||
ext = ""
|
ext = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,10 +26,9 @@ export type CSSResource = {
|
|||||||
export function JSResourceToScriptElement(resource: JSResource, preserve?: boolean): JSX.Element {
|
export function JSResourceToScriptElement(resource: JSResource, preserve?: boolean): JSX.Element {
|
||||||
const scriptType = resource.moduleType ?? "application/javascript"
|
const scriptType = resource.moduleType ?? "application/javascript"
|
||||||
const spaPreserve = preserve ?? resource.spaPreserve
|
const spaPreserve = preserve ?? resource.spaPreserve
|
||||||
|
|
||||||
if (resource.contentType === "external") {
|
if (resource.contentType === "external") {
|
||||||
return (
|
return (
|
||||||
<script key={resource.src} src={resource.src} type={scriptType} data-persist={spaPreserve} />
|
<script key={resource.src} src={resource.src} type={scriptType} spa-preserve={spaPreserve} />
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
const content = resource.script
|
const content = resource.script
|
||||||
@@ -37,7 +36,7 @@ export function JSResourceToScriptElement(resource: JSResource, preserve?: boole
|
|||||||
<script
|
<script
|
||||||
key={randomUUID()}
|
key={randomUUID()}
|
||||||
type={scriptType}
|
type={scriptType}
|
||||||
data-persist={spaPreserve}
|
spa-preserve={spaPreserve}
|
||||||
dangerouslySetInnerHTML={{ __html: content }}
|
dangerouslySetInnerHTML={{ __html: content }}
|
||||||
></script>
|
></script>
|
||||||
)
|
)
|
||||||
@@ -55,7 +54,7 @@ export function CSSResourceToStyleElement(resource: CSSResource, preserve?: bool
|
|||||||
href={resource.content}
|
href={resource.content}
|
||||||
rel="stylesheet"
|
rel="stylesheet"
|
||||||
type="text/css"
|
type="text/css"
|
||||||
data-persist={spaPreserve}
|
spa-preserve={spaPreserve}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,94 +0,0 @@
|
|||||||
import { FilePath, FullSlug, slugifyFilePath } from "./path"
|
|
||||||
|
|
||||||
export type WikilinkWithPosition = {
|
|
||||||
wikilink: ParsedWikilink
|
|
||||||
start: number
|
|
||||||
end: number
|
|
||||||
}
|
|
||||||
|
|
||||||
export type ParsedWikilink = {
|
|
||||||
raw: string
|
|
||||||
target: string
|
|
||||||
anchor?: string
|
|
||||||
alias?: string
|
|
||||||
embed: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
export type ResolvedWikilink = {
|
|
||||||
slug: FullSlug
|
|
||||||
anchor?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
const wikilinkRegex = /^!?\[\[([^\]|#]+)(?:#([^\]|]+))?(?:\|([^\]]+))?\]\]$/
|
|
||||||
|
|
||||||
export function parseWikilink(text: string): ParsedWikilink | null {
|
|
||||||
const trimmed = text.trim()
|
|
||||||
const match = wikilinkRegex.exec(trimmed)
|
|
||||||
if (!match) return null
|
|
||||||
|
|
||||||
const [, target, anchor, alias] = match
|
|
||||||
return {
|
|
||||||
raw: trimmed,
|
|
||||||
target: target?.trim() ?? "",
|
|
||||||
anchor: anchor?.trim(),
|
|
||||||
alias: alias?.trim(),
|
|
||||||
embed: trimmed.startsWith("!"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function resolveWikilinkTarget(
|
|
||||||
parsed: ParsedWikilink,
|
|
||||||
currentSlug: FullSlug,
|
|
||||||
): ResolvedWikilink | null {
|
|
||||||
const target = parsed.target.trim()
|
|
||||||
if (!target) return null
|
|
||||||
|
|
||||||
if (target.startsWith("/")) {
|
|
||||||
const slug = slugifyFilePath(target.slice(1).replace(/\\/g, "/") as FilePath)
|
|
||||||
return { slug, anchor: parsed.anchor }
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentParts = currentSlug.split("/")
|
|
||||||
const currentDir = currentParts.slice(0, -1)
|
|
||||||
|
|
||||||
const targetParts = target.replace(/\\/g, "/").split("/")
|
|
||||||
const resolved: string[] = [...currentDir]
|
|
||||||
|
|
||||||
for (const part of targetParts) {
|
|
||||||
if (part === "..") {
|
|
||||||
resolved.pop()
|
|
||||||
} else if (part !== "." && part.length > 0) {
|
|
||||||
resolved.push(part)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const slug = slugifyFilePath(resolved.join("/") as FilePath)
|
|
||||||
return { slug, anchor: parsed.anchor }
|
|
||||||
}
|
|
||||||
|
|
||||||
const globalWikilinkRegex = /!?\[\[([^\]|#]+)(?:#([^\]|]+))?(?:\|([^\]]+))?\]\]/g
|
|
||||||
|
|
||||||
export function extractWikilinksWithPositions(text: string): WikilinkWithPosition[] {
|
|
||||||
const results: WikilinkWithPosition[] = []
|
|
||||||
let match: RegExpExecArray | null
|
|
||||||
|
|
||||||
globalWikilinkRegex.lastIndex = 0
|
|
||||||
|
|
||||||
while ((match = globalWikilinkRegex.exec(text)) !== null) {
|
|
||||||
const [fullMatch, target, anchor, alias] = match
|
|
||||||
|
|
||||||
results.push({
|
|
||||||
wikilink: {
|
|
||||||
raw: fullMatch,
|
|
||||||
target: target?.trim() ?? "",
|
|
||||||
anchor: anchor?.trim(),
|
|
||||||
alias: alias?.trim(),
|
|
||||||
embed: fullMatch.startsWith("!"),
|
|
||||||
},
|
|
||||||
start: match.index,
|
|
||||||
end: match.index + fullMatch.length,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
548
quartz/workers/semantic.worker.ts
Normal file
548
quartz/workers/semantic.worker.ts
Normal file
@@ -0,0 +1,548 @@
|
|||||||
|
// Unified semantic search worker: handles data loading and query execution
|
||||||
|
import { env, pipeline } from "@huggingface/transformers"
|
||||||
|
import "onnxruntime-web/webgpu"
|
||||||
|
import "onnxruntime-web/wasm"
|
||||||
|
|
||||||
|
export {}
|
||||||
|
|
||||||
|
type VectorShardMeta = {
|
||||||
|
path: string
|
||||||
|
rows: number
|
||||||
|
rowOffset: number
|
||||||
|
byteLength: number
|
||||||
|
sha256?: string
|
||||||
|
byteStride: number
|
||||||
|
}
|
||||||
|
|
||||||
|
type LevelSection = {
|
||||||
|
level: number
|
||||||
|
indptr: { offset: number; elements: number; byteLength: number }
|
||||||
|
indices: { offset: number; elements: number; byteLength: number }
|
||||||
|
}
|
||||||
|
|
||||||
|
type ChunkMetadata = {
|
||||||
|
parentSlug: string
|
||||||
|
chunkId: number
|
||||||
|
}
|
||||||
|
|
||||||
|
type Manifest = {
|
||||||
|
version: number
|
||||||
|
dims: number
|
||||||
|
dtype: string
|
||||||
|
normalized: boolean
|
||||||
|
rows: number
|
||||||
|
shardSizeRows: number
|
||||||
|
vectors: {
|
||||||
|
dtype: string
|
||||||
|
rows: number
|
||||||
|
dims: number
|
||||||
|
shards: VectorShardMeta[]
|
||||||
|
}
|
||||||
|
ids: string[]
|
||||||
|
titles?: string[]
|
||||||
|
chunkMetadata?: Record<string, ChunkMetadata>
|
||||||
|
hnsw: {
|
||||||
|
M: number
|
||||||
|
efConstruction: number
|
||||||
|
entryPoint: number
|
||||||
|
maxLevel: number
|
||||||
|
graph: {
|
||||||
|
path: string
|
||||||
|
sha256?: string
|
||||||
|
levels: LevelSection[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type InitMessage = {
|
||||||
|
type: "init"
|
||||||
|
cfg: any
|
||||||
|
manifestUrl: string
|
||||||
|
baseUrl?: string
|
||||||
|
disableCache?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
type SearchMessage = { type: "search"; text: string; k: number; seq: number }
|
||||||
|
type ResetMessage = { type: "reset" }
|
||||||
|
|
||||||
|
type WorkerMessage = InitMessage | SearchMessage | ResetMessage
|
||||||
|
|
||||||
|
type ReadyMessage = { type: "ready" }
|
||||||
|
|
||||||
|
type ProgressMessage = {
|
||||||
|
type: "progress"
|
||||||
|
loadedRows: number
|
||||||
|
totalRows: number
|
||||||
|
}
|
||||||
|
|
||||||
|
type SearchHit = { id: number; score: number }
|
||||||
|
|
||||||
|
type SearchResultMessage = {
|
||||||
|
type: "search-result"
|
||||||
|
seq: number
|
||||||
|
semantic: SearchHit[]
|
||||||
|
}
|
||||||
|
|
||||||
|
type ErrorMessage = { type: "error"; seq?: number; message: string }
|
||||||
|
|
||||||
|
type WorkerState = "idle" | "loading" | "ready" | "error"
|
||||||
|
|
||||||
|
// IndexedDB configuration
|
||||||
|
const DB_NAME = "semantic-search-cache"
|
||||||
|
const STORE_NAME = "assets"
|
||||||
|
const DB_VERSION = 1
|
||||||
|
const hasIndexedDB = typeof indexedDB !== "undefined"
|
||||||
|
const supportsSharedArrayBuffer = typeof SharedArrayBuffer !== "undefined"
|
||||||
|
|
||||||
|
// State
|
||||||
|
let state: WorkerState = "idle"
|
||||||
|
let manifest: Manifest | null = null
|
||||||
|
let cfg: any = null
|
||||||
|
let vectorsView: Float32Array | null = null
|
||||||
|
let dims = 0
|
||||||
|
let rows = 0
|
||||||
|
let classifier: any = null
|
||||||
|
let envConfigured = false
|
||||||
|
let entryPoint = -1
|
||||||
|
let maxLevel = 0
|
||||||
|
let efDefault = 128
|
||||||
|
let levelGraph: { indptr: Uint32Array; indices: Uint32Array }[] = []
|
||||||
|
let abortController: AbortController | null = null
|
||||||
|
let dbPromise: Promise<IDBDatabase> | null = null
|
||||||
|
|
||||||
|
// IndexedDB helpers
|
||||||
|
function openDatabase(): Promise<IDBDatabase> {
|
||||||
|
if (!hasIndexedDB) {
|
||||||
|
return Promise.reject(new Error("indexedDB unavailable"))
|
||||||
|
}
|
||||||
|
if (!dbPromise) {
|
||||||
|
dbPromise = new Promise((resolve, reject) => {
|
||||||
|
const req = indexedDB.open(DB_NAME, DB_VERSION)
|
||||||
|
req.onupgradeneeded = () => {
|
||||||
|
const db = req.result
|
||||||
|
if (!db.objectStoreNames.contains(STORE_NAME)) {
|
||||||
|
db.createObjectStore(STORE_NAME)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
req.onsuccess = () => resolve(req.result)
|
||||||
|
req.onerror = () => reject(req.error ?? new Error("failed to open cache store"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return dbPromise
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readAsset(hash: string): Promise<ArrayBuffer | null> {
|
||||||
|
if (!hasIndexedDB) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
const db = await openDatabase()
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const tx = db.transaction(STORE_NAME, "readonly")
|
||||||
|
const store = tx.objectStore(STORE_NAME)
|
||||||
|
const req = store.get(hash)
|
||||||
|
req.onsuccess = () => {
|
||||||
|
const value = req.result
|
||||||
|
if (value instanceof ArrayBuffer) {
|
||||||
|
resolve(value)
|
||||||
|
} else if (value && value.buffer instanceof ArrayBuffer) {
|
||||||
|
resolve(value.buffer as ArrayBuffer)
|
||||||
|
} else {
|
||||||
|
resolve(null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
req.onerror = () => reject(req.error ?? new Error("failed to read cached asset"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function writeAsset(hash: string, buffer: ArrayBuffer): Promise<void> {
|
||||||
|
if (!hasIndexedDB) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const db = await openDatabase()
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
const tx = db.transaction(STORE_NAME, "readwrite")
|
||||||
|
const store = tx.objectStore(STORE_NAME)
|
||||||
|
const req = store.put(buffer, hash)
|
||||||
|
req.onsuccess = () => resolve()
|
||||||
|
req.onerror = () => reject(req.error ?? new Error("failed to cache asset"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function toAbsolute(path: string, baseUrl?: string): string {
|
||||||
|
if (path.startsWith("http://") || path.startsWith("https://")) {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
const base = baseUrl ?? self.location.origin
|
||||||
|
return new URL(path, base).toString()
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchBinary(
|
||||||
|
path: string,
|
||||||
|
disableCache: boolean,
|
||||||
|
sha?: string,
|
||||||
|
): Promise<ArrayBuffer> {
|
||||||
|
if (!disableCache && sha && hasIndexedDB) {
|
||||||
|
try {
|
||||||
|
const cached = await readAsset(sha)
|
||||||
|
if (cached) {
|
||||||
|
return cached
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// fall through to network fetch on cache errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const res = await fetch(path, { signal: abortController?.signal ?? undefined })
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`failed to fetch ${path}: ${res.status} ${res.statusText}`)
|
||||||
|
}
|
||||||
|
const payload = await res.arrayBuffer()
|
||||||
|
if (!disableCache && sha && hasIndexedDB) {
|
||||||
|
try {
|
||||||
|
await writeAsset(sha, payload)
|
||||||
|
} catch {
|
||||||
|
// ignore cache write failures
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return payload
|
||||||
|
}
|
||||||
|
|
||||||
|
async function populateVectors(
|
||||||
|
manifest: Manifest,
|
||||||
|
baseUrl: string | undefined,
|
||||||
|
disableCache: boolean | undefined,
|
||||||
|
): Promise<{ buffer: Float32Array; rowsLoaded: number }> {
|
||||||
|
if (manifest.vectors.dtype !== "fp32") {
|
||||||
|
throw new Error(`unsupported embedding dtype '${manifest.vectors.dtype}', regenerate with fp32`)
|
||||||
|
}
|
||||||
|
const rows = manifest.rows
|
||||||
|
const dims = manifest.dims
|
||||||
|
const totalBytes = rows * dims * Float32Array.BYTES_PER_ELEMENT
|
||||||
|
const buffer = supportsSharedArrayBuffer
|
||||||
|
? new Float32Array(new SharedArrayBuffer(totalBytes))
|
||||||
|
: new Float32Array(totalBytes)
|
||||||
|
let loadedRows = 0
|
||||||
|
for (const shard of manifest.vectors.shards) {
|
||||||
|
const absolute = toAbsolute(shard.path, baseUrl)
|
||||||
|
const payload = await fetchBinary(absolute, Boolean(disableCache), shard.sha256)
|
||||||
|
const view = new Float32Array(payload)
|
||||||
|
if (view.length !== shard.rows * dims) {
|
||||||
|
throw new Error(
|
||||||
|
`shard ${shard.path} has mismatched length (expected ${shard.rows * dims}, got ${view.length})`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
buffer.set(view, shard.rowOffset * dims)
|
||||||
|
loadedRows = Math.min(rows, shard.rowOffset + shard.rows)
|
||||||
|
const progress: ProgressMessage = {
|
||||||
|
type: "progress",
|
||||||
|
loadedRows,
|
||||||
|
totalRows: rows,
|
||||||
|
}
|
||||||
|
self.postMessage(progress)
|
||||||
|
}
|
||||||
|
return { buffer, rowsLoaded: loadedRows }
|
||||||
|
}
|
||||||
|
|
||||||
|
async function populateGraph(
|
||||||
|
manifest: Manifest,
|
||||||
|
baseUrl: string | undefined,
|
||||||
|
disableCache: boolean | undefined,
|
||||||
|
): Promise<ArrayBuffer> {
|
||||||
|
const graphMeta = manifest.hnsw.graph
|
||||||
|
const absolute = toAbsolute(graphMeta.path, baseUrl)
|
||||||
|
return await fetchBinary(absolute, Boolean(disableCache), graphMeta.sha256)
|
||||||
|
}
|
||||||
|
|
||||||
|
function configureRuntimeEnv() {
|
||||||
|
if (envConfigured) return
|
||||||
|
env.allowLocalModels = false
|
||||||
|
env.allowRemoteModels = true
|
||||||
|
const wasmBackend = env.backends?.onnx?.wasm
|
||||||
|
if (!wasmBackend) {
|
||||||
|
throw new Error("transformers.js ONNX runtime backend unavailable")
|
||||||
|
}
|
||||||
|
const cdnBase = `https://cdn.jsdelivr.net/npm/@huggingface/transformers@${env.version}/dist/`
|
||||||
|
wasmBackend.wasmPaths = cdnBase
|
||||||
|
envConfigured = true
|
||||||
|
}
|
||||||
|
|
||||||
|
async function ensureEncoder() {
|
||||||
|
if (classifier) return
|
||||||
|
if (!cfg?.model) {
|
||||||
|
throw new Error("semantic worker missing model identifier")
|
||||||
|
}
|
||||||
|
configureRuntimeEnv()
|
||||||
|
const dtype = typeof cfg?.dtype === "string" && cfg.dtype.length > 0 ? cfg.dtype : "fp32"
|
||||||
|
const pipelineOpts: Record<string, unknown> = {
|
||||||
|
device: "wasm",
|
||||||
|
dtype,
|
||||||
|
local_files_only: false,
|
||||||
|
}
|
||||||
|
classifier = await pipeline("feature-extraction", cfg.model, pipelineOpts)
|
||||||
|
cfg.dtype = dtype
|
||||||
|
}
|
||||||
|
|
||||||
|
function vectorSlice(id: number): Float32Array {
|
||||||
|
if (!vectorsView) {
|
||||||
|
throw new Error("vector buffer not configured")
|
||||||
|
}
|
||||||
|
const start = id * dims
|
||||||
|
const end = start + dims
|
||||||
|
return vectorsView.subarray(start, end)
|
||||||
|
}
|
||||||
|
|
||||||
|
function dot(a: Float32Array, b: Float32Array): number {
|
||||||
|
let s = 0
|
||||||
|
for (let i = 0; i < dims; i++) {
|
||||||
|
s += a[i] * b[i]
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
function neighborsFor(level: number, node: number): Uint32Array {
|
||||||
|
const meta = levelGraph[level]
|
||||||
|
if (!meta) return new Uint32Array()
|
||||||
|
const { indptr, indices } = meta
|
||||||
|
if (node < 0 || node + 1 >= indptr.length) return new Uint32Array()
|
||||||
|
const start = indptr[node]
|
||||||
|
const end = indptr[node + 1]
|
||||||
|
return indices.subarray(start, end)
|
||||||
|
}
|
||||||
|
|
||||||
|
function insertSortedDescending(arr: SearchHit[], item: SearchHit) {
|
||||||
|
let idx = arr.length
|
||||||
|
while (idx > 0 && arr[idx - 1].score < item.score) {
|
||||||
|
idx -= 1
|
||||||
|
}
|
||||||
|
arr.splice(idx, 0, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
function bruteForceSearch(query: Float32Array, k: number): SearchHit[] {
|
||||||
|
if (!vectorsView) return []
|
||||||
|
const hits: SearchHit[] = []
|
||||||
|
for (let id = 0; id < rows; id++) {
|
||||||
|
const score = dot(query, vectorSlice(id))
|
||||||
|
if (hits.length < k) {
|
||||||
|
insertSortedDescending(hits, { id, score })
|
||||||
|
} else if (score > hits[hits.length - 1].score) {
|
||||||
|
insertSortedDescending(hits, { id, score })
|
||||||
|
hits.length = k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return hits
|
||||||
|
}
|
||||||
|
|
||||||
|
function hnswSearch(query: Float32Array, k: number): SearchHit[] {
|
||||||
|
if (!manifest || !vectorsView || entryPoint < 0 || levelGraph.length === 0) {
|
||||||
|
return bruteForceSearch(query, k)
|
||||||
|
}
|
||||||
|
const ef = Math.max(efDefault, k * 10)
|
||||||
|
let ep = entryPoint
|
||||||
|
let epScore = dot(query, vectorSlice(ep))
|
||||||
|
for (let level = maxLevel; level > 0; level--) {
|
||||||
|
let changed = true
|
||||||
|
while (changed) {
|
||||||
|
changed = false
|
||||||
|
const neigh = neighborsFor(level, ep)
|
||||||
|
for (let i = 0; i < neigh.length; i++) {
|
||||||
|
const candidate = neigh[i]
|
||||||
|
if (candidate >= rows) continue
|
||||||
|
const score = dot(query, vectorSlice(candidate))
|
||||||
|
if (score > epScore) {
|
||||||
|
epScore = score
|
||||||
|
ep = candidate
|
||||||
|
changed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const visited = new Set<number>()
|
||||||
|
const candidateQueue: SearchHit[] = []
|
||||||
|
const best: SearchHit[] = []
|
||||||
|
insertSortedDescending(candidateQueue, { id: ep, score: epScore })
|
||||||
|
insertSortedDescending(best, { id: ep, score: epScore })
|
||||||
|
visited.add(ep)
|
||||||
|
|
||||||
|
while (candidateQueue.length > 0) {
|
||||||
|
const current = candidateQueue.shift()!
|
||||||
|
const worstBest = best.length >= ef ? best[best.length - 1].score : -Infinity
|
||||||
|
if (current.score < worstBest && best.length >= ef) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
const neigh = neighborsFor(0, current.id)
|
||||||
|
for (let i = 0; i < neigh.length; i++) {
|
||||||
|
const candidate = neigh[i]
|
||||||
|
if (candidate >= rows || visited.has(candidate)) continue
|
||||||
|
visited.add(candidate)
|
||||||
|
const score = dot(query, vectorSlice(candidate))
|
||||||
|
const hit = { id: candidate, score }
|
||||||
|
insertSortedDescending(candidateQueue, hit)
|
||||||
|
if (best.length < ef || score > best[best.length - 1].score) {
|
||||||
|
insertSortedDescending(best, hit)
|
||||||
|
if (best.length > ef) {
|
||||||
|
best.pop()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
best.sort((a, b) => b.score - a.score)
|
||||||
|
return best.slice(0, k)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function embed(text: string, isQuery: boolean = false): Promise<Float32Array> {
|
||||||
|
await ensureEncoder()
|
||||||
|
// Apply model-specific prefixes for asymmetric search
|
||||||
|
let prefixedText = text
|
||||||
|
if (cfg?.model) {
|
||||||
|
const modelName = cfg.model.toLowerCase()
|
||||||
|
switch (true) {
|
||||||
|
case modelName.includes("e5"): {
|
||||||
|
// E5 models require query: or passage: prefix
|
||||||
|
prefixedText = isQuery ? `query: ${text}` : `passage: ${text}`
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case modelName.includes("qwen") && modelName.includes("embedding"): {
|
||||||
|
// Qwen3-Embedding requires task instruction for queries only
|
||||||
|
if (isQuery) {
|
||||||
|
const task = "Given a web search query, retrieve relevant passages that answer the query"
|
||||||
|
prefixedText = `Instruct: ${task}\nQuery: ${text}`
|
||||||
|
}
|
||||||
|
// Documents use plain text (no prefix)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case modelName.includes("embeddinggemma"): {
|
||||||
|
// embeddinggemma requires specific prefixes
|
||||||
|
prefixedText = isQuery
|
||||||
|
? `task: search result | query: ${text}`
|
||||||
|
: `title: none | text: ${text}`
|
||||||
|
break
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const out = await classifier(prefixedText, { pooling: "mean", normalize: true })
|
||||||
|
const data = Array.from(out?.data ?? out) as number[]
|
||||||
|
const vec = new Float32Array(dims)
|
||||||
|
for (let i = 0; i < dims; i++) vec[i] = data[i] ?? 0
|
||||||
|
return vec
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleInit(msg: InitMessage) {
|
||||||
|
if (state === "loading" || state === "ready") {
|
||||||
|
throw new Error("worker already initialized or loading")
|
||||||
|
}
|
||||||
|
|
||||||
|
state = "loading"
|
||||||
|
abortController?.abort()
|
||||||
|
abortController = new AbortController()
|
||||||
|
|
||||||
|
try {
|
||||||
|
cfg = msg.cfg
|
||||||
|
|
||||||
|
const manifestUrl = toAbsolute(msg.manifestUrl, msg.baseUrl)
|
||||||
|
const response = await fetch(manifestUrl, { signal: abortController.signal })
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(
|
||||||
|
`failed to fetch manifest ${manifestUrl}: ${response.status} ${response.statusText}`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
manifest = (await response.json()) as Manifest
|
||||||
|
|
||||||
|
if (manifest.vectors.dtype !== "fp32") {
|
||||||
|
throw new Error(
|
||||||
|
`unsupported embedding dtype '${manifest.vectors.dtype}', regenerate with fp32`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
dims = manifest.dims
|
||||||
|
rows = manifest.rows
|
||||||
|
|
||||||
|
const { buffer: vectorBuffer } = await populateVectors(manifest, msg.baseUrl, msg.disableCache)
|
||||||
|
vectorsView = vectorBuffer
|
||||||
|
|
||||||
|
const graphBuffer = await populateGraph(manifest, msg.baseUrl, msg.disableCache)
|
||||||
|
|
||||||
|
entryPoint = manifest.hnsw.entryPoint
|
||||||
|
maxLevel = manifest.hnsw.maxLevel
|
||||||
|
efDefault = Math.max(64, manifest.hnsw.M * 4)
|
||||||
|
levelGraph = manifest.hnsw.graph.levels.map((level) => {
|
||||||
|
const indptr = new Uint32Array(graphBuffer, level.indptr.offset, level.indptr.elements)
|
||||||
|
const indices = new Uint32Array(graphBuffer, level.indices.offset, level.indices.elements)
|
||||||
|
return { indptr, indices }
|
||||||
|
})
|
||||||
|
|
||||||
|
state = "ready"
|
||||||
|
const ready: ReadyMessage = { type: "ready" }
|
||||||
|
self.postMessage(ready)
|
||||||
|
} catch (err) {
|
||||||
|
state = "error"
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleSearch(msg: SearchMessage) {
|
||||||
|
if (state !== "ready") {
|
||||||
|
throw new Error("worker not ready for search")
|
||||||
|
}
|
||||||
|
if (!manifest || !vectorsView) {
|
||||||
|
throw new Error("semantic worker not configured")
|
||||||
|
}
|
||||||
|
|
||||||
|
const queryVec = await embed(msg.text, true)
|
||||||
|
const semanticHits = hnswSearch(queryVec, Math.max(1, msg.k))
|
||||||
|
const message: SearchResultMessage = {
|
||||||
|
type: "search-result",
|
||||||
|
seq: msg.seq,
|
||||||
|
semantic: semanticHits,
|
||||||
|
}
|
||||||
|
self.postMessage(message)
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleReset() {
|
||||||
|
abortController?.abort()
|
||||||
|
abortController = null
|
||||||
|
state = "idle"
|
||||||
|
manifest = null
|
||||||
|
cfg = null
|
||||||
|
vectorsView = null
|
||||||
|
dims = 0
|
||||||
|
rows = 0
|
||||||
|
classifier = null
|
||||||
|
envConfigured = false
|
||||||
|
levelGraph = []
|
||||||
|
entryPoint = -1
|
||||||
|
maxLevel = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
self.onmessage = (event: MessageEvent<WorkerMessage>) => {
|
||||||
|
const data = event.data
|
||||||
|
|
||||||
|
if (data.type === "reset") {
|
||||||
|
handleReset()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.type === "init") {
|
||||||
|
void handleInit(data).catch((err: unknown) => {
|
||||||
|
const message: ErrorMessage = {
|
||||||
|
type: "error",
|
||||||
|
message: err instanceof Error ? err.message : String(err),
|
||||||
|
}
|
||||||
|
self.postMessage(message)
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.type === "search") {
|
||||||
|
void handleSearch(data).catch((err: unknown) => {
|
||||||
|
const message: ErrorMessage = {
|
||||||
|
type: "error",
|
||||||
|
seq: data.seq,
|
||||||
|
message: err instanceof Error ? err.message : String(err),
|
||||||
|
}
|
||||||
|
self.postMessage(message)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user