forked from github/quartz
Compare commits
2 Commits
1076bf31ed
...
graph-roll
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4c37e6f3fb | ||
|
|
9b1f48eeb6 |
8
.github/workflows/build-preview.yaml
vendored
8
.github/workflows/build-preview.yaml
vendored
@@ -11,17 +11,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
name: Build Preview
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
run: npx quartz build -d docs -v
|
||||
|
||||
- name: Upload build artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: preview-build
|
||||
path: public
|
||||
|
||||
10
.github/workflows/ci.yaml
vendored
10
.github/workflows/ci.yaml
vendored
@@ -19,17 +19,17 @@ jobs:
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@@ -53,11 +53,11 @@ jobs:
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
- name: Get package version
|
||||
|
||||
2
.github/workflows/deploy-preview.yaml
vendored
2
.github/workflows/deploy-preview.yaml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
name: Deploy Preview to Cloudflare Pages
|
||||
steps:
|
||||
- name: Download build artifact
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v4
|
||||
id: preview-build-artifact
|
||||
with:
|
||||
name: preview-build
|
||||
|
||||
6
.github/workflows/docker-build-push.yaml
vendored
6
.github/workflows/docker-build-push.yaml
vendored
@@ -21,11 +21,11 @@ jobs:
|
||||
echo "OWNER_LOWERCASE=${OWNER,,}" >> ${GITHUB_ENV}
|
||||
env:
|
||||
OWNER: "${{ github.repository_owner }}"
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Inject slug/short variables
|
||||
uses: rlespinasse/github-slug-action@v5.4.0
|
||||
uses: rlespinasse/github-slug-action@v5.1.0
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
network=host
|
||||
- name: Install cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@v4.0.0
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -9,19 +9,3 @@ tsconfig.tsbuildinfo
|
||||
private/
|
||||
.replit
|
||||
replit.nix
|
||||
erl_crash.dump
|
||||
# content/ is generated by the export script; only keep the placeholder
|
||||
content/*
|
||||
!content/.gitkeep
|
||||
# static/ox-hugo/ is populated by ox-hugo during export
|
||||
static/ox-hugo/
|
||||
# Elixir/Mix build artifacts for the pipeline project
|
||||
scripts/pipeline/_build/
|
||||
scripts/pipeline/deps/
|
||||
scripts/pipeline/erl_crash.dump
|
||||
# Test helpers (not needed in production)
|
||||
scripts/test.bib
|
||||
scripts/test_pipeline.exs
|
||||
/pipeline/deps/
|
||||
/pipeline/_build/
|
||||
/pipeline/result
|
||||
|
||||
254
AGENTS.md
254
AGENTS.md
@@ -1,254 +0,0 @@
|
||||
# AGENTS.md - Coding Agent Instructions
|
||||
|
||||
This document provides essential information for AI coding agents working in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
**Quartz** is a static site generator for publishing digital gardens and notes as websites.
|
||||
Built with TypeScript, Preact, and unified/remark/rehype for markdown processing.
|
||||
|
||||
| Stack | Technology |
|
||||
| ------------- | ----------------------------------------- |
|
||||
| Language | TypeScript 5.x (strict mode) |
|
||||
| Runtime | Node.js >=22 (v22.16.0 pinned) |
|
||||
| Package Mgr | npm >=10.9.2 |
|
||||
| Module System | ES Modules (`"type": "module"`) |
|
||||
| UI Framework | Preact 10.x (JSX with `react-jsx` pragma) |
|
||||
| Build Tool | esbuild |
|
||||
| Styling | SCSS via esbuild-sass-plugin |
|
||||
|
||||
## Environment
|
||||
|
||||
This is a Nix project. Use the provided `flake.nix` to enter a dev shell with Node.js 22 and npm:
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
```
|
||||
|
||||
All `npm` commands below must be run inside the dev shell.
|
||||
|
||||
## Build, Lint, and Test Commands
|
||||
|
||||
```bash
|
||||
# Type check and format check (CI validation)
|
||||
npm run check
|
||||
|
||||
# Auto-format code with Prettier
|
||||
npm run format
|
||||
|
||||
# Run all tests
|
||||
npm run test
|
||||
|
||||
# Run a single test file
|
||||
npx tsx --test quartz/util/path.test.ts
|
||||
|
||||
# Run tests matching a pattern (use --test-name-pattern)
|
||||
npx tsx --test --test-name-pattern="typeguards" quartz/util/path.test.ts
|
||||
|
||||
# Build the static site
|
||||
npx quartz build
|
||||
|
||||
# Build and serve with hot reload
|
||||
npx quartz build --serve
|
||||
|
||||
# Profile build performance
|
||||
npm run profile
|
||||
```
|
||||
|
||||
### Test Files Location
|
||||
|
||||
Tests use Node.js native test runner via `tsx`. Test files follow the `*.test.ts` pattern:
|
||||
|
||||
- `quartz/util/path.test.ts`
|
||||
- `quartz/util/fileTrie.test.ts`
|
||||
- `quartz/components/scripts/search.test.ts`
|
||||
|
||||
## Code Style Guidelines
|
||||
|
||||
### Prettier Configuration (`.prettierrc`)
|
||||
|
||||
```json
|
||||
{
|
||||
"printWidth": 100,
|
||||
"tabWidth": 2,
|
||||
"semi": false,
|
||||
"trailingComma": "all",
|
||||
"quoteProps": "as-needed"
|
||||
}
|
||||
```
|
||||
|
||||
**No ESLint** - only Prettier for formatting. Run `npm run format` before committing.
|
||||
|
||||
### TypeScript Configuration
|
||||
|
||||
- **Strict mode enabled** (`strict: true`)
|
||||
- `noUnusedLocals: true` - no unused variables
|
||||
- `noUnusedParameters: true` - no unused function parameters
|
||||
- JSX configured for Preact (`jsxImportSource: "preact"`)
|
||||
|
||||
### Import Conventions
|
||||
|
||||
```typescript
|
||||
// 1. External packages first
|
||||
import { PluggableList } from "unified"
|
||||
import { visit } from "unist-util-visit"
|
||||
|
||||
// 2. Internal utilities/types (relative paths)
|
||||
import { QuartzTransformerPlugin } from "../types"
|
||||
import { FilePath, slugifyFilePath } from "../../util/path"
|
||||
import { i18n } from "../../i18n"
|
||||
```
|
||||
|
||||
### Naming Conventions
|
||||
|
||||
| Element | Convention | Example |
|
||||
| ---------------- | ------------ | ----------------------------------- |
|
||||
| Files (utils) | camelCase | `path.ts`, `fileTrie.ts` |
|
||||
| Files (comps) | PascalCase | `TableOfContents.tsx`, `Search.tsx` |
|
||||
| Types/Interfaces | PascalCase | `QuartzComponent`, `FullSlug` |
|
||||
| Type Guards | `is*` prefix | `isFilePath()`, `isFullSlug()` |
|
||||
| Constants | UPPER_CASE | `QUARTZ`, `UPSTREAM_NAME` |
|
||||
| Options types | `Options` | `interface Options { ... }` |
|
||||
|
||||
### Branded Types Pattern
|
||||
|
||||
This codebase uses branded types for type-safe path handling:
|
||||
|
||||
```typescript
|
||||
type SlugLike<T> = string & { __brand: T }
|
||||
export type FilePath = SlugLike<"filepath">
|
||||
export type FullSlug = SlugLike<"full">
|
||||
export type SimpleSlug = SlugLike<"simple">
|
||||
|
||||
// Always validate with type guards before using
|
||||
export function isFilePath(s: string): s is FilePath { ... }
|
||||
```
|
||||
|
||||
### Component Pattern (Preact)
|
||||
|
||||
Components use a factory function pattern with attached static properties:
|
||||
|
||||
```typescript
|
||||
export default ((userOpts?: Partial<Options>) => {
|
||||
const opts: Options = { ...defaultOptions, ...userOpts }
|
||||
|
||||
const ComponentName: QuartzComponent = ({ cfg, displayClass }: QuartzComponentProps) => {
|
||||
return <div class={classNames(displayClass, "component-name")}>...</div>
|
||||
}
|
||||
|
||||
ComponentName.css = style // SCSS styles
|
||||
ComponentName.afterDOMLoaded = script // Client-side JS
|
||||
return ComponentName
|
||||
}) satisfies QuartzComponentConstructor
|
||||
```
|
||||
|
||||
### Plugin Pattern
|
||||
|
||||
Three plugin types: transformers, filters, and emitters.
|
||||
|
||||
```typescript
|
||||
export const PluginName: QuartzTransformerPlugin<Partial<Options>> = (userOpts) => {
|
||||
const opts = { ...defaultOptions, ...userOpts }
|
||||
return {
|
||||
name: "PluginName",
|
||||
markdownPlugins(ctx) { return [...] },
|
||||
htmlPlugins(ctx) { return [...] },
|
||||
externalResources(ctx) { return { js: [], css: [] } },
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Testing Pattern
|
||||
|
||||
Use Node.js native test runner with `assert`:
|
||||
|
||||
```typescript
|
||||
import test, { describe, beforeEach } from "node:test"
|
||||
import assert from "node:assert"
|
||||
|
||||
describe("FeatureName", () => {
|
||||
test("should do something", () => {
|
||||
assert.strictEqual(actual, expected)
|
||||
assert.deepStrictEqual(actualObj, expectedObj)
|
||||
assert(condition) // truthy assertion
|
||||
assert(!condition) // falsy assertion
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Use `try/catch` for critical operations (file I/O, parsing)
|
||||
- Custom `trace` utility for error reporting with stack traces
|
||||
- `process.exit(1)` for fatal errors
|
||||
- `console.warn()` for non-fatal issues
|
||||
|
||||
### Async Patterns
|
||||
|
||||
- Prefer `async/await` over raw promises
|
||||
- Use async generators (`async *emit()`) for streaming file output
|
||||
- Use `async-mutex` for concurrent build protection
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
quartz/
|
||||
├── bootstrap-cli.mjs # CLI entry point
|
||||
├── build.ts # Build orchestration
|
||||
├── cfg.ts # Configuration types
|
||||
├── components/ # Preact UI components
|
||||
│ ├── *.tsx # Components
|
||||
│ ├── scripts/ # Client-side scripts (*.inline.ts)
|
||||
│ └── styles/ # Component SCSS
|
||||
├── plugins/
|
||||
│ ├── transformers/ # Markdown AST transformers
|
||||
│ ├── filters/ # Content filters
|
||||
│ ├── emitters/ # Output generators
|
||||
│ └── types.ts # Plugin type definitions
|
||||
├── processors/ # Build pipeline (parse/filter/emit)
|
||||
├── util/ # Utility functions
|
||||
└── i18n/ # Internationalization (30+ locales)
|
||||
```
|
||||
|
||||
## Branch Workflow
|
||||
|
||||
This is a fork of [jackyzha0/quartz](https://github.com/jackyzha0/quartz) with org-roam customizations.
|
||||
|
||||
| Branch | Purpose |
|
||||
| ----------- | ------------------------------------------------ |
|
||||
| `main` | Clean mirror of upstream quartz — no custom code |
|
||||
| `org-roam` | Default branch — all customizations live here |
|
||||
| `feature/*` | Short-lived branches off `org-roam` |
|
||||
|
||||
### Pulling Upstream Updates
|
||||
|
||||
```bash
|
||||
git checkout main
|
||||
git fetch upstream
|
||||
git merge upstream/main
|
||||
git checkout org-roam
|
||||
git merge main
|
||||
# Resolve conflicts if any, then commit
|
||||
```
|
||||
|
||||
### Working on Features
|
||||
|
||||
```bash
|
||||
git checkout org-roam
|
||||
git checkout -b feature/my-feature
|
||||
# ... work ...
|
||||
git checkout org-roam
|
||||
git merge feature/my-feature
|
||||
git branch -d feature/my-feature
|
||||
```
|
||||
|
||||
**Merge direction:** `upstream → main → org-roam → feature/*`
|
||||
|
||||
## Important Notes
|
||||
|
||||
- **Client-side scripts**: Use `.inline.ts` suffix, bundled via esbuild
|
||||
- **Isomorphic code**: `quartz/util/path.ts` must not use Node.js APIs
|
||||
- **Incremental builds**: Plugins can implement `partialEmit` for efficiency
|
||||
- **Markdown flavors**: Supports Obsidian (`ofm.ts`) and Roam (`roam.ts`) syntax
|
||||
- **Pipeline build artifacts**: `scripts/pipeline/_build/` and `scripts/pipeline/deps/`
|
||||
are gitignored — run `mix deps.get` inside `scripts/pipeline/` after a fresh clone
|
||||
90
README.md
90
README.md
@@ -1,96 +1,14 @@
|
||||
# Quartz v4 — org-roam edition
|
||||
# Quartz v4
|
||||
|
||||
> "[One] who works with the door open gets all kinds of interruptions, but [they] also occasionally gets clues as to what the world is and what might be important." — Richard Hamming
|
||||
> “[One] who works with the door open gets all kinds of interruptions, but [they] also occasionally gets clues as to what the world is and what might be important.” — Richard Hamming
|
||||
|
||||
Quartz is a set of tools that helps you publish your [digital garden](https://jzhao.xyz/posts/networked-thought) and notes as a website for free.
|
||||
Quartz v4 features a from-the-ground rewrite focusing on end-user extensibility and ease-of-use.
|
||||
|
||||
This fork adds first-class support for [org-roam](https://www.orgroam.com/) notes via [ox-hugo](https://ox-hugo.scripter.co/).
|
||||
|
||||
🔗 Upstream documentation: https://quartz.jzhao.xyz/
|
||||
🔗 Read the documentation and get started: https://quartz.jzhao.xyz/
|
||||
|
||||
[Join the Discord Community](https://discord.gg/cRFFHYye7t)
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
This project uses Nix. Enter the development shell, which provides Node.js 22, Elixir, and Emacs with ox-hugo:
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
```
|
||||
|
||||
All commands below must be run inside this shell.
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
### Building from org-roam notes
|
||||
|
||||
Your org-roam notes live in a separate directory. Point `NOTES_DIR` at it:
|
||||
|
||||
```bash
|
||||
# Export notes to content/ and build the site
|
||||
NOTES_DIR=/path/to/notes npm run build:notes
|
||||
|
||||
# Export, build, and serve with hot reload
|
||||
NOTES_DIR=/path/to/notes npm run serve:notes
|
||||
|
||||
# Export only (wipes content/ and re-exports all .org files)
|
||||
NOTES_DIR=/path/to/notes npm run export
|
||||
```
|
||||
|
||||
The export pipeline runs in four phases:
|
||||
|
||||
1. **Wipe** `content/` clean
|
||||
2. **Export** every `.org` file via `emacs --batch` + ox-hugo → Markdown
|
||||
3. **Transform** — post-process the Markdown (citation resolution, etc.)
|
||||
4. **Index** — generate a fallback `index.md` if none was exported
|
||||
|
||||
#### Citations (org-citar → Zotero links)
|
||||
|
||||
org-citar references (`[cite:@key]`) are resolved to clickable Zotero links.
|
||||
With Zotero running and the [Better BibTeX](https://retorque.re/zotero-better-bibtex/)
|
||||
plugin installed, no extra configuration is needed — the pipeline detects it
|
||||
automatically and links directly to the PDF in your library.
|
||||
|
||||
```bash
|
||||
# Use a local .bib file as fallback when Zotero is not running
|
||||
BIBTEX_FILE=/path/to/refs.bib NOTES_DIR=/path/to/notes npm run export
|
||||
|
||||
# Control warning verbosity for unresolved keys
|
||||
CITATION_MODE=strict NOTES_DIR=/path/to/notes npm run export
|
||||
```
|
||||
|
||||
| Env var | Default | Purpose |
|
||||
| --------------- | ------------------------ | ----------------------------------------- |
|
||||
| `BIBTEX_FILE` | — | Path to `.bib` file for citation fallback |
|
||||
| `ZOTERO_URL` | `http://localhost:23119` | Zotero Better BibTeX base URL |
|
||||
| `CITATION_MODE` | `warn` | `silent` / `warn` / `strict` |
|
||||
|
||||
### Building without org-roam notes
|
||||
|
||||
If you manage `content/` directly with Markdown files:
|
||||
|
||||
```bash
|
||||
# Build the site
|
||||
npx quartz build
|
||||
|
||||
# Build and serve with hot reload
|
||||
npx quartz build --serve
|
||||
```
|
||||
|
||||
The site is generated in `public/`. When serving, visit http://localhost:8080.
|
||||
|
||||
### Development
|
||||
|
||||
```bash
|
||||
npm run check # type check + format check
|
||||
npm run format # auto-format with Prettier
|
||||
npm run test # run tests
|
||||
```
|
||||
|
||||
## Sponsors
|
||||
|
||||
<p align="center">
|
||||
|
||||
@@ -34,9 +34,6 @@ This part of the configuration concerns anything that can affect the whole site.
|
||||
- `{ provider: 'tinylytics', siteId: '<your-site-id>' }`: use [Tinylytics](https://tinylytics.app/);
|
||||
- `{ provider: 'cabin' }` or `{ provider: 'cabin', host: 'https://cabin.example.com' }` (custom domain): use [Cabin](https://withcabin.com);
|
||||
- `{provider: 'clarity', projectId: '<your-clarity-id-code' }`: use [Microsoft clarity](https://clarity.microsoft.com/). The project id can be found on top of the overview page.
|
||||
- `{ provider: 'matomo', siteId: '<your-matomo-id-code', host: 'matomo.example.com' }`: use [Matomo](https://matomo.org/), without protocol.
|
||||
- `{ provider: 'vercel' }`: use [Vercel Web Analytics](https://vercel.com/docs/concepts/analytics).
|
||||
- `{ provider: 'rybbit', siteId: 'my-rybbit-id' }` (managed) or `{ provider: 'rybbit', siteId: 'my-rybbit-id', host: 'my-rybbit-domain.com' }` (self-hosted) use [Rybbit](https://rybbit.com);
|
||||
- `locale`: used for [[i18n]] and date formatting
|
||||
- `baseUrl`: this is used for sitemaps and RSS feeds that require an absolute URL to know where the canonical 'home' of your site lives. This is normally the deployed URL of your site (e.g. `quartz.jzhao.xyz` for this site). Do not include the protocol (i.e. `https://`) or any leading or trailing slashes.
|
||||
- This should also include the subpath if you are [[hosting]] on GitHub pages without a custom domain. For example, if my repository is `jackyzha0/quartz`, GitHub pages would deploy to `https://jackyzha0.github.io/quartz` and the `baseUrl` would be `jackyzha0.github.io/quartz`.
|
||||
|
||||
@@ -5,7 +5,3 @@ You can run the below one-liner to run Quartz in Docker.
|
||||
```sh
|
||||
docker run --rm -itp 8080:8080 -p 3001:3001 -v ./content:/usr/src/app/content $(docker build -q .)
|
||||
```
|
||||
|
||||
> [!warning] Not to be used for production
|
||||
> Serve mode is intended for local previews only.
|
||||
> For production workloads, see the page on [[hosting]].
|
||||
|
||||
@@ -162,7 +162,7 @@ You can access the tags of a file by `node.data.tags`.
|
||||
Component.Explorer({
|
||||
filterFn: (node) => {
|
||||
// exclude files with the tag "explorerexclude"
|
||||
return node.data?.tags?.includes("explorerexclude") !== true
|
||||
return node.data.tags?.includes("explorerexclude") !== true
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
@@ -8,7 +8,7 @@ By default, Quartz only fetches previews for pages inside your vault due to [COR
|
||||
|
||||
When [[creating components|creating your own components]], you can include this `popover-hint` class to also include it in the popover.
|
||||
|
||||
Similar to Obsidian, [[quartz-layout-desktop.png|images referenced using wikilinks]] can also be viewed as popups.
|
||||
Similar to Obsidian, [[quartz layout.png|images referenced using wikilinks]] can also be viewed as popups.
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ However, if you'd like to publish your site to the world, you need a way to host
|
||||
## Cloudflare Pages
|
||||
|
||||
1. Log in to the [Cloudflare dashboard](https://dash.cloudflare.com/) and select your account.
|
||||
2. In Account Home, select **Compute (Workers)** > **Workers & Pages** > **Create application** > **Pages** > **Connect to Git**.
|
||||
2. In Account Home, select **Workers & Pages** > **Create application** > **Pages** > **Connect to Git**.
|
||||
3. Select the new GitHub repository that you created and, in the **Set up builds and deployments** section, provide the following information:
|
||||
|
||||
| Configuration option | Value |
|
||||
|
||||
@@ -14,6 +14,10 @@ This plugin accepts the following configuration options:
|
||||
- `renderEngine`: the engine to use to render LaTeX equations. Can be `"katex"` for [KaTeX](https://katex.org/), `"mathjax"` for [MathJax](https://www.mathjax.org/) [SVG rendering](https://docs.mathjax.org/en/latest/output/svg.html), or `"typst"` for [Typst](https://typst.app/) (a new way to compose LaTeX equation). Defaults to KaTeX.
|
||||
- `customMacros`: custom macros for all LaTeX blocks. It takes the form of a key-value pair where the key is a new command name and the value is the expansion of the macro. For example: `{"\\R": "\\mathbb{R}"}`
|
||||
|
||||
> [!note] Typst support
|
||||
>
|
||||
> Currently, typst doesn't support inline-math
|
||||
|
||||
## API
|
||||
|
||||
- Category: Transformer
|
||||
|
||||
126
flake.lock
generated
126
flake.lock
generated
@@ -1,126 +0,0 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_2": {
|
||||
"inputs": {
|
||||
"systems": "systems_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1771008912,
|
||||
"narHash": "sha256-gf2AmWVTs8lEq7z/3ZAsgnZDhWIckkb+ZnAo5RzSxJg=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "a82ccc39b39b621151d6732718e3e250109076fa",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1771369470,
|
||||
"narHash": "sha256-0NBlEBKkN3lufyvFegY4TYv5mCNHbi5OmBDrzihbBMQ=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "0182a361324364ae3f436a63005877674cf45efb",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"pipeline": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils_2",
|
||||
"nixpkgs": "nixpkgs_2"
|
||||
},
|
||||
"locked": {
|
||||
"path": "./pipeline",
|
||||
"type": "path"
|
||||
},
|
||||
"original": {
|
||||
"path": "./pipeline",
|
||||
"type": "path"
|
||||
},
|
||||
"parent": []
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"pipeline": "pipeline"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems_2": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
93
flake.nix
93
flake.nix
@@ -1,93 +0,0 @@
|
||||
{
|
||||
description = "Quartz org-roam dev shell and build app";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
pipeline.url = "path:./pipeline";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils, pipeline }:
|
||||
flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
fs = pkgs.lib.fileset;
|
||||
|
||||
pipelineApp = pipeline.packages.${system}.default;
|
||||
|
||||
# Pre-fetched npm dependency tree (node_modules).
|
||||
# src is filtered to only package.json + package-lock.json so that
|
||||
# edits to Quartz source files do not invalidate this derivation.
|
||||
quartzDeps = pkgs.buildNpmPackage {
|
||||
pname = "quartz-deps";
|
||||
version = "4.5.2";
|
||||
src = fs.toSource {
|
||||
root = ./.;
|
||||
fileset = fs.unions [
|
||||
./package.json
|
||||
./package-lock.json
|
||||
];
|
||||
};
|
||||
npmDepsHash = "sha256-7u+VlIx44B3/ivM9vLMIOn+e4TL4eS6B682vhS+Ikb4=";
|
||||
dontBuild = true;
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -r node_modules $out/node_modules
|
||||
'';
|
||||
};
|
||||
|
||||
# The build application wrapper script
|
||||
buildApp = pkgs.writeShellApplication {
|
||||
name = "build";
|
||||
runtimeInputs = [ pkgs.nodejs_22 ];
|
||||
text = ''
|
||||
NOTES_DIR="''${1:?Usage: build <path-to-notes-dir>}"
|
||||
NOTES_DIR=$(realpath "$NOTES_DIR")
|
||||
ORIG_CWD=$(pwd)
|
||||
|
||||
# Set up a writable working copy of the repo in a temp dir
|
||||
WORK=$(mktemp -d)
|
||||
trap 'rm -rf "$WORK"' EXIT
|
||||
cp -r ${self}/. "$WORK/repo"
|
||||
chmod -R u+w "$WORK/repo"
|
||||
|
||||
# Drop in pre-built node_modules
|
||||
ln -s ${quartzDeps}/node_modules "$WORK/repo/node_modules"
|
||||
|
||||
# Run the pre-compiled pipeline escript (org → md, citations transform)
|
||||
${pipelineApp}/bin/pipeline "$NOTES_DIR" \
|
||||
--output "$WORK/repo" \
|
||||
--content-dir "$WORK/repo/content"
|
||||
|
||||
# Build the static site from within the repo copy so relative paths
|
||||
# (e.g. ./package.json in constants.js) resolve correctly.
|
||||
# --output is absolute so the result lands in the caller's cwd.
|
||||
cd "$WORK/repo"
|
||||
node quartz/bootstrap-cli.mjs build \
|
||||
--directory "$WORK/repo/content" \
|
||||
--output "$ORIG_CWD/public"
|
||||
'';
|
||||
};
|
||||
in
|
||||
{
|
||||
devShells.default = pkgs.mkShell {
|
||||
buildInputs = [
|
||||
pkgs.nodejs_22
|
||||
pkgs.elixir
|
||||
pkgs.mcp-nixos
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
echo "Node $(node --version) / npm $(npm --version)"
|
||||
elixir --version 2>/dev/null | head -1 || true
|
||||
'';
|
||||
};
|
||||
|
||||
packages.default = buildApp;
|
||||
packages.build = buildApp;
|
||||
packages.pipeline = pipelineApp;
|
||||
|
||||
apps.default = { type = "app"; program = "${buildApp}/bin/build"; };
|
||||
apps.build = { type = "app"; program = "${buildApp}/bin/build"; };
|
||||
});
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 8.2 KiB |
@@ -1,16 +0,0 @@
|
||||
:PROPERTIES:
|
||||
:ID: emt-madrid
|
||||
:END:
|
||||
#+title: EMT Madrid (urban bus)
|
||||
|
||||
Empresa Municipal de Transportes (EMT) operates the urban bus network
|
||||
within the municipality of Madrid — around 200 lines.
|
||||
|
||||
* Notable lines
|
||||
- *Line 27* — connects Embajadores with Barrio de la Concepción, one of the
|
||||
oldest routes in the network.
|
||||
- *Line 34* — Argüelles to Carabanchel, crossing the city centre via Gran Vía.
|
||||
- *Búho (owl) lines* — night buses running from Cibeles from midnight to 6 am.
|
||||
|
||||
* See also
|
||||
- [[id:madrid-transport][Madrid Public Transport]]
|
||||
@@ -1,13 +0,0 @@
|
||||
#+title: Example: Citation Reference
|
||||
|
||||
This file demonstrates how org-citar citations pass through ox-hugo into
|
||||
markdown, where the pipeline transform resolves them.
|
||||
|
||||
The methodology described in [cite:@podlovics2021journalArticle] provides a
|
||||
useful framework for analysis.
|
||||
|
||||
Multiple citations can appear together:
|
||||
[cite:@podlovics2021journalArticle;@petersen2022book]
|
||||
|
||||
Older bare-cite style (org-roam v1 / older citar) also works:
|
||||
cite:@podlovics2021journalArticle
|
||||
@@ -1,33 +0,0 @@
|
||||
:PROPERTIES:
|
||||
:ID: example-images
|
||||
:END:
|
||||
#+title: Example: Image References
|
||||
|
||||
This note demonstrates the three image reference scenarios that the pipeline
|
||||
must handle.
|
||||
|
||||
* Scenario 1: External image (URL)
|
||||
|
||||
An image hosted on the web — ox-hugo passes the URL through as-is and no
|
||||
local file handling is needed.
|
||||
|
||||
#+attr_html: :link "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSkzsTuLOt8esM6enoKwkzqA52G3p9hldlf2g&s"
|
||||
[[file:quartz-logo-external.png]]
|
||||
|
||||
* Scenario 2: Local image (same notes directory)
|
||||
|
||||
An image sitting next to this .org file inside the notes directory.
|
||||
ox-hugo copies files referenced with a relative path into the Hugo =static/=
|
||||
assets tree automatically.
|
||||
|
||||
#+CAPTION: Quartz logo (local, same notes dir)
|
||||
[[file:quartz-logo.png]]
|
||||
|
||||
* Scenario 3: External image (outside notes directory)
|
||||
|
||||
An image that lives outside the notes directory entirely — for example a
|
||||
shared assets folder or a system path. ox-hugo still copies it into =static/=
|
||||
and rewrites the reference.
|
||||
|
||||
#+CAPTION: Quartz logo (outside notes dir)
|
||||
[[file:../notes-external/external-location-image.png]]
|
||||
@@ -1,17 +0,0 @@
|
||||
:PROPERTIES:
|
||||
:ID: madrid-transport
|
||||
:END:
|
||||
#+title: Madrid Public Transport
|
||||
|
||||
Madrid has one of the most extensive public transport networks in Europe,
|
||||
operated primarily by [[id:crtm][Consorcio Regional de Transportes de Madrid]] (CRTM).
|
||||
|
||||
* Modes
|
||||
- [[id:metro-madrid][Metro de Madrid]] — 13 lines, ~300 km of track
|
||||
- [[id:emt-madrid][EMT Bus]] — urban buses within the city
|
||||
- Cercanías — suburban rail run by Renfe
|
||||
- Interurbano — regional buses to the wider Community of Madrid
|
||||
|
||||
* Ticketing
|
||||
A single [[https://www.crtm.es][tarjeta transporte]] (transport card) works across all modes.
|
||||
The Multi card covers zones A–C and is topped up at any metro station.
|
||||
@@ -1,18 +0,0 @@
|
||||
:PROPERTIES:
|
||||
:ID: metro-madrid
|
||||
:END:
|
||||
#+title: Metro de Madrid
|
||||
|
||||
The Madrid Metro is the main rapid transit network in the city, opened in 1919.
|
||||
It is the second oldest metro in the Iberian Peninsula after Barcelona.
|
||||
|
||||
* Key Lines
|
||||
| Line | Name | Colour | Terminals |
|
||||
|------+-----------------+--------+------------------------------|
|
||||
| L1 | Pinar de Chamartín–Valdecarros | Blue | Pinar de Chamartín / Valdecarros |
|
||||
| L6 | Circular | Grey | Circular (loop) |
|
||||
| L10 | — | Dark blue | Hospital Infanta Sofía / Tres Olivos |
|
||||
|
||||
* See also
|
||||
- [[id:madrid-transport][Madrid Public Transport]]
|
||||
- [[id:sol-interchange][Sol interchange]]
|
||||
@@ -1,12 +0,0 @@
|
||||
:PROPERTIES:
|
||||
:ID: sol-interchange
|
||||
:END:
|
||||
#+title: Sol (interchange)
|
||||
|
||||
Sol is the busiest interchange station in the Madrid Metro, sitting beneath
|
||||
Puerta del Sol in the city centre.
|
||||
|
||||
Lines serving Sol: [[id:metro-madrid][L1]], L2, L3.
|
||||
|
||||
It also connects to the Cercanías hub underneath, making it the de-facto
|
||||
zero point of Madrid's public transport.
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 8.2 KiB |
@@ -1,22 +0,0 @@
|
||||
:PROPERTIES:
|
||||
:ID: crtm
|
||||
:END:
|
||||
#+title: CRTM — Consorcio Regional de Transportes de Madrid
|
||||
|
||||
The CRTM is the regional authority that coordinates public transport across
|
||||
the Community of Madrid. It does not operate services directly but sets
|
||||
fares, zones, and integration policy.
|
||||
|
||||
* Fare zones
|
||||
| Zone | Coverage |
|
||||
|-------+-----------------------------|
|
||||
| A | Municipality of Madrid |
|
||||
| B1 | Inner ring municipalities |
|
||||
| B2 | Outer ring municipalities |
|
||||
| B3 | Further suburban area |
|
||||
| C1–C2 | Commuter belt |
|
||||
|
||||
* Related
|
||||
- [[id:madrid-transport][Madrid Public Transport]]
|
||||
- [[id:metro-madrid][Metro de Madrid]]
|
||||
- [[id:emt-madrid][EMT Madrid]]
|
||||
@@ -1,19 +0,0 @@
|
||||
:PROPERTIES:
|
||||
:ID: m30
|
||||
:END:
|
||||
#+title: M-30
|
||||
|
||||
The M-30 is Madrid's innermost ring road, circling the city centre at a
|
||||
radius of roughly 3–5 km from Puerta del Sol.
|
||||
|
||||
It runs mostly underground through the Madrid Río tunnel section along the
|
||||
Manzanares river, built during the 2004–2007 renovation that reclaimed the
|
||||
riverbank as a public park.
|
||||
|
||||
* Key junctions
|
||||
- Nudo Norte — connects to A-1 (Burgos) and A-6 (La Coruña)
|
||||
- Nudo Sur — connects to A-4 (Cádiz) and A-42 (Toledo)
|
||||
|
||||
* See also
|
||||
- [[id:crtm][CRTM]]
|
||||
- [[id:madrid-transport][Madrid Public Transport]]
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"mcp": {
|
||||
"nixos": {
|
||||
"type": "local",
|
||||
"command": ["mcp-nixos"],
|
||||
"enabled": true
|
||||
}
|
||||
}
|
||||
}
|
||||
1847
package-lock.json
generated
1847
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
61
package.json
61
package.json
@@ -2,7 +2,7 @@
|
||||
"name": "@jackyzha0/quartz",
|
||||
"description": "🌱 publish your digital garden and notes as a website",
|
||||
"private": true,
|
||||
"version": "4.5.2",
|
||||
"version": "4.5.1",
|
||||
"type": "module",
|
||||
"author": "jackyzha0 <j.zhao2k19@gmail.com>",
|
||||
"license": "MIT",
|
||||
@@ -17,10 +17,7 @@
|
||||
"check": "tsc --noEmit && npx prettier . --check",
|
||||
"format": "npx prettier . --write",
|
||||
"test": "tsx --test",
|
||||
"profile": "0x -D prof ./quartz/bootstrap-cli.mjs build --concurrency=1",
|
||||
"export": "elixir scripts/export.exs",
|
||||
"build:notes": "elixir scripts/export.exs && npx quartz build",
|
||||
"serve:notes": "elixir scripts/export.exs && npx quartz build --serve"
|
||||
"profile": "0x -D prof ./quartz/bootstrap-cli.mjs build --concurrency=1"
|
||||
},
|
||||
"engines": {
|
||||
"npm": ">=10.9.2",
|
||||
@@ -39,35 +36,35 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@clack/prompts": "^0.11.0",
|
||||
"@floating-ui/dom": "^1.7.4",
|
||||
"@floating-ui/dom": "^1.7.2",
|
||||
"@myriaddreamin/rehype-typst": "^0.6.0",
|
||||
"@napi-rs/simple-git": "0.1.22",
|
||||
"@napi-rs/simple-git": "0.1.21",
|
||||
"@tweenjs/tween.js": "^25.0.0",
|
||||
"ansi-truncate": "^1.4.0",
|
||||
"ansi-truncate": "^1.2.0",
|
||||
"async-mutex": "^0.5.0",
|
||||
"chokidar": "^5.0.0",
|
||||
"chokidar": "^4.0.3",
|
||||
"cli-spinner": "^0.2.10",
|
||||
"d3": "^7.9.0",
|
||||
"esbuild-sass-plugin": "^3.6.0",
|
||||
"flexsearch": "^0.8.205",
|
||||
"esbuild-sass-plugin": "^3.3.1",
|
||||
"flexsearch": "0.7.43",
|
||||
"github-slugger": "^2.0.0",
|
||||
"globby": "^16.1.0",
|
||||
"globby": "^14.1.0",
|
||||
"gray-matter": "^4.0.3",
|
||||
"hast-util-to-html": "^9.0.5",
|
||||
"hast-util-to-jsx-runtime": "^2.3.6",
|
||||
"hast-util-to-string": "^3.0.1",
|
||||
"is-absolute-url": "^5.0.0",
|
||||
"js-yaml": "^4.1.1",
|
||||
"lightningcss": "^1.31.1",
|
||||
"is-absolute-url": "^4.0.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
"lightningcss": "^1.30.1",
|
||||
"mdast-util-find-and-replace": "^3.0.2",
|
||||
"mdast-util-to-hast": "^13.2.1",
|
||||
"mdast-util-to-hast": "^13.2.0",
|
||||
"mdast-util-to-string": "^4.0.0",
|
||||
"micromorph": "^0.4.5",
|
||||
"minimatch": "^10.1.1",
|
||||
"pixi.js": "^8.15.0",
|
||||
"preact": "^10.28.2",
|
||||
"preact-render-to-string": "^6.6.5",
|
||||
"pretty-bytes": "^7.1.0",
|
||||
"minimatch": "^10.0.3",
|
||||
"pixi.js": "^8.11.0",
|
||||
"preact": "^10.27.0",
|
||||
"preact-render-to-string": "^6.5.13",
|
||||
"pretty-bytes": "^7.0.0",
|
||||
"pretty-time": "^1.1.0",
|
||||
"reading-time": "^1.5.0",
|
||||
"rehype-autolink-headings": "^7.1.0",
|
||||
@@ -86,32 +83,32 @@
|
||||
"remark-rehype": "^11.1.2",
|
||||
"remark-smartypants": "^3.0.2",
|
||||
"rfdc": "^1.4.1",
|
||||
"satori": "^0.19.1",
|
||||
"satori": "^0.16.1",
|
||||
"serve-handler": "^6.1.6",
|
||||
"sharp": "^0.34.5",
|
||||
"sharp": "^0.34.3",
|
||||
"shiki": "^1.26.2",
|
||||
"source-map-support": "^0.5.21",
|
||||
"to-vfile": "^8.0.0",
|
||||
"toml": "^3.0.0",
|
||||
"unified": "^11.0.5",
|
||||
"unist-util-visit": "^5.1.0",
|
||||
"unist-util-visit": "^5.0.0",
|
||||
"vfile": "^6.0.3",
|
||||
"workerpool": "^10.0.1",
|
||||
"ws": "^8.19.0",
|
||||
"workerpool": "^9.3.3",
|
||||
"ws": "^8.18.3",
|
||||
"yargs": "^18.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/d3": "^7.4.3",
|
||||
"@types/hast": "^3.0.4",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^25.0.10",
|
||||
"@types/node": "^24.1.0",
|
||||
"@types/pretty-time": "^1.1.5",
|
||||
"@types/source-map-support": "^0.5.10",
|
||||
"@types/ws": "^8.18.1",
|
||||
"@types/yargs": "^17.0.35",
|
||||
"esbuild": "^0.27.2",
|
||||
"prettier": "^3.8.1",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.9.3"
|
||||
"@types/yargs": "^17.0.33",
|
||||
"esbuild": "^0.25.8",
|
||||
"prettier": "^3.6.2",
|
||||
"tsx": "^4.20.3",
|
||||
"typescript": "^5.8.3"
|
||||
}
|
||||
}
|
||||
|
||||
61
pipeline/flake.lock
generated
61
pipeline/flake.lock
generated
@@ -1,61 +0,0 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1771369470,
|
||||
"narHash": "sha256-0NBlEBKkN3lufyvFegY4TYv5mCNHbi5OmBDrzihbBMQ=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "0182a361324364ae3f436a63005877674cf45efb",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
{
|
||||
description = "Org-roam export pipeline — Elixir escript";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils }:
|
||||
flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
fs = pkgs.lib.fileset;
|
||||
|
||||
# Emacs with ox-hugo — needed at runtime by the pipeline escript
|
||||
# (export_org_files calls `emacs --batch` with ox-hugo).
|
||||
emacsWithOxHugo = (pkgs.emacsPackagesFor pkgs.emacs-nox).emacsWithPackages
|
||||
(epkgs: [ epkgs.ox-hugo ]);
|
||||
|
||||
# Pre-fetched Hex/Mix dependencies.
|
||||
# src is filtered to mix.exs + mix.lock so source edits don't
|
||||
# invalidate this derivation.
|
||||
mixDeps = pkgs.beamPackages.fetchMixDeps {
|
||||
pname = "pipeline-mix-deps";
|
||||
version = "0.1.0";
|
||||
src = fs.toSource {
|
||||
root = ./.;
|
||||
fileset = fs.unions [
|
||||
./mix.exs
|
||||
./mix.lock
|
||||
];
|
||||
};
|
||||
sha256 = "sha256-si7JAomY1HZ33m6ihUJP5i6PO39CE1clYvuMtn0CbPU=";
|
||||
};
|
||||
|
||||
# Compiled pipeline escript (without runtime wrappers).
|
||||
pipelineEscript = pkgs.beamPackages.mixRelease {
|
||||
pname = "pipeline";
|
||||
version = "0.1.0";
|
||||
src = ./.;
|
||||
|
||||
escriptBinName = "pipeline";
|
||||
mixFodDeps = mixDeps;
|
||||
|
||||
stripDebug = true;
|
||||
};
|
||||
|
||||
# Wrapped pipeline that puts emacs (with ox-hugo) on PATH so
|
||||
# the escript's System.cmd("emacs", ...) calls succeed.
|
||||
pipelineApp = pkgs.writeShellApplication {
|
||||
name = "pipeline";
|
||||
runtimeInputs = [ emacsWithOxHugo pkgs.inotify-tools ];
|
||||
text = ''
|
||||
exec ${pipelineEscript}/bin/pipeline "$@"
|
||||
'';
|
||||
};
|
||||
in
|
||||
{
|
||||
packages.default = pipelineApp;
|
||||
packages.escript = pipelineEscript;
|
||||
|
||||
devShells.default = pkgs.mkShell {
|
||||
buildInputs = [
|
||||
pkgs.elixir
|
||||
pkgs.inotify-tools
|
||||
emacsWithOxHugo
|
||||
];
|
||||
};
|
||||
});
|
||||
}
|
||||
@@ -1,164 +0,0 @@
|
||||
defmodule Pipeline do
|
||||
@moduledoc """
|
||||
Post-export markdown transformation pipeline.
|
||||
|
||||
Applies a list of transform modules sequentially over markdown files.
|
||||
Each transform module must implement the `Pipeline.Transform` behaviour.
|
||||
|
||||
Transforms are applied in the order given. A file is rewritten only
|
||||
when at least one transform mutates its content (checked via equality).
|
||||
|
||||
## Usage
|
||||
|
||||
opts = %{
|
||||
zotero_url: "http://localhost:23119",
|
||||
bibtex_file: System.get_env("BIBTEX_FILE"),
|
||||
citation_mode: :warn # :silent | :warn | :strict
|
||||
}
|
||||
|
||||
# Batch: all .md files in a directory
|
||||
Pipeline.run(content_dir, [Pipeline.Transforms.Citations], opts)
|
||||
|
||||
# Targeted: specific files only
|
||||
Pipeline.run_on_files(["content/foo.md"], [Pipeline.Transforms.Citations], opts)
|
||||
|
||||
# With pre-initialized transforms (for watch mode, avoids re-init)
|
||||
initialized = Pipeline.init_transforms([Pipeline.Transforms.Citations], opts)
|
||||
Pipeline.run_on_files_with(["content/foo.md"], initialized, opts)
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
@type transform :: module()
|
||||
@type initialized_transform :: {module(), term()}
|
||||
@type opts :: map()
|
||||
|
||||
@doc """
|
||||
Initialize transform modules. Returns a list of `{module, state}` tuples.
|
||||
|
||||
Call this once and reuse the result with `run_on_files_with/3` to avoid
|
||||
re-initializing transforms on every file change (e.g., in watch mode).
|
||||
"""
|
||||
@spec init_transforms([transform()], opts()) :: [initialized_transform()]
|
||||
def init_transforms(transforms, opts) do
|
||||
Enum.map(transforms, fn mod ->
|
||||
state = mod.init(opts)
|
||||
{mod, state}
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Tear down previously initialized transforms, releasing any resources.
|
||||
"""
|
||||
@spec teardown_transforms([initialized_transform()]) :: :ok
|
||||
def teardown_transforms(initialized) do
|
||||
Enum.each(initialized, fn {mod, state} ->
|
||||
if function_exported?(mod, :teardown, 1) do
|
||||
mod.teardown(state)
|
||||
end
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
@doc """
|
||||
Run all transforms over every `.md` file under `content_dir`.
|
||||
|
||||
Initializes and tears down transforms automatically.
|
||||
Returns `{:ok, stats}` where stats maps each transform to a count of files it changed.
|
||||
"""
|
||||
@spec run(String.t(), [transform()], opts()) :: {:ok, map()}
|
||||
def run(content_dir, transforms, opts \\ %{}) do
|
||||
md_files =
|
||||
content_dir
|
||||
|> Path.join("**/*.md")
|
||||
|> Path.wildcard()
|
||||
|
||||
if md_files == [] do
|
||||
Logger.warning("Pipeline: no .md files found in #{content_dir}")
|
||||
{:ok, %{}}
|
||||
else
|
||||
Logger.info(
|
||||
"Pipeline: processing #{length(md_files)} markdown files " <>
|
||||
"with #{length(transforms)} transform(s)"
|
||||
)
|
||||
|
||||
initialized = init_transforms(transforms, opts)
|
||||
stats = apply_transforms(md_files, initialized, opts)
|
||||
teardown_transforms(initialized)
|
||||
{:ok, stats}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Run all transforms over specific `.md` files only.
|
||||
|
||||
Initializes and tears down transforms automatically.
|
||||
Files that don't exist are silently skipped.
|
||||
"""
|
||||
@spec run_on_files([String.t()], [transform()], opts()) :: {:ok, map()}
|
||||
def run_on_files(file_paths, transforms, opts \\ %{}) do
|
||||
existing = Enum.filter(file_paths, &File.exists?/1)
|
||||
|
||||
if existing == [] do
|
||||
Logger.debug("Pipeline: no files to process")
|
||||
{:ok, %{}}
|
||||
else
|
||||
Logger.info("Pipeline: processing #{length(existing)} file(s)")
|
||||
initialized = init_transforms(transforms, opts)
|
||||
stats = apply_transforms(existing, initialized, opts)
|
||||
teardown_transforms(initialized)
|
||||
{:ok, stats}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Run pre-initialized transforms over specific `.md` files.
|
||||
|
||||
Does NOT call `init` or `teardown` — the caller manages the transform
|
||||
lifecycle. Use this in watch mode to avoid re-initializing on every change.
|
||||
"""
|
||||
@spec run_on_files_with([String.t()], [initialized_transform()], opts()) :: {:ok, map()}
|
||||
def run_on_files_with(file_paths, initialized, opts) do
|
||||
existing = Enum.filter(file_paths, &File.exists?/1)
|
||||
|
||||
if existing == [] do
|
||||
Logger.debug("Pipeline: no files to process")
|
||||
{:ok, %{}}
|
||||
else
|
||||
stats = apply_transforms(existing, initialized, opts)
|
||||
{:ok, stats}
|
||||
end
|
||||
end
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Private
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
defp apply_transforms(md_files, initialized, opts) do
|
||||
Enum.reduce(md_files, %{}, fn path, acc ->
|
||||
original = File.read!(path)
|
||||
|
||||
{transformed, file_stats} =
|
||||
Enum.reduce(initialized, {original, %{}}, fn {mod, state}, {content, fstats} ->
|
||||
result = mod.apply(content, state, opts)
|
||||
changed = result != content
|
||||
|
||||
{result,
|
||||
Map.update(
|
||||
fstats,
|
||||
mod,
|
||||
if(changed, do: 1, else: 0),
|
||||
&(&1 + if(changed, do: 1, else: 0))
|
||||
)}
|
||||
end)
|
||||
|
||||
if transformed != original do
|
||||
File.write!(path, transformed)
|
||||
Logger.debug("Pipeline: updated #{Path.relative_to_cwd(path)}")
|
||||
end
|
||||
|
||||
Map.merge(acc, file_stats, fn _k, a, b -> a + b end)
|
||||
end)
|
||||
end
|
||||
end
|
||||
@@ -1,14 +0,0 @@
|
||||
defmodule Pipeline.Application do
|
||||
@moduledoc false
|
||||
use Application
|
||||
|
||||
@impl true
|
||||
def start(_type, _args) do
|
||||
children = [
|
||||
{Finch, name: Pipeline.Finch}
|
||||
]
|
||||
|
||||
opts = [strategy: :one_for_one, name: Pipeline.Supervisor]
|
||||
Supervisor.start_link(children, opts)
|
||||
end
|
||||
end
|
||||
@@ -1,204 +0,0 @@
|
||||
defmodule Pipeline.CLI do
|
||||
@moduledoc """
|
||||
Escript entry point for the org-roam export pipeline.
|
||||
|
||||
Runs four phases in sequence:
|
||||
|
||||
1. Wipe `content/` (preserving `.gitkeep`)
|
||||
2. Export each `.org` file via `emacs --batch` + ox-hugo -> `content/**/*.md`
|
||||
3. Run Elixir transform modules over every `.md` file
|
||||
4. Generate a fallback `content/index.md` if none was exported
|
||||
|
||||
With `--watch`, after the initial batch the process stays alive and
|
||||
incrementally re-exports only changed `.org` files.
|
||||
|
||||
## Usage
|
||||
|
||||
pipeline <notes-dir> [--output <path>] [--watch]
|
||||
|
||||
Arguments:
|
||||
notes-dir Path to the directory containing `.org` notes (required).
|
||||
Also accepts the `NOTES_DIR` env var.
|
||||
|
||||
Options:
|
||||
--output <path> Output root directory (used as ox-hugo base dir).
|
||||
Defaults to the `OUTPUT_DIR` env var, or the current
|
||||
working directory.
|
||||
--content-dir <p> Output directory for exported Markdown. Defaults to
|
||||
`<output>/content`.
|
||||
--watch After initial batch, watch notes-dir for changes and
|
||||
incrementally re-export affected files.
|
||||
|
||||
Optional env vars:
|
||||
BIBTEX_FILE Path to a `.bib` file used as citation fallback.
|
||||
ZOTERO_URL Zotero Better BibTeX base URL (default: http://localhost:23119).
|
||||
CITATION_MODE silent | warn (default) | strict.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
@transforms [Pipeline.Transforms.Citations]
|
||||
|
||||
def main(argv) do
|
||||
Application.ensure_all_started(:pipeline)
|
||||
|
||||
{notes_dir, output_dir, content_dir, watch?} = parse_args(argv)
|
||||
pipeline_opts = build_pipeline_opts()
|
||||
|
||||
# Phase 1-4: full batch export
|
||||
wipe(content_dir)
|
||||
export_all(notes_dir, output_dir)
|
||||
run_pipeline(content_dir, pipeline_opts)
|
||||
generate_index(content_dir)
|
||||
|
||||
md_count =
|
||||
content_dir
|
||||
|> Path.join("**/*.md")
|
||||
|> Path.wildcard()
|
||||
|> length()
|
||||
|
||||
IO.puts("==> Done. #{md_count} markdown files in #{content_dir}")
|
||||
|
||||
# Phase 5: optional watch mode
|
||||
if watch? do
|
||||
IO.puts("==> Watching #{notes_dir} for .org changes... (Ctrl+C to stop)")
|
||||
|
||||
{:ok, _pid} =
|
||||
Pipeline.Watcher.start_link(
|
||||
notes_dir: notes_dir,
|
||||
output_dir: output_dir,
|
||||
content_dir: content_dir,
|
||||
pipeline_opts: pipeline_opts,
|
||||
transforms: @transforms
|
||||
)
|
||||
|
||||
Process.sleep(:infinity)
|
||||
end
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Argument parsing
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp parse_args(argv) do
|
||||
{opts, positional, _invalid} =
|
||||
OptionParser.parse(argv,
|
||||
strict: [output: :string, content_dir: :string, watch: :boolean]
|
||||
)
|
||||
|
||||
notes_dir =
|
||||
case positional do
|
||||
[dir | _] ->
|
||||
dir
|
||||
|
||||
[] ->
|
||||
System.get_env("NOTES_DIR") ||
|
||||
abort("Usage: pipeline <notes-dir> [--output <path>] [--watch]")
|
||||
end
|
||||
|
||||
notes_dir = Path.expand(notes_dir)
|
||||
|
||||
unless File.dir?(notes_dir) do
|
||||
abort("Error: notes directory does not exist: #{notes_dir}")
|
||||
end
|
||||
|
||||
output_dir =
|
||||
(opts[:output] || System.get_env("OUTPUT_DIR") || File.cwd!())
|
||||
|> Path.expand()
|
||||
|
||||
content_dir =
|
||||
(opts[:content_dir] || Path.join(output_dir, "content"))
|
||||
|> Path.expand()
|
||||
|
||||
watch? = Keyword.get(opts, :watch, false)
|
||||
|
||||
{notes_dir, output_dir, content_dir, watch?}
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Phase 1: Wipe content/
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp wipe(content_dir) do
|
||||
IO.puts("==> Wiping #{content_dir}")
|
||||
File.mkdir_p!(content_dir)
|
||||
|
||||
content_dir
|
||||
|> File.ls!()
|
||||
|> Enum.reject(&(&1 == ".gitkeep"))
|
||||
|> Enum.each(fn entry ->
|
||||
Path.join(content_dir, entry) |> File.rm_rf!()
|
||||
end)
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Phase 2: Export org files via Emacs + ox-hugo
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp export_all(notes_dir, output_dir) do
|
||||
IO.puts("==> Exporting org files from #{notes_dir}")
|
||||
|
||||
case Pipeline.Export.export_all(notes_dir, output_dir) do
|
||||
{:ok, 0} ->
|
||||
IO.puts("No .org files found in #{notes_dir}")
|
||||
System.halt(0)
|
||||
|
||||
{:ok, count} ->
|
||||
IO.puts(" exported #{count} file(s)")
|
||||
|
||||
{:error, failures} ->
|
||||
IO.puts(:stderr, "\nFailed to export #{length(failures)} file(s):")
|
||||
|
||||
Enum.each(failures, fn {f, {:error, reason}} ->
|
||||
IO.puts(:stderr, " #{f}: #{inspect(reason)}")
|
||||
end)
|
||||
|
||||
System.halt(1)
|
||||
end
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Phase 3: Markdown transformation pipeline
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp run_pipeline(content_dir, pipeline_opts) do
|
||||
IO.puts("==> Running markdown pipeline")
|
||||
|
||||
{:ok, stats} = Pipeline.run(content_dir, @transforms, pipeline_opts)
|
||||
|
||||
Enum.each(stats, fn {mod, count} ->
|
||||
IO.puts(" #{inspect(mod)}: #{count} file(s) modified")
|
||||
end)
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Phase 4: Generate default index.md if none was exported
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp generate_index(content_dir) do
|
||||
IO.puts("==> Generating index")
|
||||
Pipeline.Index.generate(content_dir)
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp build_pipeline_opts do
|
||||
%{
|
||||
zotero_url: System.get_env("ZOTERO_URL", "http://localhost:23119"),
|
||||
bibtex_file: System.get_env("BIBTEX_FILE"),
|
||||
citation_mode:
|
||||
case System.get_env("CITATION_MODE", "warn") do
|
||||
"silent" -> :silent
|
||||
"strict" -> :strict
|
||||
_ -> :warn
|
||||
end
|
||||
}
|
||||
end
|
||||
|
||||
defp abort(message) do
|
||||
IO.puts(:stderr, message)
|
||||
System.halt(1)
|
||||
end
|
||||
end
|
||||
@@ -1,135 +0,0 @@
|
||||
defmodule Pipeline.Export do
|
||||
@moduledoc """
|
||||
Org-to-Markdown export via Emacs batch + ox-hugo.
|
||||
|
||||
Provides both single-file and batch export, plus a helper to compute
|
||||
the expected `.md` output path for a given `.org` source file.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
@doc """
|
||||
Export a single `.org` file to Markdown via `emacs --batch` + ox-hugo.
|
||||
|
||||
Returns `{:ok, exit_code}` with the emacs exit code (0 = success),
|
||||
or `{:error, reason}` if the command could not be executed.
|
||||
"""
|
||||
@spec export_file(String.t(), String.t(), String.t()) :: {:ok, non_neg_integer()} | {:error, term()}
|
||||
def export_file(orgfile, notes_dir, output_dir) do
|
||||
section =
|
||||
orgfile
|
||||
|> Path.dirname()
|
||||
|> Path.relative_to(notes_dir)
|
||||
|
||||
# ox-hugo requires static/ to exist for image asset copying
|
||||
File.mkdir_p!(Path.join(output_dir, "static"))
|
||||
|
||||
{output, exit_code} =
|
||||
System.cmd(
|
||||
"emacs",
|
||||
[
|
||||
"--batch",
|
||||
"--eval", "(require 'ox-hugo)",
|
||||
"--eval", """
|
||||
(org-cite-register-processor 'passthrough
|
||||
:export-citation
|
||||
(lambda (citation _style _backend _info)
|
||||
(let ((keys (mapcar (lambda (ref)
|
||||
(concat "@" (org-element-property :key ref)))
|
||||
(org-cite-get-references citation))))
|
||||
(format "[cite:%s]" (string-join keys ";")))))
|
||||
""",
|
||||
"--eval", "(setq org-cite-export-processors '((t passthrough)))",
|
||||
"--eval", ~s[(setq org-hugo-base-dir "#{output_dir}")],
|
||||
"--eval", ~s[(setq org-hugo-default-section-directory "#{section}")],
|
||||
"--visit", orgfile,
|
||||
"--funcall", "org-hugo-export-to-md"
|
||||
],
|
||||
stderr_to_stdout: true
|
||||
)
|
||||
|
||||
filtered =
|
||||
output
|
||||
|> String.split("\n")
|
||||
|> Enum.reject(&String.match?(&1, ~r/^Loading|^ad-handle|^For information/))
|
||||
|> Enum.join("\n")
|
||||
|
||||
if filtered != "", do: Logger.info("emacs: #{filtered}")
|
||||
|
||||
if exit_code == 0 do
|
||||
{:ok, exit_code}
|
||||
else
|
||||
{:error, {:emacs_exit, exit_code, filtered}}
|
||||
end
|
||||
rescue
|
||||
e -> {:error, e}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Export all `.org` files found under `notes_dir`.
|
||||
|
||||
Returns `{:ok, count}` where `count` is the number of successfully
|
||||
exported files, or `{:error, failures}` if any files failed.
|
||||
"""
|
||||
@spec export_all(String.t(), String.t()) :: {:ok, non_neg_integer()} | {:error, list()}
|
||||
def export_all(notes_dir, output_dir) do
|
||||
org_files =
|
||||
Path.join(notes_dir, "**/*.org")
|
||||
|> Path.wildcard()
|
||||
|
||||
if org_files == [] do
|
||||
Logger.warning("No .org files found in #{notes_dir}")
|
||||
{:ok, 0}
|
||||
else
|
||||
Logger.info("Exporting #{length(org_files)} org file(s) from #{notes_dir}")
|
||||
|
||||
results =
|
||||
Enum.map(org_files, fn orgfile ->
|
||||
IO.puts(" exporting: #{orgfile}")
|
||||
{orgfile, export_file(orgfile, notes_dir, output_dir)}
|
||||
end)
|
||||
|
||||
failures =
|
||||
Enum.filter(results, fn
|
||||
{_, {:ok, _}} -> false
|
||||
{_, {:error, _}} -> true
|
||||
end)
|
||||
|
||||
if failures == [] do
|
||||
{:ok, length(results)}
|
||||
else
|
||||
{:error, failures}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Compute the expected `.md` path for a given `.org` file.
|
||||
|
||||
Uses the same section-mapping logic as ox-hugo: the relative directory
|
||||
of the `.org` file within `notes_dir` becomes the section directory
|
||||
under `content_dir`.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> Pipeline.Export.expected_md_path("/notes/bus/emt.org", "/notes", "/out/content")
|
||||
"/out/content/bus/emt.md"
|
||||
|
||||
iex> Pipeline.Export.expected_md_path("/notes/top-level.org", "/notes", "/out/content")
|
||||
"/out/content/top-level.md"
|
||||
"""
|
||||
@spec expected_md_path(String.t(), String.t(), String.t()) :: String.t()
|
||||
def expected_md_path(orgfile, notes_dir, content_dir) do
|
||||
section =
|
||||
orgfile
|
||||
|> Path.dirname()
|
||||
|> Path.relative_to(notes_dir)
|
||||
|
||||
basename = Path.basename(orgfile, ".org") <> ".md"
|
||||
|
||||
case section do
|
||||
"." -> Path.join(content_dir, basename)
|
||||
_ -> Path.join([content_dir, section, basename])
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,83 +0,0 @@
|
||||
defmodule Pipeline.Index do
|
||||
@moduledoc """
|
||||
Generates a fallback `index.md` in the content directory if none was
|
||||
exported from an `.org` file.
|
||||
|
||||
The generated index lists all markdown pages alphabetically with links.
|
||||
"""
|
||||
|
||||
@doc """
|
||||
Generate `content_dir/index.md` if it does not already exist.
|
||||
|
||||
If an `index.md` was already created by ox-hugo (from an `index.org`),
|
||||
it is left untouched.
|
||||
"""
|
||||
@spec generate(String.t()) :: :ok
|
||||
def generate(content_dir) do
|
||||
index_path = Path.join(content_dir, "index.md")
|
||||
|
||||
unless File.exists?(index_path) do
|
||||
IO.puts(" generating default index.md")
|
||||
|
||||
pages =
|
||||
Path.join(content_dir, "**/*.md")
|
||||
|> Path.wildcard()
|
||||
|> Enum.map(fn path ->
|
||||
slug = Path.relative_to(path, content_dir) |> Path.rootname()
|
||||
|
||||
title =
|
||||
path
|
||||
|> File.read!()
|
||||
|> then(fn content ->
|
||||
case Regex.run(~r/^title\s*=\s*"(.+)"/m, content) do
|
||||
[_, t] -> t
|
||||
_ -> slug
|
||||
end
|
||||
end)
|
||||
|
||||
{slug, title}
|
||||
end)
|
||||
|> Enum.sort_by(fn {_, title} -> title end)
|
||||
|> Enum.map(fn {slug, title} -> "- [#{title}](#{slug})" end)
|
||||
|> Enum.join("\n")
|
||||
|
||||
File.write!(index_path, """
|
||||
---
|
||||
title: Index
|
||||
---
|
||||
|
||||
#{pages}
|
||||
""")
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
@doc """
|
||||
Regenerate the index by removing any previously generated one first.
|
||||
|
||||
Only removes the index if it was generated by us (contains `title: Index`).
|
||||
User-exported index files (from `index.org`) are left untouched.
|
||||
"""
|
||||
@spec regenerate(String.t()) :: :ok
|
||||
def regenerate(content_dir) do
|
||||
index_path = Path.join(content_dir, "index.md")
|
||||
|
||||
if File.exists?(index_path) do
|
||||
content = File.read!(index_path)
|
||||
|
||||
if generated_index?(content) do
|
||||
File.rm!(index_path)
|
||||
end
|
||||
end
|
||||
|
||||
generate(content_dir)
|
||||
end
|
||||
|
||||
defp generated_index?(content) do
|
||||
# Our generated index uses "title: Index" in YAML frontmatter.
|
||||
# ox-hugo uses TOML frontmatter (title = "..."), so this won't
|
||||
# match user-exported files.
|
||||
String.contains?(content, "title: Index")
|
||||
end
|
||||
end
|
||||
@@ -1,178 +0,0 @@
|
||||
defmodule Pipeline.Resolvers.BibTeX do
|
||||
@moduledoc """
|
||||
Resolves citation keys from a local BibTeX (.bib) file.
|
||||
|
||||
Configured via the `BIBTEX_FILE` environment variable, or passed directly
|
||||
as `opts.bibtex_file`. The file is parsed once at init time and the
|
||||
resulting entry map is reused for all lookups.
|
||||
|
||||
Supports extracting: author last names, year, title, DOI, URL.
|
||||
|
||||
BibTeX entry format parsed:
|
||||
|
||||
@type{citationkey,
|
||||
author = {Last, First and Last2, First2},
|
||||
year = {2021},
|
||||
title = {Some Title},
|
||||
doi = {10.xxxx/yyyy},
|
||||
url = {https://example.com},
|
||||
}
|
||||
|
||||
Returns `{:ok, %{label: "Author, Year", url: "..."}}` or `:error`.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@doc """
|
||||
Parse a .bib file and return a map of `%{citation_key => entry_map}`.
|
||||
Returns `{:ok, entries}` or `{:error, reason}`.
|
||||
"""
|
||||
@spec load(String.t()) :: {:ok, map()} | {:error, term()}
|
||||
def load(path) do
|
||||
case File.read(path) do
|
||||
{:ok, content} ->
|
||||
entries = parse_entries(content)
|
||||
Logger.info("BibTeX: loaded #{map_size(entries)} entries from #{path}")
|
||||
{:ok, entries}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Resolve a citation key from pre-loaded BibTeX entries.
|
||||
"""
|
||||
@spec resolve(String.t(), map()) :: {:ok, map()} | :error
|
||||
def resolve(key, entries) do
|
||||
case Map.fetch(entries, key) do
|
||||
{:ok, entry} ->
|
||||
label = build_label(entry)
|
||||
url = build_url(entry)
|
||||
{:ok, %{label: label, url: url}}
|
||||
|
||||
:error ->
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Parsing
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
# Match @type{key, ...fields...}
|
||||
# We handle nested braces by scanning character by character after
|
||||
# finding the opening, rather than relying on a single regex.
|
||||
@entry_header ~r/@\w+\s*\{\s*([^,\s]+)\s*,/
|
||||
|
||||
defp parse_entries(content) do
|
||||
# Split on "@" boundaries, then parse each chunk
|
||||
content
|
||||
|> String.split(~r/(?=@\w+\s*\{)/, trim: true)
|
||||
|> Enum.reduce(%{}, fn chunk, acc ->
|
||||
case Regex.run(@entry_header, chunk) do
|
||||
[_, key] ->
|
||||
fields = parse_fields(chunk)
|
||||
Map.put(acc, String.trim(key), fields)
|
||||
|
||||
_ ->
|
||||
acc
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
# Extract key = {value} or key = "value" pairs from an entry block.
|
||||
# Handles simple single-depth braces; good enough for common fields.
|
||||
@field_regex ~r/(\w+)\s*=\s*(?:\{([^{}]*(?:\{[^{}]*\}[^{}]*)*)\}|"([^"]*)")/
|
||||
|
||||
defp parse_fields(chunk) do
|
||||
@field_regex
|
||||
|> Regex.scan(chunk)
|
||||
|> Enum.reduce(%{}, fn match, acc ->
|
||||
field_name = Enum.at(match, 1) |> String.downcase()
|
||||
# Value is in capture group 2 (braces) or 3 (quotes)
|
||||
value =
|
||||
case {Enum.at(match, 2, ""), Enum.at(match, 3, "")} do
|
||||
{"", q} -> q
|
||||
{b, _} -> b
|
||||
end
|
||||
|
||||
Map.put(acc, field_name, String.trim(value))
|
||||
end)
|
||||
end
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Label & URL building
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
defp build_label(entry) do
|
||||
author_part =
|
||||
entry
|
||||
|> Map.get("author", "")
|
||||
|> parse_authors()
|
||||
|> format_authors()
|
||||
|
||||
year = Map.get(entry, "year", Map.get(entry, "date", ""))
|
||||
year = extract_year(year)
|
||||
|
||||
if year && author_part != "", do: "#{author_part}, #{year}", else: author_part
|
||||
end
|
||||
|
||||
defp parse_authors(""), do: []
|
||||
|
||||
defp parse_authors(author_str) do
|
||||
author_str
|
||||
|> String.split(" and ", trim: true)
|
||||
|> Enum.map(&extract_last_name/1)
|
||||
|> Enum.reject(&(&1 == ""))
|
||||
end
|
||||
|
||||
# Handles "Last, First" and "First Last" formats
|
||||
defp extract_last_name(name) do
|
||||
name = String.trim(name)
|
||||
|
||||
cond do
|
||||
String.contains?(name, ",") ->
|
||||
name |> String.split(",") |> List.first() |> String.trim()
|
||||
|
||||
String.contains?(name, " ") ->
|
||||
name |> String.split(" ") |> List.last() |> String.trim()
|
||||
|
||||
true ->
|
||||
name
|
||||
end
|
||||
end
|
||||
|
||||
defp format_authors([]), do: "Unknown"
|
||||
defp format_authors([single]), do: single
|
||||
defp format_authors([first | rest]), do: "#{first} & #{List.last(rest)}"
|
||||
|
||||
defp extract_year(""), do: nil
|
||||
|
||||
defp extract_year(str) do
|
||||
case Regex.run(~r/\b(\d{4})\b/, str) do
|
||||
[_, year] -> year
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp build_url(entry) do
|
||||
cond do
|
||||
doi = Map.get(entry, "doi", "") |> non_empty() ->
|
||||
"https://doi.org/#{doi}"
|
||||
|
||||
url = Map.get(entry, "url", "") |> non_empty() ->
|
||||
url
|
||||
|
||||
true ->
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
defp non_empty(""), do: nil
|
||||
defp non_empty(v), do: v
|
||||
end
|
||||
@@ -1,18 +0,0 @@
|
||||
defmodule Pipeline.Resolvers.DOI do
|
||||
@moduledoc """
|
||||
Last-resort citation resolver — always succeeds.
|
||||
|
||||
If the citation key looks like a DOI (starts with "10."), returns a
|
||||
`https://doi.org/...` link. Otherwise returns the key itself as a
|
||||
plain label with no URL.
|
||||
"""
|
||||
|
||||
@spec resolve(String.t()) :: {:ok, map()}
|
||||
def resolve(key) do
|
||||
if String.starts_with?(key, "10.") do
|
||||
{:ok, %{label: key, url: "https://doi.org/#{key}"}}
|
||||
else
|
||||
{:ok, %{label: key, url: nil}}
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,182 +0,0 @@
|
||||
defmodule Pipeline.Resolvers.Zotero do
|
||||
@moduledoc """
|
||||
Resolves citation keys via Zotero Better BibTeX's JSON-RPC API.
|
||||
|
||||
Requires Zotero to be running with the Better BibTeX plugin installed.
|
||||
Default endpoint: http://localhost:23119/better-bibtex/json-rpc
|
||||
|
||||
Resolution strategy:
|
||||
1. Search by citation key via `item.search`
|
||||
2. If found, try to get a PDF attachment link (zotero://open-pdf/...)
|
||||
3. Fall back to zotero://select/items/@key
|
||||
|
||||
Returns `{:ok, %{label: "Author, Year", url: "zotero://..."}}` or `:error`.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
@rpc_path "/better-bibtex/json-rpc"
|
||||
|
||||
@doc """
|
||||
Attempt to resolve `key` against a running Zotero instance.
|
||||
`base_url` defaults to `http://localhost:23119`.
|
||||
"""
|
||||
@spec resolve(String.t(), String.t()) :: {:ok, map()} | :error
|
||||
def resolve(key, base_url \\ "http://localhost:23119") do
|
||||
url = base_url <> @rpc_path
|
||||
|
||||
payload =
|
||||
Jason.encode!(%{
|
||||
jsonrpc: "2.0",
|
||||
method: "item.search",
|
||||
params: [
|
||||
[["citationKey", "is", key]]
|
||||
],
|
||||
id: 1
|
||||
})
|
||||
|
||||
case Req.post(url,
|
||||
body: payload,
|
||||
headers: [{"content-type", "application/json"}],
|
||||
receive_timeout: 5_000,
|
||||
finch: Pipeline.Finch
|
||||
) do
|
||||
{:ok, %{status: 200, body: body}} ->
|
||||
parse_response(body, key, base_url)
|
||||
|
||||
{:ok, %{status: status}} ->
|
||||
Logger.debug("Zotero: unexpected HTTP #{status} for key #{key}")
|
||||
:error
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.debug("Zotero: connection failed for key #{key}: #{inspect(reason)}")
|
||||
:error
|
||||
|
||||
other ->
|
||||
Logger.debug("Zotero: unexpected result for key #{key}: #{inspect(other)}")
|
||||
:error
|
||||
end
|
||||
rescue
|
||||
e ->
|
||||
Logger.debug("Zotero: exception resolving key #{key}: #{inspect(e)}")
|
||||
:error
|
||||
end
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Private helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
defp parse_response(%{"result" => [item | _]}, key, base_url) do
|
||||
label = build_label(item)
|
||||
url = resolve_url(item, key, base_url)
|
||||
{:ok, %{label: label, url: url}}
|
||||
end
|
||||
|
||||
defp parse_response(%{"result" => []}, key, _base_url) do
|
||||
Logger.debug("Zotero: no item found for key #{key}")
|
||||
:error
|
||||
end
|
||||
|
||||
defp parse_response(%{"error" => err}, key, _base_url) do
|
||||
Logger.debug("Zotero: RPC error for key #{key}: #{inspect(err)}")
|
||||
:error
|
||||
end
|
||||
|
||||
defp parse_response(body, key, _base_url) do
|
||||
Logger.debug("Zotero: unexpected response shape for key #{key}: #{inspect(body)}")
|
||||
:error
|
||||
end
|
||||
|
||||
defp fetch_pdf_url(key, base_url) do
|
||||
payload =
|
||||
Jason.encode!(%{
|
||||
jsonrpc: "2.0",
|
||||
method: "item.attachments",
|
||||
params: [key],
|
||||
id: 2
|
||||
})
|
||||
|
||||
case Req.post(base_url <> @rpc_path,
|
||||
body: payload,
|
||||
headers: [{"content-type", "application/json"}],
|
||||
receive_timeout: 5_000,
|
||||
finch: Pipeline.Finch
|
||||
) do
|
||||
{:ok, %{status: 200, body: %{"result" => attachments}}} when is_list(attachments) ->
|
||||
attachments
|
||||
|> Enum.find_value(fn att ->
|
||||
open = Map.get(att, "open", "")
|
||||
path = Map.get(att, "path", "")
|
||||
if String.ends_with?(path, ".pdf"), do: open, else: nil
|
||||
end)
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
rescue
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
# CSL-JSON format: authors are under "author" with "family"/"given" keys.
|
||||
# Year is under "issued" -> "date-parts" -> [[year, month, day]].
|
||||
defp build_label(item) do
|
||||
authors = Map.get(item, "author", [])
|
||||
year = extract_year(item)
|
||||
|
||||
author_part =
|
||||
case authors do
|
||||
[] ->
|
||||
"Unknown"
|
||||
|
||||
[single] ->
|
||||
Map.get(single, "family", Map.get(single, "literal", "Unknown"))
|
||||
|
||||
[first | rest] ->
|
||||
first_name = Map.get(first, "family", Map.get(first, "literal", "Unknown"))
|
||||
last_name =
|
||||
rest
|
||||
|> List.last()
|
||||
|> then(&Map.get(&1, "family", Map.get(&1, "literal", "Unknown")))
|
||||
|
||||
"#{first_name} & #{last_name}"
|
||||
end
|
||||
|
||||
if year, do: "#{author_part}, #{year}", else: author_part
|
||||
end
|
||||
|
||||
# "issued": {"date-parts": [["2021", 2, 3]]}
|
||||
defp extract_year(item) do
|
||||
case get_in(item, ["issued", "date-parts"]) do
|
||||
[[year | _] | _] -> to_string(year)
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp resolve_url(item, key, base_url) do
|
||||
# Prefer zotero://open-pdf/... for items with a PDF attachment.
|
||||
# Fall back to zotero://select/library/items/KEY to open the item in Zotero.
|
||||
# The "id" field is a URI like "http://zotero.org/users/123/items/ABCD1234".
|
||||
pdf_url = fetch_pdf_url(key, base_url)
|
||||
|
||||
if pdf_url do
|
||||
pdf_url
|
||||
else
|
||||
item_key =
|
||||
item
|
||||
|> Map.get("id", "")
|
||||
|> String.split("/")
|
||||
|> List.last()
|
||||
|> non_empty()
|
||||
|
||||
if item_key do
|
||||
"zotero://select/library/items/#{item_key}"
|
||||
else
|
||||
"zotero://select/items/@#{key}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp non_empty(nil), do: nil
|
||||
defp non_empty(""), do: nil
|
||||
defp non_empty(v), do: v
|
||||
end
|
||||
@@ -1,48 +0,0 @@
|
||||
defmodule Pipeline.Transform do
|
||||
@moduledoc """
|
||||
Behaviour that all markdown transform modules must implement.
|
||||
|
||||
## Callbacks
|
||||
|
||||
- `init/1` — called once before processing; returns transform-specific state.
|
||||
Default implementation returns the opts map unchanged.
|
||||
- `apply/3` — called per .md file; returns the (possibly modified) content.
|
||||
- `teardown/1` — optional cleanup after all files are processed.
|
||||
|
||||
## Example
|
||||
|
||||
defmodule MyTransform do
|
||||
@behaviour Pipeline.Transform
|
||||
|
||||
@impl true
|
||||
def init(opts), do: %{some_state: opts[:value]}
|
||||
|
||||
@impl true
|
||||
def apply(content, state, _opts) do
|
||||
String.replace(content, "foo", state.some_state)
|
||||
end
|
||||
end
|
||||
"""
|
||||
|
||||
@doc "One-time initialisation. Returns opaque state passed to apply/3."
|
||||
@callback init(opts :: map()) :: term()
|
||||
|
||||
@doc "Transform file content. Returns the (possibly modified) content string."
|
||||
@callback apply(content :: String.t(), state :: term(), opts :: map()) :: String.t()
|
||||
|
||||
@doc "Optional cleanup after all files are processed."
|
||||
@callback teardown(state :: term()) :: :ok
|
||||
|
||||
@optional_callbacks teardown: 1
|
||||
|
||||
defmacro __using__(_) do
|
||||
quote do
|
||||
@behaviour Pipeline.Transform
|
||||
|
||||
@impl Pipeline.Transform
|
||||
def init(opts), do: opts
|
||||
|
||||
defoverridable init: 1
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,231 +0,0 @@
|
||||
defmodule Pipeline.Transforms.Citations do
|
||||
@moduledoc """
|
||||
Markdown transform: resolves org-citar citation keys to hyperlinks.
|
||||
|
||||
## Recognised citation syntax (as output by ox-hugo from org-citar)
|
||||
|
||||
[cite:@key] → org-cite / citar standard (most common)
|
||||
[cite:@key1;@key2] → multiple citations
|
||||
cite:key → older roam-style bare cite syntax
|
||||
|
||||
## Resolution chain (in order)
|
||||
|
||||
1. Zotero (live instance via Better BibTeX JSON-RPC) — preferred
|
||||
2. BibTeX file (BIBTEX_FILE env var) — fallback
|
||||
3. DOI / bare key — always succeeds
|
||||
|
||||
## Modes (opts.citation_mode)
|
||||
|
||||
:silent — silently use DOI/bare-key fallback when Zotero+BibTeX fail
|
||||
:warn — (default) emit a Logger.warning for unresolved keys
|
||||
:strict — raise on unresolved keys (aborts pipeline)
|
||||
|
||||
## Format
|
||||
|
||||
Resolved citations are rendered as:
|
||||
|
||||
[Label](url) when a URL is available
|
||||
[Label] when no URL could be determined (bare key fallback)
|
||||
|
||||
Multiple semicolon-separated keys become space-separated links:
|
||||
|
||||
[cite:@a;@b] → [Author A, 2020](url_a) [Author B, 2019](url_b)
|
||||
|
||||
## init/1 callback
|
||||
|
||||
Loads the BibTeX file (if configured) once before processing begins,
|
||||
and probes Zotero availability, emitting warnings as appropriate.
|
||||
"""
|
||||
|
||||
@behaviour Pipeline.Transform
|
||||
|
||||
require Logger
|
||||
|
||||
alias Pipeline.Resolvers.Zotero
|
||||
alias Pipeline.Resolvers.BibTeX
|
||||
alias Pipeline.Resolvers.DOI
|
||||
|
||||
# Match [cite:@key] and [cite:@key1;@key2;...] (org-cite / citar style)
|
||||
@cite_bracket_regex ~r/\[cite:(@[^\]]+)\]/
|
||||
|
||||
# Match bare cite:key or cite:@key (older roam style, no brackets, optional @ prefix)
|
||||
@cite_bare_regex ~r/(?<![(\[])cite:@?([a-zA-Z0-9_:-]+)/
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Pipeline callbacks
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@doc """
|
||||
Called once before processing any files. Loads BibTeX, probes Zotero.
|
||||
Returns a state map passed to every `apply/3` call.
|
||||
"""
|
||||
def init(opts) do
|
||||
bibtex_entries = load_bibtex(opts)
|
||||
zotero_available = probe_zotero(opts)
|
||||
|
||||
if not zotero_available and bibtex_entries == %{} do
|
||||
Logger.warning(
|
||||
"Citations: neither Zotero nor a BibTeX file is available. " <>
|
||||
"All citations will fall back to bare-key rendering. " <>
|
||||
"Set BIBTEX_FILE env var or start Zotero with Better BibTeX to resolve citations."
|
||||
)
|
||||
end
|
||||
|
||||
%{
|
||||
bibtex_entries: bibtex_entries,
|
||||
zotero_available: zotero_available,
|
||||
zotero_url: Map.get(opts, :zotero_url, "http://localhost:23119"),
|
||||
citation_mode: Map.get(opts, :citation_mode, :warn)
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Apply citation resolution to a single markdown file's content.
|
||||
"""
|
||||
def apply(content, state, _opts) do
|
||||
content
|
||||
|> resolve_bracket_citations(state)
|
||||
|> resolve_bare_citations(state)
|
||||
end
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Resolution passes
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
defp resolve_bracket_citations(content, state) do
|
||||
Regex.replace(@cite_bracket_regex, content, fn _full, keys_str ->
|
||||
keys_str
|
||||
|> String.split(";")
|
||||
|> Enum.map(&String.trim/1)
|
||||
|> Enum.map(fn "@" <> key -> key end)
|
||||
|> Enum.map(&resolve_key(&1, state))
|
||||
|> Enum.join(" ")
|
||||
end)
|
||||
end
|
||||
|
||||
defp resolve_bare_citations(content, state) do
|
||||
Regex.replace(@cite_bare_regex, content, fn _full, key ->
|
||||
resolve_key(key, state)
|
||||
end)
|
||||
end
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Single-key resolution chain
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
defp resolve_key(key, state) do
|
||||
info =
|
||||
with :error <- try_zotero(key, state),
|
||||
:error <- try_bibtex(key, state) do
|
||||
handle_unresolved(key, state)
|
||||
else
|
||||
{:ok, citation_info} -> citation_info
|
||||
end
|
||||
|
||||
format_result(info)
|
||||
end
|
||||
|
||||
defp try_zotero(_key, %{zotero_available: false}), do: :error
|
||||
|
||||
defp try_zotero(key, %{zotero_url: url}) do
|
||||
Zotero.resolve(key, url)
|
||||
end
|
||||
|
||||
defp try_bibtex(_key, %{bibtex_entries: entries}) when map_size(entries) == 0, do: :error
|
||||
|
||||
defp try_bibtex(key, %{bibtex_entries: entries}) do
|
||||
BibTeX.resolve(key, entries)
|
||||
end
|
||||
|
||||
defp handle_unresolved(key, %{citation_mode: mode}) do
|
||||
case mode do
|
||||
:strict ->
|
||||
raise "Citations: could not resolve citation key '#{key}' and mode is :strict"
|
||||
|
||||
:warn ->
|
||||
Logger.warning("Citations: unresolved citation key '#{key}' — using bare-key fallback")
|
||||
{:ok, result} = DOI.resolve(key)
|
||||
result
|
||||
|
||||
:silent ->
|
||||
{:ok, result} = DOI.resolve(key)
|
||||
result
|
||||
end
|
||||
end
|
||||
|
||||
defp format_result(%{label: label, url: nil}), do: "[#{label}]"
|
||||
defp format_result(%{label: label, url: url}), do: "[#{label}](#{url})"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Init helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
defp load_bibtex(opts) do
|
||||
path = Map.get(opts, :bibtex_file) || System.get_env("BIBTEX_FILE")
|
||||
|
||||
cond do
|
||||
is_nil(path) ->
|
||||
Logger.debug("Citations: BIBTEX_FILE not set — BibTeX resolver disabled")
|
||||
%{}
|
||||
|
||||
not File.exists?(path) ->
|
||||
Logger.warning("Citations: BIBTEX_FILE=#{path} does not exist — BibTeX resolver disabled")
|
||||
%{}
|
||||
|
||||
true ->
|
||||
case BibTeX.load(path) do
|
||||
{:ok, entries} -> entries
|
||||
{:error, reason} ->
|
||||
Logger.warning("Citations: failed to load BibTeX file #{path}: #{inspect(reason)}")
|
||||
%{}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp probe_zotero(opts) do
|
||||
url = Map.get(opts, :zotero_url, "http://localhost:23119")
|
||||
|
||||
# Use a no-op JSON-RPC call to probe availability.
|
||||
# /better-bibtex/cayw is intentionally avoided — it blocks waiting for
|
||||
# user interaction and never returns without a pick.
|
||||
payload =
|
||||
Jason.encode!(%{
|
||||
jsonrpc: "2.0",
|
||||
method: "item.search",
|
||||
params: [[[]]],
|
||||
id: 0
|
||||
})
|
||||
|
||||
result =
|
||||
try do
|
||||
Req.post(url <> "/better-bibtex/json-rpc",
|
||||
body: payload,
|
||||
headers: [{"content-type", "application/json"}],
|
||||
receive_timeout: 3_000,
|
||||
finch: Pipeline.Finch
|
||||
)
|
||||
rescue
|
||||
e -> {:error, e}
|
||||
end
|
||||
|
||||
case result do
|
||||
{:ok, %{status: 200}} ->
|
||||
Logger.info("Citations: Zotero Better BibTeX is available at #{url}")
|
||||
true
|
||||
|
||||
{:ok, %{status: status}} ->
|
||||
Logger.warning(
|
||||
"Citations: Zotero responded HTTP #{status} at #{url} — " <>
|
||||
"is Better BibTeX installed?"
|
||||
)
|
||||
false
|
||||
|
||||
_ ->
|
||||
Logger.warning(
|
||||
"Citations: Zotero not reachable at #{url} — " <>
|
||||
"start Zotero with Better BibTeX or set BIBTEX_FILE as fallback"
|
||||
)
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,236 +0,0 @@
|
||||
defmodule Pipeline.Watcher do
|
||||
@moduledoc """
|
||||
File-watching GenServer that detects `.org` file changes and triggers
|
||||
incremental export + transform for only the affected files.
|
||||
|
||||
Uses the `file_system` package (inotify on Linux, fsevents on macOS)
|
||||
to watch the notes directory. Events are debounced per-file (500ms)
|
||||
to coalesce rapid writes (e.g., Emacs auto-save).
|
||||
|
||||
## Lifecycle
|
||||
|
||||
Started dynamically by `Pipeline.CLI` after the initial batch export.
|
||||
Transforms are initialized once at startup and reused across all
|
||||
incremental rebuilds to avoid repeated Zotero probes and BibTeX loads.
|
||||
|
||||
## Usage
|
||||
|
||||
Pipeline.Watcher.start_link(
|
||||
notes_dir: "/path/to/notes",
|
||||
output_dir: "/path/to/output",
|
||||
content_dir: "/path/to/output/content",
|
||||
pipeline_opts: %{zotero_url: "...", ...},
|
||||
transforms: [Pipeline.Transforms.Citations]
|
||||
)
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
|
||||
require Logger
|
||||
|
||||
@debounce_ms 500
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Client API
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@doc """
|
||||
Start the watcher as a linked process.
|
||||
|
||||
## Options
|
||||
|
||||
* `:notes_dir` — directory to watch for `.org` changes (required)
|
||||
* `:output_dir` — ox-hugo base dir (required)
|
||||
* `:content_dir` — directory where `.md` files are written (required)
|
||||
* `:pipeline_opts` — opts map passed to transforms (required)
|
||||
* `:transforms` — list of transform modules (default: `[Pipeline.Transforms.Citations]`)
|
||||
"""
|
||||
def start_link(opts) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# GenServer callbacks
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@impl true
|
||||
def init(opts) do
|
||||
notes_dir = Keyword.fetch!(opts, :notes_dir)
|
||||
output_dir = Keyword.fetch!(opts, :output_dir)
|
||||
content_dir = Keyword.fetch!(opts, :content_dir)
|
||||
pipeline_opts = Keyword.fetch!(opts, :pipeline_opts)
|
||||
transforms = Keyword.get(opts, :transforms, [Pipeline.Transforms.Citations])
|
||||
|
||||
# Initialize transforms once — reused for all incremental rebuilds
|
||||
initialized_transforms = Pipeline.init_transforms(transforms, pipeline_opts)
|
||||
|
||||
# Start the file system watcher
|
||||
{:ok, watcher_pid} = FileSystem.start_link(dirs: [notes_dir], recursive: true)
|
||||
FileSystem.subscribe(watcher_pid)
|
||||
|
||||
Logger.info("Watcher: monitoring #{notes_dir} for .org changes")
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
notes_dir: notes_dir,
|
||||
output_dir: output_dir,
|
||||
content_dir: content_dir,
|
||||
pipeline_opts: pipeline_opts,
|
||||
watcher_pid: watcher_pid,
|
||||
initialized_transforms: initialized_transforms,
|
||||
pending: %{}
|
||||
}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info({:file_event, _pid, {path, events}}, state) do
|
||||
path = to_string(path)
|
||||
|
||||
if org_file?(path) and not temporary_file?(path) do
|
||||
event_type = classify_events(events)
|
||||
Logger.debug("Watcher: #{event_type} event for #{path}")
|
||||
{:noreply, schedule_debounce(path, event_type, state)}
|
||||
else
|
||||
{:noreply, state}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info({:file_event, _pid, :stop}, state) do
|
||||
Logger.warning("Watcher: file system monitor stopped unexpectedly")
|
||||
{:stop, :watcher_stopped, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info({:debounced, path, event_type}, state) do
|
||||
state = %{state | pending: Map.delete(state.pending, path)}
|
||||
|
||||
case event_type do
|
||||
:deleted ->
|
||||
handle_delete(path, state)
|
||||
|
||||
_created_or_modified ->
|
||||
handle_change(path, state)
|
||||
end
|
||||
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def terminate(_reason, state) do
|
||||
Pipeline.teardown_transforms(state.initialized_transforms)
|
||||
:ok
|
||||
end
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Event handling
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
defp handle_change(orgfile, state) do
|
||||
%{
|
||||
notes_dir: notes_dir,
|
||||
output_dir: output_dir,
|
||||
content_dir: content_dir,
|
||||
pipeline_opts: pipeline_opts,
|
||||
initialized_transforms: initialized_transforms
|
||||
} = state
|
||||
|
||||
md_path = Pipeline.Export.expected_md_path(orgfile, notes_dir, content_dir)
|
||||
IO.puts("==> Changed: #{Path.relative_to(orgfile, notes_dir)}")
|
||||
|
||||
case Pipeline.Export.export_file(orgfile, notes_dir, output_dir) do
|
||||
{:ok, _} ->
|
||||
IO.puts(" exported: #{Path.relative_to(md_path, content_dir)}")
|
||||
|
||||
{:ok, stats} = Pipeline.run_on_files_with([md_path], initialized_transforms, pipeline_opts)
|
||||
|
||||
Enum.each(stats, fn {mod, count} ->
|
||||
if count > 0, do: IO.puts(" #{inspect(mod)}: #{count} file(s) modified")
|
||||
end)
|
||||
|
||||
regenerate_index(content_dir)
|
||||
IO.puts("==> Done")
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("Watcher: export failed for #{orgfile}: #{inspect(reason)}")
|
||||
end
|
||||
end
|
||||
|
||||
defp handle_delete(orgfile, state) do
|
||||
%{notes_dir: notes_dir, content_dir: content_dir} = state
|
||||
|
||||
md_path = Pipeline.Export.expected_md_path(orgfile, notes_dir, content_dir)
|
||||
IO.puts("==> Deleted: #{Path.relative_to(orgfile, notes_dir)}")
|
||||
|
||||
if File.exists?(md_path) do
|
||||
File.rm!(md_path)
|
||||
IO.puts(" removed: #{Path.relative_to(md_path, content_dir)}")
|
||||
|
||||
# Clean up empty parent directories left behind
|
||||
cleanup_empty_dirs(Path.dirname(md_path), content_dir)
|
||||
end
|
||||
|
||||
regenerate_index(content_dir)
|
||||
IO.puts("==> Done")
|
||||
end
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Index generation
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
defp regenerate_index(content_dir) do
|
||||
Pipeline.Index.regenerate(content_dir)
|
||||
end
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Helpers
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
defp schedule_debounce(path, event_type, state) do
|
||||
# Cancel any existing timer for this path
|
||||
case Map.get(state.pending, path) do
|
||||
nil -> :ok
|
||||
old_ref -> Process.cancel_timer(old_ref)
|
||||
end
|
||||
|
||||
ref = Process.send_after(self(), {:debounced, path, event_type}, @debounce_ms)
|
||||
%{state | pending: Map.put(state.pending, path, ref)}
|
||||
end
|
||||
|
||||
defp org_file?(path), do: String.ends_with?(path, ".org")
|
||||
|
||||
defp temporary_file?(path) do
|
||||
basename = Path.basename(path)
|
||||
# Emacs creates temp files like .#file.org and #file.org#
|
||||
String.starts_with?(basename, ".#") or
|
||||
(String.starts_with?(basename, "#") and String.ends_with?(basename, "#"))
|
||||
end
|
||||
|
||||
defp classify_events(events) do
|
||||
cond do
|
||||
:removed in events or :deleted in events -> :deleted
|
||||
:created in events -> :created
|
||||
:modified in events or :changed in events -> :modified
|
||||
# renamed can mean created or deleted depending on context;
|
||||
# if the file exists it was renamed into the watched dir
|
||||
:renamed in events -> :modified
|
||||
true -> :modified
|
||||
end
|
||||
end
|
||||
|
||||
defp cleanup_empty_dirs(dir, stop_at) do
|
||||
dir = Path.expand(dir)
|
||||
stop_at = Path.expand(stop_at)
|
||||
|
||||
if dir != stop_at and File.dir?(dir) do
|
||||
case File.ls!(dir) do
|
||||
[] ->
|
||||
File.rmdir!(dir)
|
||||
cleanup_empty_dirs(Path.dirname(dir), stop_at)
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,34 +0,0 @@
|
||||
defmodule Pipeline.MixProject do
|
||||
use Mix.Project
|
||||
|
||||
def project do
|
||||
[
|
||||
app: :pipeline,
|
||||
version: "0.1.0",
|
||||
elixir: "~> 1.17",
|
||||
start_permanent: Mix.env() == :prod,
|
||||
deps: deps(),
|
||||
escript: escript()
|
||||
]
|
||||
end
|
||||
|
||||
def application do
|
||||
[
|
||||
extra_applications: [:logger],
|
||||
mod: {Pipeline.Application, []}
|
||||
]
|
||||
end
|
||||
|
||||
defp escript do
|
||||
[main_module: Pipeline.CLI]
|
||||
end
|
||||
|
||||
defp deps do
|
||||
[
|
||||
{:finch, "~> 0.19"},
|
||||
{:req, "~> 0.5"},
|
||||
{:jason, "~> 1.4"},
|
||||
{:file_system, "~> 1.0"}
|
||||
]
|
||||
end
|
||||
end
|
||||
@@ -1,12 +0,0 @@
|
||||
%{
|
||||
"file_system": {:hex, :file_system, "1.1.1", "31864f4685b0148f25bd3fbef2b1228457c0c89024ad67f7a81a3ffbc0bbad3a", [:mix], [], "hexpm", "7a15ff97dfe526aeefb090a7a9d3d03aa907e100e262a0f8f7746b78f8f87a5d"},
|
||||
"finch": {:hex, :finch, "0.21.0", "b1c3b2d48af02d0c66d2a9ebfb5622be5c5ecd62937cf79a88a7f98d48a8290c", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "87dc6e169794cb2570f75841a19da99cfde834249568f2a5b121b809588a4377"},
|
||||
"hpax": {:hex, :hpax, "1.0.3", "ed67ef51ad4df91e75cc6a1494f851850c0bd98ebc0be6e81b026e765ee535aa", [:mix], [], "hexpm", "8eab6e1cfa8d5918c2ce4ba43588e894af35dbd8e91e6e55c817bca5847df34a"},
|
||||
"jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
|
||||
"mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"},
|
||||
"mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"},
|
||||
"nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
|
||||
"nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
|
||||
"req": {:hex, :req, "0.5.17", "0096ddd5b0ed6f576a03dde4b158a0c727215b15d2795e59e0916c6971066ede", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "0b8bc6ffdfebbc07968e59d3ff96d52f2202d0536f10fef4dc11dc02a2a43e39"},
|
||||
"telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"},
|
||||
}
|
||||
@@ -55,7 +55,7 @@ const config: QuartzConfig = {
|
||||
},
|
||||
plugins: {
|
||||
transformers: [
|
||||
Plugin.FrontMatter({ delimiters: "+++", language: "toml" }),
|
||||
Plugin.FrontMatter(),
|
||||
Plugin.CreatedModifiedDate({
|
||||
priority: ["frontmatter", "git", "filesystem"],
|
||||
}),
|
||||
@@ -66,11 +66,7 @@ const config: QuartzConfig = {
|
||||
},
|
||||
keepBackground: false,
|
||||
}),
|
||||
// OxHugoFlavouredMarkdown must come before GitHubFlavoredMarkdown.
|
||||
// Note: not compatible with ObsidianFlavoredMarkdown — use one or the other.
|
||||
// If ox-hugo exports TOML frontmatter, change FrontMatter to:
|
||||
// Plugin.FrontMatter({ delims: "+++", language: "toml" })
|
||||
Plugin.OxHugoFlavouredMarkdown(),
|
||||
Plugin.ObsidianFlavoredMarkdown({ enableInHtmlEmbed: false }),
|
||||
Plugin.GitHubFlavoredMarkdown(),
|
||||
Plugin.TableOfContents(),
|
||||
Plugin.CrawlLinks({ markdownLinkResolution: "shortest" }),
|
||||
|
||||
@@ -71,7 +71,7 @@ async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
|
||||
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
|
||||
|
||||
perf.addEvent("glob")
|
||||
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns, false)
|
||||
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
|
||||
const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
|
||||
console.log(
|
||||
`Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
|
||||
@@ -143,7 +143,6 @@ async function startWatching(
|
||||
}
|
||||
|
||||
const watcher = chokidar.watch(".", {
|
||||
awaitWriteFinish: { stabilityThreshold: 250 },
|
||||
persistent: true,
|
||||
cwd: argv.directory,
|
||||
ignoreInitial: true,
|
||||
@@ -152,19 +151,16 @@ async function startWatching(
|
||||
const changes: ChangeEvent[] = []
|
||||
watcher
|
||||
.on("add", (fp) => {
|
||||
fp = toPosixPath(fp)
|
||||
if (buildData.ignored(fp)) return
|
||||
changes.push({ path: fp as FilePath, type: "add" })
|
||||
void rebuild(changes, clientRefresh, buildData)
|
||||
})
|
||||
.on("change", (fp) => {
|
||||
fp = toPosixPath(fp)
|
||||
if (buildData.ignored(fp)) return
|
||||
changes.push({ path: fp as FilePath, type: "change" })
|
||||
void rebuild(changes, clientRefresh, buildData)
|
||||
})
|
||||
.on("unlink", (fp) => {
|
||||
fp = toPosixPath(fp)
|
||||
if (buildData.ignored(fp)) return
|
||||
changes.push({ path: fp as FilePath, type: "delete" })
|
||||
void rebuild(changes, clientRefresh, buildData)
|
||||
|
||||
@@ -42,19 +42,6 @@ export type Analytics =
|
||||
provider: "clarity"
|
||||
projectId?: string
|
||||
}
|
||||
| {
|
||||
provider: "matomo"
|
||||
host: string
|
||||
siteId: string
|
||||
}
|
||||
| {
|
||||
provider: "vercel"
|
||||
}
|
||||
| {
|
||||
provider: "rybbit"
|
||||
siteId: string
|
||||
host?: string
|
||||
}
|
||||
|
||||
export interface GlobalConfiguration {
|
||||
pageTitle: string
|
||||
|
||||
@@ -7,8 +7,8 @@ import fs from "fs"
|
||||
export function escapePath(fp) {
|
||||
return fp
|
||||
.replace(/\\ /g, " ") // unescape spaces
|
||||
.replace(/^"(.*)"$/, "$1")
|
||||
.replace(/^'(.*)'$/, "$1")
|
||||
.replace(/^".*"$/, "$1")
|
||||
.replace(/^'.*"$/, "$1")
|
||||
.trim()
|
||||
}
|
||||
|
||||
|
||||
@@ -55,14 +55,11 @@ export type FolderState = {
|
||||
collapsed: boolean
|
||||
}
|
||||
|
||||
let numExplorers = 0
|
||||
export default ((userOpts?: Partial<Options>) => {
|
||||
const opts: Options = { ...defaultOptions, ...userOpts }
|
||||
const { OverflowList, overflowListAfterDOMLoaded } = OverflowListFactory()
|
||||
|
||||
const Explorer: QuartzComponent = ({ cfg, displayClass }: QuartzComponentProps) => {
|
||||
const id = `explorer-${numExplorers++}`
|
||||
|
||||
return (
|
||||
<div
|
||||
class={classNames(displayClass, "explorer")}
|
||||
@@ -80,7 +77,7 @@ export default ((userOpts?: Partial<Options>) => {
|
||||
type="button"
|
||||
class="explorer-toggle mobile-explorer hide-until-loaded"
|
||||
data-mobile={true}
|
||||
aria-controls={id}
|
||||
aria-controls="explorer-content"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
@@ -119,7 +116,7 @@ export default ((userOpts?: Partial<Options>) => {
|
||||
<polyline points="6 9 12 15 18 9"></polyline>
|
||||
</svg>
|
||||
</button>
|
||||
<div id={id} class="explorer-content" aria-expanded={false} role="group">
|
||||
<div class="explorer-content" aria-expanded={false}>
|
||||
<OverflowList class="explorer-ul" />
|
||||
</div>
|
||||
<template id="template-file">
|
||||
|
||||
@@ -12,9 +12,9 @@ const OverflowList = ({
|
||||
)
|
||||
}
|
||||
|
||||
let numLists = 0
|
||||
let numExplorers = 0
|
||||
export default () => {
|
||||
const id = `list-${numLists++}`
|
||||
const id = `list-${numExplorers++}`
|
||||
|
||||
return {
|
||||
OverflowList: (props: JSX.HTMLAttributes<HTMLUListElement>) => (
|
||||
|
||||
@@ -20,6 +20,7 @@ export default ((userOpts?: Partial<SearchOptions>) => {
|
||||
return (
|
||||
<div class={classNames(displayClass, "search")}>
|
||||
<button class="search-button">
|
||||
<p>{i18n(cfg.locale).components.search.title}</p>
|
||||
<svg role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 19.9 19.7">
|
||||
<title>Search</title>
|
||||
<g class="search-path" fill="none">
|
||||
@@ -27,7 +28,6 @@ export default ((userOpts?: Partial<SearchOptions>) => {
|
||||
<circle cx="8" cy="8" r="7" />
|
||||
</g>
|
||||
</svg>
|
||||
<p>{i18n(cfg.locale).components.search.title}</p>
|
||||
</button>
|
||||
<div class="search-container">
|
||||
<div class="search-space">
|
||||
|
||||
@@ -17,7 +17,6 @@ const defaultOptions: Options = {
|
||||
layout: "modern",
|
||||
}
|
||||
|
||||
let numTocs = 0
|
||||
export default ((opts?: Partial<Options>) => {
|
||||
const layout = opts?.layout ?? defaultOptions.layout
|
||||
const { OverflowList, overflowListAfterDOMLoaded } = OverflowListFactory()
|
||||
@@ -30,13 +29,12 @@ export default ((opts?: Partial<Options>) => {
|
||||
return null
|
||||
}
|
||||
|
||||
const id = `toc-${numTocs++}`
|
||||
return (
|
||||
<div class={classNames(displayClass, "toc")}>
|
||||
<button
|
||||
type="button"
|
||||
class={fileData.collapseToc ? "collapsed toc-header" : "toc-header"}
|
||||
aria-controls={id}
|
||||
aria-controls="toc-content"
|
||||
aria-expanded={!fileData.collapseToc}
|
||||
>
|
||||
<h3>{i18n(cfg.locale).components.tableOfContents.title}</h3>
|
||||
@@ -55,10 +53,7 @@ export default ((opts?: Partial<Options>) => {
|
||||
<polyline points="6 9 12 15 18 9"></polyline>
|
||||
</svg>
|
||||
</button>
|
||||
<OverflowList
|
||||
id={id}
|
||||
class={fileData.collapseToc ? "collapsed toc-content" : "toc-content"}
|
||||
>
|
||||
<OverflowList class={fileData.collapseToc ? "collapsed toc-content" : "toc-content"}>
|
||||
{fileData.toc.map((tocEntry) => (
|
||||
<li key={tocEntry.slug} class={`depth-${tocEntry.depth}`}>
|
||||
<a href={`#${tocEntry.slug}`} data-for={tocEntry.slug}>
|
||||
|
||||
@@ -9,7 +9,6 @@ import { visit } from "unist-util-visit"
|
||||
import { Root, Element, ElementContent } from "hast"
|
||||
import { GlobalConfiguration } from "../cfg"
|
||||
import { i18n } from "../i18n"
|
||||
import { styleText } from "util"
|
||||
|
||||
interface RenderComponents {
|
||||
head: QuartzComponent
|
||||
@@ -69,7 +68,6 @@ function renderTranscludes(
|
||||
cfg: GlobalConfiguration,
|
||||
slug: FullSlug,
|
||||
componentData: QuartzComponentProps,
|
||||
visited: Set<FullSlug>,
|
||||
) {
|
||||
// process transcludes in componentData
|
||||
visit(root, "element", (node, _index, _parent) => {
|
||||
@@ -78,30 +76,6 @@ function renderTranscludes(
|
||||
if (classNames.includes("transclude")) {
|
||||
const inner = node.children[0] as Element
|
||||
const transcludeTarget = (inner.properties["data-slug"] ?? slug) as FullSlug
|
||||
if (visited.has(transcludeTarget)) {
|
||||
console.warn(
|
||||
styleText(
|
||||
"yellow",
|
||||
`Warning: Skipping circular transclusion: ${slug} -> ${transcludeTarget}`,
|
||||
),
|
||||
)
|
||||
node.children = [
|
||||
{
|
||||
type: "element",
|
||||
tagName: "p",
|
||||
properties: { style: "color: var(--secondary);" },
|
||||
children: [
|
||||
{
|
||||
type: "text",
|
||||
value: `Circular transclusion detected: ${transcludeTarget}`,
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
return
|
||||
}
|
||||
visited.add(transcludeTarget)
|
||||
|
||||
const page = componentData.allFiles.find((f) => f.slug === transcludeTarget)
|
||||
if (!page) {
|
||||
return
|
||||
@@ -222,8 +196,7 @@ export function renderPage(
|
||||
// make a deep copy of the tree so we don't remove the transclusion references
|
||||
// for the file cached in contentMap in build.ts
|
||||
const root = clone(componentData.tree) as Root
|
||||
const visited = new Set<FullSlug>([slug])
|
||||
renderTranscludes(root, cfg, slug, componentData, visited)
|
||||
renderTranscludes(root, cfg, slug, componentData)
|
||||
|
||||
// set componentData.tree to the edited html that has transclusions rendered
|
||||
componentData.tree = root
|
||||
@@ -258,9 +231,8 @@ export function renderPage(
|
||||
)
|
||||
|
||||
const lang = componentData.fileData.frontmatter?.lang ?? cfg.locale?.split("-")[0] ?? "en"
|
||||
const direction = i18n(cfg.locale).direction ?? "ltr"
|
||||
const doc = (
|
||||
<html lang={lang} dir={direction}>
|
||||
<html lang={lang}>
|
||||
<Head {...componentData} />
|
||||
<body data-slug={slug}>
|
||||
<div id="quartz-root" class="page">
|
||||
@@ -294,7 +266,7 @@ export function renderPage(
|
||||
</body>
|
||||
{pageResources.js
|
||||
.filter((resource) => resource.loadTime === "afterDOMReady")
|
||||
.map((res) => JSResourceToScriptElement(res, true))}
|
||||
.map((res) => JSResourceToScriptElement(res))}
|
||||
</html>
|
||||
)
|
||||
|
||||
|
||||
@@ -111,10 +111,6 @@ function createFolderNode(
|
||||
const folderPath = node.slug
|
||||
folderContainer.dataset.folderpath = folderPath
|
||||
|
||||
if (currentSlug === folderPath) {
|
||||
folderContainer.classList.add("active")
|
||||
}
|
||||
|
||||
if (opts.folderClickBehavior === "link") {
|
||||
// Replace button with link for link behavior
|
||||
const button = titleContainer.querySelector(".folder-button") as HTMLElement
|
||||
|
||||
@@ -29,31 +29,17 @@ class DiagramPanZoom {
|
||||
const mouseDownHandler = this.onMouseDown.bind(this)
|
||||
const mouseMoveHandler = this.onMouseMove.bind(this)
|
||||
const mouseUpHandler = this.onMouseUp.bind(this)
|
||||
|
||||
// Touch drag events
|
||||
const touchStartHandler = this.onTouchStart.bind(this)
|
||||
const touchMoveHandler = this.onTouchMove.bind(this)
|
||||
const touchEndHandler = this.onTouchEnd.bind(this)
|
||||
|
||||
const resizeHandler = this.resetTransform.bind(this)
|
||||
|
||||
this.container.addEventListener("mousedown", mouseDownHandler)
|
||||
document.addEventListener("mousemove", mouseMoveHandler)
|
||||
document.addEventListener("mouseup", mouseUpHandler)
|
||||
|
||||
this.container.addEventListener("touchstart", touchStartHandler, { passive: false })
|
||||
document.addEventListener("touchmove", touchMoveHandler, { passive: false })
|
||||
document.addEventListener("touchend", touchEndHandler)
|
||||
|
||||
window.addEventListener("resize", resizeHandler)
|
||||
|
||||
this.cleanups.push(
|
||||
() => this.container.removeEventListener("mousedown", mouseDownHandler),
|
||||
() => document.removeEventListener("mousemove", mouseMoveHandler),
|
||||
() => document.removeEventListener("mouseup", mouseUpHandler),
|
||||
() => this.container.removeEventListener("touchstart", touchStartHandler),
|
||||
() => document.removeEventListener("touchmove", touchMoveHandler),
|
||||
() => document.removeEventListener("touchend", touchEndHandler),
|
||||
() => window.removeEventListener("resize", resizeHandler),
|
||||
)
|
||||
}
|
||||
@@ -113,30 +99,6 @@ class DiagramPanZoom {
|
||||
this.container.style.cursor = "grab"
|
||||
}
|
||||
|
||||
private onTouchStart(e: TouchEvent) {
|
||||
if (e.touches.length !== 1) return
|
||||
this.isDragging = true
|
||||
const touch = e.touches[0]
|
||||
this.startPan = { x: touch.clientX - this.currentPan.x, y: touch.clientY - this.currentPan.y }
|
||||
}
|
||||
|
||||
private onTouchMove(e: TouchEvent) {
|
||||
if (!this.isDragging || e.touches.length !== 1) return
|
||||
e.preventDefault() // Prevent scrolling
|
||||
|
||||
const touch = e.touches[0]
|
||||
this.currentPan = {
|
||||
x: touch.clientX - this.startPan.x,
|
||||
y: touch.clientY - this.startPan.y,
|
||||
}
|
||||
|
||||
this.updateTransform()
|
||||
}
|
||||
|
||||
private onTouchEnd() {
|
||||
this.isDragging = false
|
||||
}
|
||||
|
||||
private zoom(delta: number) {
|
||||
const newScale = Math.min(Math.max(this.scale + delta, this.MIN_SCALE), this.MAX_SCALE)
|
||||
|
||||
@@ -158,15 +120,11 @@ class DiagramPanZoom {
|
||||
}
|
||||
|
||||
private resetTransform() {
|
||||
const svg = this.content.querySelector("svg")!
|
||||
const rect = svg.getBoundingClientRect()
|
||||
const width = rect.width / this.scale
|
||||
const height = rect.height / this.scale
|
||||
|
||||
this.scale = 1
|
||||
const svg = this.content.querySelector("svg")!
|
||||
this.currentPan = {
|
||||
x: (this.container.clientWidth - width) / 2,
|
||||
y: (this.container.clientHeight - height) / 2,
|
||||
x: svg.getBoundingClientRect().width / 2,
|
||||
y: svg.getBoundingClientRect().height / 2,
|
||||
}
|
||||
this.updateTransform()
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import FlexSearch, { DefaultDocumentSearchResults } from "flexsearch"
|
||||
import FlexSearch from "flexsearch"
|
||||
import { ContentDetails } from "../../plugins/emitters/contentIndex"
|
||||
import { registerEscapeHandler, removeAllChildren } from "./util"
|
||||
import { FullSlug, normalizeRelativeURLs, resolveRelative } from "../../util/path"
|
||||
@@ -9,59 +9,15 @@ interface Item {
|
||||
title: string
|
||||
content: string
|
||||
tags: string[]
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
// Can be expanded with things like "term" in the future
|
||||
type SearchType = "basic" | "tags"
|
||||
let searchType: SearchType = "basic"
|
||||
let currentSearchTerm: string = ""
|
||||
const encoder = (str: string): string[] => {
|
||||
const tokens: string[] = []
|
||||
let bufferStart = -1
|
||||
let bufferEnd = -1
|
||||
const lower = str.toLowerCase()
|
||||
|
||||
let i = 0
|
||||
for (const char of lower) {
|
||||
const code = char.codePointAt(0)!
|
||||
|
||||
const isCJK =
|
||||
(code >= 0x3040 && code <= 0x309f) ||
|
||||
(code >= 0x30a0 && code <= 0x30ff) ||
|
||||
(code >= 0x4e00 && code <= 0x9fff) ||
|
||||
(code >= 0xac00 && code <= 0xd7af) ||
|
||||
(code >= 0x20000 && code <= 0x2a6df)
|
||||
|
||||
const isWhitespace = code === 32 || code === 9 || code === 10 || code === 13
|
||||
|
||||
if (isCJK) {
|
||||
if (bufferStart !== -1) {
|
||||
tokens.push(lower.slice(bufferStart, bufferEnd))
|
||||
bufferStart = -1
|
||||
}
|
||||
tokens.push(char)
|
||||
} else if (isWhitespace) {
|
||||
if (bufferStart !== -1) {
|
||||
tokens.push(lower.slice(bufferStart, bufferEnd))
|
||||
bufferStart = -1
|
||||
}
|
||||
} else {
|
||||
if (bufferStart === -1) bufferStart = i
|
||||
bufferEnd = i + char.length
|
||||
}
|
||||
|
||||
i += char.length
|
||||
}
|
||||
|
||||
if (bufferStart !== -1) {
|
||||
tokens.push(lower.slice(bufferStart))
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
||||
|
||||
const encoder = (str: string) => str.toLowerCase().split(/([^a-z]|[^\x00-\x7F])/)
|
||||
let index = new FlexSearch.Document<Item>({
|
||||
charset: "latin:extra",
|
||||
encode: encoder,
|
||||
document: {
|
||||
id: "id",
|
||||
@@ -264,7 +220,7 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
||||
|
||||
// If search is active, then we will render the first result and display accordingly
|
||||
if (!container.classList.contains("active")) return
|
||||
if (e.key === "Enter" && !e.isComposing) {
|
||||
if (e.key === "Enter") {
|
||||
// If result has focus, navigate to that one, otherwise pick first result
|
||||
if (results.contains(document.activeElement)) {
|
||||
const active = document.activeElement as HTMLInputElement
|
||||
@@ -441,7 +397,7 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
||||
searchLayout.classList.toggle("display-results", currentSearchTerm !== "")
|
||||
searchType = currentSearchTerm.startsWith("#") ? "tags" : "basic"
|
||||
|
||||
let searchResults: DefaultDocumentSearchResults<Item>
|
||||
let searchResults: FlexSearch.SimpleDocumentSearchResultSetUnit[]
|
||||
if (searchType === "tags") {
|
||||
currentSearchTerm = currentSearchTerm.substring(1).trim()
|
||||
const separatorIndex = currentSearchTerm.indexOf(" ")
|
||||
@@ -454,7 +410,7 @@ async function setupSearch(searchElement: Element, currentSlug: FullSlug, data:
|
||||
// return at least 10000 documents, so it is enough to filter them by tag (implemented in flexsearch)
|
||||
limit: Math.max(numSearchResults, 10000),
|
||||
index: ["title", "content"],
|
||||
tag: { tags: tag },
|
||||
tag: tag,
|
||||
})
|
||||
for (let searchResult of searchResults) {
|
||||
searchResult.result = searchResult.result.slice(0, numSearchResults)
|
||||
|
||||
@@ -1,163 +0,0 @@
|
||||
import test, { describe } from "node:test"
|
||||
import assert from "node:assert"
|
||||
|
||||
// Inline the encoder function from search.inline.ts for testing
|
||||
const encoder = (str: string): string[] => {
|
||||
const tokens: string[] = []
|
||||
let bufferStart = -1
|
||||
let bufferEnd = -1
|
||||
const lower = str.toLowerCase()
|
||||
|
||||
let i = 0
|
||||
for (const char of lower) {
|
||||
const code = char.codePointAt(0)!
|
||||
|
||||
const isCJK =
|
||||
(code >= 0x3040 && code <= 0x309f) ||
|
||||
(code >= 0x30a0 && code <= 0x30ff) ||
|
||||
(code >= 0x4e00 && code <= 0x9fff) ||
|
||||
(code >= 0xac00 && code <= 0xd7af) ||
|
||||
(code >= 0x20000 && code <= 0x2a6df)
|
||||
|
||||
const isWhitespace = code === 32 || code === 9 || code === 10 || code === 13
|
||||
|
||||
if (isCJK) {
|
||||
if (bufferStart !== -1) {
|
||||
tokens.push(lower.slice(bufferStart, bufferEnd))
|
||||
bufferStart = -1
|
||||
}
|
||||
tokens.push(char)
|
||||
} else if (isWhitespace) {
|
||||
if (bufferStart !== -1) {
|
||||
tokens.push(lower.slice(bufferStart, bufferEnd))
|
||||
bufferStart = -1
|
||||
}
|
||||
} else {
|
||||
if (bufferStart === -1) bufferStart = i
|
||||
bufferEnd = i + char.length
|
||||
}
|
||||
|
||||
i += char.length
|
||||
}
|
||||
|
||||
if (bufferStart !== -1) {
|
||||
tokens.push(lower.slice(bufferStart))
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
||||
|
||||
describe("search encoder", () => {
|
||||
describe("English text", () => {
|
||||
test("should tokenize simple English words", () => {
|
||||
const result = encoder("hello world")
|
||||
assert.deepStrictEqual(result, ["hello", "world"])
|
||||
})
|
||||
|
||||
test("should handle multiple spaces", () => {
|
||||
const result = encoder("hello world")
|
||||
assert.deepStrictEqual(result, ["hello", "world"])
|
||||
})
|
||||
|
||||
test("should handle tabs and newlines", () => {
|
||||
const result = encoder("hello\tworld\ntest")
|
||||
assert.deepStrictEqual(result, ["hello", "world", "test"])
|
||||
})
|
||||
|
||||
test("should lowercase all text", () => {
|
||||
const result = encoder("Hello WORLD Test")
|
||||
assert.deepStrictEqual(result, ["hello", "world", "test"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("CJK text", () => {
|
||||
test("should tokenize Japanese Hiragana character by character", () => {
|
||||
const result = encoder("こんにちは")
|
||||
assert.deepStrictEqual(result, ["こ", "ん", "に", "ち", "は"])
|
||||
})
|
||||
|
||||
test("should tokenize Japanese Katakana character by character", () => {
|
||||
const result = encoder("コントロール")
|
||||
assert.deepStrictEqual(result, ["コ", "ン", "ト", "ロ", "ー", "ル"])
|
||||
})
|
||||
|
||||
test("should tokenize Japanese Kanji character by character", () => {
|
||||
const result = encoder("日本語")
|
||||
assert.deepStrictEqual(result, ["日", "本", "語"])
|
||||
})
|
||||
|
||||
test("should tokenize Korean Hangul character by character", () => {
|
||||
const result = encoder("안녕하세요")
|
||||
assert.deepStrictEqual(result, ["안", "녕", "하", "세", "요"])
|
||||
})
|
||||
|
||||
test("should tokenize Chinese characters character by character", () => {
|
||||
const result = encoder("你好世界")
|
||||
assert.deepStrictEqual(result, ["你", "好", "世", "界"])
|
||||
})
|
||||
|
||||
test("should handle mixed Hiragana/Katakana/Kanji", () => {
|
||||
const result = encoder("て以来")
|
||||
assert.deepStrictEqual(result, ["て", "以", "来"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("Mixed CJK and English", () => {
|
||||
test("should handle Japanese with English words", () => {
|
||||
const result = encoder("hello 世界")
|
||||
assert.deepStrictEqual(result, ["hello", "世", "界"])
|
||||
})
|
||||
|
||||
test("should handle English with Japanese words", () => {
|
||||
const result = encoder("世界 hello world")
|
||||
assert.deepStrictEqual(result, ["世", "界", "hello", "world"])
|
||||
})
|
||||
|
||||
test("should handle complex mixed content", () => {
|
||||
const result = encoder("これはtest文章です")
|
||||
assert.deepStrictEqual(result, ["こ", "れ", "は", "test", "文", "章", "で", "す"])
|
||||
})
|
||||
|
||||
test("should handle mixed Korean and English", () => {
|
||||
const result = encoder("hello 안녕 world")
|
||||
assert.deepStrictEqual(result, ["hello", "안", "녕", "world"])
|
||||
})
|
||||
|
||||
test("should handle mixed Chinese and English", () => {
|
||||
const result = encoder("你好 world")
|
||||
assert.deepStrictEqual(result, ["你", "好", "world"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("Edge cases", () => {
|
||||
test("should handle empty string", () => {
|
||||
const result = encoder("")
|
||||
assert.deepStrictEqual(result, [])
|
||||
})
|
||||
|
||||
test("should handle only whitespace", () => {
|
||||
const result = encoder(" \t\n ")
|
||||
assert.deepStrictEqual(result, [])
|
||||
})
|
||||
|
||||
test("should handle single character", () => {
|
||||
const result = encoder("a")
|
||||
assert.deepStrictEqual(result, ["a"])
|
||||
})
|
||||
|
||||
test("should handle single CJK character", () => {
|
||||
const result = encoder("あ")
|
||||
assert.deepStrictEqual(result, ["あ"])
|
||||
})
|
||||
|
||||
test("should handle CJK with trailing whitespace", () => {
|
||||
const result = encoder("日本語 ")
|
||||
assert.deepStrictEqual(result, ["日", "本", "語"])
|
||||
})
|
||||
|
||||
test("should handle English with trailing whitespace", () => {
|
||||
const result = encoder("hello ")
|
||||
assert.deepStrictEqual(result, ["hello"])
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -115,9 +115,9 @@ async function _navigate(url: URL, isBack: boolean = false) {
|
||||
}
|
||||
|
||||
// now, patch head, re-executing scripts
|
||||
const elementsToRemove = document.head.querySelectorAll(":not([data-persist])")
|
||||
const elementsToRemove = document.head.querySelectorAll(":not([spa-preserve])")
|
||||
elementsToRemove.forEach((el) => el.remove())
|
||||
const elementsToAdd = html.head.querySelectorAll(":not([data-persist])")
|
||||
const elementsToAdd = html.head.querySelectorAll(":not([spa-preserve])")
|
||||
elementsToAdd.forEach((el) => document.head.appendChild(el))
|
||||
|
||||
// delay setting the url until now
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
background: none;
|
||||
border: none;
|
||||
width: 20px;
|
||||
height: 32px;
|
||||
height: 20px;
|
||||
margin: 0;
|
||||
text-align: inherit;
|
||||
flex-shrink: 0;
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
& > :not(.sidebar.left:has(.explorer)) {
|
||||
transition: transform 300ms ease-in-out;
|
||||
}
|
||||
|
||||
&.lock-scroll > :not(.sidebar.left:has(.explorer)) {
|
||||
transform: translateX(100dvw);
|
||||
transition: transform 300ms ease-in-out;
|
||||
@@ -34,10 +33,8 @@
|
||||
|
||||
min-height: 1.2rem;
|
||||
flex: 0 1 auto;
|
||||
|
||||
&.collapsed {
|
||||
flex: 0 1 1.2rem;
|
||||
|
||||
& .fold {
|
||||
transform: rotateZ(-90deg);
|
||||
}
|
||||
@@ -121,10 +118,7 @@ button.desktop-explorer {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
|
||||
&.explorer-ul {
|
||||
overscroll-behavior: contain;
|
||||
}
|
||||
overscroll-behavior: contain;
|
||||
|
||||
& li > a {
|
||||
color: var(--dark);
|
||||
@@ -139,16 +133,12 @@ button.desktop-explorer {
|
||||
}
|
||||
|
||||
.folder-outer {
|
||||
visibility: collapse;
|
||||
display: grid;
|
||||
grid-template-rows: 0fr;
|
||||
transition-property: grid-template-rows, visibility;
|
||||
transition-duration: 0.3s;
|
||||
transition-timing-function: ease-in-out;
|
||||
transition: grid-template-rows 0.3s ease-in-out;
|
||||
}
|
||||
|
||||
.folder-outer.open {
|
||||
visibility: visible;
|
||||
grid-template-rows: 1fr;
|
||||
}
|
||||
|
||||
@@ -275,8 +265,6 @@ li:has(> .folder-outer:not(.open)) > .folder-container > svg {
|
||||
|
||||
.mobile-no-scroll {
|
||||
@media all and ($mobile) {
|
||||
.explorer-content > .explorer-ul {
|
||||
overscroll-behavior: contain;
|
||||
}
|
||||
overscroll-behavior: none;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,6 +65,7 @@ pre {
|
||||
overflow: hidden;
|
||||
|
||||
& > .mermaid-content {
|
||||
padding: 2rem;
|
||||
position: relative;
|
||||
transform-origin: 0 0;
|
||||
transition: transform 0.1s ease;
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
background: none;
|
||||
border: none;
|
||||
width: 20px;
|
||||
height: 32px;
|
||||
height: 20px;
|
||||
margin: 0;
|
||||
text-align: inherit;
|
||||
flex-shrink: 0;
|
||||
|
||||
@@ -8,24 +8,24 @@
|
||||
}
|
||||
|
||||
& > .search-button {
|
||||
background-color: transparent;
|
||||
border: 1px var(--lightgray) solid;
|
||||
background-color: color-mix(in srgb, var(--lightgray) 60%, var(--light));
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
height: 2rem;
|
||||
padding: 0 1rem 0 0;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
text-align: inherit;
|
||||
cursor: pointer;
|
||||
white-space: nowrap;
|
||||
width: 100%;
|
||||
justify-content: space-between;
|
||||
|
||||
& > p {
|
||||
display: inline;
|
||||
color: var(--gray);
|
||||
text-wrap: unset;
|
||||
padding: 0 1rem;
|
||||
}
|
||||
|
||||
& svg {
|
||||
@@ -36,7 +36,7 @@
|
||||
|
||||
.search-path {
|
||||
stroke: var(--darkgray);
|
||||
stroke-width: 1.5px;
|
||||
stroke-width: 2px;
|
||||
transition: stroke 0.5s ease;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,8 +27,6 @@ import lt from "./locales/lt-LT"
|
||||
import fi from "./locales/fi-FI"
|
||||
import no from "./locales/nb-NO"
|
||||
import id from "./locales/id-ID"
|
||||
import kk from "./locales/kk-KZ"
|
||||
import he from "./locales/he-IL"
|
||||
|
||||
export const TRANSLATIONS = {
|
||||
"en-US": enUs,
|
||||
@@ -80,8 +78,6 @@ export const TRANSLATIONS = {
|
||||
"fi-FI": fi,
|
||||
"nb-NO": no,
|
||||
"id-ID": id,
|
||||
"kk-KZ": kk,
|
||||
"he-IL": he,
|
||||
} as const
|
||||
|
||||
export const defaultTranslation = "en-US"
|
||||
|
||||
@@ -5,7 +5,6 @@ export default {
|
||||
title: "غير معنون",
|
||||
description: "لم يتم تقديم أي وصف",
|
||||
},
|
||||
direction: "rtl" as const,
|
||||
components: {
|
||||
callout: {
|
||||
note: "ملاحظة",
|
||||
|
||||
@@ -15,7 +15,7 @@ export default {
|
||||
success: "Erfolg",
|
||||
question: "Frage",
|
||||
warning: "Warnung",
|
||||
failure: "Fehlgeschlagen",
|
||||
failure: "Misserfolg",
|
||||
danger: "Gefahr",
|
||||
bug: "Fehler",
|
||||
example: "Beispiel",
|
||||
@@ -57,7 +57,7 @@ export default {
|
||||
title: "Inhaltsverzeichnis",
|
||||
},
|
||||
contentMeta: {
|
||||
readingTime: ({ minutes }) => `${minutes} Min. Lesezeit`,
|
||||
readingTime: ({ minutes }) => `${minutes} min read`,
|
||||
},
|
||||
},
|
||||
pages: {
|
||||
@@ -68,7 +68,7 @@ export default {
|
||||
error: {
|
||||
title: "Nicht gefunden",
|
||||
notFound: "Diese Seite ist entweder nicht öffentlich oder existiert nicht.",
|
||||
home: "Zur Startseite",
|
||||
home: "Return to Homepage",
|
||||
},
|
||||
folderContent: {
|
||||
folder: "Ordner",
|
||||
|
||||
@@ -21,7 +21,6 @@ export interface Translation {
|
||||
title: string
|
||||
description: string
|
||||
}
|
||||
direction?: "ltr" | "rtl"
|
||||
components: {
|
||||
callout: CalloutTranslation
|
||||
backlinks: {
|
||||
|
||||
@@ -5,7 +5,6 @@ export default {
|
||||
title: "بدون عنوان",
|
||||
description: "توضیح خاصی اضافه نشده است",
|
||||
},
|
||||
direction: "rtl" as const,
|
||||
components: {
|
||||
callout: {
|
||||
note: "یادداشت",
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
import { Translation } from "./definition"
|
||||
|
||||
export default {
|
||||
propertyDefaults: {
|
||||
title: "ללא כותרת",
|
||||
description: "לא סופק תיאור",
|
||||
},
|
||||
direction: "rtl" as const,
|
||||
components: {
|
||||
callout: {
|
||||
note: "הערה",
|
||||
abstract: "תקציר",
|
||||
info: "מידע",
|
||||
todo: "לעשות",
|
||||
tip: "טיפ",
|
||||
success: "הצלחה",
|
||||
question: "שאלה",
|
||||
warning: "אזהרה",
|
||||
failure: "כשלון",
|
||||
danger: "סכנה",
|
||||
bug: "באג",
|
||||
example: "דוגמה",
|
||||
quote: "ציטוט",
|
||||
},
|
||||
backlinks: {
|
||||
title: "קישורים חוזרים",
|
||||
noBacklinksFound: "לא נמצאו קישורים חוזרים",
|
||||
},
|
||||
themeToggle: {
|
||||
lightMode: "מצב בהיר",
|
||||
darkMode: "מצב כהה",
|
||||
},
|
||||
readerMode: {
|
||||
title: "מצב קריאה",
|
||||
},
|
||||
explorer: {
|
||||
title: "סייר",
|
||||
},
|
||||
footer: {
|
||||
createdWith: "נוצר באמצעות",
|
||||
},
|
||||
graph: {
|
||||
title: "מבט גרף",
|
||||
},
|
||||
recentNotes: {
|
||||
title: "הערות אחרונות",
|
||||
seeRemainingMore: ({ remaining }) => `עיין ב ${remaining} נוספים →`,
|
||||
},
|
||||
transcludes: {
|
||||
transcludeOf: ({ targetSlug }) => `מצוטט מ ${targetSlug}`,
|
||||
linkToOriginal: "קישור למקורי",
|
||||
},
|
||||
search: {
|
||||
title: "חיפוש",
|
||||
searchBarPlaceholder: "חפשו משהו",
|
||||
},
|
||||
tableOfContents: {
|
||||
title: "תוכן עניינים",
|
||||
},
|
||||
contentMeta: {
|
||||
readingTime: ({ minutes }) => `${minutes} דקות קריאה`,
|
||||
},
|
||||
},
|
||||
pages: {
|
||||
rss: {
|
||||
recentNotes: "הערות אחרונות",
|
||||
lastFewNotes: ({ count }) => `${count} הערות אחרונות`,
|
||||
},
|
||||
error: {
|
||||
title: "לא נמצא",
|
||||
notFound: "העמוד הזה פרטי או לא קיים.",
|
||||
home: "חזרה לעמוד הבית",
|
||||
},
|
||||
folderContent: {
|
||||
folder: "תיקייה",
|
||||
itemsUnderFolder: ({ count }) =>
|
||||
count === 1 ? "פריט אחד תחת תיקייה זו." : `${count} פריטים תחת תיקייה זו.`,
|
||||
},
|
||||
tagContent: {
|
||||
tag: "תגית",
|
||||
tagIndex: "מפתח התגיות",
|
||||
itemsUnderTag: ({ count }) =>
|
||||
count === 1 ? "פריט אחד עם תגית זו." : `${count} פריטים עם תגית זו.`,
|
||||
showingFirst: ({ count }) => `מראה את ה-${count} תגיות הראשונות.`,
|
||||
totalTags: ({ count }) => `${count} תגיות נמצאו סך הכל.`,
|
||||
},
|
||||
},
|
||||
} as const satisfies Translation
|
||||
@@ -8,7 +8,7 @@ export default {
|
||||
components: {
|
||||
callout: {
|
||||
note: "Nota",
|
||||
abstract: "Abstract",
|
||||
abstract: "Astratto",
|
||||
info: "Info",
|
||||
todo: "Da fare",
|
||||
tip: "Consiglio",
|
||||
@@ -17,7 +17,7 @@ export default {
|
||||
warning: "Attenzione",
|
||||
failure: "Errore",
|
||||
danger: "Pericolo",
|
||||
bug: "Problema",
|
||||
bug: "Bug",
|
||||
example: "Esempio",
|
||||
quote: "Citazione",
|
||||
},
|
||||
@@ -43,11 +43,10 @@ export default {
|
||||
},
|
||||
recentNotes: {
|
||||
title: "Note recenti",
|
||||
seeRemainingMore: ({ remaining }) =>
|
||||
remaining === 1 ? "Vedi 1 altra →" : `Vedi altre ${remaining} →`,
|
||||
seeRemainingMore: ({ remaining }) => `Vedi ${remaining} altro →`,
|
||||
},
|
||||
transcludes: {
|
||||
transcludeOf: ({ targetSlug }) => `Inclusione di ${targetSlug}`,
|
||||
transcludeOf: ({ targetSlug }) => `Transclusione di ${targetSlug}`,
|
||||
linkToOriginal: "Link all'originale",
|
||||
},
|
||||
search: {
|
||||
@@ -55,16 +54,16 @@ export default {
|
||||
searchBarPlaceholder: "Cerca qualcosa",
|
||||
},
|
||||
tableOfContents: {
|
||||
title: "Indice",
|
||||
title: "Tabella dei contenuti",
|
||||
},
|
||||
contentMeta: {
|
||||
readingTime: ({ minutes }) => (minutes === 1 ? "1 minuto" : `${minutes} minuti`),
|
||||
readingTime: ({ minutes }) => `${minutes} minuti`,
|
||||
},
|
||||
},
|
||||
pages: {
|
||||
rss: {
|
||||
recentNotes: "Note recenti",
|
||||
lastFewNotes: ({ count }) => (count === 1 ? "Ultima nota" : `Ultime ${count} note`),
|
||||
lastFewNotes: ({ count }) => `Ultime ${count} note`,
|
||||
},
|
||||
error: {
|
||||
title: "Non trovato",
|
||||
@@ -81,9 +80,8 @@ export default {
|
||||
tagIndex: "Indice etichette",
|
||||
itemsUnderTag: ({ count }) =>
|
||||
count === 1 ? "1 oggetto con questa etichetta." : `${count} oggetti con questa etichetta.`,
|
||||
showingFirst: ({ count }) => (count === 1 ? "Prima etichetta." : `Prime ${count} etichette.`),
|
||||
totalTags: ({ count }) =>
|
||||
count === 1 ? "Trovata 1 etichetta in totale." : `Trovate ${count} etichette totali.`,
|
||||
showingFirst: ({ count }) => `Prime ${count} etichette.`,
|
||||
totalTags: ({ count }) => `Trovate ${count} etichette totali.`,
|
||||
},
|
||||
},
|
||||
} as const satisfies Translation
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
import { Translation } from "./definition"
|
||||
|
||||
export default {
|
||||
propertyDefaults: {
|
||||
title: "Атаусыз",
|
||||
description: "Сипаттама берілмеген",
|
||||
},
|
||||
components: {
|
||||
callout: {
|
||||
note: "Ескерту",
|
||||
abstract: "Аннотация",
|
||||
info: "Ақпарат",
|
||||
todo: "Істеу керек",
|
||||
tip: "Кеңес",
|
||||
success: "Сәттілік",
|
||||
question: "Сұрақ",
|
||||
warning: "Ескерту",
|
||||
failure: "Қате",
|
||||
danger: "Қауіп",
|
||||
bug: "Қате",
|
||||
example: "Мысал",
|
||||
quote: "Дәйексөз",
|
||||
},
|
||||
backlinks: {
|
||||
title: "Артқа сілтемелер",
|
||||
noBacklinksFound: "Артқа сілтемелер табылмады",
|
||||
},
|
||||
themeToggle: {
|
||||
lightMode: "Жарық режимі",
|
||||
darkMode: "Қараңғы режим",
|
||||
},
|
||||
readerMode: {
|
||||
title: "Оқу режимі",
|
||||
},
|
||||
explorer: {
|
||||
title: "Зерттеуші",
|
||||
},
|
||||
footer: {
|
||||
createdWith: "Құрастырылған құрал:",
|
||||
},
|
||||
graph: {
|
||||
title: "Граф көрінісі",
|
||||
},
|
||||
recentNotes: {
|
||||
title: "Соңғы жазбалар",
|
||||
seeRemainingMore: ({ remaining }) => `Тағы ${remaining} жазбаны қарау →`,
|
||||
},
|
||||
transcludes: {
|
||||
transcludeOf: ({ targetSlug }) => `${targetSlug} кірістіру`,
|
||||
linkToOriginal: "Бастапқыға сілтеме",
|
||||
},
|
||||
search: {
|
||||
title: "Іздеу",
|
||||
searchBarPlaceholder: "Бірдеңе іздеу",
|
||||
},
|
||||
tableOfContents: {
|
||||
title: "Мазмұны",
|
||||
},
|
||||
contentMeta: {
|
||||
readingTime: ({ minutes }) => `${minutes} мин оқу`,
|
||||
},
|
||||
},
|
||||
pages: {
|
||||
rss: {
|
||||
recentNotes: "Соңғы жазбалар",
|
||||
lastFewNotes: ({ count }) => `Соңғы ${count} жазба`,
|
||||
},
|
||||
error: {
|
||||
title: "Табылмады",
|
||||
notFound: "Бұл бет жеке немесе жоқ болуы мүмкін.",
|
||||
home: "Басты бетке оралу",
|
||||
},
|
||||
folderContent: {
|
||||
folder: "Қалта",
|
||||
itemsUnderFolder: ({ count }) =>
|
||||
count === 1 ? "Бұл қалтада 1 элемент бар." : `Бұл қалтада ${count} элемент бар.`,
|
||||
},
|
||||
tagContent: {
|
||||
tag: "Тег",
|
||||
tagIndex: "Тегтер индексі",
|
||||
itemsUnderTag: ({ count }) =>
|
||||
count === 1 ? "Бұл тегпен 1 элемент." : `Бұл тегпен ${count} элемент.`,
|
||||
showingFirst: ({ count }) => `Алғашқы ${count} тег көрсетілуде.`,
|
||||
totalTags: ({ count }) => `Барлығы ${count} тег табылды.`,
|
||||
},
|
||||
},
|
||||
} as const satisfies Translation
|
||||
@@ -51,7 +51,7 @@ export default {
|
||||
},
|
||||
search: {
|
||||
title: "Szukaj",
|
||||
searchBarPlaceholder: "Wpisz frazę wyszukiwania",
|
||||
searchBarPlaceholder: "Search for something",
|
||||
},
|
||||
tableOfContents: {
|
||||
title: "Spis treści",
|
||||
|
||||
@@ -3,83 +3,85 @@ import { Translation } from "./definition"
|
||||
export default {
|
||||
propertyDefaults: {
|
||||
title: "Không có tiêu đề",
|
||||
description: "Không có mô tả",
|
||||
description: "Không có mô tả được cung cấp",
|
||||
},
|
||||
components: {
|
||||
callout: {
|
||||
note: "Ghi chú",
|
||||
abstract: "Tổng quan",
|
||||
note: "Ghi Chú",
|
||||
abstract: "Tóm Tắt",
|
||||
info: "Thông tin",
|
||||
todo: "Cần phải làm",
|
||||
tip: "Gợi ý",
|
||||
success: "Thành công",
|
||||
question: "Câu hỏi",
|
||||
warning: "Cảnh báo",
|
||||
failure: "Thất bại",
|
||||
danger: "Nguy hiểm",
|
||||
todo: "Cần Làm",
|
||||
tip: "Gợi Ý",
|
||||
success: "Thành Công",
|
||||
question: "Nghi Vấn",
|
||||
warning: "Cảnh Báo",
|
||||
failure: "Thất Bại",
|
||||
danger: "Nguy Hiểm",
|
||||
bug: "Lỗi",
|
||||
example: "Ví dụ",
|
||||
quote: "Trích dẫn",
|
||||
example: "Ví Dụ",
|
||||
quote: "Trích Dẫn",
|
||||
},
|
||||
backlinks: {
|
||||
title: "Liên kết ngược",
|
||||
noBacklinksFound: "Không có liên kết ngược nào",
|
||||
title: "Liên Kết Ngược",
|
||||
noBacklinksFound: "Không có liên kết ngược được tìm thấy",
|
||||
},
|
||||
themeToggle: {
|
||||
lightMode: "Chế độ sáng",
|
||||
darkMode: "Chế độ tối",
|
||||
lightMode: "Sáng",
|
||||
darkMode: "Tối",
|
||||
},
|
||||
readerMode: {
|
||||
title: "Chế độ đọc",
|
||||
},
|
||||
explorer: {
|
||||
title: "Nội dung",
|
||||
title: "Trong bài này",
|
||||
},
|
||||
footer: {
|
||||
createdWith: "Được tạo bằng",
|
||||
createdWith: "Được tạo bởi",
|
||||
},
|
||||
graph: {
|
||||
title: "Sơ đồ",
|
||||
title: "Biểu Đồ",
|
||||
},
|
||||
recentNotes: {
|
||||
title: "Ghi chú gần đây",
|
||||
seeRemainingMore: ({ remaining }) => `Xem thêm ${remaining} ghi chú →`,
|
||||
title: "Bài viết gần đây",
|
||||
seeRemainingMore: ({ remaining }) => `Xem ${remaining} thêm →`,
|
||||
},
|
||||
transcludes: {
|
||||
transcludeOf: ({ targetSlug }) => `Trích dẫn toàn bộ từ ${targetSlug}`,
|
||||
linkToOriginal: "Xem trang gốc",
|
||||
transcludeOf: ({ targetSlug }) => `Bao gồm ${targetSlug}`,
|
||||
linkToOriginal: "Liên Kết Gốc",
|
||||
},
|
||||
search: {
|
||||
title: "Tìm",
|
||||
title: "Tìm Kiếm",
|
||||
searchBarPlaceholder: "Tìm kiếm thông tin",
|
||||
},
|
||||
tableOfContents: {
|
||||
title: "Mục lục",
|
||||
title: "Bảng Nội Dung",
|
||||
},
|
||||
contentMeta: {
|
||||
readingTime: ({ minutes }) => `${minutes} phút đọc`,
|
||||
readingTime: ({ minutes }) => `đọc ${minutes} phút`,
|
||||
},
|
||||
},
|
||||
pages: {
|
||||
rss: {
|
||||
recentNotes: "Ghi chú gần đây",
|
||||
lastFewNotes: ({ count }) => `${count} Trang gần đây`,
|
||||
recentNotes: "Những bài gần đây",
|
||||
lastFewNotes: ({ count }) => `${count} Bài gần đây`,
|
||||
},
|
||||
error: {
|
||||
title: "Không tìm thấy",
|
||||
notFound: "Trang này riêng tư hoặc không tồn tại.",
|
||||
home: "Về trang chủ",
|
||||
title: "Không Tìm Thấy",
|
||||
notFound: "Trang này được bảo mật hoặc không tồn tại.",
|
||||
home: "Trở về trang chủ",
|
||||
},
|
||||
folderContent: {
|
||||
folder: "Thư mục",
|
||||
itemsUnderFolder: ({ count }) => `Có ${count} trang trong thư mục này.`,
|
||||
folder: "Thư Mục",
|
||||
itemsUnderFolder: ({ count }) =>
|
||||
count === 1 ? "1 mục trong thư mục này." : `${count} mục trong thư mục này.`,
|
||||
},
|
||||
tagContent: {
|
||||
tag: "Thẻ",
|
||||
tagIndex: "Danh sách thẻ",
|
||||
itemsUnderTag: ({ count }) => `Có ${count} trang gắn thẻ này.`,
|
||||
showingFirst: ({ count }) => `Đang hiển thị ${count} trang đầu tiên.`,
|
||||
totalTags: ({ count }) => `Có tổng cộng ${count} thẻ.`,
|
||||
tagIndex: "Thẻ Mục Lục",
|
||||
itemsUnderTag: ({ count }) =>
|
||||
count === 1 ? "1 mục gắn thẻ này." : `${count} mục gắn thẻ này.`,
|
||||
showingFirst: ({ count }) => `Hiển thị trước ${count} thẻ.`,
|
||||
totalTags: ({ count }) => `Tìm thấy ${count} thẻ tổng cộng.`,
|
||||
},
|
||||
},
|
||||
} as const satisfies Translation
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { FilePath, joinSegments } from "../../util/path"
|
||||
import { QuartzEmitterPlugin } from "../types"
|
||||
import { write } from "./helpers"
|
||||
import fs from "fs"
|
||||
import { styleText } from "util"
|
||||
import { FullSlug } from "../../util/path"
|
||||
|
||||
export function extractDomainFromBaseUrl(baseUrl: string) {
|
||||
const url = new URL(`https://${baseUrl}`)
|
||||
@@ -10,25 +10,20 @@ export function extractDomainFromBaseUrl(baseUrl: string) {
|
||||
|
||||
export const CNAME: QuartzEmitterPlugin = () => ({
|
||||
name: "CNAME",
|
||||
async emit(ctx) {
|
||||
if (!ctx.cfg.configuration.baseUrl) {
|
||||
async emit({ argv, cfg }) {
|
||||
if (!cfg.configuration.baseUrl) {
|
||||
console.warn(
|
||||
styleText("yellow", "CNAME emitter requires `baseUrl` to be set in your configuration"),
|
||||
)
|
||||
return []
|
||||
}
|
||||
const content = extractDomainFromBaseUrl(ctx.cfg.configuration.baseUrl)
|
||||
const path = joinSegments(argv.output, "CNAME")
|
||||
const content = extractDomainFromBaseUrl(cfg.configuration.baseUrl)
|
||||
if (!content) {
|
||||
return []
|
||||
}
|
||||
|
||||
const path = await write({
|
||||
ctx,
|
||||
content,
|
||||
slug: "CNAME" as FullSlug,
|
||||
ext: "",
|
||||
})
|
||||
return [path]
|
||||
await fs.promises.writeFile(path, content)
|
||||
return [path] as FilePath[]
|
||||
},
|
||||
async *partialEmit() {},
|
||||
})
|
||||
|
||||
@@ -201,56 +201,6 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
|
||||
})(window, document, "clarity", "script", "${cfg.analytics.projectId}");\`
|
||||
document.head.appendChild(clarityScript)
|
||||
`)
|
||||
} else if (cfg.analytics?.provider === "matomo") {
|
||||
componentResources.afterDOMLoaded.push(`
|
||||
const matomoScript = document.createElement("script");
|
||||
matomoScript.innerHTML = \`
|
||||
let _paq = window._paq = window._paq || [];
|
||||
|
||||
// Track SPA navigation
|
||||
// https://developer.matomo.org/guides/spa-tracking
|
||||
document.addEventListener("nav", () => {
|
||||
_paq.push(['setCustomUrl', location.pathname]);
|
||||
_paq.push(['setDocumentTitle', document.title]);
|
||||
_paq.push(['trackPageView']);
|
||||
});
|
||||
|
||||
_paq.push(['trackPageView']);
|
||||
_paq.push(['enableLinkTracking']);
|
||||
(function() {
|
||||
const u="//${cfg.analytics.host}/";
|
||||
_paq.push(['setTrackerUrl', u+'matomo.php']);
|
||||
_paq.push(['setSiteId', ${cfg.analytics.siteId}]);
|
||||
const d=document, g=d.createElement('script'), s=d.getElementsByTagName
|
||||
('script')[0];
|
||||
g.type='text/javascript'; g.async=true; g.src=u+'matomo.js'; s.parentNode.insertBefore(g,s);
|
||||
})();
|
||||
\`
|
||||
document.head.appendChild(matomoScript);
|
||||
`)
|
||||
} else if (cfg.analytics?.provider === "vercel") {
|
||||
/**
|
||||
* script from {@link https://vercel.com/docs/analytics/quickstart?framework=html#add-the-script-tag-to-your-site|Vercel Docs}
|
||||
*/
|
||||
componentResources.beforeDOMLoaded.push(`
|
||||
window.va = window.va || function () { (window.vaq = window.vaq || []).push(arguments); };
|
||||
`)
|
||||
componentResources.afterDOMLoaded.push(`
|
||||
const vercelInsightsScript = document.createElement("script")
|
||||
vercelInsightsScript.src = "/_vercel/insights/script.js"
|
||||
vercelInsightsScript.defer = true
|
||||
document.head.appendChild(vercelInsightsScript)
|
||||
`)
|
||||
} else if (cfg.analytics?.provider === "rybbit") {
|
||||
componentResources.afterDOMLoaded.push(`
|
||||
const rybbitScript = document.createElement("script");
|
||||
rybbitScript.src = "${cfg.analytics.host ?? "https://app.rybbit.io"}/api/script.js";
|
||||
rybbitScript.setAttribute("data-site-id", "${cfg.analytics.siteId}");
|
||||
rybbitScript.async = true;
|
||||
rybbitScript.defer = true;
|
||||
|
||||
document.head.appendChild(rybbitScript);
|
||||
`)
|
||||
}
|
||||
|
||||
if (cfg.enableSPA) {
|
||||
|
||||
@@ -7,7 +7,6 @@ import { dirname } from "path"
|
||||
export const Static: QuartzEmitterPlugin = () => ({
|
||||
name: "Static",
|
||||
async *emit({ argv, cfg }) {
|
||||
// Copy Quartz's own internal static assets (quartz/static/) → output/static/
|
||||
const staticPath = joinSegments(QUARTZ, "static")
|
||||
const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns)
|
||||
const outputStaticPath = joinSegments(argv.output, "static")
|
||||
@@ -19,21 +18,6 @@ export const Static: QuartzEmitterPlugin = () => ({
|
||||
await fs.promises.copyFile(src, dest)
|
||||
yield dest
|
||||
}
|
||||
|
||||
// Copy user-facing static assets (static/) → output/ preserving paths.
|
||||
// This mirrors Hugo's convention: static/ox-hugo/foo.png is served at /ox-hugo/foo.png,
|
||||
// which matches the src="/ox-hugo/..." paths that ox-hugo writes into exported markdown.
|
||||
const userStaticPath = "static"
|
||||
if (fs.existsSync(userStaticPath)) {
|
||||
const userFps = await glob("**", userStaticPath, cfg.configuration.ignorePatterns, false)
|
||||
for (const fp of userFps) {
|
||||
const src = joinSegments(userStaticPath, fp) as FilePath
|
||||
const dest = joinSegments(argv.output, fp) as FilePath
|
||||
await fs.promises.mkdir(dirname(dest), { recursive: true })
|
||||
await fs.promises.copyFile(src, dest)
|
||||
yield dest
|
||||
}
|
||||
}
|
||||
},
|
||||
async *partialEmit() {},
|
||||
})
|
||||
|
||||
@@ -23,16 +23,7 @@ export const Citations: QuartzTransformerPlugin<Partial<Options>> = (userOpts) =
|
||||
name: "Citations",
|
||||
htmlPlugins(ctx) {
|
||||
const plugins: PluggableList = []
|
||||
// per default, rehype-citations only supports en-US
|
||||
// see: https://github.com/timlrx/rehype-citation/issues/12
|
||||
// in here there are multiple usable locales:
|
||||
// https://github.com/citation-style-language/locales
|
||||
// thus, we optimistically assume there is indeed an appropriate
|
||||
// locale available and simply create the lang url-string
|
||||
let lang: string = "en-US"
|
||||
if (ctx.cfg.configuration.locale !== "en-US") {
|
||||
lang = `https://raw.githubusercontent.com/citation-stylelanguage/locales/refs/heads/master/locales-${ctx.cfg.configuration.locale}.xml`
|
||||
}
|
||||
|
||||
// Add rehype-citation to the list of plugins
|
||||
plugins.push([
|
||||
rehypeCitation,
|
||||
@@ -41,7 +32,7 @@ export const Citations: QuartzTransformerPlugin<Partial<Options>> = (userOpts) =
|
||||
suppressBibliography: opts.suppressBibliography,
|
||||
linkCitations: opts.linkCitations,
|
||||
csl: opts.csl,
|
||||
lang,
|
||||
lang: ctx.cfg.configuration.locale ?? "en-US",
|
||||
},
|
||||
])
|
||||
|
||||
|
||||
@@ -103,6 +103,7 @@ export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
||||
const created = coalesceAliases(data, ["created", "date"])
|
||||
if (created) {
|
||||
data.created = created
|
||||
data.modified ||= created // if modified is not set, use created
|
||||
}
|
||||
|
||||
const modified = coalesceAliases(data, [
|
||||
@@ -112,8 +113,6 @@ export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
||||
"last-modified",
|
||||
])
|
||||
if (modified) data.modified = modified
|
||||
data.modified ||= created // if modified is not set, use created
|
||||
|
||||
const published = coalesceAliases(data, ["published", "publishDate", "date"])
|
||||
if (published) data.published = published
|
||||
|
||||
|
||||
@@ -17,10 +17,8 @@ interface Options {
|
||||
typstOptions: TypstOptions
|
||||
}
|
||||
|
||||
// mathjax macros
|
||||
export type Args = boolean | number | string | null
|
||||
interface MacroType {
|
||||
[key: string]: string | Args[]
|
||||
[key: string]: string
|
||||
}
|
||||
|
||||
export const Latex: QuartzTransformerPlugin<Partial<Options>> = (opts) => {
|
||||
@@ -39,20 +37,11 @@ export const Latex: QuartzTransformerPlugin<Partial<Options>> = (opts) => {
|
||||
case "typst": {
|
||||
return [[rehypeTypst, opts?.typstOptions ?? {}]]
|
||||
}
|
||||
default:
|
||||
case "mathjax": {
|
||||
return [
|
||||
[
|
||||
rehypeMathjax,
|
||||
{
|
||||
...(opts?.mathJaxOptions ?? {}),
|
||||
tex: {
|
||||
...(opts?.mathJaxOptions?.tex ?? {}),
|
||||
macros,
|
||||
},
|
||||
},
|
||||
],
|
||||
]
|
||||
return [[rehypeMathjax, { macros, ...(opts?.mathJaxOptions ?? {}) }]]
|
||||
}
|
||||
default: {
|
||||
return [[rehypeMathjax, { macros, ...(opts?.mathJaxOptions ?? {}) }]]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -57,7 +57,7 @@ export const CrawlLinks: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
||||
) {
|
||||
let dest = node.properties.href as RelativeURL
|
||||
const classes = (node.properties.className ?? []) as string[]
|
||||
const isExternal = isAbsoluteUrl(dest, { httpOnly: false })
|
||||
const isExternal = isAbsoluteUrl(dest)
|
||||
classes.push(isExternal ? "external" : "internal")
|
||||
|
||||
if (isExternal && opts.externalLinkIcon) {
|
||||
@@ -99,9 +99,7 @@ export const CrawlLinks: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
||||
}
|
||||
|
||||
// don't process external links or intra-document anchors
|
||||
const isInternal = !(
|
||||
isAbsoluteUrl(dest, { httpOnly: false }) || dest.startsWith("#")
|
||||
)
|
||||
const isInternal = !(isAbsoluteUrl(dest) || dest.startsWith("#"))
|
||||
if (isInternal) {
|
||||
dest = node.properties.href = transformLink(
|
||||
file.data.slug!,
|
||||
@@ -147,7 +145,7 @@ export const CrawlLinks: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
||||
node.properties.loading = "lazy"
|
||||
}
|
||||
|
||||
if (!isAbsoluteUrl(node.properties.src, { httpOnly: false })) {
|
||||
if (!isAbsoluteUrl(node.properties.src)) {
|
||||
let dest = node.properties.src as RelativeURL
|
||||
dest = node.properties.src = transformLink(
|
||||
file.data.slug!,
|
||||
|
||||
@@ -488,7 +488,16 @@ export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
||||
{
|
||||
data: { hProperties: { className: ["callout-content"] }, hName: "div" },
|
||||
type: "blockquote",
|
||||
children: [...calloutContent],
|
||||
children: [
|
||||
{
|
||||
data: {
|
||||
hProperties: { className: ["callout-content-inner"] },
|
||||
hName: "div",
|
||||
},
|
||||
type: "blockquote",
|
||||
children: [...calloutContent],
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import { QuartzTransformerPlugin } from "../types"
|
||||
import rehypeRaw from "rehype-raw"
|
||||
import { PluggableList } from "unified"
|
||||
|
||||
export interface Options {
|
||||
/** Replace {{ relref }} with quartz wikilinks []() */
|
||||
@@ -27,10 +25,7 @@ const defaultOptions: Options = {
|
||||
const relrefRegex = new RegExp(/\[([^\]]+)\]\(\{\{< relref "([^"]+)" >\}\}\)/, "g")
|
||||
const predefinedHeadingIdRegex = new RegExp(/(.*) {#(?:.*)}/, "g")
|
||||
const hugoShortcodeRegex = new RegExp(/{{(.*)}}/, "g")
|
||||
// Matches the full Hugo {{< figure src="..." ... >}} shortcode and captures src.
|
||||
// Must run before the generic shortcode stripper to avoid partial-match issues
|
||||
// with captions that contain HTML (e.g. <span class="figure-number">).
|
||||
const figureShortcodeRegex = new RegExp(/{{<\s*figure\b[^}]*\bsrc="([^"]*)"[^}]*>}}/, "g")
|
||||
const figureTagRegex = new RegExp(/< ?figure src="(.*)" ?>/, "g")
|
||||
// \\\\\( -> matches \\(
|
||||
// (.+?) -> Lazy match for capturing the equation
|
||||
// \\\\\) -> matches \\)
|
||||
@@ -73,14 +68,6 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
||||
})
|
||||
}
|
||||
|
||||
if (opts.replaceFigureWithMdImg) {
|
||||
src = src.toString()
|
||||
src = src.replaceAll(figureShortcodeRegex, (_value, ...capture) => {
|
||||
const [imgSrc] = capture
|
||||
return ``
|
||||
})
|
||||
}
|
||||
|
||||
if (opts.removeHugoShortcode) {
|
||||
src = src.toString()
|
||||
src = src.replaceAll(hugoShortcodeRegex, (_value, ...capture) => {
|
||||
@@ -89,6 +76,14 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
||||
})
|
||||
}
|
||||
|
||||
if (opts.replaceFigureWithMdImg) {
|
||||
src = src.toString()
|
||||
src = src.replaceAll(figureTagRegex, (_value, ...capture) => {
|
||||
const [src] = capture
|
||||
return ``
|
||||
})
|
||||
}
|
||||
|
||||
if (opts.replaceOrgLatex) {
|
||||
src = src.toString()
|
||||
src = src.replaceAll(inlineLatexRegex, (_value, ...capture) => {
|
||||
@@ -107,9 +102,5 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
||||
}
|
||||
return src
|
||||
},
|
||||
htmlPlugins() {
|
||||
const plugins: PluggableList = [rehypeRaw]
|
||||
return plugins
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,10 +9,6 @@ html {
|
||||
text-size-adjust: none;
|
||||
overflow-x: hidden;
|
||||
width: 100vw;
|
||||
|
||||
@media all and ($mobile) {
|
||||
scroll-padding-top: 4rem;
|
||||
}
|
||||
}
|
||||
|
||||
body {
|
||||
@@ -43,17 +39,23 @@ li,
|
||||
ol,
|
||||
ul,
|
||||
.katex,
|
||||
.math,
|
||||
.typst-doc,
|
||||
g[class~="typst-text"] {
|
||||
.math {
|
||||
color: var(--darkgray);
|
||||
fill: var(--darkgray);
|
||||
overflow-wrap: break-word;
|
||||
text-wrap: pretty;
|
||||
hyphens: auto;
|
||||
}
|
||||
|
||||
path[class~="typst-shape"] {
|
||||
stroke: var(--darkgray);
|
||||
p,
|
||||
ul,
|
||||
text,
|
||||
a,
|
||||
li,
|
||||
ol,
|
||||
ul,
|
||||
.katex,
|
||||
.math {
|
||||
overflow-wrap: anywhere;
|
||||
/* tr and td removed from list of selectors for overflow-wrap, allowing them to use default 'normal' property value */
|
||||
}
|
||||
|
||||
.math {
|
||||
@@ -219,7 +221,7 @@ a {
|
||||
}
|
||||
|
||||
& .sidebar {
|
||||
gap: 1.2rem;
|
||||
gap: 2rem;
|
||||
top: 0;
|
||||
box-sizing: border-box;
|
||||
padding: $topSpacing 2rem 2rem 2rem;
|
||||
|
||||
@@ -11,11 +11,14 @@
|
||||
|
||||
& > .callout-content {
|
||||
display: grid;
|
||||
transition: grid-template-rows 0.1s cubic-bezier(0.02, 0.01, 0.47, 1);
|
||||
overflow: hidden;
|
||||
transition: grid-template-rows 0.3s ease;
|
||||
|
||||
& > :first-child {
|
||||
margin-top: 0;
|
||||
& > .callout-content-inner {
|
||||
overflow: hidden;
|
||||
|
||||
& > :first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,28 +121,8 @@
|
||||
--callout-icon: var(--callout-icon-quote);
|
||||
}
|
||||
|
||||
&.is-collapsed {
|
||||
& > .callout-title > .fold-callout-icon {
|
||||
transform: rotateZ(-90deg);
|
||||
}
|
||||
|
||||
.callout-content {
|
||||
& > * {
|
||||
transition:
|
||||
height 0.1s cubic-bezier(0.02, 0.01, 0.47, 1),
|
||||
margin 0.1s cubic-bezier(0.02, 0.01, 0.47, 1),
|
||||
padding 0.1s cubic-bezier(0.02, 0.01, 0.47, 1);
|
||||
overflow-y: clip;
|
||||
height: 0;
|
||||
margin-bottom: 0;
|
||||
margin-top: 0;
|
||||
padding-bottom: 0;
|
||||
padding-top: 0;
|
||||
}
|
||||
& > :first-child {
|
||||
margin-top: -1rem;
|
||||
}
|
||||
}
|
||||
&.is-collapsed > .callout-title > .fold-callout-icon {
|
||||
transform: rotateZ(-90deg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,13 +10,12 @@ export async function glob(
|
||||
pattern: string,
|
||||
cwd: string,
|
||||
ignorePatterns: string[],
|
||||
respectGitignore: boolean = true,
|
||||
): Promise<FilePath[]> {
|
||||
const fps = (
|
||||
await globby(pattern, {
|
||||
cwd,
|
||||
ignore: ignorePatterns,
|
||||
gitignore: respectGitignore,
|
||||
gitignore: true,
|
||||
})
|
||||
).map(toPosixPath)
|
||||
return fps as FilePath[]
|
||||
|
||||
@@ -26,10 +26,9 @@ export type CSSResource = {
|
||||
export function JSResourceToScriptElement(resource: JSResource, preserve?: boolean): JSX.Element {
|
||||
const scriptType = resource.moduleType ?? "application/javascript"
|
||||
const spaPreserve = preserve ?? resource.spaPreserve
|
||||
|
||||
if (resource.contentType === "external") {
|
||||
return (
|
||||
<script key={resource.src} src={resource.src} type={scriptType} data-persist={spaPreserve} />
|
||||
<script key={resource.src} src={resource.src} type={scriptType} spa-preserve={spaPreserve} />
|
||||
)
|
||||
} else {
|
||||
const content = resource.script
|
||||
@@ -37,7 +36,7 @@ export function JSResourceToScriptElement(resource: JSResource, preserve?: boole
|
||||
<script
|
||||
key={randomUUID()}
|
||||
type={scriptType}
|
||||
data-persist={spaPreserve}
|
||||
spa-preserve={spaPreserve}
|
||||
dangerouslySetInnerHTML={{ __html: content }}
|
||||
></script>
|
||||
)
|
||||
@@ -55,7 +54,7 @@ export function CSSResourceToStyleElement(resource: CSSResource, preserve?: bool
|
||||
href={resource.content}
|
||||
rel="stylesheet"
|
||||
type="text/css"
|
||||
data-persist={spaPreserve}
|
||||
spa-preserve={spaPreserve}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user