commit 6476b45f04bd1a998868d23ad3a66986b5b9313a Author: Ignacio Ballesteros Date: Sat Feb 21 14:50:52 2026 +0100 initial: org-garden diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b4dfc59 --- /dev/null +++ b/.gitignore @@ -0,0 +1,27 @@ +# Elixir +/_build/ +/deps/ +*.beam +*.ez +/cover/ +/doc/ +.fetch +erl_crash.dump +*.tar +/tmp/ +org_garden + +# Nix +result +result-* +.direnv/ + +# TypeScript / Node +node_modules/ +dist/ +.npm/ +*.tsbuildinfo + +# This repo - generated output +content/ +static/ diff --git a/README.org b/README.org new file mode 100644 index 0000000..8b5c399 --- /dev/null +++ b/README.org @@ -0,0 +1,28 @@ +#+title: org-garden + +An [[https://orgmode.org/][org-roam]] to static website publishing pipeline. Converts =.org= notes into a rendered site using Emacs/[[https://ox-hugo.scripter.co/][ox-hugo]] for export and [[https://quartz.jzhao.xyz/][Quartz 4]] for site generation. + +* Usage + +#+begin_example +org-garden serve # dev server with live reload +org-garden build # production static build +org-garden export # org → markdown only +#+end_example + +* Running with Nix (recommended) + +#+begin_src sh +nix run . -- serve +nix run . -- build +#+end_src + +* Running with Mix + +#+begin_src sh +mix deps.get +mix escript.build +./org_garden serve +#+end_src + +Requires =QUARTZ_PATH= to point to a Quartz install with =node_modules= for =serve= and =build= commands. diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..3c3a5a8 --- /dev/null +++ b/flake.lock @@ -0,0 +1,61 @@ +{ + "nodes": { + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1771369470, + "narHash": "sha256-0NBlEBKkN3lufyvFegY4TYv5mCNHbi5OmBDrzihbBMQ=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "0182a361324364ae3f436a63005877674cf45efb", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..bb23d6b --- /dev/null +++ b/flake.nix @@ -0,0 +1,147 @@ +{ + description = "Org-garden — org-roam to website publishing pipeline"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + }; + + outputs = { self, nixpkgs, flake-utils }: + flake-utils.lib.eachDefaultSystem (system: + let + pkgs = import nixpkgs { inherit system; }; + fs = pkgs.lib.fileset; + + # ========================================================================= + # Emacs with ox-hugo + # ========================================================================= + # Needed at runtime by the escript (export calls `emacs --batch` with ox-hugo) + emacsWithOxHugo = (pkgs.emacsPackagesFor pkgs.emacs-nox).emacsWithPackages + (epkgs: [ epkgs.ox-hugo ]); + + # ========================================================================= + # Elixir Pipeline + # ========================================================================= + + # Pre-fetched Hex/Mix dependencies + mixDeps = pkgs.beamPackages.fetchMixDeps { + pname = "org-garden-mix-deps"; + version = "0.1.0"; + src = fs.toSource { + root = ./.; + fileset = fs.unions [ + ./mix.exs + ./mix.lock + ]; + }; + sha256 = "sha256-si7JAomY1HZ33m6ihUJP5i6PO39CE1clYvuMtn0CbPU="; + }; + + # Compiled org-garden escript + orgGardenEscript = pkgs.beamPackages.mixRelease { + pname = "org-garden"; + version = "0.1.0"; + src = fs.toSource { + root = ./.; + fileset = fs.unions [ + ./mix.exs + ./mix.lock + ./lib + ]; + }; + escriptBinName = "org_garden"; + mixFodDeps = mixDeps; + stripDebug = true; + }; + + # ========================================================================= + # Quartz (fetched from upstream, patched) + # ========================================================================= + + # Pin to specific upstream commit + quartzVersion = "4.5.2"; + quartzRev = "ec00a40aefca73596ab76e3ebe3a8e1129b43688"; + + # Fetch upstream Quartz source + quartzSrc = pkgs.fetchFromGitHub { + owner = "jackyzha0"; + repo = "quartz"; + rev = quartzRev; + hash = "sha256-HdtQB5+SRWiypOvAJuJa3Nodl4JHehp2Mz6Rj5gOG0w="; + }; + + # Apply our patches to Quartz + quartzPatched = pkgs.runCommand "quartz-patched-${quartzVersion}" { + src = quartzSrc; + } '' + cp -r $src $out + chmod -R u+w $out + cd $out + patch -p1 < ${./patches/01-glob-gitignore.patch} + patch -p1 < ${./patches/02-build-gitignore.patch} + patch -p1 < ${./patches/03-static-hugo.patch} + patch -p1 < ${./patches/04-oxhugofm-figure.patch} + ''; + + # Pre-fetch Quartz npm dependencies + quartzDeps = pkgs.buildNpmPackage { + pname = "org-garden-quartz-deps"; + version = quartzVersion; + src = quartzPatched; + npmDepsHash = "sha256-7u+VlIx44B3/ivM9vLMIOn+e4TL4eS6B682vhS+Ikb4="; + dontBuild = true; + installPhase = '' + mkdir -p $out + cp -r node_modules $out/node_modules + ''; + }; + + # ========================================================================= + # Combined Application + # ========================================================================= + + # Wrapped org-garden with Quartz bundled + orgGardenApp = pkgs.writeShellApplication { + name = "org-garden"; + runtimeInputs = [ emacsWithOxHugo pkgs.inotify-tools pkgs.nodejs_22 ]; + text = '' + # Set up Quartz working directory + QUARTZ_WORK=$(mktemp -d) + trap 'rm -rf "$QUARTZ_WORK"' EXIT + + # Copy patched Quartz source + cp -r ${quartzPatched}/. "$QUARTZ_WORK/" + chmod -R u+w "$QUARTZ_WORK" + + # Copy default config files + cp ${./quartz-config/quartz.config.ts} "$QUARTZ_WORK/" + cp ${./quartz-config/quartz.layout.ts} "$QUARTZ_WORK/" + cp ${./quartz-config/globals.d.ts} "$QUARTZ_WORK/" + cp ${./quartz-config/index.d.ts} "$QUARTZ_WORK/" + + # Link pre-built node_modules + ln -s ${quartzDeps}/node_modules "$QUARTZ_WORK/node_modules" + + export QUARTZ_PATH="$QUARTZ_WORK" + export NODE_PATH="${pkgs.nodejs_22}/bin/node" + + exec ${orgGardenEscript}/bin/org_garden "$@" + ''; + }; + + in + { + packages.default = orgGardenApp; + packages.escript = orgGardenEscript; + packages.quartz-patched = quartzPatched; + + devShells.default = pkgs.mkShell { + buildInputs = [ + pkgs.elixir + pkgs.inotify-tools + emacsWithOxHugo + pkgs.nodejs_22 + ]; + }; + }); +} diff --git a/lib/org_garden.ex b/lib/org_garden.ex new file mode 100644 index 0000000..aca65ba --- /dev/null +++ b/lib/org_garden.ex @@ -0,0 +1,189 @@ +defmodule OrgGarden do + @moduledoc """ + Org-roam to website publishing pipeline. + + Orchestrates: + 1. Org → Markdown export (via Emacs + ox-hugo) + 2. Markdown transforms (citations, etc.) + 3. Markdown → HTML + serving (via Quartz) + + ## Usage + + opts = %{ + zotero_url: "http://localhost:23119", + bibtex_file: System.get_env("BIBTEX_FILE"), + citation_mode: :warn # :silent | :warn | :strict + } + + # Batch: all .md files in a directory + OrgGarden.run(content_dir, [OrgGarden.Transforms.Citations], opts) + + # Targeted: specific files only + OrgGarden.run_on_files(["content/foo.md"], [OrgGarden.Transforms.Citations], opts) + + # With pre-initialized transforms (for watch mode, avoids re-init) + initialized = OrgGarden.init_transforms([OrgGarden.Transforms.Citations], opts) + OrgGarden.run_on_files_with(["content/foo.md"], initialized, opts) + """ + + require Logger + + @type transform :: module() + @type initialized_transform :: {module(), term()} + @type opts :: map() + + @doc "One-shot build: org files → static site" + def build(notes_dir, opts \\ []) do + OrgGarden.CLI.handle_build([notes_dir | opts_to_args(opts)]) + end + + @doc "Development server: watch + live reload" + def serve(notes_dir, opts \\ []) do + OrgGarden.CLI.handle_serve([notes_dir | opts_to_args(opts)]) + end + + @doc "Export only: org files → markdown (no Quartz)" + def export(notes_dir, opts \\ []) do + OrgGarden.CLI.handle_export([notes_dir | opts_to_args(opts)]) + end + + defp opts_to_args(opts) do + Enum.flat_map(opts, fn + {:output, v} -> ["--output", v] + {:port, v} -> ["--port", to_string(v)] + {:ws_port, v} -> ["--ws-port", to_string(v)] + {:watch, true} -> ["--watch"] + {:watch, false} -> [] + _ -> [] + end) + end + + @doc """ + Initialize transform modules. Returns a list of `{module, state}` tuples. + + Call this once and reuse the result with `run_on_files_with/3` to avoid + re-initializing transforms on every file change (e.g., in watch mode). + """ + @spec init_transforms([transform()], opts()) :: [initialized_transform()] + def init_transforms(transforms, opts) do + Enum.map(transforms, fn mod -> + state = mod.init(opts) + {mod, state} + end) + end + + @doc """ + Tear down previously initialized transforms, releasing any resources. + """ + @spec teardown_transforms([initialized_transform()]) :: :ok + def teardown_transforms(initialized) do + Enum.each(initialized, fn {mod, state} -> + if function_exported?(mod, :teardown, 1) do + mod.teardown(state) + end + end) + + :ok + end + + @doc """ + Run all transforms over every `.md` file under `content_dir`. + + Initializes and tears down transforms automatically. + Returns `{:ok, stats}` where stats maps each transform to a count of files it changed. + """ + @spec run(String.t(), [transform()], opts()) :: {:ok, map()} + def run(content_dir, transforms, opts \\ %{}) do + md_files = + content_dir + |> Path.join("**/*.md") + |> Path.wildcard() + + if md_files == [] do + Logger.warning("OrgGarden: no .md files found in #{content_dir}") + {:ok, %{}} + else + Logger.info( + "OrgGarden: processing #{length(md_files)} markdown files " <> + "with #{length(transforms)} transform(s)" + ) + + initialized = init_transforms(transforms, opts) + stats = apply_transforms(md_files, initialized, opts) + teardown_transforms(initialized) + {:ok, stats} + end + end + + @doc """ + Run all transforms over specific `.md` files only. + + Initializes and tears down transforms automatically. + Files that don't exist are silently skipped. + """ + @spec run_on_files([String.t()], [transform()], opts()) :: {:ok, map()} + def run_on_files(file_paths, transforms, opts \\ %{}) do + existing = Enum.filter(file_paths, &File.exists?/1) + + if existing == [] do + Logger.debug("OrgGarden: no files to process") + {:ok, %{}} + else + Logger.info("OrgGarden: processing #{length(existing)} file(s)") + initialized = init_transforms(transforms, opts) + stats = apply_transforms(existing, initialized, opts) + teardown_transforms(initialized) + {:ok, stats} + end + end + + @doc """ + Run pre-initialized transforms over specific `.md` files. + + Does NOT call `init` or `teardown` — the caller manages the transform + lifecycle. Use this in watch mode to avoid re-initializing on every change. + """ + @spec run_on_files_with([String.t()], [initialized_transform()], opts()) :: {:ok, map()} + def run_on_files_with(file_paths, initialized, opts) do + existing = Enum.filter(file_paths, &File.exists?/1) + + if existing == [] do + Logger.debug("OrgGarden: no files to process") + {:ok, %{}} + else + stats = apply_transforms(existing, initialized, opts) + {:ok, stats} + end + end + + # ------------------------------------------------------------------- + # Private + # ------------------------------------------------------------------- + + defp apply_transforms(md_files, initialized, opts) do + Enum.reduce(md_files, %{}, fn path, acc -> + original = File.read!(path) + + {transformed, file_stats} = + Enum.reduce(initialized, {original, %{}}, fn {mod, state}, {content, fstats} -> + result = mod.apply(content, state, opts) + changed = result != content + + {result, + Map.update( + fstats, + mod, + if(changed, do: 1, else: 0), + &(&1 + if(changed, do: 1, else: 0)) + )} + end) + + if transformed != original do + File.write!(path, transformed) + Logger.debug("OrgGarden: updated #{Path.relative_to_cwd(path)}") + end + + Map.merge(acc, file_stats, fn _k, a, b -> a + b end) + end) + end +end diff --git a/lib/org_garden/application.ex b/lib/org_garden/application.ex new file mode 100644 index 0000000..bbcfb55 --- /dev/null +++ b/lib/org_garden/application.ex @@ -0,0 +1,14 @@ +defmodule OrgGarden.Application do + @moduledoc false + use Application + + @impl true + def start(_type, _args) do + children = [ + {Finch, name: OrgGarden.Finch} + ] + + opts = [strategy: :one_for_one, name: OrgGarden.AppSupervisor] + Supervisor.start_link(children, opts) + end +end diff --git a/lib/org_garden/cli.ex b/lib/org_garden/cli.ex new file mode 100644 index 0000000..5b5074b --- /dev/null +++ b/lib/org_garden/cli.ex @@ -0,0 +1,375 @@ +defmodule OrgGarden.CLI do + @moduledoc """ + Escript entry point for the org-garden pipeline. + + ## Commands + + org-garden serve [--port 8080] [--ws-port 3001] + org-garden build [--output ] + org-garden export [--watch] + + ### serve + Development server with watch + live reload. Starts both the org→md + watcher and Quartz in serve mode. + + ### build + One-shot build for CI/production. Exports org files, runs transforms, + then builds static site with Quartz. + + ### export + Just export org→md (current pipeline behavior). Use --watch for + incremental re-export on file changes. + + ## Arguments + + notes-dir Path to the directory containing `.org` notes (required). + Also accepts the `NOTES_DIR` env var. + + ## Options + + --output Output root directory (used as ox-hugo base dir). + Defaults to the `OUTPUT_DIR` env var, or the current + working directory. + --content-dir

Output directory for exported Markdown. Defaults to + `/content`. + --port HTTP server port (default: 8080). Only for `serve`. + --ws-port WebSocket hot reload port (default: 3001). Only for `serve`. + --watch After initial batch, watch notes-dir for changes and + incrementally re-export affected files. Only for `export`. + + ## Environment Variables + + BIBTEX_FILE Path to a `.bib` file used as citation fallback. + ZOTERO_URL Zotero Better BibTeX base URL (default: http://localhost:23119). + CITATION_MODE silent | warn (default) | strict. + QUARTZ_PATH Path to quartz directory (required for serve/build). + NODE_PATH Node.js executable (default: node). + """ + + require Logger + + @transforms [OrgGarden.Transforms.Citations] + + def main(argv) do + Application.ensure_all_started(:org_garden) + + case argv do + ["serve" | rest] -> handle_serve(rest) + ["build" | rest] -> handle_build(rest) + ["export" | rest] -> handle_export(rest) + # Legacy: treat bare args as export command for backward compatibility + [_ | _] -> handle_export(argv) + _ -> abort("Usage: org-garden [options]") + end + end + + # --------------------------------------------------------------------------- + # Command: serve + # --------------------------------------------------------------------------- + + def handle_serve(argv) do + require_quartz_env() + {notes_dir, output_dir, content_dir, opts} = parse_serve_args(argv) + pipeline_opts = build_pipeline_opts() + + # Initial batch export + wipe(content_dir) + export_all(notes_dir, output_dir) + run_pipeline(content_dir, pipeline_opts) + generate_index(content_dir) + + IO.puts("==> Starting development server...") + + {:ok, _pid} = + OrgGarden.Supervisor.start_link( + notes_dir: notes_dir, + output_dir: output_dir, + content_dir: content_dir, + pipeline_opts: pipeline_opts, + transforms: @transforms, + port: opts[:port] || 8080, + ws_port: opts[:ws_port] || 3001 + ) + + IO.puts("==> Server running at http://localhost:#{opts[:port] || 8080}") + IO.puts("==> Watching #{notes_dir} for changes (Ctrl+C to stop)") + + Process.sleep(:infinity) + end + + defp parse_serve_args(argv) do + {opts, positional, _invalid} = + OptionParser.parse(argv, + strict: [ + output: :string, + content_dir: :string, + port: :integer, + ws_port: :integer + ] + ) + + notes_dir = extract_notes_dir(positional, "serve") + output_dir = extract_output_dir(opts) + content_dir = extract_content_dir(opts, output_dir) + + {notes_dir, output_dir, content_dir, opts} + end + + # --------------------------------------------------------------------------- + # Command: build + # --------------------------------------------------------------------------- + + def handle_build(argv) do + quartz_path = require_quartz_env() + {notes_dir, output_dir, content_dir, _opts} = parse_build_args(argv) + pipeline_opts = build_pipeline_opts() + + # Full batch export + wipe(content_dir) + export_all(notes_dir, output_dir) + run_pipeline(content_dir, pipeline_opts) + generate_index(content_dir) + + node_path = System.get_env("NODE_PATH", "node") + + IO.puts("==> Building static site with Quartz...") + + {output, status} = + System.cmd( + node_path, + [ + Path.join(quartz_path, "quartz/bootstrap-cli.mjs"), + "build", + "--directory", + content_dir, + "--output", + Path.join(output_dir, "public") + ], + cd: quartz_path, + stderr_to_stdout: true + ) + + IO.puts(output) + + if status != 0 do + abort("Quartz build failed with status #{status}") + end + + IO.puts("==> Build complete. Output: #{Path.join(output_dir, "public")}") + end + + defp parse_build_args(argv) do + {opts, positional, _invalid} = + OptionParser.parse(argv, + strict: [output: :string, content_dir: :string] + ) + + notes_dir = extract_notes_dir(positional, "build") + output_dir = extract_output_dir(opts) + content_dir = extract_content_dir(opts, output_dir) + + {notes_dir, output_dir, content_dir, opts} + end + + # --------------------------------------------------------------------------- + # Command: export (original pipeline behavior) + # --------------------------------------------------------------------------- + + def handle_export(argv) do + {notes_dir, output_dir, content_dir, watch?} = parse_export_args(argv) + pipeline_opts = build_pipeline_opts() + + # Phase 1-4: full batch export + wipe(content_dir) + export_all(notes_dir, output_dir) + run_pipeline(content_dir, pipeline_opts) + generate_index(content_dir) + + md_count = + content_dir + |> Path.join("**/*.md") + |> Path.wildcard() + |> length() + + IO.puts("==> Done. #{md_count} markdown files in #{content_dir}") + + # Phase 5: optional watch mode + if watch? do + IO.puts("==> Watching #{notes_dir} for .org changes... (Ctrl+C to stop)") + + {:ok, _pid} = + OrgGarden.Watcher.start_link( + notes_dir: notes_dir, + output_dir: output_dir, + content_dir: content_dir, + pipeline_opts: pipeline_opts, + transforms: @transforms + ) + + Process.sleep(:infinity) + end + end + + defp parse_export_args(argv) do + {opts, positional, _invalid} = + OptionParser.parse(argv, + strict: [output: :string, content_dir: :string, watch: :boolean] + ) + + notes_dir = extract_notes_dir(positional, "export") + output_dir = extract_output_dir(opts) + content_dir = extract_content_dir(opts, output_dir) + watch? = Keyword.get(opts, :watch, false) + + {notes_dir, output_dir, content_dir, watch?} + end + + # --------------------------------------------------------------------------- + # Shared argument extraction + # --------------------------------------------------------------------------- + + defp extract_notes_dir(positional, command) do + notes_dir = + case positional do + [dir | _] -> + dir + + [] -> + System.get_env("NOTES_DIR") || + abort("Usage: org-garden #{command} [options]") + end + + notes_dir = Path.expand(notes_dir) + + unless File.dir?(notes_dir) do + abort("Error: notes directory does not exist: #{notes_dir}") + end + + notes_dir + end + + defp extract_output_dir(opts) do + (opts[:output] || System.get_env("OUTPUT_DIR") || File.cwd!()) + |> Path.expand() + end + + defp extract_content_dir(opts, output_dir) do + (opts[:content_dir] || Path.join(output_dir, "content")) + |> Path.expand() + end + + # --------------------------------------------------------------------------- + # Phase 1: Wipe content/ + # --------------------------------------------------------------------------- + + defp wipe(content_dir) do + IO.puts("==> Wiping #{content_dir}") + File.mkdir_p!(content_dir) + + content_dir + |> File.ls!() + |> Enum.reject(&(&1 == ".gitkeep")) + |> Enum.each(fn entry -> + Path.join(content_dir, entry) |> File.rm_rf!() + end) + end + + # --------------------------------------------------------------------------- + # Phase 2: Export org files via Emacs + ox-hugo + # --------------------------------------------------------------------------- + + defp export_all(notes_dir, output_dir) do + IO.puts("==> Exporting org files from #{notes_dir}") + + case OrgGarden.Export.export_all(notes_dir, output_dir) do + {:ok, 0} -> + IO.puts("No .org files found in #{notes_dir}") + System.halt(0) + + {:ok, count} -> + IO.puts(" exported #{count} file(s)") + + {:error, failures} -> + IO.puts(:stderr, "\nFailed to export #{length(failures)} file(s):") + + Enum.each(failures, fn {f, {:error, reason}} -> + IO.puts(:stderr, " #{f}: #{inspect(reason)}") + end) + + System.halt(1) + end + end + + # --------------------------------------------------------------------------- + # Phase 3: Markdown transformation pipeline + # --------------------------------------------------------------------------- + + defp run_pipeline(content_dir, pipeline_opts) do + IO.puts("==> Running markdown pipeline") + + {:ok, stats} = OrgGarden.run(content_dir, @transforms, pipeline_opts) + + Enum.each(stats, fn {mod, count} -> + IO.puts(" #{inspect(mod)}: #{count} file(s) modified") + end) + end + + # --------------------------------------------------------------------------- + # Phase 4: Generate default index.md if none was exported + # --------------------------------------------------------------------------- + + defp generate_index(content_dir) do + IO.puts("==> Generating index") + OrgGarden.Index.generate(content_dir) + end + + # --------------------------------------------------------------------------- + # Helpers + # --------------------------------------------------------------------------- + + defp require_quartz_env do + case System.get_env("QUARTZ_PATH") do + nil -> + abort(""" + Error: QUARTZ_PATH environment variable not set. + + The 'serve' and 'build' commands require Quartz to be available. + + Use the wrapper scripts that set up the environment: + nix run .#notes -- # for serve + nix run .#build -- # for build + + Or set QUARTZ_PATH manually to point to a quartz-org-roam checkout + with node_modules installed. + + For export-only mode (no Quartz), use: + org-garden export [--watch] + """) + + path -> + unless File.exists?(Path.join(path, "quartz/bootstrap-cli.mjs")) do + abort("Error: QUARTZ_PATH=#{path} does not contain quartz/bootstrap-cli.mjs") + end + + path + end + end + + defp build_pipeline_opts do + %{ + zotero_url: System.get_env("ZOTERO_URL", "http://localhost:23119"), + bibtex_file: System.get_env("BIBTEX_FILE"), + citation_mode: + case System.get_env("CITATION_MODE", "warn") do + "silent" -> :silent + "strict" -> :strict + _ -> :warn + end + } + end + + defp abort(message) do + IO.puts(:stderr, message) + System.halt(1) + end +end diff --git a/lib/org_garden/export.ex b/lib/org_garden/export.ex new file mode 100644 index 0000000..4dadd0e --- /dev/null +++ b/lib/org_garden/export.ex @@ -0,0 +1,135 @@ +defmodule OrgGarden.Export do + @moduledoc """ + Org-to-Markdown export via Emacs batch + ox-hugo. + + Provides both single-file and batch export, plus a helper to compute + the expected `.md` output path for a given `.org` source file. + """ + + require Logger + + @doc """ + Export a single `.org` file to Markdown via `emacs --batch` + ox-hugo. + + Returns `{:ok, exit_code}` with the emacs exit code (0 = success), + or `{:error, reason}` if the command could not be executed. + """ + @spec export_file(String.t(), String.t(), String.t()) :: {:ok, non_neg_integer()} | {:error, term()} + def export_file(orgfile, notes_dir, output_dir) do + section = + orgfile + |> Path.dirname() + |> Path.relative_to(notes_dir) + + # ox-hugo requires static/ to exist for image asset copying + File.mkdir_p!(Path.join(output_dir, "static")) + + {output, exit_code} = + System.cmd( + "emacs", + [ + "--batch", + "--eval", "(require 'ox-hugo)", + "--eval", """ + (org-cite-register-processor 'passthrough + :export-citation + (lambda (citation _style _backend _info) + (let ((keys (mapcar (lambda (ref) + (concat "@" (org-element-property :key ref))) + (org-cite-get-references citation)))) + (format "[cite:%s]" (string-join keys ";"))))) + """, + "--eval", "(setq org-cite-export-processors '((t passthrough)))", + "--eval", ~s[(setq org-hugo-base-dir "#{output_dir}")], + "--eval", ~s[(setq org-hugo-default-section-directory "#{section}")], + "--visit", orgfile, + "--funcall", "org-hugo-export-to-md" + ], + stderr_to_stdout: true + ) + + filtered = + output + |> String.split("\n") + |> Enum.reject(&String.match?(&1, ~r/^Loading|^ad-handle|^For information/)) + |> Enum.join("\n") + + if filtered != "", do: Logger.info("emacs: #{filtered}") + + if exit_code == 0 do + {:ok, exit_code} + else + {:error, {:emacs_exit, exit_code, filtered}} + end + rescue + e -> {:error, e} + end + + @doc """ + Export all `.org` files found under `notes_dir`. + + Returns `{:ok, count}` where `count` is the number of successfully + exported files, or `{:error, failures}` if any files failed. + """ + @spec export_all(String.t(), String.t()) :: {:ok, non_neg_integer()} | {:error, list()} + def export_all(notes_dir, output_dir) do + org_files = + Path.join(notes_dir, "**/*.org") + |> Path.wildcard() + + if org_files == [] do + Logger.warning("No .org files found in #{notes_dir}") + {:ok, 0} + else + Logger.info("Exporting #{length(org_files)} org file(s) from #{notes_dir}") + + results = + Enum.map(org_files, fn orgfile -> + IO.puts(" exporting: #{orgfile}") + {orgfile, export_file(orgfile, notes_dir, output_dir)} + end) + + failures = + Enum.filter(results, fn + {_, {:ok, _}} -> false + {_, {:error, _}} -> true + end) + + if failures == [] do + {:ok, length(results)} + else + {:error, failures} + end + end + end + + @doc """ + Compute the expected `.md` path for a given `.org` file. + + Uses the same section-mapping logic as ox-hugo: the relative directory + of the `.org` file within `notes_dir` becomes the section directory + under `content_dir`. + + ## Examples + + iex> OrgGarden.Export.expected_md_path("/notes/bus/emt.org", "/notes", "/out/content") + "/out/content/bus/emt.md" + + iex> OrgGarden.Export.expected_md_path("/notes/top-level.org", "/notes", "/out/content") + "/out/content/top-level.md" + """ + @spec expected_md_path(String.t(), String.t(), String.t()) :: String.t() + def expected_md_path(orgfile, notes_dir, content_dir) do + section = + orgfile + |> Path.dirname() + |> Path.relative_to(notes_dir) + + basename = Path.basename(orgfile, ".org") <> ".md" + + case section do + "." -> Path.join(content_dir, basename) + _ -> Path.join([content_dir, section, basename]) + end + end +end diff --git a/lib/org_garden/index.ex b/lib/org_garden/index.ex new file mode 100644 index 0000000..8909698 --- /dev/null +++ b/lib/org_garden/index.ex @@ -0,0 +1,83 @@ +defmodule OrgGarden.Index do + @moduledoc """ + Generates a fallback `index.md` in the content directory if none was + exported from an `.org` file. + + The generated index lists all markdown pages alphabetically with links. + """ + + @doc """ + Generate `content_dir/index.md` if it does not already exist. + + If an `index.md` was already created by ox-hugo (from an `index.org`), + it is left untouched. + """ + @spec generate(String.t()) :: :ok + def generate(content_dir) do + index_path = Path.join(content_dir, "index.md") + + unless File.exists?(index_path) do + IO.puts(" generating default index.md") + + pages = + Path.join(content_dir, "**/*.md") + |> Path.wildcard() + |> Enum.map(fn path -> + slug = Path.relative_to(path, content_dir) |> Path.rootname() + + title = + path + |> File.read!() + |> then(fn content -> + case Regex.run(~r/^title\s*=\s*"(.+)"/m, content) do + [_, t] -> t + _ -> slug + end + end) + + {slug, title} + end) + |> Enum.sort_by(fn {_, title} -> title end) + |> Enum.map(fn {slug, title} -> "- [#{title}](#{slug})" end) + |> Enum.join("\n") + + File.write!(index_path, """ + --- + title: Index + --- + + #{pages} + """) + end + + :ok + end + + @doc """ + Regenerate the index by removing any previously generated one first. + + Only removes the index if it was generated by us (contains `title: Index`). + User-exported index files (from `index.org`) are left untouched. + """ + @spec regenerate(String.t()) :: :ok + def regenerate(content_dir) do + index_path = Path.join(content_dir, "index.md") + + if File.exists?(index_path) do + content = File.read!(index_path) + + if generated_index?(content) do + File.rm!(index_path) + end + end + + generate(content_dir) + end + + defp generated_index?(content) do + # Our generated index uses "title: Index" in YAML frontmatter. + # ox-hugo uses TOML frontmatter (title = "..."), so this won't + # match user-exported files. + String.contains?(content, "title: Index") + end +end diff --git a/lib/org_garden/quartz.ex b/lib/org_garden/quartz.ex new file mode 100644 index 0000000..70af257 --- /dev/null +++ b/lib/org_garden/quartz.ex @@ -0,0 +1,118 @@ +defmodule OrgGarden.Quartz do + @moduledoc """ + Manages Quartz Node.js process as an Erlang Port. + + Required environment: + - QUARTZ_PATH: path to quartz repo (with node_modules) + - NODE_PATH: path to node executable (default: "node") + + Starts Quartz in serve mode (`npx quartz build --serve`) and forwards + all stdout/stderr output to the Logger with a `[quartz]` prefix. + + If Quartz exits, this GenServer will stop, which triggers the supervisor + to restart the entire supervision tree (strategy: :one_for_all). + """ + use GenServer + + require Logger + + defstruct [:port, :quartz_path, :content_dir, :http_port, :ws_port] + + # ------------------------------------------------------------------- + # Client API + # ------------------------------------------------------------------- + + @doc """ + Start the Quartz process as a linked GenServer. + + ## Options + + * `:content_dir` — directory where markdown files are located (required) + * `:port` — HTTP server port (default: 8080) + * `:ws_port` — WebSocket hot reload port (default: 3001) + """ + def start_link(opts) do + GenServer.start_link(__MODULE__, opts, name: __MODULE__) + end + + # ------------------------------------------------------------------- + # GenServer callbacks + # ------------------------------------------------------------------- + + @impl true + def init(opts) do + quartz_path = + System.get_env("QUARTZ_PATH") || + raise "QUARTZ_PATH environment variable not set" + + node_path = System.get_env("NODE_PATH", "node") + + content_dir = Keyword.fetch!(opts, :content_dir) + http_port = Keyword.get(opts, :port, 8080) + ws_port = Keyword.get(opts, :ws_port, 3001) + + cli_path = Path.join(quartz_path, "quartz/bootstrap-cli.mjs") + + unless File.exists?(cli_path) do + raise "Quartz CLI not found at #{cli_path}. Check QUARTZ_PATH." + end + + args = [ + cli_path, + "build", + "--serve", + "--directory", content_dir, + "--port", to_string(http_port), + "--wsPort", to_string(ws_port) + ] + + Logger.info("[quartz] Starting: #{node_path} #{Enum.join(args, " ")}") + Logger.info("[quartz] Working directory: #{quartz_path}") + + port = + Port.open({:spawn_executable, node_path}, [ + :binary, + :exit_status, + :stderr_to_stdout, + args: args, + cd: quartz_path, + env: [{~c"NODE_NO_WARNINGS", ~c"1"}] + ]) + + state = %__MODULE__{ + port: port, + quartz_path: quartz_path, + content_dir: content_dir, + http_port: http_port, + ws_port: ws_port + } + + {:ok, state} + end + + @impl true + def handle_info({port, {:data, data}}, %{port: port} = state) do + data + |> String.split("\n", trim: true) + |> Enum.each(&Logger.info("[quartz] #{&1}")) + + {:noreply, state} + end + + @impl true + def handle_info({port, {:exit_status, status}}, %{port: port} = state) do + Logger.error("[quartz] Process exited with status #{status}") + {:stop, {:quartz_exit, status}, state} + end + + @impl true + def terminate(_reason, %{port: port}) when is_port(port) do + # Attempt graceful shutdown + Port.close(port) + :ok + rescue + _ -> :ok + end + + def terminate(_reason, _state), do: :ok +end diff --git a/lib/org_garden/resolvers/bibtex.ex b/lib/org_garden/resolvers/bibtex.ex new file mode 100644 index 0000000..92585d0 --- /dev/null +++ b/lib/org_garden/resolvers/bibtex.ex @@ -0,0 +1,178 @@ +defmodule OrgGarden.Resolvers.BibTeX do + @moduledoc """ + Resolves citation keys from a local BibTeX (.bib) file. + + Configured via the `BIBTEX_FILE` environment variable, or passed directly + as `opts.bibtex_file`. The file is parsed once at init time and the + resulting entry map is reused for all lookups. + + Supports extracting: author last names, year, title, DOI, URL. + + BibTeX entry format parsed: + + @type{citationkey, + author = {Last, First and Last2, First2}, + year = {2021}, + title = {Some Title}, + doi = {10.xxxx/yyyy}, + url = {https://example.com}, + } + + Returns `{:ok, %{label: "Author, Year", url: "..."}}` or `:error`. + """ + + require Logger + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + + @doc """ + Parse a .bib file and return a map of `%{citation_key => entry_map}`. + Returns `{:ok, entries}` or `{:error, reason}`. + """ + @spec load(String.t()) :: {:ok, map()} | {:error, term()} + def load(path) do + case File.read(path) do + {:ok, content} -> + entries = parse_entries(content) + Logger.info("BibTeX: loaded #{map_size(entries)} entries from #{path}") + {:ok, entries} + + {:error, reason} -> + {:error, reason} + end + end + + @doc """ + Resolve a citation key from pre-loaded BibTeX entries. + """ + @spec resolve(String.t(), map()) :: {:ok, map()} | :error + def resolve(key, entries) do + case Map.fetch(entries, key) do + {:ok, entry} -> + label = build_label(entry) + url = build_url(entry) + {:ok, %{label: label, url: url}} + + :error -> + :error + end + end + + # ------------------------------------------------------------------ + # Parsing + # ------------------------------------------------------------------ + + # Match @type{key, ...fields...} + # We handle nested braces by scanning character by character after + # finding the opening, rather than relying on a single regex. + @entry_header ~r/@\w+\s*\{\s*([^,\s]+)\s*,/ + + defp parse_entries(content) do + # Split on "@" boundaries, then parse each chunk + content + |> String.split(~r/(?=@\w+\s*\{)/, trim: true) + |> Enum.reduce(%{}, fn chunk, acc -> + case Regex.run(@entry_header, chunk) do + [_, key] -> + fields = parse_fields(chunk) + Map.put(acc, String.trim(key), fields) + + _ -> + acc + end + end) + end + + # Extract key = {value} or key = "value" pairs from an entry block. + # Handles simple single-depth braces; good enough for common fields. + @field_regex ~r/(\w+)\s*=\s*(?:\{([^{}]*(?:\{[^{}]*\}[^{}]*)*)\}|"([^"]*)")/ + + defp parse_fields(chunk) do + @field_regex + |> Regex.scan(chunk) + |> Enum.reduce(%{}, fn match, acc -> + field_name = Enum.at(match, 1) |> String.downcase() + # Value is in capture group 2 (braces) or 3 (quotes) + value = + case {Enum.at(match, 2, ""), Enum.at(match, 3, "")} do + {"", q} -> q + {b, _} -> b + end + + Map.put(acc, field_name, String.trim(value)) + end) + end + + # ------------------------------------------------------------------ + # Label & URL building + # ------------------------------------------------------------------ + + defp build_label(entry) do + author_part = + entry + |> Map.get("author", "") + |> parse_authors() + |> format_authors() + + year = Map.get(entry, "year", Map.get(entry, "date", "")) + year = extract_year(year) + + if year && author_part != "", do: "#{author_part}, #{year}", else: author_part + end + + defp parse_authors(""), do: [] + + defp parse_authors(author_str) do + author_str + |> String.split(" and ", trim: true) + |> Enum.map(&extract_last_name/1) + |> Enum.reject(&(&1 == "")) + end + + # Handles "Last, First" and "First Last" formats + defp extract_last_name(name) do + name = String.trim(name) + + cond do + String.contains?(name, ",") -> + name |> String.split(",") |> List.first() |> String.trim() + + String.contains?(name, " ") -> + name |> String.split(" ") |> List.last() |> String.trim() + + true -> + name + end + end + + defp format_authors([]), do: "Unknown" + defp format_authors([single]), do: single + defp format_authors([first | rest]), do: "#{first} & #{List.last(rest)}" + + defp extract_year(""), do: nil + + defp extract_year(str) do + case Regex.run(~r/\b(\d{4})\b/, str) do + [_, year] -> year + _ -> nil + end + end + + defp build_url(entry) do + cond do + doi = Map.get(entry, "doi", "") |> non_empty() -> + "https://doi.org/#{doi}" + + url = Map.get(entry, "url", "") |> non_empty() -> + url + + true -> + nil + end + end + + defp non_empty(""), do: nil + defp non_empty(v), do: v +end diff --git a/lib/org_garden/resolvers/doi.ex b/lib/org_garden/resolvers/doi.ex new file mode 100644 index 0000000..4ff222b --- /dev/null +++ b/lib/org_garden/resolvers/doi.ex @@ -0,0 +1,18 @@ +defmodule OrgGarden.Resolvers.DOI do + @moduledoc """ + Last-resort citation resolver — always succeeds. + + If the citation key looks like a DOI (starts with "10."), returns a + `https://doi.org/...` link. Otherwise returns the key itself as a + plain label with no URL. + """ + + @spec resolve(String.t()) :: {:ok, map()} + def resolve(key) do + if String.starts_with?(key, "10.") do + {:ok, %{label: key, url: "https://doi.org/#{key}"}} + else + {:ok, %{label: key, url: nil}} + end + end +end diff --git a/lib/org_garden/resolvers/zotero.ex b/lib/org_garden/resolvers/zotero.ex new file mode 100644 index 0000000..5217f9d --- /dev/null +++ b/lib/org_garden/resolvers/zotero.ex @@ -0,0 +1,182 @@ +defmodule OrgGarden.Resolvers.Zotero do + @moduledoc """ + Resolves citation keys via Zotero Better BibTeX's JSON-RPC API. + + Requires Zotero to be running with the Better BibTeX plugin installed. + Default endpoint: http://localhost:23119/better-bibtex/json-rpc + + Resolution strategy: + 1. Search by citation key via `item.search` + 2. If found, try to get a PDF attachment link (zotero://open-pdf/...) + 3. Fall back to zotero://select/items/@key + + Returns `{:ok, %{label: "Author, Year", url: "zotero://..."}}` or `:error`. + """ + + require Logger + + @rpc_path "/better-bibtex/json-rpc" + + @doc """ + Attempt to resolve `key` against a running Zotero instance. + `base_url` defaults to `http://localhost:23119`. + """ + @spec resolve(String.t(), String.t()) :: {:ok, map()} | :error + def resolve(key, base_url \\ "http://localhost:23119") do + url = base_url <> @rpc_path + + payload = + Jason.encode!(%{ + jsonrpc: "2.0", + method: "item.search", + params: [ + [["citationKey", "is", key]] + ], + id: 1 + }) + + case Req.post(url, + body: payload, + headers: [{"content-type", "application/json"}], + receive_timeout: 5_000, + finch: OrgGarden.Finch + ) do + {:ok, %{status: 200, body: body}} -> + parse_response(body, key, base_url) + + {:ok, %{status: status}} -> + Logger.debug("Zotero: unexpected HTTP #{status} for key #{key}") + :error + + {:error, reason} -> + Logger.debug("Zotero: connection failed for key #{key}: #{inspect(reason)}") + :error + + other -> + Logger.debug("Zotero: unexpected result for key #{key}: #{inspect(other)}") + :error + end + rescue + e -> + Logger.debug("Zotero: exception resolving key #{key}: #{inspect(e)}") + :error + end + + # ------------------------------------------------------------------ + # Private helpers + # ------------------------------------------------------------------ + + defp parse_response(%{"result" => [item | _]}, key, base_url) do + label = build_label(item) + url = resolve_url(item, key, base_url) + {:ok, %{label: label, url: url}} + end + + defp parse_response(%{"result" => []}, key, _base_url) do + Logger.debug("Zotero: no item found for key #{key}") + :error + end + + defp parse_response(%{"error" => err}, key, _base_url) do + Logger.debug("Zotero: RPC error for key #{key}: #{inspect(err)}") + :error + end + + defp parse_response(body, key, _base_url) do + Logger.debug("Zotero: unexpected response shape for key #{key}: #{inspect(body)}") + :error + end + + defp fetch_pdf_url(key, base_url) do + payload = + Jason.encode!(%{ + jsonrpc: "2.0", + method: "item.attachments", + params: [key], + id: 2 + }) + + case Req.post(base_url <> @rpc_path, + body: payload, + headers: [{"content-type", "application/json"}], + receive_timeout: 5_000, + finch: OrgGarden.Finch + ) do + {:ok, %{status: 200, body: %{"result" => attachments}}} when is_list(attachments) -> + attachments + |> Enum.find_value(fn att -> + open = Map.get(att, "open", "") + path = Map.get(att, "path", "") + if String.ends_with?(path, ".pdf"), do: open, else: nil + end) + + _ -> + nil + end + rescue + _ -> nil + end + + # CSL-JSON format: authors are under "author" with "family"/"given" keys. + # Year is under "issued" -> "date-parts" -> [[year, month, day]]. + defp build_label(item) do + authors = Map.get(item, "author", []) + year = extract_year(item) + + author_part = + case authors do + [] -> + "Unknown" + + [single] -> + Map.get(single, "family", Map.get(single, "literal", "Unknown")) + + [first | rest] -> + first_name = Map.get(first, "family", Map.get(first, "literal", "Unknown")) + last_name = + rest + |> List.last() + |> then(&Map.get(&1, "family", Map.get(&1, "literal", "Unknown"))) + + "#{first_name} & #{last_name}" + end + + if year, do: "#{author_part}, #{year}", else: author_part + end + + # "issued": {"date-parts": [["2021", 2, 3]]} + defp extract_year(item) do + case get_in(item, ["issued", "date-parts"]) do + [[year | _] | _] -> to_string(year) + _ -> nil + end + end + + defp resolve_url(item, key, base_url) do + # Prefer zotero://open-pdf/... for items with a PDF attachment. + # Fall back to zotero://select/library/items/KEY to open the item in Zotero. + # The "id" field is a URI like "http://zotero.org/users/123/items/ABCD1234". + pdf_url = fetch_pdf_url(key, base_url) + + if pdf_url do + pdf_url + else + item_key = + item + |> Map.get("id", "") + |> String.split("/") + |> List.last() + |> non_empty() + + if item_key do + "zotero://select/library/items/#{item_key}" + else + "zotero://select/items/@#{key}" + end + end + end + + defp non_empty(nil), do: nil + defp non_empty(""), do: nil + defp non_empty(v), do: v +end diff --git a/lib/org_garden/supervisor.ex b/lib/org_garden/supervisor.ex new file mode 100644 index 0000000..54ad6be --- /dev/null +++ b/lib/org_garden/supervisor.ex @@ -0,0 +1,40 @@ +defmodule OrgGarden.Supervisor do + @moduledoc """ + Supervises development server components. + + Strategy: :one_for_all + If either child fails, restart both to ensure consistent state. + + Children: + 1. OrgGarden.Watcher - watches .org files for changes + 2. OrgGarden.Quartz - runs Quartz Node.js server + + ## Usage + + OrgGarden.Supervisor.start_link( + notes_dir: "/path/to/notes", + output_dir: "/path/to/output", + content_dir: "/path/to/output/content", + pipeline_opts: %{zotero_url: "...", ...}, + transforms: [OrgGarden.Transforms.Citations], + port: 8080, + ws_port: 3001 + ) + """ + use Supervisor + + def start_link(opts) do + Supervisor.start_link(__MODULE__, opts, name: __MODULE__) + end + + @impl true + def init(opts) do + children = [ + {OrgGarden.Watcher, + Keyword.take(opts, [:notes_dir, :output_dir, :content_dir, :pipeline_opts, :transforms])}, + {OrgGarden.Quartz, Keyword.take(opts, [:content_dir, :port, :ws_port])} + ] + + Supervisor.init(children, strategy: :one_for_all) + end +end diff --git a/lib/org_garden/transform.ex b/lib/org_garden/transform.ex new file mode 100644 index 0000000..bb9ea7a --- /dev/null +++ b/lib/org_garden/transform.ex @@ -0,0 +1,48 @@ +defmodule OrgGarden.Transform do + @moduledoc """ + Behaviour that all markdown transform modules must implement. + + ## Callbacks + + - `init/1` — called once before processing; returns transform-specific state. + Default implementation returns the opts map unchanged. + - `apply/3` — called per .md file; returns the (possibly modified) content. + - `teardown/1` — optional cleanup after all files are processed. + + ## Example + + defmodule MyTransform do + @behaviour OrgGarden.Transform + + @impl true + def init(opts), do: %{some_state: opts[:value]} + + @impl true + def apply(content, state, _opts) do + String.replace(content, "foo", state.some_state) + end + end + """ + + @doc "One-time initialisation. Returns opaque state passed to apply/3." + @callback init(opts :: map()) :: term() + + @doc "Transform file content. Returns the (possibly modified) content string." + @callback apply(content :: String.t(), state :: term(), opts :: map()) :: String.t() + + @doc "Optional cleanup after all files are processed." + @callback teardown(state :: term()) :: :ok + + @optional_callbacks teardown: 1 + + defmacro __using__(_) do + quote do + @behaviour OrgGarden.Transform + + @impl OrgGarden.Transform + def init(opts), do: opts + + defoverridable init: 1 + end + end +end diff --git a/lib/org_garden/transforms/citations.ex b/lib/org_garden/transforms/citations.ex new file mode 100644 index 0000000..cffa88c --- /dev/null +++ b/lib/org_garden/transforms/citations.ex @@ -0,0 +1,231 @@ +defmodule OrgGarden.Transforms.Citations do + @moduledoc """ + Markdown transform: resolves org-citar citation keys to hyperlinks. + + ## Recognised citation syntax (as output by ox-hugo from org-citar) + + [cite:@key] → org-cite / citar standard (most common) + [cite:@key1;@key2] → multiple citations + cite:key → older roam-style bare cite syntax + + ## Resolution chain (in order) + + 1. Zotero (live instance via Better BibTeX JSON-RPC) — preferred + 2. BibTeX file (BIBTEX_FILE env var) — fallback + 3. DOI / bare key — always succeeds + + ## Modes (opts.citation_mode) + + :silent — silently use DOI/bare-key fallback when Zotero+BibTeX fail + :warn — (default) emit a Logger.warning for unresolved keys + :strict — raise on unresolved keys (aborts pipeline) + + ## Format + + Resolved citations are rendered as: + + [Label](url) when a URL is available + [Label] when no URL could be determined (bare key fallback) + + Multiple semicolon-separated keys become space-separated links: + + [cite:@a;@b] → [Author A, 2020](url_a) [Author B, 2019](url_b) + + ## init/1 callback + + Loads the BibTeX file (if configured) once before processing begins, + and probes Zotero availability, emitting warnings as appropriate. + """ + + @behaviour OrgGarden.Transform + + require Logger + + alias OrgGarden.Resolvers.Zotero + alias OrgGarden.Resolvers.BibTeX + alias OrgGarden.Resolvers.DOI + + # Match [cite:@key] and [cite:@key1;@key2;...] (org-cite / citar style) + @cite_bracket_regex ~r/\[cite:(@[^\]]+)\]/ + + # Match bare cite:key or cite:@key (older roam style, no brackets, optional @ prefix) + @cite_bare_regex ~r/(? + "All citations will fall back to bare-key rendering. " <> + "Set BIBTEX_FILE env var or start Zotero with Better BibTeX to resolve citations." + ) + end + + %{ + bibtex_entries: bibtex_entries, + zotero_available: zotero_available, + zotero_url: Map.get(opts, :zotero_url, "http://localhost:23119"), + citation_mode: Map.get(opts, :citation_mode, :warn) + } + end + + @doc """ + Apply citation resolution to a single markdown file's content. + """ + def apply(content, state, _opts) do + content + |> resolve_bracket_citations(state) + |> resolve_bare_citations(state) + end + + # ------------------------------------------------------------------ + # Resolution passes + # ------------------------------------------------------------------ + + defp resolve_bracket_citations(content, state) do + Regex.replace(@cite_bracket_regex, content, fn _full, keys_str -> + keys_str + |> String.split(";") + |> Enum.map(&String.trim/1) + |> Enum.map(fn "@" <> key -> key end) + |> Enum.map(&resolve_key(&1, state)) + |> Enum.join(" ") + end) + end + + defp resolve_bare_citations(content, state) do + Regex.replace(@cite_bare_regex, content, fn _full, key -> + resolve_key(key, state) + end) + end + + # ------------------------------------------------------------------ + # Single-key resolution chain + # ------------------------------------------------------------------ + + defp resolve_key(key, state) do + info = + with :error <- try_zotero(key, state), + :error <- try_bibtex(key, state) do + handle_unresolved(key, state) + else + {:ok, citation_info} -> citation_info + end + + format_result(info) + end + + defp try_zotero(_key, %{zotero_available: false}), do: :error + + defp try_zotero(key, %{zotero_url: url}) do + Zotero.resolve(key, url) + end + + defp try_bibtex(_key, %{bibtex_entries: entries}) when map_size(entries) == 0, do: :error + + defp try_bibtex(key, %{bibtex_entries: entries}) do + BibTeX.resolve(key, entries) + end + + defp handle_unresolved(key, %{citation_mode: mode}) do + case mode do + :strict -> + raise "Citations: could not resolve citation key '#{key}' and mode is :strict" + + :warn -> + Logger.warning("Citations: unresolved citation key '#{key}' — using bare-key fallback") + {:ok, result} = DOI.resolve(key) + result + + :silent -> + {:ok, result} = DOI.resolve(key) + result + end + end + + defp format_result(%{label: label, url: nil}), do: "[#{label}]" + defp format_result(%{label: label, url: url}), do: "[#{label}](#{url})" + + # ------------------------------------------------------------------ + # Init helpers + # ------------------------------------------------------------------ + + defp load_bibtex(opts) do + path = Map.get(opts, :bibtex_file) || System.get_env("BIBTEX_FILE") + + cond do + is_nil(path) -> + Logger.debug("Citations: BIBTEX_FILE not set — BibTeX resolver disabled") + %{} + + not File.exists?(path) -> + Logger.warning("Citations: BIBTEX_FILE=#{path} does not exist — BibTeX resolver disabled") + %{} + + true -> + case BibTeX.load(path) do + {:ok, entries} -> entries + {:error, reason} -> + Logger.warning("Citations: failed to load BibTeX file #{path}: #{inspect(reason)}") + %{} + end + end + end + + defp probe_zotero(opts) do + url = Map.get(opts, :zotero_url, "http://localhost:23119") + + # Use a no-op JSON-RPC call to probe availability. + # /better-bibtex/cayw is intentionally avoided — it blocks waiting for + # user interaction and never returns without a pick. + payload = + Jason.encode!(%{ + jsonrpc: "2.0", + method: "item.search", + params: [[[]]], + id: 0 + }) + + result = + try do + Req.post(url <> "/better-bibtex/json-rpc", + body: payload, + headers: [{"content-type", "application/json"}], + receive_timeout: 3_000, + finch: OrgGarden.Finch + ) + rescue + e -> {:error, e} + end + + case result do + {:ok, %{status: 200}} -> + Logger.info("Citations: Zotero Better BibTeX is available at #{url}") + true + + {:ok, %{status: status}} -> + Logger.warning( + "Citations: Zotero responded HTTP #{status} at #{url} — " <> + "is Better BibTeX installed?" + ) + false + + _ -> + Logger.warning( + "Citations: Zotero not reachable at #{url} — " <> + "start Zotero with Better BibTeX or set BIBTEX_FILE as fallback" + ) + false + end + end +end diff --git a/lib/org_garden/watcher.ex b/lib/org_garden/watcher.ex new file mode 100644 index 0000000..4551692 --- /dev/null +++ b/lib/org_garden/watcher.ex @@ -0,0 +1,236 @@ +defmodule OrgGarden.Watcher do + @moduledoc """ + File-watching GenServer that detects `.org` file changes and triggers + incremental export + transform for only the affected files. + + Uses the `file_system` package (inotify on Linux, fsevents on macOS) + to watch the notes directory. Events are debounced per-file (500ms) + to coalesce rapid writes (e.g., Emacs auto-save). + + ## Lifecycle + + Started dynamically by `OrgGarden.CLI` after the initial batch export. + Transforms are initialized once at startup and reused across all + incremental rebuilds to avoid repeated Zotero probes and BibTeX loads. + + ## Usage + + OrgGarden.Watcher.start_link( + notes_dir: "/path/to/notes", + output_dir: "/path/to/output", + content_dir: "/path/to/output/content", + pipeline_opts: %{zotero_url: "...", ...}, + transforms: [OrgGarden.Transforms.Citations] + ) + """ + + use GenServer + + require Logger + + @debounce_ms 500 + + # ------------------------------------------------------------------- + # Client API + # ------------------------------------------------------------------- + + @doc """ + Start the watcher as a linked process. + + ## Options + + * `:notes_dir` — directory to watch for `.org` changes (required) + * `:output_dir` — ox-hugo base dir (required) + * `:content_dir` — directory where `.md` files are written (required) + * `:pipeline_opts` — opts map passed to transforms (required) + * `:transforms` — list of transform modules (default: `[OrgGarden.Transforms.Citations]`) + """ + def start_link(opts) do + GenServer.start_link(__MODULE__, opts, name: __MODULE__) + end + + # ------------------------------------------------------------------- + # GenServer callbacks + # ------------------------------------------------------------------- + + @impl true + def init(opts) do + notes_dir = Keyword.fetch!(opts, :notes_dir) + output_dir = Keyword.fetch!(opts, :output_dir) + content_dir = Keyword.fetch!(opts, :content_dir) + pipeline_opts = Keyword.fetch!(opts, :pipeline_opts) + transforms = Keyword.get(opts, :transforms, [OrgGarden.Transforms.Citations]) + + # Initialize transforms once — reused for all incremental rebuilds + initialized_transforms = OrgGarden.init_transforms(transforms, pipeline_opts) + + # Start the file system watcher + {:ok, watcher_pid} = FileSystem.start_link(dirs: [notes_dir], recursive: true) + FileSystem.subscribe(watcher_pid) + + Logger.info("Watcher: monitoring #{notes_dir} for .org changes") + + {:ok, + %{ + notes_dir: notes_dir, + output_dir: output_dir, + content_dir: content_dir, + pipeline_opts: pipeline_opts, + watcher_pid: watcher_pid, + initialized_transforms: initialized_transforms, + pending: %{} + }} + end + + @impl true + def handle_info({:file_event, _pid, {path, events}}, state) do + path = to_string(path) + + if org_file?(path) and not temporary_file?(path) do + event_type = classify_events(events) + Logger.debug("Watcher: #{event_type} event for #{path}") + {:noreply, schedule_debounce(path, event_type, state)} + else + {:noreply, state} + end + end + + @impl true + def handle_info({:file_event, _pid, :stop}, state) do + Logger.warning("Watcher: file system monitor stopped unexpectedly") + {:stop, :watcher_stopped, state} + end + + @impl true + def handle_info({:debounced, path, event_type}, state) do + state = %{state | pending: Map.delete(state.pending, path)} + + case event_type do + :deleted -> + handle_delete(path, state) + + _created_or_modified -> + handle_change(path, state) + end + + {:noreply, state} + end + + @impl true + def terminate(_reason, state) do + OrgGarden.teardown_transforms(state.initialized_transforms) + :ok + end + + # ------------------------------------------------------------------- + # Event handling + # ------------------------------------------------------------------- + + defp handle_change(orgfile, state) do + %{ + notes_dir: notes_dir, + output_dir: output_dir, + content_dir: content_dir, + pipeline_opts: pipeline_opts, + initialized_transforms: initialized_transforms + } = state + + md_path = OrgGarden.Export.expected_md_path(orgfile, notes_dir, content_dir) + IO.puts("==> Changed: #{Path.relative_to(orgfile, notes_dir)}") + + case OrgGarden.Export.export_file(orgfile, notes_dir, output_dir) do + {:ok, _} -> + IO.puts(" exported: #{Path.relative_to(md_path, content_dir)}") + + {:ok, stats} = OrgGarden.run_on_files_with([md_path], initialized_transforms, pipeline_opts) + + Enum.each(stats, fn {mod, count} -> + if count > 0, do: IO.puts(" #{inspect(mod)}: #{count} file(s) modified") + end) + + regenerate_index(content_dir) + IO.puts("==> Done") + + {:error, reason} -> + Logger.error("Watcher: export failed for #{orgfile}: #{inspect(reason)}") + end + end + + defp handle_delete(orgfile, state) do + %{notes_dir: notes_dir, content_dir: content_dir} = state + + md_path = OrgGarden.Export.expected_md_path(orgfile, notes_dir, content_dir) + IO.puts("==> Deleted: #{Path.relative_to(orgfile, notes_dir)}") + + if File.exists?(md_path) do + File.rm!(md_path) + IO.puts(" removed: #{Path.relative_to(md_path, content_dir)}") + + # Clean up empty parent directories left behind + cleanup_empty_dirs(Path.dirname(md_path), content_dir) + end + + regenerate_index(content_dir) + IO.puts("==> Done") + end + + # ------------------------------------------------------------------- + # Index generation + # ------------------------------------------------------------------- + + defp regenerate_index(content_dir) do + OrgGarden.Index.regenerate(content_dir) + end + + # ------------------------------------------------------------------- + # Helpers + # ------------------------------------------------------------------- + + defp schedule_debounce(path, event_type, state) do + # Cancel any existing timer for this path + case Map.get(state.pending, path) do + nil -> :ok + old_ref -> Process.cancel_timer(old_ref) + end + + ref = Process.send_after(self(), {:debounced, path, event_type}, @debounce_ms) + %{state | pending: Map.put(state.pending, path, ref)} + end + + defp org_file?(path), do: String.ends_with?(path, ".org") + + defp temporary_file?(path) do + basename = Path.basename(path) + # Emacs creates temp files like .#file.org and #file.org# + String.starts_with?(basename, ".#") or + (String.starts_with?(basename, "#") and String.ends_with?(basename, "#")) + end + + defp classify_events(events) do + cond do + :removed in events or :deleted in events -> :deleted + :created in events -> :created + :modified in events or :changed in events -> :modified + # renamed can mean created or deleted depending on context; + # if the file exists it was renamed into the watched dir + :renamed in events -> :modified + true -> :modified + end + end + + defp cleanup_empty_dirs(dir, stop_at) do + dir = Path.expand(dir) + stop_at = Path.expand(stop_at) + + if dir != stop_at and File.dir?(dir) do + case File.ls!(dir) do + [] -> + File.rmdir!(dir) + cleanup_empty_dirs(Path.dirname(dir), stop_at) + + _ -> + :ok + end + end + end +end diff --git a/mix.exs b/mix.exs new file mode 100644 index 0000000..6689d37 --- /dev/null +++ b/mix.exs @@ -0,0 +1,34 @@ +defmodule OrgGarden.MixProject do + use Mix.Project + + def project do + [ + app: :org_garden, + version: "0.1.0", + elixir: "~> 1.17", + start_permanent: Mix.env() == :prod, + deps: deps(), + escript: escript() + ] + end + + def application do + [ + extra_applications: [:logger], + mod: {OrgGarden.Application, []} + ] + end + + defp escript do + [main_module: OrgGarden.CLI] + end + + defp deps do + [ + {:finch, "~> 0.19"}, + {:req, "~> 0.5"}, + {:jason, "~> 1.4"}, + {:file_system, "~> 1.0"} + ] + end +end diff --git a/mix.lock b/mix.lock new file mode 100644 index 0000000..c5e4b01 --- /dev/null +++ b/mix.lock @@ -0,0 +1,12 @@ +%{ + "file_system": {:hex, :file_system, "1.1.1", "31864f4685b0148f25bd3fbef2b1228457c0c89024ad67f7a81a3ffbc0bbad3a", [:mix], [], "hexpm", "7a15ff97dfe526aeefb090a7a9d3d03aa907e100e262a0f8f7746b78f8f87a5d"}, + "finch": {:hex, :finch, "0.21.0", "b1c3b2d48af02d0c66d2a9ebfb5622be5c5ecd62937cf79a88a7f98d48a8290c", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "87dc6e169794cb2570f75841a19da99cfde834249568f2a5b121b809588a4377"}, + "hpax": {:hex, :hpax, "1.0.3", "ed67ef51ad4df91e75cc6a1494f851850c0bd98ebc0be6e81b026e765ee535aa", [:mix], [], "hexpm", "8eab6e1cfa8d5918c2ce4ba43588e894af35dbd8e91e6e55c817bca5847df34a"}, + "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, + "mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"}, + "mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"}, + "nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"}, + "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"}, + "req": {:hex, :req, "0.5.17", "0096ddd5b0ed6f576a03dde4b158a0c727215b15d2795e59e0916c6971066ede", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "0b8bc6ffdfebbc07968e59d3ff96d52f2202d0536f10fef4dc11dc02a2a43e39"}, + "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, +} diff --git a/patches/01-glob-gitignore.patch b/patches/01-glob-gitignore.patch new file mode 100644 index 0000000..5ccbf47 --- /dev/null +++ b/patches/01-glob-gitignore.patch @@ -0,0 +1,19 @@ +diff --git a/quartz/util/glob.ts b/quartz/util/glob.ts +index 7a71160..91fbaa7 100644 +--- a/quartz/util/glob.ts ++++ b/quartz/util/glob.ts +@@ -10,12 +10,13 @@ export async function glob( + pattern: string, + cwd: string, + ignorePatterns: string[], ++ respectGitignore: boolean = true, + ): Promise { + const fps = ( + await globby(pattern, { + cwd, + ignore: ignorePatterns, +- gitignore: true, ++ gitignore: respectGitignore, + }) + ).map(toPosixPath) + return fps as FilePath[] diff --git a/patches/02-build-gitignore.patch b/patches/02-build-gitignore.patch new file mode 100644 index 0000000..1517ec6 --- /dev/null +++ b/patches/02-build-gitignore.patch @@ -0,0 +1,13 @@ +diff --git a/quartz/build.ts b/quartz/build.ts +index b98f4a8..3166a06 100644 +--- a/quartz/build.ts ++++ b/quartz/build.ts +@@ -71,7 +71,7 @@ async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) { + console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`) + + perf.addEvent("glob") +- const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns) ++ const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns, false) + const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort() + console.log( + `Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`, diff --git a/patches/03-static-hugo.patch b/patches/03-static-hugo.patch new file mode 100644 index 0000000..ee0114a --- /dev/null +++ b/patches/03-static-hugo.patch @@ -0,0 +1,34 @@ +diff --git a/quartz/plugins/emitters/static.ts b/quartz/plugins/emitters/static.ts +index 0b45290..8b34049 100644 +--- a/quartz/plugins/emitters/static.ts ++++ b/quartz/plugins/emitters/static.ts +@@ -7,6 +7,7 @@ import { dirname } from "path" + export const Static: QuartzEmitterPlugin = () => ({ + name: "Static", + async *emit({ argv, cfg }) { ++ // Copy Quartz's own internal static assets (quartz/static/) → output/static/ + const staticPath = joinSegments(QUARTZ, "static") + const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns) + const outputStaticPath = joinSegments(argv.output, "static") +@@ -18,6 +19,21 @@ export const Static: QuartzEmitterPlugin = () => ({ + await fs.promises.copyFile(src, dest) + yield dest + } ++ ++ // Copy user-facing static assets (static/) → output/ preserving paths. ++ // This mirrors Hugo's convention: static/ox-hugo/foo.png is served at /ox-hugo/foo.png, ++ // which matches the src="/ox-hugo/..." paths that ox-hugo writes into exported markdown. ++ const userStaticPath = "static" ++ if (fs.existsSync(userStaticPath)) { ++ const userFps = await glob("**", userStaticPath, cfg.configuration.ignorePatterns, false) ++ for (const fp of userFps) { ++ const src = joinSegments(userStaticPath, fp) as FilePath ++ const dest = joinSegments(argv.output, fp) as FilePath ++ await fs.promises.mkdir(dirname(dest), { recursive: true }) ++ await fs.promises.copyFile(src, dest) ++ yield dest ++ } ++ } + }, + async *partialEmit() {}, + }) diff --git a/patches/04-oxhugofm-figure.patch b/patches/04-oxhugofm-figure.patch new file mode 100644 index 0000000..9b5435c --- /dev/null +++ b/patches/04-oxhugofm-figure.patch @@ -0,0 +1,44 @@ +diff --git a/quartz/plugins/transformers/oxhugofm.ts b/quartz/plugins/transformers/oxhugofm.ts +index 303566e..4fb5e2c 100644 +--- a/quartz/plugins/transformers/oxhugofm.ts ++++ b/quartz/plugins/transformers/oxhugofm.ts +@@ -27,7 +27,10 @@ const defaultOptions: Options = { + const relrefRegex = new RegExp(/\[([^\]]+)\]\(\{\{< relref "([^"]+)" >\}\}\)/, "g") + const predefinedHeadingIdRegex = new RegExp(/(.*) {#(?:.*)}/, "g") + const hugoShortcodeRegex = new RegExp(/{{(.*)}}/, "g") +-const figureTagRegex = new RegExp(/< ?figure src="(.*)" ?>/, "g") ++// Matches the full Hugo {{< figure src="..." ... >}} shortcode and captures src. ++// Must run before the generic shortcode stripper to avoid partial-match issues ++// with captions that contain HTML (e.g. ). ++const figureShortcodeRegex = new RegExp(/{{<\s*figure\b[^}]*\bsrc="([^"]*)"[^}]*>}}/, "g") + // \\\\\( -> matches \\( + // (.+?) -> Lazy match for capturing the equation + // \\\\\) -> matches \\) +@@ -70,19 +73,19 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin> + }) + } + +- if (opts.removeHugoShortcode) { ++ if (opts.replaceFigureWithMdImg) { + src = src.toString() +- src = src.replaceAll(hugoShortcodeRegex, (_value, ...capture) => { +- const [scContent] = capture +- return scContent ++ src = src.replaceAll(figureShortcodeRegex, (_value, ...capture) => { ++ const [imgSrc] = capture ++ return `![](${imgSrc})` + }) + } + +- if (opts.replaceFigureWithMdImg) { ++ if (opts.removeHugoShortcode) { + src = src.toString() +- src = src.replaceAll(figureTagRegex, (_value, ...capture) => { +- const [src] = capture +- return `![](${src})` ++ src = src.replaceAll(hugoShortcodeRegex, (_value, ...capture) => { ++ const [scContent] = capture ++ return scContent + }) + } + diff --git a/quartz-config/globals.d.ts b/quartz-config/globals.d.ts new file mode 100644 index 0000000..6cf30f8 --- /dev/null +++ b/quartz-config/globals.d.ts @@ -0,0 +1,17 @@ +export declare global { + interface Document { + addEventListener( + type: K, + listener: (this: Document, ev: CustomEventMap[K]) => void, + ): void + removeEventListener( + type: K, + listener: (this: Document, ev: CustomEventMap[K]) => void, + ): void + dispatchEvent(ev: CustomEventMap[K] | UIEvent): void + } + interface Window { + spaNavigate(url: URL, isBack: boolean = false) + addCleanup(fn: (...args: any[]) => void) + } +} diff --git a/quartz-config/index.d.ts b/quartz-config/index.d.ts new file mode 100644 index 0000000..9011ee3 --- /dev/null +++ b/quartz-config/index.d.ts @@ -0,0 +1,15 @@ +declare module "*.scss" { + const content: string + export = content +} + +// dom custom event +interface CustomEventMap { + prenav: CustomEvent<{}> + nav: CustomEvent<{ url: FullSlug }> + themechange: CustomEvent<{ theme: "light" | "dark" }> + readermodechange: CustomEvent<{ mode: "on" | "off" }> +} + +type ContentIndex = Record +declare const fetchData: Promise diff --git a/quartz-config/quartz.config.ts b/quartz-config/quartz.config.ts new file mode 100644 index 0000000..a8540f7 --- /dev/null +++ b/quartz-config/quartz.config.ts @@ -0,0 +1,101 @@ +import { QuartzConfig } from "./quartz/cfg" +import * as Plugin from "./quartz/plugins" + +/** + * Quartz 4 Configuration + * + * See https://quartz.jzhao.xyz/configuration for more information. + */ +const config: QuartzConfig = { + configuration: { + pageTitle: "Quartz 4", + pageTitleSuffix: "", + enableSPA: true, + enablePopovers: true, + analytics: { + provider: "plausible", + }, + locale: "en-US", + baseUrl: "quartz.jzhao.xyz", + ignorePatterns: ["private", "templates", ".obsidian"], + defaultDateType: "modified", + theme: { + fontOrigin: "googleFonts", + cdnCaching: true, + typography: { + header: "Schibsted Grotesk", + body: "Source Sans Pro", + code: "IBM Plex Mono", + }, + colors: { + lightMode: { + light: "#faf8f8", + lightgray: "#e5e5e5", + gray: "#b8b8b8", + darkgray: "#4e4e4e", + dark: "#2b2b2b", + secondary: "#284b63", + tertiary: "#84a59d", + highlight: "rgba(143, 159, 169, 0.15)", + textHighlight: "#fff23688", + }, + darkMode: { + light: "#161618", + lightgray: "#393639", + gray: "#646464", + darkgray: "#d4d4d4", + dark: "#ebebec", + secondary: "#7b97aa", + tertiary: "#84a59d", + highlight: "rgba(143, 159, 169, 0.15)", + textHighlight: "#b3aa0288", + }, + }, + }, + }, + plugins: { + transformers: [ + Plugin.FrontMatter({ delimiters: "+++", language: "toml" }), + Plugin.CreatedModifiedDate({ + priority: ["frontmatter", "git", "filesystem"], + }), + Plugin.SyntaxHighlighting({ + theme: { + light: "github-light", + dark: "github-dark", + }, + keepBackground: false, + }), + // OxHugoFlavouredMarkdown must come before GitHubFlavoredMarkdown. + // Note: not compatible with ObsidianFlavoredMarkdown — use one or the other. + // If ox-hugo exports TOML frontmatter, change FrontMatter to: + // Plugin.FrontMatter({ delims: "+++", language: "toml" }) + Plugin.OxHugoFlavouredMarkdown(), + Plugin.GitHubFlavoredMarkdown(), + Plugin.TableOfContents(), + Plugin.CrawlLinks({ markdownLinkResolution: "shortest" }), + Plugin.Description(), + Plugin.Latex({ renderEngine: "katex" }), + ], + filters: [Plugin.RemoveDrafts()], + emitters: [ + Plugin.AliasRedirects(), + Plugin.ComponentResources(), + Plugin.ContentPage(), + Plugin.FolderPage(), + Plugin.TagPage(), + Plugin.ContentIndex({ + enableSiteMap: true, + enableRSS: true, + }), + Plugin.Assets(), + Plugin.Static(), + Plugin.Favicon(), + Plugin.NotFoundPage(), + // Comment out CustomOgImages to speed up build time + Plugin.CustomOgImages(), + ], + }, +} + +export default config diff --git a/quartz-config/quartz.layout.ts b/quartz-config/quartz.layout.ts new file mode 100644 index 0000000..970a5be --- /dev/null +++ b/quartz-config/quartz.layout.ts @@ -0,0 +1,68 @@ +import { PageLayout, SharedLayout } from "./quartz/cfg" +import * as Component from "./quartz/components" + +// components shared across all pages +export const sharedPageComponents: SharedLayout = { + head: Component.Head(), + header: [], + afterBody: [], + footer: Component.Footer({ + links: { + GitHub: "https://github.com/jackyzha0/quartz", + "Discord Community": "https://discord.gg/cRFFHYye7t", + }, + }), +} + +// components for pages that display a single page (e.g. a single note) +export const defaultContentPageLayout: PageLayout = { + beforeBody: [ + Component.ConditionalRender({ + component: Component.Breadcrumbs(), + condition: (page) => page.fileData.slug !== "index", + }), + Component.ArticleTitle(), + Component.ContentMeta(), + Component.TagList(), + ], + left: [ + Component.PageTitle(), + Component.MobileOnly(Component.Spacer()), + Component.Flex({ + components: [ + { + Component: Component.Search(), + grow: true, + }, + { Component: Component.Darkmode() }, + { Component: Component.ReaderMode() }, + ], + }), + Component.Explorer(), + ], + right: [ + Component.Graph(), + Component.DesktopOnly(Component.TableOfContents()), + Component.Backlinks(), + ], +} + +// components for pages that display lists of pages (e.g. tags or folders) +export const defaultListPageLayout: PageLayout = { + beforeBody: [Component.Breadcrumbs(), Component.ArticleTitle(), Component.ContentMeta()], + left: [ + Component.PageTitle(), + Component.MobileOnly(Component.Spacer()), + Component.Flex({ + components: [ + { + Component: Component.Search(), + grow: true, + }, + { Component: Component.Darkmode() }, + ], + }), + Component.Explorer(), + ], + right: [], +}