initial: org-garden

This commit is contained in:
Ignacio Ballesteros
2026-02-21 14:50:52 +01:00
commit 6476b45f04
27 changed files with 2467 additions and 0 deletions

189
lib/org_garden.ex Normal file
View File

@@ -0,0 +1,189 @@
defmodule OrgGarden do
@moduledoc """
Org-roam to website publishing pipeline.
Orchestrates:
1. Org → Markdown export (via Emacs + ox-hugo)
2. Markdown transforms (citations, etc.)
3. Markdown → HTML + serving (via Quartz)
## Usage
opts = %{
zotero_url: "http://localhost:23119",
bibtex_file: System.get_env("BIBTEX_FILE"),
citation_mode: :warn # :silent | :warn | :strict
}
# Batch: all .md files in a directory
OrgGarden.run(content_dir, [OrgGarden.Transforms.Citations], opts)
# Targeted: specific files only
OrgGarden.run_on_files(["content/foo.md"], [OrgGarden.Transforms.Citations], opts)
# With pre-initialized transforms (for watch mode, avoids re-init)
initialized = OrgGarden.init_transforms([OrgGarden.Transforms.Citations], opts)
OrgGarden.run_on_files_with(["content/foo.md"], initialized, opts)
"""
require Logger
@type transform :: module()
@type initialized_transform :: {module(), term()}
@type opts :: map()
@doc "One-shot build: org files → static site"
def build(notes_dir, opts \\ []) do
OrgGarden.CLI.handle_build([notes_dir | opts_to_args(opts)])
end
@doc "Development server: watch + live reload"
def serve(notes_dir, opts \\ []) do
OrgGarden.CLI.handle_serve([notes_dir | opts_to_args(opts)])
end
@doc "Export only: org files → markdown (no Quartz)"
def export(notes_dir, opts \\ []) do
OrgGarden.CLI.handle_export([notes_dir | opts_to_args(opts)])
end
defp opts_to_args(opts) do
Enum.flat_map(opts, fn
{:output, v} -> ["--output", v]
{:port, v} -> ["--port", to_string(v)]
{:ws_port, v} -> ["--ws-port", to_string(v)]
{:watch, true} -> ["--watch"]
{:watch, false} -> []
_ -> []
end)
end
@doc """
Initialize transform modules. Returns a list of `{module, state}` tuples.
Call this once and reuse the result with `run_on_files_with/3` to avoid
re-initializing transforms on every file change (e.g., in watch mode).
"""
@spec init_transforms([transform()], opts()) :: [initialized_transform()]
def init_transforms(transforms, opts) do
Enum.map(transforms, fn mod ->
state = mod.init(opts)
{mod, state}
end)
end
@doc """
Tear down previously initialized transforms, releasing any resources.
"""
@spec teardown_transforms([initialized_transform()]) :: :ok
def teardown_transforms(initialized) do
Enum.each(initialized, fn {mod, state} ->
if function_exported?(mod, :teardown, 1) do
mod.teardown(state)
end
end)
:ok
end
@doc """
Run all transforms over every `.md` file under `content_dir`.
Initializes and tears down transforms automatically.
Returns `{:ok, stats}` where stats maps each transform to a count of files it changed.
"""
@spec run(String.t(), [transform()], opts()) :: {:ok, map()}
def run(content_dir, transforms, opts \\ %{}) do
md_files =
content_dir
|> Path.join("**/*.md")
|> Path.wildcard()
if md_files == [] do
Logger.warning("OrgGarden: no .md files found in #{content_dir}")
{:ok, %{}}
else
Logger.info(
"OrgGarden: processing #{length(md_files)} markdown files " <>
"with #{length(transforms)} transform(s)"
)
initialized = init_transforms(transforms, opts)
stats = apply_transforms(md_files, initialized, opts)
teardown_transforms(initialized)
{:ok, stats}
end
end
@doc """
Run all transforms over specific `.md` files only.
Initializes and tears down transforms automatically.
Files that don't exist are silently skipped.
"""
@spec run_on_files([String.t()], [transform()], opts()) :: {:ok, map()}
def run_on_files(file_paths, transforms, opts \\ %{}) do
existing = Enum.filter(file_paths, &File.exists?/1)
if existing == [] do
Logger.debug("OrgGarden: no files to process")
{:ok, %{}}
else
Logger.info("OrgGarden: processing #{length(existing)} file(s)")
initialized = init_transforms(transforms, opts)
stats = apply_transforms(existing, initialized, opts)
teardown_transforms(initialized)
{:ok, stats}
end
end
@doc """
Run pre-initialized transforms over specific `.md` files.
Does NOT call `init` or `teardown` — the caller manages the transform
lifecycle. Use this in watch mode to avoid re-initializing on every change.
"""
@spec run_on_files_with([String.t()], [initialized_transform()], opts()) :: {:ok, map()}
def run_on_files_with(file_paths, initialized, opts) do
existing = Enum.filter(file_paths, &File.exists?/1)
if existing == [] do
Logger.debug("OrgGarden: no files to process")
{:ok, %{}}
else
stats = apply_transforms(existing, initialized, opts)
{:ok, stats}
end
end
# -------------------------------------------------------------------
# Private
# -------------------------------------------------------------------
defp apply_transforms(md_files, initialized, opts) do
Enum.reduce(md_files, %{}, fn path, acc ->
original = File.read!(path)
{transformed, file_stats} =
Enum.reduce(initialized, {original, %{}}, fn {mod, state}, {content, fstats} ->
result = mod.apply(content, state, opts)
changed = result != content
{result,
Map.update(
fstats,
mod,
if(changed, do: 1, else: 0),
&(&1 + if(changed, do: 1, else: 0))
)}
end)
if transformed != original do
File.write!(path, transformed)
Logger.debug("OrgGarden: updated #{Path.relative_to_cwd(path)}")
end
Map.merge(acc, file_stats, fn _k, a, b -> a + b end)
end)
end
end

View File

@@ -0,0 +1,14 @@
defmodule OrgGarden.Application do
@moduledoc false
use Application
@impl true
def start(_type, _args) do
children = [
{Finch, name: OrgGarden.Finch}
]
opts = [strategy: :one_for_one, name: OrgGarden.AppSupervisor]
Supervisor.start_link(children, opts)
end
end

375
lib/org_garden/cli.ex Normal file
View File

@@ -0,0 +1,375 @@
defmodule OrgGarden.CLI do
@moduledoc """
Escript entry point for the org-garden pipeline.
## Commands
org-garden serve <notes-dir> [--port 8080] [--ws-port 3001]
org-garden build <notes-dir> [--output <path>]
org-garden export <notes-dir> [--watch]
### serve
Development server with watch + live reload. Starts both the org→md
watcher and Quartz in serve mode.
### build
One-shot build for CI/production. Exports org files, runs transforms,
then builds static site with Quartz.
### export
Just export org→md (current pipeline behavior). Use --watch for
incremental re-export on file changes.
## Arguments
notes-dir Path to the directory containing `.org` notes (required).
Also accepts the `NOTES_DIR` env var.
## Options
--output <path> Output root directory (used as ox-hugo base dir).
Defaults to the `OUTPUT_DIR` env var, or the current
working directory.
--content-dir <p> Output directory for exported Markdown. Defaults to
`<output>/content`.
--port <n> HTTP server port (default: 8080). Only for `serve`.
--ws-port <n> WebSocket hot reload port (default: 3001). Only for `serve`.
--watch After initial batch, watch notes-dir for changes and
incrementally re-export affected files. Only for `export`.
## Environment Variables
BIBTEX_FILE Path to a `.bib` file used as citation fallback.
ZOTERO_URL Zotero Better BibTeX base URL (default: http://localhost:23119).
CITATION_MODE silent | warn (default) | strict.
QUARTZ_PATH Path to quartz directory (required for serve/build).
NODE_PATH Node.js executable (default: node).
"""
require Logger
@transforms [OrgGarden.Transforms.Citations]
def main(argv) do
Application.ensure_all_started(:org_garden)
case argv do
["serve" | rest] -> handle_serve(rest)
["build" | rest] -> handle_build(rest)
["export" | rest] -> handle_export(rest)
# Legacy: treat bare args as export command for backward compatibility
[_ | _] -> handle_export(argv)
_ -> abort("Usage: org-garden <serve|build|export> <notes-dir> [options]")
end
end
# ---------------------------------------------------------------------------
# Command: serve
# ---------------------------------------------------------------------------
def handle_serve(argv) do
require_quartz_env()
{notes_dir, output_dir, content_dir, opts} = parse_serve_args(argv)
pipeline_opts = build_pipeline_opts()
# Initial batch export
wipe(content_dir)
export_all(notes_dir, output_dir)
run_pipeline(content_dir, pipeline_opts)
generate_index(content_dir)
IO.puts("==> Starting development server...")
{:ok, _pid} =
OrgGarden.Supervisor.start_link(
notes_dir: notes_dir,
output_dir: output_dir,
content_dir: content_dir,
pipeline_opts: pipeline_opts,
transforms: @transforms,
port: opts[:port] || 8080,
ws_port: opts[:ws_port] || 3001
)
IO.puts("==> Server running at http://localhost:#{opts[:port] || 8080}")
IO.puts("==> Watching #{notes_dir} for changes (Ctrl+C to stop)")
Process.sleep(:infinity)
end
defp parse_serve_args(argv) do
{opts, positional, _invalid} =
OptionParser.parse(argv,
strict: [
output: :string,
content_dir: :string,
port: :integer,
ws_port: :integer
]
)
notes_dir = extract_notes_dir(positional, "serve")
output_dir = extract_output_dir(opts)
content_dir = extract_content_dir(opts, output_dir)
{notes_dir, output_dir, content_dir, opts}
end
# ---------------------------------------------------------------------------
# Command: build
# ---------------------------------------------------------------------------
def handle_build(argv) do
quartz_path = require_quartz_env()
{notes_dir, output_dir, content_dir, _opts} = parse_build_args(argv)
pipeline_opts = build_pipeline_opts()
# Full batch export
wipe(content_dir)
export_all(notes_dir, output_dir)
run_pipeline(content_dir, pipeline_opts)
generate_index(content_dir)
node_path = System.get_env("NODE_PATH", "node")
IO.puts("==> Building static site with Quartz...")
{output, status} =
System.cmd(
node_path,
[
Path.join(quartz_path, "quartz/bootstrap-cli.mjs"),
"build",
"--directory",
content_dir,
"--output",
Path.join(output_dir, "public")
],
cd: quartz_path,
stderr_to_stdout: true
)
IO.puts(output)
if status != 0 do
abort("Quartz build failed with status #{status}")
end
IO.puts("==> Build complete. Output: #{Path.join(output_dir, "public")}")
end
defp parse_build_args(argv) do
{opts, positional, _invalid} =
OptionParser.parse(argv,
strict: [output: :string, content_dir: :string]
)
notes_dir = extract_notes_dir(positional, "build")
output_dir = extract_output_dir(opts)
content_dir = extract_content_dir(opts, output_dir)
{notes_dir, output_dir, content_dir, opts}
end
# ---------------------------------------------------------------------------
# Command: export (original pipeline behavior)
# ---------------------------------------------------------------------------
def handle_export(argv) do
{notes_dir, output_dir, content_dir, watch?} = parse_export_args(argv)
pipeline_opts = build_pipeline_opts()
# Phase 1-4: full batch export
wipe(content_dir)
export_all(notes_dir, output_dir)
run_pipeline(content_dir, pipeline_opts)
generate_index(content_dir)
md_count =
content_dir
|> Path.join("**/*.md")
|> Path.wildcard()
|> length()
IO.puts("==> Done. #{md_count} markdown files in #{content_dir}")
# Phase 5: optional watch mode
if watch? do
IO.puts("==> Watching #{notes_dir} for .org changes... (Ctrl+C to stop)")
{:ok, _pid} =
OrgGarden.Watcher.start_link(
notes_dir: notes_dir,
output_dir: output_dir,
content_dir: content_dir,
pipeline_opts: pipeline_opts,
transforms: @transforms
)
Process.sleep(:infinity)
end
end
defp parse_export_args(argv) do
{opts, positional, _invalid} =
OptionParser.parse(argv,
strict: [output: :string, content_dir: :string, watch: :boolean]
)
notes_dir = extract_notes_dir(positional, "export")
output_dir = extract_output_dir(opts)
content_dir = extract_content_dir(opts, output_dir)
watch? = Keyword.get(opts, :watch, false)
{notes_dir, output_dir, content_dir, watch?}
end
# ---------------------------------------------------------------------------
# Shared argument extraction
# ---------------------------------------------------------------------------
defp extract_notes_dir(positional, command) do
notes_dir =
case positional do
[dir | _] ->
dir
[] ->
System.get_env("NOTES_DIR") ||
abort("Usage: org-garden #{command} <notes-dir> [options]")
end
notes_dir = Path.expand(notes_dir)
unless File.dir?(notes_dir) do
abort("Error: notes directory does not exist: #{notes_dir}")
end
notes_dir
end
defp extract_output_dir(opts) do
(opts[:output] || System.get_env("OUTPUT_DIR") || File.cwd!())
|> Path.expand()
end
defp extract_content_dir(opts, output_dir) do
(opts[:content_dir] || Path.join(output_dir, "content"))
|> Path.expand()
end
# ---------------------------------------------------------------------------
# Phase 1: Wipe content/
# ---------------------------------------------------------------------------
defp wipe(content_dir) do
IO.puts("==> Wiping #{content_dir}")
File.mkdir_p!(content_dir)
content_dir
|> File.ls!()
|> Enum.reject(&(&1 == ".gitkeep"))
|> Enum.each(fn entry ->
Path.join(content_dir, entry) |> File.rm_rf!()
end)
end
# ---------------------------------------------------------------------------
# Phase 2: Export org files via Emacs + ox-hugo
# ---------------------------------------------------------------------------
defp export_all(notes_dir, output_dir) do
IO.puts("==> Exporting org files from #{notes_dir}")
case OrgGarden.Export.export_all(notes_dir, output_dir) do
{:ok, 0} ->
IO.puts("No .org files found in #{notes_dir}")
System.halt(0)
{:ok, count} ->
IO.puts(" exported #{count} file(s)")
{:error, failures} ->
IO.puts(:stderr, "\nFailed to export #{length(failures)} file(s):")
Enum.each(failures, fn {f, {:error, reason}} ->
IO.puts(:stderr, " #{f}: #{inspect(reason)}")
end)
System.halt(1)
end
end
# ---------------------------------------------------------------------------
# Phase 3: Markdown transformation pipeline
# ---------------------------------------------------------------------------
defp run_pipeline(content_dir, pipeline_opts) do
IO.puts("==> Running markdown pipeline")
{:ok, stats} = OrgGarden.run(content_dir, @transforms, pipeline_opts)
Enum.each(stats, fn {mod, count} ->
IO.puts(" #{inspect(mod)}: #{count} file(s) modified")
end)
end
# ---------------------------------------------------------------------------
# Phase 4: Generate default index.md if none was exported
# ---------------------------------------------------------------------------
defp generate_index(content_dir) do
IO.puts("==> Generating index")
OrgGarden.Index.generate(content_dir)
end
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
defp require_quartz_env do
case System.get_env("QUARTZ_PATH") do
nil ->
abort("""
Error: QUARTZ_PATH environment variable not set.
The 'serve' and 'build' commands require Quartz to be available.
Use the wrapper scripts that set up the environment:
nix run .#notes -- <notes-dir> # for serve
nix run .#build -- <notes-dir> # for build
Or set QUARTZ_PATH manually to point to a quartz-org-roam checkout
with node_modules installed.
For export-only mode (no Quartz), use:
org-garden export <notes-dir> [--watch]
""")
path ->
unless File.exists?(Path.join(path, "quartz/bootstrap-cli.mjs")) do
abort("Error: QUARTZ_PATH=#{path} does not contain quartz/bootstrap-cli.mjs")
end
path
end
end
defp build_pipeline_opts do
%{
zotero_url: System.get_env("ZOTERO_URL", "http://localhost:23119"),
bibtex_file: System.get_env("BIBTEX_FILE"),
citation_mode:
case System.get_env("CITATION_MODE", "warn") do
"silent" -> :silent
"strict" -> :strict
_ -> :warn
end
}
end
defp abort(message) do
IO.puts(:stderr, message)
System.halt(1)
end
end

135
lib/org_garden/export.ex Normal file
View File

@@ -0,0 +1,135 @@
defmodule OrgGarden.Export do
@moduledoc """
Org-to-Markdown export via Emacs batch + ox-hugo.
Provides both single-file and batch export, plus a helper to compute
the expected `.md` output path for a given `.org` source file.
"""
require Logger
@doc """
Export a single `.org` file to Markdown via `emacs --batch` + ox-hugo.
Returns `{:ok, exit_code}` with the emacs exit code (0 = success),
or `{:error, reason}` if the command could not be executed.
"""
@spec export_file(String.t(), String.t(), String.t()) :: {:ok, non_neg_integer()} | {:error, term()}
def export_file(orgfile, notes_dir, output_dir) do
section =
orgfile
|> Path.dirname()
|> Path.relative_to(notes_dir)
# ox-hugo requires static/ to exist for image asset copying
File.mkdir_p!(Path.join(output_dir, "static"))
{output, exit_code} =
System.cmd(
"emacs",
[
"--batch",
"--eval", "(require 'ox-hugo)",
"--eval", """
(org-cite-register-processor 'passthrough
:export-citation
(lambda (citation _style _backend _info)
(let ((keys (mapcar (lambda (ref)
(concat "@" (org-element-property :key ref)))
(org-cite-get-references citation))))
(format "[cite:%s]" (string-join keys ";")))))
""",
"--eval", "(setq org-cite-export-processors '((t passthrough)))",
"--eval", ~s[(setq org-hugo-base-dir "#{output_dir}")],
"--eval", ~s[(setq org-hugo-default-section-directory "#{section}")],
"--visit", orgfile,
"--funcall", "org-hugo-export-to-md"
],
stderr_to_stdout: true
)
filtered =
output
|> String.split("\n")
|> Enum.reject(&String.match?(&1, ~r/^Loading|^ad-handle|^For information/))
|> Enum.join("\n")
if filtered != "", do: Logger.info("emacs: #{filtered}")
if exit_code == 0 do
{:ok, exit_code}
else
{:error, {:emacs_exit, exit_code, filtered}}
end
rescue
e -> {:error, e}
end
@doc """
Export all `.org` files found under `notes_dir`.
Returns `{:ok, count}` where `count` is the number of successfully
exported files, or `{:error, failures}` if any files failed.
"""
@spec export_all(String.t(), String.t()) :: {:ok, non_neg_integer()} | {:error, list()}
def export_all(notes_dir, output_dir) do
org_files =
Path.join(notes_dir, "**/*.org")
|> Path.wildcard()
if org_files == [] do
Logger.warning("No .org files found in #{notes_dir}")
{:ok, 0}
else
Logger.info("Exporting #{length(org_files)} org file(s) from #{notes_dir}")
results =
Enum.map(org_files, fn orgfile ->
IO.puts(" exporting: #{orgfile}")
{orgfile, export_file(orgfile, notes_dir, output_dir)}
end)
failures =
Enum.filter(results, fn
{_, {:ok, _}} -> false
{_, {:error, _}} -> true
end)
if failures == [] do
{:ok, length(results)}
else
{:error, failures}
end
end
end
@doc """
Compute the expected `.md` path for a given `.org` file.
Uses the same section-mapping logic as ox-hugo: the relative directory
of the `.org` file within `notes_dir` becomes the section directory
under `content_dir`.
## Examples
iex> OrgGarden.Export.expected_md_path("/notes/bus/emt.org", "/notes", "/out/content")
"/out/content/bus/emt.md"
iex> OrgGarden.Export.expected_md_path("/notes/top-level.org", "/notes", "/out/content")
"/out/content/top-level.md"
"""
@spec expected_md_path(String.t(), String.t(), String.t()) :: String.t()
def expected_md_path(orgfile, notes_dir, content_dir) do
section =
orgfile
|> Path.dirname()
|> Path.relative_to(notes_dir)
basename = Path.basename(orgfile, ".org") <> ".md"
case section do
"." -> Path.join(content_dir, basename)
_ -> Path.join([content_dir, section, basename])
end
end
end

83
lib/org_garden/index.ex Normal file
View File

@@ -0,0 +1,83 @@
defmodule OrgGarden.Index do
@moduledoc """
Generates a fallback `index.md` in the content directory if none was
exported from an `.org` file.
The generated index lists all markdown pages alphabetically with links.
"""
@doc """
Generate `content_dir/index.md` if it does not already exist.
If an `index.md` was already created by ox-hugo (from an `index.org`),
it is left untouched.
"""
@spec generate(String.t()) :: :ok
def generate(content_dir) do
index_path = Path.join(content_dir, "index.md")
unless File.exists?(index_path) do
IO.puts(" generating default index.md")
pages =
Path.join(content_dir, "**/*.md")
|> Path.wildcard()
|> Enum.map(fn path ->
slug = Path.relative_to(path, content_dir) |> Path.rootname()
title =
path
|> File.read!()
|> then(fn content ->
case Regex.run(~r/^title\s*=\s*"(.+)"/m, content) do
[_, t] -> t
_ -> slug
end
end)
{slug, title}
end)
|> Enum.sort_by(fn {_, title} -> title end)
|> Enum.map(fn {slug, title} -> "- [#{title}](#{slug})" end)
|> Enum.join("\n")
File.write!(index_path, """
---
title: Index
---
#{pages}
""")
end
:ok
end
@doc """
Regenerate the index by removing any previously generated one first.
Only removes the index if it was generated by us (contains `title: Index`).
User-exported index files (from `index.org`) are left untouched.
"""
@spec regenerate(String.t()) :: :ok
def regenerate(content_dir) do
index_path = Path.join(content_dir, "index.md")
if File.exists?(index_path) do
content = File.read!(index_path)
if generated_index?(content) do
File.rm!(index_path)
end
end
generate(content_dir)
end
defp generated_index?(content) do
# Our generated index uses "title: Index" in YAML frontmatter.
# ox-hugo uses TOML frontmatter (title = "..."), so this won't
# match user-exported files.
String.contains?(content, "title: Index")
end
end

118
lib/org_garden/quartz.ex Normal file
View File

@@ -0,0 +1,118 @@
defmodule OrgGarden.Quartz do
@moduledoc """
Manages Quartz Node.js process as an Erlang Port.
Required environment:
- QUARTZ_PATH: path to quartz repo (with node_modules)
- NODE_PATH: path to node executable (default: "node")
Starts Quartz in serve mode (`npx quartz build --serve`) and forwards
all stdout/stderr output to the Logger with a `[quartz]` prefix.
If Quartz exits, this GenServer will stop, which triggers the supervisor
to restart the entire supervision tree (strategy: :one_for_all).
"""
use GenServer
require Logger
defstruct [:port, :quartz_path, :content_dir, :http_port, :ws_port]
# -------------------------------------------------------------------
# Client API
# -------------------------------------------------------------------
@doc """
Start the Quartz process as a linked GenServer.
## Options
* `:content_dir` — directory where markdown files are located (required)
* `:port` — HTTP server port (default: 8080)
* `:ws_port` — WebSocket hot reload port (default: 3001)
"""
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
# -------------------------------------------------------------------
# GenServer callbacks
# -------------------------------------------------------------------
@impl true
def init(opts) do
quartz_path =
System.get_env("QUARTZ_PATH") ||
raise "QUARTZ_PATH environment variable not set"
node_path = System.get_env("NODE_PATH", "node")
content_dir = Keyword.fetch!(opts, :content_dir)
http_port = Keyword.get(opts, :port, 8080)
ws_port = Keyword.get(opts, :ws_port, 3001)
cli_path = Path.join(quartz_path, "quartz/bootstrap-cli.mjs")
unless File.exists?(cli_path) do
raise "Quartz CLI not found at #{cli_path}. Check QUARTZ_PATH."
end
args = [
cli_path,
"build",
"--serve",
"--directory", content_dir,
"--port", to_string(http_port),
"--wsPort", to_string(ws_port)
]
Logger.info("[quartz] Starting: #{node_path} #{Enum.join(args, " ")}")
Logger.info("[quartz] Working directory: #{quartz_path}")
port =
Port.open({:spawn_executable, node_path}, [
:binary,
:exit_status,
:stderr_to_stdout,
args: args,
cd: quartz_path,
env: [{~c"NODE_NO_WARNINGS", ~c"1"}]
])
state = %__MODULE__{
port: port,
quartz_path: quartz_path,
content_dir: content_dir,
http_port: http_port,
ws_port: ws_port
}
{:ok, state}
end
@impl true
def handle_info({port, {:data, data}}, %{port: port} = state) do
data
|> String.split("\n", trim: true)
|> Enum.each(&Logger.info("[quartz] #{&1}"))
{:noreply, state}
end
@impl true
def handle_info({port, {:exit_status, status}}, %{port: port} = state) do
Logger.error("[quartz] Process exited with status #{status}")
{:stop, {:quartz_exit, status}, state}
end
@impl true
def terminate(_reason, %{port: port}) when is_port(port) do
# Attempt graceful shutdown
Port.close(port)
:ok
rescue
_ -> :ok
end
def terminate(_reason, _state), do: :ok
end

View File

@@ -0,0 +1,178 @@
defmodule OrgGarden.Resolvers.BibTeX do
@moduledoc """
Resolves citation keys from a local BibTeX (.bib) file.
Configured via the `BIBTEX_FILE` environment variable, or passed directly
as `opts.bibtex_file`. The file is parsed once at init time and the
resulting entry map is reused for all lookups.
Supports extracting: author last names, year, title, DOI, URL.
BibTeX entry format parsed:
@type{citationkey,
author = {Last, First and Last2, First2},
year = {2021},
title = {Some Title},
doi = {10.xxxx/yyyy},
url = {https://example.com},
}
Returns `{:ok, %{label: "Author, Year", url: "..."}}` or `:error`.
"""
require Logger
# ------------------------------------------------------------------
# Public API
# ------------------------------------------------------------------
@doc """
Parse a .bib file and return a map of `%{citation_key => entry_map}`.
Returns `{:ok, entries}` or `{:error, reason}`.
"""
@spec load(String.t()) :: {:ok, map()} | {:error, term()}
def load(path) do
case File.read(path) do
{:ok, content} ->
entries = parse_entries(content)
Logger.info("BibTeX: loaded #{map_size(entries)} entries from #{path}")
{:ok, entries}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Resolve a citation key from pre-loaded BibTeX entries.
"""
@spec resolve(String.t(), map()) :: {:ok, map()} | :error
def resolve(key, entries) do
case Map.fetch(entries, key) do
{:ok, entry} ->
label = build_label(entry)
url = build_url(entry)
{:ok, %{label: label, url: url}}
:error ->
:error
end
end
# ------------------------------------------------------------------
# Parsing
# ------------------------------------------------------------------
# Match @type{key, ...fields...}
# We handle nested braces by scanning character by character after
# finding the opening, rather than relying on a single regex.
@entry_header ~r/@\w+\s*\{\s*([^,\s]+)\s*,/
defp parse_entries(content) do
# Split on "@" boundaries, then parse each chunk
content
|> String.split(~r/(?=@\w+\s*\{)/, trim: true)
|> Enum.reduce(%{}, fn chunk, acc ->
case Regex.run(@entry_header, chunk) do
[_, key] ->
fields = parse_fields(chunk)
Map.put(acc, String.trim(key), fields)
_ ->
acc
end
end)
end
# Extract key = {value} or key = "value" pairs from an entry block.
# Handles simple single-depth braces; good enough for common fields.
@field_regex ~r/(\w+)\s*=\s*(?:\{([^{}]*(?:\{[^{}]*\}[^{}]*)*)\}|"([^"]*)")/
defp parse_fields(chunk) do
@field_regex
|> Regex.scan(chunk)
|> Enum.reduce(%{}, fn match, acc ->
field_name = Enum.at(match, 1) |> String.downcase()
# Value is in capture group 2 (braces) or 3 (quotes)
value =
case {Enum.at(match, 2, ""), Enum.at(match, 3, "")} do
{"", q} -> q
{b, _} -> b
end
Map.put(acc, field_name, String.trim(value))
end)
end
# ------------------------------------------------------------------
# Label & URL building
# ------------------------------------------------------------------
defp build_label(entry) do
author_part =
entry
|> Map.get("author", "")
|> parse_authors()
|> format_authors()
year = Map.get(entry, "year", Map.get(entry, "date", ""))
year = extract_year(year)
if year && author_part != "", do: "#{author_part}, #{year}", else: author_part
end
defp parse_authors(""), do: []
defp parse_authors(author_str) do
author_str
|> String.split(" and ", trim: true)
|> Enum.map(&extract_last_name/1)
|> Enum.reject(&(&1 == ""))
end
# Handles "Last, First" and "First Last" formats
defp extract_last_name(name) do
name = String.trim(name)
cond do
String.contains?(name, ",") ->
name |> String.split(",") |> List.first() |> String.trim()
String.contains?(name, " ") ->
name |> String.split(" ") |> List.last() |> String.trim()
true ->
name
end
end
defp format_authors([]), do: "Unknown"
defp format_authors([single]), do: single
defp format_authors([first | rest]), do: "#{first} & #{List.last(rest)}"
defp extract_year(""), do: nil
defp extract_year(str) do
case Regex.run(~r/\b(\d{4})\b/, str) do
[_, year] -> year
_ -> nil
end
end
defp build_url(entry) do
cond do
doi = Map.get(entry, "doi", "") |> non_empty() ->
"https://doi.org/#{doi}"
url = Map.get(entry, "url", "") |> non_empty() ->
url
true ->
nil
end
end
defp non_empty(""), do: nil
defp non_empty(v), do: v
end

View File

@@ -0,0 +1,18 @@
defmodule OrgGarden.Resolvers.DOI do
@moduledoc """
Last-resort citation resolver — always succeeds.
If the citation key looks like a DOI (starts with "10."), returns a
`https://doi.org/...` link. Otherwise returns the key itself as a
plain label with no URL.
"""
@spec resolve(String.t()) :: {:ok, map()}
def resolve(key) do
if String.starts_with?(key, "10.") do
{:ok, %{label: key, url: "https://doi.org/#{key}"}}
else
{:ok, %{label: key, url: nil}}
end
end
end

View File

@@ -0,0 +1,182 @@
defmodule OrgGarden.Resolvers.Zotero do
@moduledoc """
Resolves citation keys via Zotero Better BibTeX's JSON-RPC API.
Requires Zotero to be running with the Better BibTeX plugin installed.
Default endpoint: http://localhost:23119/better-bibtex/json-rpc
Resolution strategy:
1. Search by citation key via `item.search`
2. If found, try to get a PDF attachment link (zotero://open-pdf/...)
3. Fall back to zotero://select/items/@key
Returns `{:ok, %{label: "Author, Year", url: "zotero://..."}}` or `:error`.
"""
require Logger
@rpc_path "/better-bibtex/json-rpc"
@doc """
Attempt to resolve `key` against a running Zotero instance.
`base_url` defaults to `http://localhost:23119`.
"""
@spec resolve(String.t(), String.t()) :: {:ok, map()} | :error
def resolve(key, base_url \\ "http://localhost:23119") do
url = base_url <> @rpc_path
payload =
Jason.encode!(%{
jsonrpc: "2.0",
method: "item.search",
params: [
[["citationKey", "is", key]]
],
id: 1
})
case Req.post(url,
body: payload,
headers: [{"content-type", "application/json"}],
receive_timeout: 5_000,
finch: OrgGarden.Finch
) do
{:ok, %{status: 200, body: body}} ->
parse_response(body, key, base_url)
{:ok, %{status: status}} ->
Logger.debug("Zotero: unexpected HTTP #{status} for key #{key}")
:error
{:error, reason} ->
Logger.debug("Zotero: connection failed for key #{key}: #{inspect(reason)}")
:error
other ->
Logger.debug("Zotero: unexpected result for key #{key}: #{inspect(other)}")
:error
end
rescue
e ->
Logger.debug("Zotero: exception resolving key #{key}: #{inspect(e)}")
:error
end
# ------------------------------------------------------------------
# Private helpers
# ------------------------------------------------------------------
defp parse_response(%{"result" => [item | _]}, key, base_url) do
label = build_label(item)
url = resolve_url(item, key, base_url)
{:ok, %{label: label, url: url}}
end
defp parse_response(%{"result" => []}, key, _base_url) do
Logger.debug("Zotero: no item found for key #{key}")
:error
end
defp parse_response(%{"error" => err}, key, _base_url) do
Logger.debug("Zotero: RPC error for key #{key}: #{inspect(err)}")
:error
end
defp parse_response(body, key, _base_url) do
Logger.debug("Zotero: unexpected response shape for key #{key}: #{inspect(body)}")
:error
end
defp fetch_pdf_url(key, base_url) do
payload =
Jason.encode!(%{
jsonrpc: "2.0",
method: "item.attachments",
params: [key],
id: 2
})
case Req.post(base_url <> @rpc_path,
body: payload,
headers: [{"content-type", "application/json"}],
receive_timeout: 5_000,
finch: OrgGarden.Finch
) do
{:ok, %{status: 200, body: %{"result" => attachments}}} when is_list(attachments) ->
attachments
|> Enum.find_value(fn att ->
open = Map.get(att, "open", "")
path = Map.get(att, "path", "")
if String.ends_with?(path, ".pdf"), do: open, else: nil
end)
_ ->
nil
end
rescue
_ -> nil
end
# CSL-JSON format: authors are under "author" with "family"/"given" keys.
# Year is under "issued" -> "date-parts" -> [[year, month, day]].
defp build_label(item) do
authors = Map.get(item, "author", [])
year = extract_year(item)
author_part =
case authors do
[] ->
"Unknown"
[single] ->
Map.get(single, "family", Map.get(single, "literal", "Unknown"))
[first | rest] ->
first_name = Map.get(first, "family", Map.get(first, "literal", "Unknown"))
last_name =
rest
|> List.last()
|> then(&Map.get(&1, "family", Map.get(&1, "literal", "Unknown")))
"#{first_name} & #{last_name}"
end
if year, do: "#{author_part}, #{year}", else: author_part
end
# "issued": {"date-parts": [["2021", 2, 3]]}
defp extract_year(item) do
case get_in(item, ["issued", "date-parts"]) do
[[year | _] | _] -> to_string(year)
_ -> nil
end
end
defp resolve_url(item, key, base_url) do
# Prefer zotero://open-pdf/... for items with a PDF attachment.
# Fall back to zotero://select/library/items/KEY to open the item in Zotero.
# The "id" field is a URI like "http://zotero.org/users/123/items/ABCD1234".
pdf_url = fetch_pdf_url(key, base_url)
if pdf_url do
pdf_url
else
item_key =
item
|> Map.get("id", "")
|> String.split("/")
|> List.last()
|> non_empty()
if item_key do
"zotero://select/library/items/#{item_key}"
else
"zotero://select/items/@#{key}"
end
end
end
defp non_empty(nil), do: nil
defp non_empty(""), do: nil
defp non_empty(v), do: v
end

View File

@@ -0,0 +1,40 @@
defmodule OrgGarden.Supervisor do
@moduledoc """
Supervises development server components.
Strategy: :one_for_all
If either child fails, restart both to ensure consistent state.
Children:
1. OrgGarden.Watcher - watches .org files for changes
2. OrgGarden.Quartz - runs Quartz Node.js server
## Usage
OrgGarden.Supervisor.start_link(
notes_dir: "/path/to/notes",
output_dir: "/path/to/output",
content_dir: "/path/to/output/content",
pipeline_opts: %{zotero_url: "...", ...},
transforms: [OrgGarden.Transforms.Citations],
port: 8080,
ws_port: 3001
)
"""
use Supervisor
def start_link(opts) do
Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
end
@impl true
def init(opts) do
children = [
{OrgGarden.Watcher,
Keyword.take(opts, [:notes_dir, :output_dir, :content_dir, :pipeline_opts, :transforms])},
{OrgGarden.Quartz, Keyword.take(opts, [:content_dir, :port, :ws_port])}
]
Supervisor.init(children, strategy: :one_for_all)
end
end

View File

@@ -0,0 +1,48 @@
defmodule OrgGarden.Transform do
@moduledoc """
Behaviour that all markdown transform modules must implement.
## Callbacks
- `init/1` — called once before processing; returns transform-specific state.
Default implementation returns the opts map unchanged.
- `apply/3` — called per .md file; returns the (possibly modified) content.
- `teardown/1` — optional cleanup after all files are processed.
## Example
defmodule MyTransform do
@behaviour OrgGarden.Transform
@impl true
def init(opts), do: %{some_state: opts[:value]}
@impl true
def apply(content, state, _opts) do
String.replace(content, "foo", state.some_state)
end
end
"""
@doc "One-time initialisation. Returns opaque state passed to apply/3."
@callback init(opts :: map()) :: term()
@doc "Transform file content. Returns the (possibly modified) content string."
@callback apply(content :: String.t(), state :: term(), opts :: map()) :: String.t()
@doc "Optional cleanup after all files are processed."
@callback teardown(state :: term()) :: :ok
@optional_callbacks teardown: 1
defmacro __using__(_) do
quote do
@behaviour OrgGarden.Transform
@impl OrgGarden.Transform
def init(opts), do: opts
defoverridable init: 1
end
end
end

View File

@@ -0,0 +1,231 @@
defmodule OrgGarden.Transforms.Citations do
@moduledoc """
Markdown transform: resolves org-citar citation keys to hyperlinks.
## Recognised citation syntax (as output by ox-hugo from org-citar)
[cite:@key] → org-cite / citar standard (most common)
[cite:@key1;@key2] → multiple citations
cite:key → older roam-style bare cite syntax
## Resolution chain (in order)
1. Zotero (live instance via Better BibTeX JSON-RPC) — preferred
2. BibTeX file (BIBTEX_FILE env var) — fallback
3. DOI / bare key — always succeeds
## Modes (opts.citation_mode)
:silent — silently use DOI/bare-key fallback when Zotero+BibTeX fail
:warn — (default) emit a Logger.warning for unresolved keys
:strict — raise on unresolved keys (aborts pipeline)
## Format
Resolved citations are rendered as:
[Label](url) when a URL is available
[Label] when no URL could be determined (bare key fallback)
Multiple semicolon-separated keys become space-separated links:
[cite:@a;@b] → [Author A, 2020](url_a) [Author B, 2019](url_b)
## init/1 callback
Loads the BibTeX file (if configured) once before processing begins,
and probes Zotero availability, emitting warnings as appropriate.
"""
@behaviour OrgGarden.Transform
require Logger
alias OrgGarden.Resolvers.Zotero
alias OrgGarden.Resolvers.BibTeX
alias OrgGarden.Resolvers.DOI
# Match [cite:@key] and [cite:@key1;@key2;...] (org-cite / citar style)
@cite_bracket_regex ~r/\[cite:(@[^\]]+)\]/
# Match bare cite:key or cite:@key (older roam style, no brackets, optional @ prefix)
@cite_bare_regex ~r/(?<![(\[])cite:@?([a-zA-Z0-9_:-]+)/
# ------------------------------------------------------------------
# OrgGarden callbacks
# ------------------------------------------------------------------
@doc """
Called once before processing any files. Loads BibTeX, probes Zotero.
Returns a state map passed to every `apply/3` call.
"""
def init(opts) do
bibtex_entries = load_bibtex(opts)
zotero_available = probe_zotero(opts)
if not zotero_available and bibtex_entries == %{} do
Logger.warning(
"Citations: neither Zotero nor a BibTeX file is available. " <>
"All citations will fall back to bare-key rendering. " <>
"Set BIBTEX_FILE env var or start Zotero with Better BibTeX to resolve citations."
)
end
%{
bibtex_entries: bibtex_entries,
zotero_available: zotero_available,
zotero_url: Map.get(opts, :zotero_url, "http://localhost:23119"),
citation_mode: Map.get(opts, :citation_mode, :warn)
}
end
@doc """
Apply citation resolution to a single markdown file's content.
"""
def apply(content, state, _opts) do
content
|> resolve_bracket_citations(state)
|> resolve_bare_citations(state)
end
# ------------------------------------------------------------------
# Resolution passes
# ------------------------------------------------------------------
defp resolve_bracket_citations(content, state) do
Regex.replace(@cite_bracket_regex, content, fn _full, keys_str ->
keys_str
|> String.split(";")
|> Enum.map(&String.trim/1)
|> Enum.map(fn "@" <> key -> key end)
|> Enum.map(&resolve_key(&1, state))
|> Enum.join(" ")
end)
end
defp resolve_bare_citations(content, state) do
Regex.replace(@cite_bare_regex, content, fn _full, key ->
resolve_key(key, state)
end)
end
# ------------------------------------------------------------------
# Single-key resolution chain
# ------------------------------------------------------------------
defp resolve_key(key, state) do
info =
with :error <- try_zotero(key, state),
:error <- try_bibtex(key, state) do
handle_unresolved(key, state)
else
{:ok, citation_info} -> citation_info
end
format_result(info)
end
defp try_zotero(_key, %{zotero_available: false}), do: :error
defp try_zotero(key, %{zotero_url: url}) do
Zotero.resolve(key, url)
end
defp try_bibtex(_key, %{bibtex_entries: entries}) when map_size(entries) == 0, do: :error
defp try_bibtex(key, %{bibtex_entries: entries}) do
BibTeX.resolve(key, entries)
end
defp handle_unresolved(key, %{citation_mode: mode}) do
case mode do
:strict ->
raise "Citations: could not resolve citation key '#{key}' and mode is :strict"
:warn ->
Logger.warning("Citations: unresolved citation key '#{key}' — using bare-key fallback")
{:ok, result} = DOI.resolve(key)
result
:silent ->
{:ok, result} = DOI.resolve(key)
result
end
end
defp format_result(%{label: label, url: nil}), do: "[#{label}]"
defp format_result(%{label: label, url: url}), do: "[#{label}](#{url})"
# ------------------------------------------------------------------
# Init helpers
# ------------------------------------------------------------------
defp load_bibtex(opts) do
path = Map.get(opts, :bibtex_file) || System.get_env("BIBTEX_FILE")
cond do
is_nil(path) ->
Logger.debug("Citations: BIBTEX_FILE not set — BibTeX resolver disabled")
%{}
not File.exists?(path) ->
Logger.warning("Citations: BIBTEX_FILE=#{path} does not exist — BibTeX resolver disabled")
%{}
true ->
case BibTeX.load(path) do
{:ok, entries} -> entries
{:error, reason} ->
Logger.warning("Citations: failed to load BibTeX file #{path}: #{inspect(reason)}")
%{}
end
end
end
defp probe_zotero(opts) do
url = Map.get(opts, :zotero_url, "http://localhost:23119")
# Use a no-op JSON-RPC call to probe availability.
# /better-bibtex/cayw is intentionally avoided — it blocks waiting for
# user interaction and never returns without a pick.
payload =
Jason.encode!(%{
jsonrpc: "2.0",
method: "item.search",
params: [[[]]],
id: 0
})
result =
try do
Req.post(url <> "/better-bibtex/json-rpc",
body: payload,
headers: [{"content-type", "application/json"}],
receive_timeout: 3_000,
finch: OrgGarden.Finch
)
rescue
e -> {:error, e}
end
case result do
{:ok, %{status: 200}} ->
Logger.info("Citations: Zotero Better BibTeX is available at #{url}")
true
{:ok, %{status: status}} ->
Logger.warning(
"Citations: Zotero responded HTTP #{status} at #{url}" <>
"is Better BibTeX installed?"
)
false
_ ->
Logger.warning(
"Citations: Zotero not reachable at #{url}" <>
"start Zotero with Better BibTeX or set BIBTEX_FILE as fallback"
)
false
end
end
end

236
lib/org_garden/watcher.ex Normal file
View File

@@ -0,0 +1,236 @@
defmodule OrgGarden.Watcher do
@moduledoc """
File-watching GenServer that detects `.org` file changes and triggers
incremental export + transform for only the affected files.
Uses the `file_system` package (inotify on Linux, fsevents on macOS)
to watch the notes directory. Events are debounced per-file (500ms)
to coalesce rapid writes (e.g., Emacs auto-save).
## Lifecycle
Started dynamically by `OrgGarden.CLI` after the initial batch export.
Transforms are initialized once at startup and reused across all
incremental rebuilds to avoid repeated Zotero probes and BibTeX loads.
## Usage
OrgGarden.Watcher.start_link(
notes_dir: "/path/to/notes",
output_dir: "/path/to/output",
content_dir: "/path/to/output/content",
pipeline_opts: %{zotero_url: "...", ...},
transforms: [OrgGarden.Transforms.Citations]
)
"""
use GenServer
require Logger
@debounce_ms 500
# -------------------------------------------------------------------
# Client API
# -------------------------------------------------------------------
@doc """
Start the watcher as a linked process.
## Options
* `:notes_dir` — directory to watch for `.org` changes (required)
* `:output_dir` — ox-hugo base dir (required)
* `:content_dir` — directory where `.md` files are written (required)
* `:pipeline_opts` — opts map passed to transforms (required)
* `:transforms` — list of transform modules (default: `[OrgGarden.Transforms.Citations]`)
"""
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
# -------------------------------------------------------------------
# GenServer callbacks
# -------------------------------------------------------------------
@impl true
def init(opts) do
notes_dir = Keyword.fetch!(opts, :notes_dir)
output_dir = Keyword.fetch!(opts, :output_dir)
content_dir = Keyword.fetch!(opts, :content_dir)
pipeline_opts = Keyword.fetch!(opts, :pipeline_opts)
transforms = Keyword.get(opts, :transforms, [OrgGarden.Transforms.Citations])
# Initialize transforms once — reused for all incremental rebuilds
initialized_transforms = OrgGarden.init_transforms(transforms, pipeline_opts)
# Start the file system watcher
{:ok, watcher_pid} = FileSystem.start_link(dirs: [notes_dir], recursive: true)
FileSystem.subscribe(watcher_pid)
Logger.info("Watcher: monitoring #{notes_dir} for .org changes")
{:ok,
%{
notes_dir: notes_dir,
output_dir: output_dir,
content_dir: content_dir,
pipeline_opts: pipeline_opts,
watcher_pid: watcher_pid,
initialized_transforms: initialized_transforms,
pending: %{}
}}
end
@impl true
def handle_info({:file_event, _pid, {path, events}}, state) do
path = to_string(path)
if org_file?(path) and not temporary_file?(path) do
event_type = classify_events(events)
Logger.debug("Watcher: #{event_type} event for #{path}")
{:noreply, schedule_debounce(path, event_type, state)}
else
{:noreply, state}
end
end
@impl true
def handle_info({:file_event, _pid, :stop}, state) do
Logger.warning("Watcher: file system monitor stopped unexpectedly")
{:stop, :watcher_stopped, state}
end
@impl true
def handle_info({:debounced, path, event_type}, state) do
state = %{state | pending: Map.delete(state.pending, path)}
case event_type do
:deleted ->
handle_delete(path, state)
_created_or_modified ->
handle_change(path, state)
end
{:noreply, state}
end
@impl true
def terminate(_reason, state) do
OrgGarden.teardown_transforms(state.initialized_transforms)
:ok
end
# -------------------------------------------------------------------
# Event handling
# -------------------------------------------------------------------
defp handle_change(orgfile, state) do
%{
notes_dir: notes_dir,
output_dir: output_dir,
content_dir: content_dir,
pipeline_opts: pipeline_opts,
initialized_transforms: initialized_transforms
} = state
md_path = OrgGarden.Export.expected_md_path(orgfile, notes_dir, content_dir)
IO.puts("==> Changed: #{Path.relative_to(orgfile, notes_dir)}")
case OrgGarden.Export.export_file(orgfile, notes_dir, output_dir) do
{:ok, _} ->
IO.puts(" exported: #{Path.relative_to(md_path, content_dir)}")
{:ok, stats} = OrgGarden.run_on_files_with([md_path], initialized_transforms, pipeline_opts)
Enum.each(stats, fn {mod, count} ->
if count > 0, do: IO.puts(" #{inspect(mod)}: #{count} file(s) modified")
end)
regenerate_index(content_dir)
IO.puts("==> Done")
{:error, reason} ->
Logger.error("Watcher: export failed for #{orgfile}: #{inspect(reason)}")
end
end
defp handle_delete(orgfile, state) do
%{notes_dir: notes_dir, content_dir: content_dir} = state
md_path = OrgGarden.Export.expected_md_path(orgfile, notes_dir, content_dir)
IO.puts("==> Deleted: #{Path.relative_to(orgfile, notes_dir)}")
if File.exists?(md_path) do
File.rm!(md_path)
IO.puts(" removed: #{Path.relative_to(md_path, content_dir)}")
# Clean up empty parent directories left behind
cleanup_empty_dirs(Path.dirname(md_path), content_dir)
end
regenerate_index(content_dir)
IO.puts("==> Done")
end
# -------------------------------------------------------------------
# Index generation
# -------------------------------------------------------------------
defp regenerate_index(content_dir) do
OrgGarden.Index.regenerate(content_dir)
end
# -------------------------------------------------------------------
# Helpers
# -------------------------------------------------------------------
defp schedule_debounce(path, event_type, state) do
# Cancel any existing timer for this path
case Map.get(state.pending, path) do
nil -> :ok
old_ref -> Process.cancel_timer(old_ref)
end
ref = Process.send_after(self(), {:debounced, path, event_type}, @debounce_ms)
%{state | pending: Map.put(state.pending, path, ref)}
end
defp org_file?(path), do: String.ends_with?(path, ".org")
defp temporary_file?(path) do
basename = Path.basename(path)
# Emacs creates temp files like .#file.org and #file.org#
String.starts_with?(basename, ".#") or
(String.starts_with?(basename, "#") and String.ends_with?(basename, "#"))
end
defp classify_events(events) do
cond do
:removed in events or :deleted in events -> :deleted
:created in events -> :created
:modified in events or :changed in events -> :modified
# renamed can mean created or deleted depending on context;
# if the file exists it was renamed into the watched dir
:renamed in events -> :modified
true -> :modified
end
end
defp cleanup_empty_dirs(dir, stop_at) do
dir = Path.expand(dir)
stop_at = Path.expand(stop_at)
if dir != stop_at and File.dir?(dir) do
case File.ls!(dir) do
[] ->
File.rmdir!(dir)
cleanup_empty_dirs(Path.dirname(dir), stop_at)
_ ->
:ok
end
end
end
end