Fix ox-hugo export with internal ID links and Quartz config

- Add org-id resolution for [[id:...]] links by building ID locations
  database once before parallel export
- Handle broken file links gracefully with org-export-with-broken-links
- Fix race condition in parallel exports by pre-building ID cache
- Fix Quartz config not being applied: cp was preserving nix store hash
  prefix in filename instead of using explicit destination filename
- Continue pipeline even when some exports fail, reporting failures
- Improve error handling and logging throughout export pipeline
This commit is contained in:
Ignacio Ballesteros
2026-02-22 19:55:17 +01:00
parent c7bd37bb95
commit 1fac31dc73
9 changed files with 169 additions and 80 deletions

View File

@@ -136,10 +136,13 @@ defmodule OrgGarden.CLI do
# Full batch export
wipe(content_dir)
export_all(notes_dir, output_dir)
export_result = export_all(notes_dir, output_dir)
run_pipeline(content_dir, pipeline_opts)
generate_index(content_dir)
# Track if we had export failures
had_export_failures = match?({:error, _}, export_result)
node_path = Config.get(:node_path, "node")
IO.puts("==> Building static site with Quartz...")
@@ -166,6 +169,11 @@ defmodule OrgGarden.CLI do
end
IO.puts("==> Build complete. Output: #{Path.join(output_dir, "public")}")
# Exit with error if there were export failures
if had_export_failures do
System.halt(1)
end
end
defp parse_build_args(argv) do
@@ -191,7 +199,7 @@ defmodule OrgGarden.CLI do
# Phase 1-4: full batch export
wipe(content_dir)
export_all(notes_dir, output_dir)
export_result = export_all(notes_dir, output_dir)
run_pipeline(content_dir, pipeline_opts)
generate_index(content_dir)
@@ -203,6 +211,12 @@ defmodule OrgGarden.CLI do
IO.puts("==> Done. #{md_count} markdown files in #{content_dir}")
# Exit with error if there were export failures (unless in watch mode)
case {export_result, watch?} do
{{:error, _}, false} -> System.halt(1)
_ -> :ok
end
# Phase 5: optional watch mode
if watch? do
IO.puts("==> Watching #{notes_dir} for .org changes... (Ctrl+C to stop)")
@@ -305,20 +319,22 @@ defmodule OrgGarden.CLI do
IO.puts("==> Exporting org files from #{notes_dir}")
case OrgGarden.Export.export_all(notes_dir, output_dir) do
{:ok, 0} ->
{:ok, 0, []} ->
IO.puts(" no .org files found")
:ok
{:ok, count} ->
{:ok, count, []} ->
IO.puts(" exported #{count} file(s)")
:ok
{:error, failures} ->
IO.puts(:stderr, "\nFailed to export #{length(failures)} file(s):")
{:ok, count, failures} ->
IO.puts(" exported #{count} file(s), #{length(failures)} failed")
Enum.each(failures, fn {f, {:error, reason}} ->
IO.puts(:stderr, " #{f}: #{inspect(reason)}")
Enum.each(failures, fn {f, {:error, {:emacs_exit, code}}} ->
IO.puts(:stderr, " failed: #{f} (emacs exit code #{code})")
end)
System.halt(1)
{:error, length(failures)}
end
end

View File

@@ -11,17 +11,21 @@ defmodule OrgGarden.Export do
@doc """
Export a single `.org` file to Markdown via `emacs --batch` + ox-hugo.
Accepts an optional `id_locations_file` path for pre-built org-id database.
If not provided, builds the ID database inline (slower for batch exports).
Returns `{:ok, exit_code}` with the emacs exit code (0 = success),
or `{:error, reason}` if the command could not be executed.
"""
@spec export_file(String.t(), String.t(), String.t()) :: {:ok, non_neg_integer()} | {:error, term()}
def export_file(orgfile, notes_dir, output_dir) do
@spec export_file(String.t(), String.t(), String.t(), String.t() | nil) ::
{:ok, non_neg_integer()} | {:error, term()}
def export_file(orgfile, notes_dir, output_dir, id_locations_file \\ nil) do
OrgGarden.Telemetry.span_export(orgfile, fn ->
do_export_file(orgfile, notes_dir, output_dir)
do_export_file(orgfile, notes_dir, output_dir, id_locations_file)
end)
end
defp do_export_file(orgfile, notes_dir, output_dir) do
defp do_export_file(orgfile, notes_dir, output_dir, id_locations_file) do
section =
orgfile
|> Path.dirname()
@@ -30,42 +34,60 @@ defmodule OrgGarden.Export do
# ox-hugo requires static/ to exist for image asset copying
File.mkdir_p!(Path.join(output_dir, "static"))
# Build the org-id setup commands based on whether we have a pre-built file
id_setup_args =
if id_locations_file do
# Use pre-built ID locations file (faster for parallel exports)
[
"--eval", ~s[(setq org-id-locations-file "#{id_locations_file}")],
"--eval", "(org-id-locations-load)"
]
else
# Build ID locations inline (for single file exports)
[
"--eval", ~s[(setq org-id-extra-files (directory-files-recursively "#{notes_dir}" "\\\\.org$"))],
"--eval", "(org-id-update-id-locations)"
]
end
{output, exit_code} =
System.cmd(
"emacs",
[
"--batch",
"--eval", "(require 'ox-hugo)",
"--eval", """
(org-cite-register-processor 'passthrough
:export-citation
(lambda (citation _style _backend _info)
(let ((keys (mapcar (lambda (ref)
(concat "@" (org-element-property :key ref)))
(org-cite-get-references citation))))
(format "[cite:%s]" (string-join keys ";")))))
""",
"--eval", "(setq org-cite-export-processors '((t passthrough)))",
"--eval", ~s[(setq org-hugo-base-dir "#{output_dir}")],
"--eval", ~s[(setq org-hugo-default-section-directory "#{section}")],
"--visit", orgfile,
"--funcall", "org-hugo-export-to-md"
],
"--eval", "(require 'ox-hugo)"
] ++
id_setup_args ++
[
# Allow export to proceed even if some links cannot be resolved
"--eval", "(setq org-export-with-broken-links 'mark)",
# Prevent errors when file links point to non-existent files/headlines
"--eval", "(advice-add 'org-link-search :around (lambda (orig-fn &rest args) (condition-case nil (apply orig-fn args) (error nil))))",
"--eval", """
(org-cite-register-processor 'passthrough
:export-citation
(lambda (citation _style _backend _info)
(let ((keys (mapcar (lambda (ref)
(concat "@" (org-element-property :key ref)))
(org-cite-get-references citation))))
(format "[cite:%s]" (string-join keys ";")))))
""",
"--eval", "(setq org-cite-export-processors '((t passthrough)))",
"--eval", ~s[(setq org-hugo-base-dir "#{output_dir}")],
"--eval", ~s[(setq org-hugo-default-section-directory "#{section}")],
"--visit", orgfile,
"--funcall", "org-hugo-export-to-md"
],
stderr_to_stdout: true
)
filtered =
output
|> String.split("\n")
|> Enum.reject(&String.match?(&1, ~r/^Loading|^ad-handle|^For information/))
|> Enum.join("\n")
if filtered != "", do: Logger.info("emacs: #{filtered}")
# Log raw emacs output at debug level for troubleshooting
if output != "", do: Logger.debug("emacs output:\n#{output}")
if exit_code == 0 do
{:ok, exit_code}
else
{:error, {:emacs_exit, exit_code, filtered}}
{:error, {:emacs_exit, exit_code}}
end
rescue
e -> {:error, e}
@@ -80,10 +102,12 @@ defmodule OrgGarden.Export do
can be configured via the `:export_concurrency` application config or
the `EXPORT_CONCURRENCY` environment variable. Defaults to #{@default_max_concurrency}.
Returns `{:ok, count}` where `count` is the number of successfully
exported files, or `{:error, failures}` if any files failed.
Returns `{:ok, success_count, failures}` where `success_count` is the number
of successfully exported files and `failures` is a list of `{file, {:error, reason}}`
tuples for files that failed to export. The pipeline continues even if some
files fail.
"""
@spec export_all(String.t(), String.t()) :: {:ok, non_neg_integer()} | {:error, list()}
@spec export_all(String.t(), String.t()) :: {:ok, non_neg_integer(), list()}
def export_all(notes_dir, output_dir) do
org_files =
Path.join(notes_dir, "**/*.org")
@@ -91,17 +115,31 @@ defmodule OrgGarden.Export do
if org_files == [] do
Logger.warning("No .org files found in #{notes_dir}")
{:ok, 0}
{:ok, 0, []}
else
max_concurrency = get_concurrency()
Logger.info("Exporting #{length(org_files)} org file(s) from #{notes_dir} (concurrency: #{max_concurrency})")
# Build org-id locations database once before parallel export
id_locations_file = build_id_locations(notes_dir)
results =
org_files
|> Task.async_stream(
fn orgfile ->
Logger.info(" exporting: #{orgfile}")
{orgfile, export_file(orgfile, notes_dir, output_dir)}
result = export_file(orgfile, notes_dir, output_dir, id_locations_file)
# Log failure inline at warning level
case result do
{:ok, _} ->
:ok
{:error, {:emacs_exit, code}} ->
Logger.warning(" failed: #{Path.basename(orgfile)} (emacs exit code #{code})")
end
{orgfile, result}
end,
max_concurrency: max_concurrency,
timeout: :infinity,
@@ -109,17 +147,41 @@ defmodule OrgGarden.Export do
)
|> Enum.map(fn {:ok, result} -> result end)
failures =
Enum.filter(results, fn
{_, {:ok, _}} -> false
{_, {:error, _}} -> true
# Clean up temp file
if id_locations_file, do: File.rm(id_locations_file)
{successes, failures} =
Enum.split_with(results, fn
{_, {:ok, _}} -> true
{_, {:error, _}} -> false
end)
if failures == [] do
{:ok, length(results)}
else
{:error, failures}
end
{:ok, length(successes), failures}
end
end
# Build org-id locations database file by scanning all org files once
defp build_id_locations(notes_dir) do
id_file = Path.join(System.tmp_dir!(), "org-id-locations-#{:erlang.unique_integer([:positive])}")
{_output, exit_code} =
System.cmd(
"emacs",
[
"--batch",
"--eval", ~s[(setq org-id-locations-file "#{id_file}")],
"--eval", ~s[(setq org-id-extra-files (directory-files-recursively "#{notes_dir}" "\\\\.org$"))],
"--eval", "(org-id-update-id-locations)"
],
stderr_to_stdout: true
)
if exit_code == 0 do
Logger.debug("Built org-id locations database: #{id_file}")
id_file
else
Logger.warning("Failed to build org-id locations database")
nil
end
end

View File

@@ -87,18 +87,14 @@ defmodule OrgGarden.Server do
}
# Run initial pipeline synchronously
case run_initial_pipeline(state) do
:ok ->
# Start supervised components
case start_supervisor(state) do
{:ok, sup_pid} ->
Logger.info("Server started on http://localhost:#{http_port}")
Logger.info("Watching #{notes_dir} for changes")
{:ok, %{state | supervisor_pid: sup_pid}}
:ok = run_initial_pipeline(state)
{:error, reason} ->
{:stop, reason}
end
# Start supervised components
case start_supervisor(state) do
{:ok, sup_pid} ->
Logger.info("Server started on http://localhost:#{http_port}")
Logger.info("Watching #{notes_dir} for changes")
{:ok, %{state | supervisor_pid: sup_pid}}
{:error, reason} ->
{:stop, reason}
@@ -148,13 +144,13 @@ defmodule OrgGarden.Server do
# Export all org files
case OrgGarden.Export.export_all(notes_dir, output_dir) do
{:ok, 0} ->
{:ok, 0, []} ->
Logger.warning("No .org files found in #{notes_dir}")
# Still generate index (will be empty or have default content)
OrgGarden.Index.generate(content_dir)
:ok
{:ok, count} ->
{:ok, count, []} ->
Logger.info("Exported #{count} file(s)")
# Run transforms
@@ -168,9 +164,23 @@ defmodule OrgGarden.Server do
OrgGarden.Index.generate(content_dir)
:ok
{:error, failures} ->
Logger.error("Failed to export #{length(failures)} file(s)")
{:error, {:export_failed, failures}}
{:ok, count, failures} ->
Logger.warning("Exported #{count} file(s), #{length(failures)} failed")
Enum.each(failures, fn {f, {:error, {:emacs_exit, code}}} ->
Logger.warning(" failed: #{Path.basename(f)} (emacs exit code #{code})")
end)
# Continue with transforms and index anyway
{:ok, stats} = OrgGarden.run(content_dir, @transforms, pipeline_opts)
Enum.each(stats, fn {mod, c} ->
Logger.info("#{inspect(mod)}: #{c} file(s) modified")
end)
# Generate index
OrgGarden.Index.generate(content_dir)
:ok
end
end

View File

@@ -74,7 +74,7 @@ defmodule OrgGarden.Telemetry do
end
defp handle_event([:org_garden, :export, :exception], _measurements, metadata, _config) do
Logger.error("Export failed: #{metadata.file} - #{inspect(metadata.reason)}")
Logger.error("Export failed: #{metadata.file}")
end
defp handle_event([:org_garden, :watcher, :file_processed], _measurements, metadata, _config) do

View File

@@ -182,8 +182,8 @@ defmodule OrgGarden.Watcher do
duration = System.monotonic_time(:millisecond) - start_time
Logger.info(" done in #{duration}ms")
{:error, reason} ->
Logger.error("Watcher: export failed for #{orgfile}: #{inspect(reason)}")
{:error, {:emacs_exit, code}} ->
Logger.error("Watcher: export failed for #{Path.basename(orgfile)} (exit code #{code})")
end
end