WIP rewrite of book rendering.
This commit is contained in:
parent
587985aa9d
commit
2987995c7a
26 changed files with 486 additions and 430 deletions
146
Makefile
146
Makefile
|
@ -1,17 +1,14 @@
|
|||
|
||||
#################################################################################
|
||||
# Configuration
|
||||
#################################################################################
|
||||
|
||||
SITE_DIR := _site
|
||||
SITE_DIR := _site
|
||||
CACHE_DIR := _cache
|
||||
TMP_DIR := $(CACHE_DIR)/tmp
|
||||
TMP_DIR := $(CACHE_DIR)/tmp
|
||||
|
||||
AGDA := stack exec agda -- \
|
||||
--no-libraries \
|
||||
--include-path=standard-library/src
|
||||
PANDOC := stack exec pandoc -- \
|
||||
--indented-code-classes=default
|
||||
--top-level-division=chapter
|
||||
AGDA := stack exec agda -- --no-libraries --include-path=standard-library/src
|
||||
PANDOC := stack exec pandoc --
|
||||
|
||||
|
||||
#################################################################################
|
||||
|
@ -57,15 +54,6 @@ test: setup-install-htmlproofer build
|
|||
.
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Test generated EPUB with EPUBCheck
|
||||
#################################################################################
|
||||
|
||||
.PHONY: test-epub
|
||||
test-epub: setup-check-epubcheck build
|
||||
epubcheck $(SITE_DIR)/plfa.epub
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Automatically rebuild the site on changes, and start a preview server
|
||||
#################################################################################
|
||||
|
@ -161,118 +149,18 @@ endif
|
|||
endif
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Build EPUB
|
||||
#################################################################################
|
||||
|
||||
include book/epub.mk
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Build PDF
|
||||
#################################################################################
|
||||
|
||||
PDF_DIR := pdf
|
||||
PDF_TEMPLATE_DIR := $(PDF_DIR)/templates
|
||||
PDF_LUA_DIR := $(PDF_DIR)/lua
|
||||
MD_DIR := src
|
||||
LAGDA_TEX_DIR := $(TMP_DIR)/lagda_tex
|
||||
TEX_DIR := $(TMP_DIR)/tex
|
||||
|
||||
# Convert MD_DIR/%.lagda.md to LAGDA_TEX_DIR/%.lagda.tex
|
||||
define LAGDA_TEX_PATH
|
||||
$(patsubst $(MD_DIR)/%.lagda.md,$(LAGDA_TEX_DIR)/%.lagda.tex,$(1))
|
||||
endef
|
||||
|
||||
# Convert MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex or TEX_DIR/%.tex
|
||||
define TEX_PATH
|
||||
$(patsubst $(PDF_DIR)/%,$(TEX_DIR)/%,\
|
||||
$(patsubst README.md,$(TEX_DIR)/plfa/frontmatter/README.tex,\
|
||||
$(patsubst $(MD_DIR)/%.md,$(TEX_DIR)/%.tex,\
|
||||
$(patsubst $(MD_DIR)/%.lagda.md,$(TEX_DIR)/%.tex,$(1)))))
|
||||
endef
|
||||
|
||||
# List source and intermediate files
|
||||
PDF_LUA_SCRIPTS := $(wildcard $(PDF_LUA_DIR)/*.lua)
|
||||
PDF_STATIC_FILES := $(wildcard pdf/*.*)
|
||||
MD_FILES := README.md $(wildcard $(MD_DIR)/plfa/**/*.md)
|
||||
LAGDA_MD_FILES := $(filter %.lagda.md,$(MD_FILES))
|
||||
LAGDA_TEX_FILES := $(call LAGDA_TEX_PATH,$(LAGDA_MD_FILES))
|
||||
TEX_FILES := $(call TEX_PATH,$(MD_FILES) $(PDF_STATIC_FILES))
|
||||
|
||||
# Compile PLFA to a PDF
|
||||
.PHONY: pdf
|
||||
pdf: $(SITE_DIR)/plfa.pdf
|
||||
|
||||
$(SITE_DIR)/plfa.pdf: $(TEX_FILES)
|
||||
@cd $(TEX_DIR) && latexmk -pdf -lualatex -use-make -halt-on-error plfa.tex
|
||||
@cp $(TEX_DIR)/plfa.pdf $(SITE_DIR)/plfa.pdf
|
||||
|
||||
|
||||
# Copy static files needed by PDF compilation
|
||||
define MK_COPYSTATIC_RULE
|
||||
src := $(1)
|
||||
dst := $(2)
|
||||
$$(dst): $$(src)
|
||||
@echo "Copy $$< to $$@"
|
||||
@cp $$< $$@
|
||||
endef
|
||||
|
||||
# Compile Markdown files to LaTeX
|
||||
define MK_MD2TEX_RULE
|
||||
src := $(1)
|
||||
dst := $(2)
|
||||
tpl := $(3)
|
||||
$$(dst): export UNCHECKED_FILES = README.md
|
||||
$$(dst): $$(src) $$(tpl) $(PDF_LUA_SCRIPTS) | setup-install-pandoc
|
||||
@echo "Compile $$< to $$@"
|
||||
@mkdir -p '$$(@D)'
|
||||
@$(PANDOC) \
|
||||
--lua-filter=$(PDF_LUA_DIR)/remove-badges.lua \
|
||||
--lua-filter=$(PDF_LUA_DIR)/typeset-codeblocks.lua \
|
||||
--lua-filter=$(PDF_LUA_DIR)/rewrite-links.lua \
|
||||
--lua-filter=$(PDF_LUA_DIR)/single-file-headers-ids.lua \
|
||||
--template=$$(tpl) \
|
||||
$$< -o $$@
|
||||
endef
|
||||
|
||||
# Compile Literate Agda files to LaTeX
|
||||
define MK_LAGDA_MD2TEX_RULE
|
||||
src := $(1)
|
||||
dst := $(2)
|
||||
$$(dst): $$(src) $(LAGDA_TEX_FILES) | setup-install-agda
|
||||
@$(AGDA) --include-path=$(LAGDA_TEX_DIR) --latex --latex-dir=$(TEX_DIR) $$<
|
||||
endef
|
||||
|
||||
|
||||
# Copy static files (from PDF_DIR/% to TEX_DIR/%)
|
||||
$(foreach static_file,\
|
||||
$(PDF_STATIC_FILES),\
|
||||
$(eval $(call MK_COPYSTATIC_RULE,\
|
||||
$(static_file),\
|
||||
$(call TEX_PATH,$(static_file)))))
|
||||
|
||||
# Compile .md files (from MD_DIR/%.md to TEX_DIR/%.tex)
|
||||
$(foreach md_file,\
|
||||
$(filter-out %acknowledgements.md %.lagda.md,$(MD_FILES)),\
|
||||
$(eval $(call MK_MD2TEX_RULE,\
|
||||
$(md_file),\
|
||||
$(call TEX_PATH,$(md_file)),\
|
||||
$(PDF_TEMPLATE_DIR)/chapter.latex)))
|
||||
|
||||
# Compile .lagda.md files (from MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex)
|
||||
$(foreach lagda_md_file,\
|
||||
$(LAGDA_MD_FILES),\
|
||||
$(eval $(call MK_MD2TEX_RULE,\
|
||||
$(lagda_md_file),\
|
||||
$(call LAGDA_TEX_PATH,$(lagda_md_file)),\
|
||||
$(PDF_TEMPLATE_DIR)/chapter.latex)))
|
||||
|
||||
# Compile acknowledgements (from SITE_DIR/acknowledgements.md to TEX_DIR/acknowledgements.tex)
|
||||
$(eval $(call MK_MD2TEX_RULE,\
|
||||
$(SITE_DIR)/acknowledgements.md,\
|
||||
$(TEX_DIR)/plfa/backmatter/acknowledgements.tex,\
|
||||
$(PDF_TEMPLATE_DIR)/acknowledgements.latex))
|
||||
|
||||
# Compile .lagda.tex files (from LAGDA_TEX_DIR/%.lagda.tex to TEX_DIR/%.tex)
|
||||
$(foreach lagda_md_file,\
|
||||
$(LAGDA_MD_FILES),\
|
||||
$(eval $(call MK_LAGDA_MD2TEX_RULE,\
|
||||
$(call LAGDA_TEX_PATH,$(lagda_md_file)),\
|
||||
$(call TEX_PATH,$(lagda_md_file)))))
|
||||
include book/pdf.mk
|
||||
|
||||
|
||||
#################################################################################
|
||||
|
@ -312,6 +200,14 @@ ifeq (,$(wildcard $(shell which fix-whitespace)))
|
|||
@echo " stack install --stack-yaml stack-8.8.3.yaml"
|
||||
endif
|
||||
|
||||
.PHONY: setup-check-latexmk
|
||||
setup-check-latexmk:
|
||||
ifeq (,$(wildcard $(shell which latexmk)))
|
||||
@echo "The command you called requires Latexmk"
|
||||
@echo "Latemk is included in MacTeX and MikTeX"
|
||||
@echo "See: https://mg.readthedocs.io/latexmk.html"
|
||||
endif
|
||||
|
||||
.PHONY: setup-check-epubcheck
|
||||
setup-check-epubcheck:
|
||||
ifeq (,$(wildcard $(shell which epubcheck)))
|
||||
|
|
90
book/epub.css
Normal file
90
book/epub.css
Normal file
|
@ -0,0 +1,90 @@
|
|||
/* This is Pandoc's default ebook CSS, modified slightly. */
|
||||
/* https://github.com/jgm/pandoc/blob/master/data/epub.css */
|
||||
@font-face {
|
||||
font-family: 'DejaVu-mononoki-Symbola-Droid';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
src: url('../public/webfonts/DejaVu-mononoki-Symbola-Droid.woff');
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 5%;
|
||||
text-align: justify;
|
||||
font-size: medium;
|
||||
}
|
||||
|
||||
code {
|
||||
font-family: 'DejaVu-mononoki-Symbola-Droid', monospace;
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
nav#toc ol,
|
||||
nav#landmarks ol {
|
||||
padding: 0;
|
||||
margin-left: 1em;
|
||||
}
|
||||
|
||||
nav#toc ol li,
|
||||
nav#landmarks ol li {
|
||||
list-style-type: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
a.footnote-ref {
|
||||
vertical-align: super;
|
||||
}
|
||||
|
||||
em,
|
||||
em em em,
|
||||
em em em em em {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
em em,
|
||||
em em em em {
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
code {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
span.smallcaps {
|
||||
font-variant: small-caps;
|
||||
}
|
||||
|
||||
span.underline {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
q {
|
||||
quotes: "“""”""‘""’";
|
||||
}
|
||||
|
||||
div.column {
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
width: 50%;
|
||||
}
|
||||
|
||||
div.hanging-indent {
|
||||
margin-left: 1.5em;
|
||||
text-indent: -1.5em;
|
||||
}
|
||||
|
||||
/* Workaround for iBooks issue; see #6242 */
|
||||
@media screen {
|
||||
.sourceCode {
|
||||
overflow: visible !important;
|
||||
white-space: pre-wrap !important;
|
||||
}
|
||||
}
|
71
book/epub.md
Normal file
71
book/epub.md
Normal file
|
@ -0,0 +1,71 @@
|
|||
---
|
||||
title: Programming Language Foundations in Agda
|
||||
author:
|
||||
- Philip Wadler
|
||||
- Wen Kokke
|
||||
- Jeremy G. Siek
|
||||
description: This book is an introduction to programming language theory using the proof assistant Agda.
|
||||
rights: Creative Commons Attribution 4.0 International License
|
||||
language: en-US
|
||||
---
|
||||
|
||||
# Front matter
|
||||
|
||||
``` {.include shift-heading-level-by=1}
|
||||
src/plfa/frontmatter/dedication.md
|
||||
src/plfa/frontmatter/preface.md
|
||||
```
|
||||
|
||||
# Part 1: Logical Foundations
|
||||
|
||||
``` {.include shift-heading-level-by=1}
|
||||
src/plfa/part1/Naturals.lagda.md
|
||||
src/plfa/part1/Induction.lagda.md
|
||||
src/plfa/part1/Relations.lagda.md
|
||||
src/plfa/part1/Equality.lagda.md
|
||||
src/plfa/part1/Isomorphism.lagda.md
|
||||
src/plfa/part1/Connectives.lagda.md
|
||||
src/plfa/part1/Negation.lagda.md
|
||||
src/plfa/part1/Quantifiers.lagda.md
|
||||
src/plfa/part1/Decidable.lagda.md
|
||||
src/plfa/part1/Lists.lagda.md
|
||||
```
|
||||
|
||||
# Part 2: Operational Semantics
|
||||
|
||||
``` {.include shift-heading-level-by=1}
|
||||
src/plfa/part2/Lambda.lagda.md
|
||||
src/plfa/part2/Properties.lagda.md
|
||||
src/plfa/part2/DeBruijn.lagda.md
|
||||
src/plfa/part2/More.lagda.md
|
||||
src/plfa/part2/Bisimulation.lagda.md
|
||||
src/plfa/part2/Inference.lagda.md
|
||||
src/plfa/part2/Untyped.lagda.md
|
||||
src/plfa/part2/Confluence.lagda.md
|
||||
src/plfa/part2/BigStep.lagda.md
|
||||
```
|
||||
|
||||
# Part 3: Denotational Semantics
|
||||
|
||||
``` {.include shift-heading-level-by=1}
|
||||
src/plfa/part3/Denotational.lagda.md
|
||||
src/plfa/part3/Compositional.lagda.md
|
||||
src/plfa/part3/Soundness.lagda.md
|
||||
src/plfa/part3/Adequacy.lagda.md
|
||||
src/plfa/part3/ContextualEquivalence.lagda.md
|
||||
```
|
||||
|
||||
# Appendix
|
||||
|
||||
``` {.include shift-heading-level-by=1}
|
||||
src/plfa/part2/Substitution.lagda.md
|
||||
```
|
||||
|
||||
|
||||
# Backmatter
|
||||
|
||||
## Acknowledgements
|
||||
``` {.include shift-heading-level-by=1}
|
||||
_site/acknowledgements.md
|
||||
src/plfa/backmatter/Fonts.lagda.md
|
||||
```
|
38
book/epub.mk
Normal file
38
book/epub.mk
Normal file
|
@ -0,0 +1,38 @@
|
|||
EPUB_DIR := book
|
||||
EPUB_TEMPLATE_DIR := $(EPUB_DIR)/templates
|
||||
EPUB_LUA_DIR := $(EPUB_DIR)/lua
|
||||
EPUB_LUA_SCRIPTS := $(wildcard $(EPUB_LUA_DIR)/*.lua)
|
||||
MD_DIR := src
|
||||
MD_FILES := README.md $(wildcard $(MD_DIR)/plfa/**/*.md)
|
||||
|
||||
|
||||
# Compile PLFA to an EPUB
|
||||
.PHONY: epub
|
||||
epub: $(SITE_DIR)/plfa.epub
|
||||
|
||||
$(SITE_DIR)/plfa.epub: book/epub.md book/epub.css $(MD_FILES) $(EPUB_LUA_SCRIPTS) | setup-install-pandoc
|
||||
@$(PANDOC) \
|
||||
--strip-comments \
|
||||
--css=epub/epub.css \
|
||||
--epub-embed-font='public/webfonts/DejaVu-mononoki-Symbola-Droid.woff' \
|
||||
--lua-filter=$(EPUB_LUA_DIR)/include-files.lua \
|
||||
--lua-filter=$(EPUB_LUA_DIR)/single-file-links.lua \
|
||||
--lua-filter=$(EPUB_LUA_DIR)/epub-clean-html.lua \
|
||||
--lua-filter=$(EPUB_LUA_DIR)/set-default-code-class.lua -M default-code-class=agda \
|
||||
--standalone \
|
||||
--fail-if-warnings \
|
||||
--toc --toc-depth=2 \
|
||||
--epub-chapter-level=2 \
|
||||
$< -o $@
|
||||
|
||||
|
||||
# Test EPUB with EPUBCheck
|
||||
.PHONY: epub-test
|
||||
epub-test: $(SITE_DIR)/plfa.epub | setup-check-epubcheck
|
||||
epubcheck $(SITE_DIR)/plfa.epub
|
||||
|
||||
|
||||
# Clean generated files
|
||||
.PHONY: epub-clean
|
||||
epub-clean:
|
||||
rm $(SITE_DIR)/plfa.epub
|
|
@ -13,7 +13,7 @@
|
|||
local List = require 'pandoc.List'
|
||||
|
||||
--- Shift headings in block list by given number
|
||||
function shift_headings(blocks, shift_by)
|
||||
local function shift_headings(blocks, shift_by)
|
||||
if not shift_by then
|
||||
return blocks
|
||||
end
|
||||
|
@ -30,7 +30,8 @@ end
|
|||
|
||||
--- Filter function for code blocks
|
||||
function CodeBlock(cb)
|
||||
-- ignore code blocks which are not of class "include".
|
||||
|
||||
-- Ignore code blocks which are not of class "include".
|
||||
if not cb.classes:includes 'include' then
|
||||
return
|
||||
end
|
||||
|
@ -40,21 +41,24 @@ function CodeBlock(cb)
|
|||
local shift_heading_level_by =
|
||||
tonumber(cb.attributes['shift-heading-level-by'])
|
||||
|
||||
|
||||
local blocks = List:new()
|
||||
for line in cb.text:gmatch('[^\n]+') do
|
||||
if line:sub(1,2) ~= '//' then
|
||||
-- Read in the document at the file path specified by `line`.
|
||||
local fh = io.open(line)
|
||||
local document = pandoc.read(fh:read '*a', format)
|
||||
fh:close()
|
||||
|
||||
-- Before shifting headings, add a title heading at the beginning of the chapter.
|
||||
local heading = pandoc.Header(1, pandoc.Str(pandoc.utils.stringify(document.meta.title)))
|
||||
document.blocks:insert(1, heading)
|
||||
if document.meta.title then
|
||||
local heading = pandoc.Header(1, pandoc.Str(pandoc.utils.stringify(document.meta.title)))
|
||||
document.blocks:insert(1, heading)
|
||||
end
|
||||
-- Shift all headings by the user-specified amount, which is 0 by default.
|
||||
local chapter = shift_headings(document.blocks, shift_heading_level_by)
|
||||
|
||||
-- Concatenate the chapter blocks (discarding the metadata) to the current document.
|
||||
blocks:extend(chapter)
|
||||
fh:close()
|
||||
end
|
||||
end
|
||||
return blocks
|
|
@ -1,4 +1,20 @@
|
|||
local function typeset_codeblock(cb)
|
||||
local unchecked_files = {}
|
||||
|
||||
local function is_checked()
|
||||
-- Check if any of our input files is an unchecked file.
|
||||
for _, input_file in pairs(PANDOC_STATE.input_files) do
|
||||
if unchecked_files[input_file] then
|
||||
if #PANDOC_STATE.input_files > 1 then
|
||||
error("Cannot handle multiple unchecked input files.")
|
||||
else
|
||||
return false
|
||||
end
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
local function render_codeblock(cb)
|
||||
-- If a code block has class Agda or its class is omitted, format it as...
|
||||
--
|
||||
-- \begin{agda}\begin{code} .. \end{agda}\end{code}
|
||||
|
@ -10,40 +26,22 @@ local function typeset_codeblock(cb)
|
|||
-- Any file which is not checked by Agda must have its code blocks typeset in the latter style.
|
||||
-- Specify these files via the UNCHECKED_FILES environment variable.
|
||||
if is_checked() then
|
||||
if #(cb.classes) == 0 or cb.classes[1] == 'agda' then
|
||||
if #cb.classes == 0 or cb.classes[1] == 'agda' then
|
||||
return pandoc.RawBlock('tex', '\\begin{agda}\n\\begin{code}\n' .. cb.text .. '\n\\end{code}\n\\end{agda}')
|
||||
end
|
||||
end
|
||||
return pandoc.RawBlock('tex', '\\begin{pre}\n' .. cb.text .. '\n\\end{pre}')
|
||||
end
|
||||
|
||||
-- Set of unchecked files, where the names of unchecked files are stored in the table keys.
|
||||
local UNCHECKED_FILES = nil
|
||||
|
||||
-- Return whether the current file is checked by Agda, given by the UNCHECKED_FILES environment variable.
|
||||
function is_checked()
|
||||
|
||||
-- Initialise the table.
|
||||
if UNCHECKED_FILES == nil then
|
||||
UNCHECKED_FILES = {}
|
||||
for unchecked_file in string.gmatch(pandoc.system.environment().UNCHECKED_FILES, "([^ ]+)") do
|
||||
UNCHECKED_FILES[unchecked_file] = true
|
||||
local function get_unchecked_files(meta)
|
||||
if meta['unchecked-files'] then
|
||||
for unchecked_file in string.gmatch(pandoc.utils.stringify(meta['unchecked-files']), "([^ ]+)") do
|
||||
unchecked_files[unchecked_file] = true
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if any of our input files is an unchecked file.
|
||||
for _, input_file in pairs(PANDOC_STATE.input_files) do
|
||||
if UNCHECKED_FILES[input_file] then
|
||||
if #PANDOC_STATE.input_files > 1 then
|
||||
error("Cannot handle multiple unchecked input files.")
|
||||
else
|
||||
return false
|
||||
end
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
return {
|
||||
{ CodeBlock = typeset_codeblock }
|
||||
{ Meta = get_unchecked_files },
|
||||
{ CodeBlock = render_codeblock }
|
||||
}
|
22
book/lua/remove-badges.lua
Normal file
22
book/lua/remove-badges.lua
Normal file
|
@ -0,0 +1,22 @@
|
|||
local badge_url = nil
|
||||
|
||||
local function remove_badges(ln)
|
||||
-- Remove link elements if their first child is a badge.
|
||||
if #ln.content > 0 and ln.content[1].t == "Image" then
|
||||
if string.find(ln.content[1].src, badge_url, 1, true) then
|
||||
return pandoc.Null()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function get_badge_url(meta)
|
||||
if meta['badge-url'] then
|
||||
badge_url = pandoc.utils.stringify(meta['badge-url'])
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
return {
|
||||
{ Meta = get_badge_url },
|
||||
{ Link = remove_badges }
|
||||
}
|
|
@ -8,19 +8,21 @@
|
|||
|
||||
local default_code_classes = {}
|
||||
|
||||
function add_default_code_class(el)
|
||||
local function add_default_code_class(el)
|
||||
if #(el.classes) == 0 then
|
||||
el.classes = default_code_classes
|
||||
return el
|
||||
end
|
||||
end
|
||||
|
||||
function get_default_code_class(meta)
|
||||
local function get_default_code_class(meta)
|
||||
if meta['default-code-class'] then
|
||||
default_code_classes = {pandoc.utils.stringify(meta['default-code-class'])}
|
||||
end
|
||||
end
|
||||
|
||||
return {{Meta = get_default_code_class},
|
||||
{Code = add_default_code_class},
|
||||
{CodeBlock = add_default_code_class}}
|
||||
return {
|
||||
{Meta = get_default_code_class},
|
||||
{Code = add_default_code_class},
|
||||
{CodeBlock = add_default_code_class}
|
||||
}
|
57
book/lua/single-file-headers.lua
Normal file
57
book/lua/single-file-headers.lua
Normal file
|
@ -0,0 +1,57 @@
|
|||
-- Performs the following transformations on Header identifiers:
|
||||
--
|
||||
-- Case 1:
|
||||
-- /title/: "Some Title"
|
||||
-- /permalink/: /Title/ -> # Some Title {#Title}
|
||||
--
|
||||
-- Case 2:
|
||||
-- ## Subsection Title {name=more-stuff} -> ## Subsection Title {#Title-more-stuff}
|
||||
--
|
||||
|
||||
local identifier = nil
|
||||
local title = nil
|
||||
|
||||
local function get_meta_info(meta)
|
||||
|
||||
-- Get the title.
|
||||
if meta['title'] then
|
||||
title = meta['title']
|
||||
elseif meta['default-title'] then
|
||||
title = meta['default-title']
|
||||
end
|
||||
title = pandoc.utils.stringify(title)
|
||||
|
||||
-- Get the identifier.
|
||||
if meta['permalink'] then
|
||||
identifier = meta['permalink'][1].c:match("^/(%w+)/$")
|
||||
elseif meta['title'] then
|
||||
identifier = meta['title']
|
||||
elseif meta['default-title'] then
|
||||
identifier = meta['default-title']
|
||||
end
|
||||
identifier = string.lower(pandoc.utils.stringify(identifier))
|
||||
end
|
||||
|
||||
local function insert_title(doc)
|
||||
-- Insert title in front of the blocks
|
||||
if title then
|
||||
header = pandoc.Header(1,title)
|
||||
header.identifier = identifier
|
||||
table.insert(doc.blocks,1,header)
|
||||
end
|
||||
return doc
|
||||
end
|
||||
|
||||
local function change_identifier(elem)
|
||||
-- Change header identifier based on metadata
|
||||
if elem.t == "Header" and elem.attributes.name then
|
||||
elem.identifier = identifier .. "-" .. elem.attributes.name
|
||||
end
|
||||
return elem
|
||||
end
|
||||
|
||||
return {
|
||||
{Meta = get_meta_info},
|
||||
{Header = change_identifier},
|
||||
{Pandoc = insert_title}
|
||||
}
|
122
book/pdf.mk
Normal file
122
book/pdf.mk
Normal file
|
@ -0,0 +1,122 @@
|
|||
PDF_DIR := book
|
||||
PDF_TEMPLATE_DIR := $(PDF_DIR)/templates
|
||||
PDF_LUA_DIR := $(PDF_DIR)/lua
|
||||
MD_DIR := src
|
||||
LAGDA_TEX_DIR := $(TMP_DIR)/lagda_tex
|
||||
TEX_DIR := $(TMP_DIR)/tex
|
||||
|
||||
# Convert MD_DIR/%.lagda.md to LAGDA_TEX_DIR/%.lagda.tex
|
||||
define LAGDA_TEX_PATH
|
||||
$(patsubst $(MD_DIR)/%.lagda.md,$(LAGDA_TEX_DIR)/%.lagda.tex,$(1))
|
||||
endef
|
||||
|
||||
# Convert MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex or TEX_DIR/%.tex
|
||||
define TEX_PATH
|
||||
$(patsubst $(PDF_DIR)/%,$(TEX_DIR)/%,\
|
||||
$(patsubst README.md,$(TEX_DIR)/plfa/frontmatter/README.tex,\
|
||||
$(patsubst $(MD_DIR)/%.md,$(TEX_DIR)/%.tex,\
|
||||
$(patsubst $(MD_DIR)/%.lagda.md,$(TEX_DIR)/%.tex,$(1)))))
|
||||
endef
|
||||
|
||||
# List source and intermediate files
|
||||
PDF_LUA_SCRIPTS := $(wildcard $(PDF_LUA_DIR)/*.lua)
|
||||
PDF_STATIC_FILES := book/pdf.tex book/DejaVu-mononoki-Symbola-Droid.ttf
|
||||
MD_FILES := README.md $(wildcard $(MD_DIR)/plfa/**/*.md)
|
||||
MD_FILES := $(filter-out %Fonts.lagda.md,$(MD_FILES))
|
||||
LAGDA_MD_FILES := $(filter %.lagda.md,$(MD_FILES))
|
||||
LAGDA_TEX_FILES := $(call LAGDA_TEX_PATH,$(LAGDA_MD_FILES))
|
||||
TEX_FILES := $(call TEX_PATH,$(MD_FILES) $(PDF_STATIC_FILES))
|
||||
|
||||
|
||||
# Compile PLFA to a PDF
|
||||
.PHONY: pdf
|
||||
pdf: $(SITE_DIR)/plfa.pdf
|
||||
|
||||
$(SITE_DIR)/plfa.pdf: $(TEX_FILES)
|
||||
@cd $(TEX_DIR) && latexmk -pdf -lualatex -use-make -halt-on-error pdf.tex
|
||||
@cp $(TEX_DIR)/pdf.pdf $(SITE_DIR)/plfa.pdf
|
||||
|
||||
|
||||
# Copy static files needed by PDF compilation
|
||||
define MK_COPYSTATIC_RULE
|
||||
src := $(1)
|
||||
dst := $(2)
|
||||
$$(dst): $$(src)
|
||||
@echo "Copy $$< to $$@"
|
||||
@mkdir -p '$$(@D)'
|
||||
@cp $$< $$@
|
||||
endef
|
||||
|
||||
# Compile Markdown files to LaTeX
|
||||
#
|
||||
# NOTE: Passing --indented-code-classes=default sets the class for /indented/ code blocks
|
||||
# to 'default', which lets us distinguish them from /fenced/ code blocks without a
|
||||
# class. That's important, since fenced code blocks are checked by Agda, but indented
|
||||
# code blocks are /not/, so latex-render-codeblocks.lua needs to be able to tell the
|
||||
# difference.
|
||||
#
|
||||
define MK_MD2TEX_RULE
|
||||
src := $(1)
|
||||
dst := $(2)
|
||||
ttl := $(3)
|
||||
$$(dst): $$(src) $(PDF_LUA_SCRIPTS) | setup-install-pandoc
|
||||
@echo "Compile $$< to $$@"
|
||||
@mkdir -p '$$(@D)'
|
||||
@$(PANDOC) \
|
||||
--top-level-division=chapter \
|
||||
--indented-code-classes=default \
|
||||
--lua-filter=$(PDF_LUA_DIR)/remove-badges.lua -M badge-url=https://img.shields.io/badge/ \
|
||||
--lua-filter=$(PDF_LUA_DIR)/latex-render-codeblocks.lua -M unchecked-files=README.md \
|
||||
--lua-filter=$(PDF_LUA_DIR)/single-file-links.lua \
|
||||
--lua-filter=$(PDF_LUA_DIR)/single-file-headers.lua -M default-title=$$(ttl) \
|
||||
$$< -o $$@
|
||||
endef
|
||||
|
||||
# Compile Literate Agda files to LaTeX
|
||||
define MK_LAGDA_MD2TEX_RULE
|
||||
src := $(1)
|
||||
dst := $(2)
|
||||
$$(dst): $$(src) $(LAGDA_TEX_FILES) | setup-install-agda setup-check-latexmk
|
||||
@$(AGDA) --include-path=$(LAGDA_TEX_DIR) --latex --latex-dir=$(TEX_DIR) $$<
|
||||
endef
|
||||
|
||||
|
||||
# Copy static files (from PDF_DIR/% to TEX_DIR/%)
|
||||
$(foreach static_file,\
|
||||
$(PDF_STATIC_FILES),\
|
||||
$(eval $(call MK_COPYSTATIC_RULE,\
|
||||
$(static_file),\
|
||||
$(call TEX_PATH,$(static_file)))))
|
||||
|
||||
# Compile .md files (from MD_DIR/%.md to TEX_DIR/%.tex)
|
||||
$(foreach md_file,\
|
||||
$(filter-out %acknowledgements.md %.lagda.md,$(MD_FILES)),\
|
||||
$(eval $(call MK_MD2TEX_RULE,\
|
||||
$(md_file),\
|
||||
$(call TEX_PATH,$(md_file)))))
|
||||
|
||||
# Compile .lagda.md files (from MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex)
|
||||
$(foreach lagda_md_file,\
|
||||
$(LAGDA_MD_FILES),\
|
||||
$(eval $(call MK_MD2TEX_RULE,\
|
||||
$(lagda_md_file),\
|
||||
$(call LAGDA_TEX_PATH,$(lagda_md_file)))))
|
||||
|
||||
# Compile acknowledgements (from SITE_DIR/acknowledgements.md to TEX_DIR/acknowledgements.tex)
|
||||
$(eval $(call MK_MD2TEX_RULE,\
|
||||
$(SITE_DIR)/acknowledgements.md,\
|
||||
$(TEX_DIR)/plfa/backmatter/acknowledgements.tex,\
|
||||
Acknowledgements))
|
||||
|
||||
# Compile .lagda.tex files (from LAGDA_TEX_DIR/%.lagda.tex to TEX_DIR/%.tex)
|
||||
$(foreach lagda_md_file,\
|
||||
$(LAGDA_MD_FILES),\
|
||||
$(eval $(call MK_LAGDA_MD2TEX_RULE,\
|
||||
$(call LAGDA_TEX_PATH,$(lagda_md_file)),\
|
||||
$(call TEX_PATH,$(lagda_md_file)))))
|
||||
|
||||
|
||||
# Clean generated files
|
||||
.PHONY: pdf-clean
|
||||
pdf-clean:
|
||||
@rm -rf $(SITE_DIR)/plfa.pdf $(LAGDA_TEX_DIR) $(TEX_DIR)
|
|
@ -173,5 +173,4 @@
|
|||
|
||||
\input{plfa/part2/Substitution}
|
||||
\input{plfa/backmatter/acknowledgements}
|
||||
\input{plfa/backmatter/Fonts}
|
||||
\end{document}
|
42
css/epub.css
42
css/epub.css
|
@ -1,42 +0,0 @@
|
|||
/* This is Pandoc's default ebook CSS, modified slightly. */
|
||||
/* https://github.com/jgm/pandoc/blob/master/data/epub.css */
|
||||
@font-face {
|
||||
font-family: 'mononoki';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
src: url('../webfonts/mononoki.woff');
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'FreeMono';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
src: url('../webfonts/FreeMono.woff');
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'DejaVuSansMono';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
src: url('../webfonts/DejaVuSansMono.woff');
|
||||
}
|
||||
body { margin: 5%; text-align: justify; font-size: medium; }
|
||||
code { font-family: 'mononoki', 'FreeMono', 'DejaVuSansMono', monospace; }
|
||||
h1, h2, h3, h4, h5, h6 { text-align: left; }
|
||||
nav#toc ol, nav#landmarks ol { padding: 0; margin-left: 1em; }
|
||||
nav#toc ol li, nav#landmarks ol li { list-style-type: none; margin: 0; padding: 0; }
|
||||
a.footnote-ref { vertical-align: super; }
|
||||
em, em em em, em em em em em { font-style: italic;}
|
||||
em em, em em em em { font-style: normal; }
|
||||
code{ white-space: pre-wrap; }
|
||||
span.smallcaps{ font-variant: small-caps; }
|
||||
span.underline{ text-decoration: underline; }
|
||||
q { quotes: "“" "”" "‘" "’"; }
|
||||
div.column{ display: inline-block; vertical-align: top; width: 50%; }
|
||||
div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
|
||||
|
||||
/* Workaround for iBooks issue; see #6242 */
|
||||
@media screen {
|
||||
.sourceCode {
|
||||
overflow: visible !important;
|
||||
white-space: pre-wrap !important;
|
||||
}
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
#!/usr/bin/env ruby
|
||||
require 'yaml'
|
||||
require 'liquid'
|
||||
|
||||
unless ARGV.length == 3
|
||||
abort "Usage: render-liquid-template.rb [yaml_file] [markdown_in_file] [markdown_out_file]"
|
||||
end
|
||||
|
||||
yaml_file, markdown_file_in, markdown_file_out = ARGV
|
||||
|
||||
[yaml_file, markdown_file_in].each do |file|
|
||||
unless File.file? file
|
||||
abort "File does not exist: '%s'" % [file]
|
||||
end
|
||||
end
|
||||
|
||||
metadata = { 'site' => (YAML.load (File.read yaml_file)) }
|
||||
template = Liquid::Template.parse (File.read markdown_file_in)
|
||||
File.write markdown_file_out, (template.render metadata)
|
|
@ -1,19 +0,0 @@
|
|||
-- Performs the following transformations on Link targets:
|
||||
--
|
||||
-- Case 1:
|
||||
-- [text](/chapter/#more-stuff) -> [text](#chapter-more-stuff)
|
||||
--
|
||||
-- Case 2:
|
||||
-- [text](/chapter/) -> [text](#chapter)
|
||||
--
|
||||
-- All other Links are ignored.
|
||||
function Link (el)
|
||||
local n
|
||||
-- Case 1:
|
||||
el.target, n = el.target:gsub("^/(%w+)/#([%w-]+)$", "#%1-%2")
|
||||
-- Case 2:
|
||||
if n == 0 then
|
||||
el.target = el.target:gsub("^/(%w+)/$", "#%1")
|
||||
end
|
||||
return el
|
||||
end
|
|
@ -3,7 +3,6 @@
|
|||
module Hakyll.Web.Routes.Permalink
|
||||
( convertPermalink
|
||||
, permalinkRoute
|
||||
, permalinkRouteWithDefault
|
||||
, stripIndexFile
|
||||
) where
|
||||
|
||||
|
@ -19,11 +18,12 @@ convertPermalink :: FilePath -> FilePath
|
|||
convertPermalink link = fromMaybe link (stripPrefix "/" link) </> "index.html"
|
||||
|
||||
permalinkRoute :: Routes
|
||||
permalinkRoute = permalinkRouteWithDefault (error "Missing field 'permalink'.")
|
||||
|
||||
permalinkRouteWithDefault :: Routes -> Routes
|
||||
permalinkRouteWithDefault def = metadataRoute $ \metadata ->
|
||||
maybe def (constRoute . convertPermalink) (lookupString "permalink" metadata)
|
||||
permalinkRoute = metadataRoute $ \metadata ->
|
||||
case lookupString "permalink" metadata of
|
||||
Nothing ->
|
||||
customRoute (\identifier -> error $ "missing field 'permalink' in metadata " <> toFilePath identifier)
|
||||
Just permalink ->
|
||||
constRoute (convertPermalink permalink)
|
||||
|
||||
-- Removes "index.html" from URLs.
|
||||
stripIndexFile :: String -> String
|
||||
|
|
97
hs/Main.hs
97
hs/Main.hs
|
@ -1,7 +1,7 @@
|
|||
{-# LANGUAGE TypeApplications #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
import Control.Monad ((<=<), forM_)
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import Data.Functor ((<&>))
|
||||
import qualified Data.Text as T
|
||||
import Hakyll
|
||||
|
@ -16,7 +16,6 @@ import qualified Text.CSL as CSL
|
|||
import qualified Text.CSL.Pandoc as CSL (processCites)
|
||||
import Text.Pandoc (Pandoc(..), ReaderOptions(..), WriterOptions(..), Extension(..))
|
||||
import qualified Text.Pandoc as Pandoc
|
||||
import qualified Text.Pandoc.Filter as Pandoc (Filter(..), applyFilters)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Configuration
|
||||
|
@ -124,29 +123,6 @@ sassOptions = defaultSassOptions
|
|||
{ sassIncludePaths = Just ["css"]
|
||||
}
|
||||
|
||||
epubSectionContext :: Context String
|
||||
epubSectionContext = mconcat
|
||||
[ contentField "content" "content"
|
||||
, titlerunningField
|
||||
, subtitleField
|
||||
]
|
||||
|
||||
epubReaderOptions :: ReaderOptions
|
||||
epubReaderOptions = siteReaderOptions
|
||||
{ readerStandalone = True
|
||||
, readerStripComments = True
|
||||
}
|
||||
|
||||
epubWriterOptions :: WriterOptions
|
||||
epubWriterOptions = siteWriterOptions
|
||||
{ writerTableOfContents = True
|
||||
, writerTOCDepth = 2
|
||||
, writerEpubFonts = [ "public/webfonts/DejaVuSansMono.woff"
|
||||
, "public/webfonts/FreeMono.woff"
|
||||
, "public/webfonts/mononoki.woff"
|
||||
]
|
||||
, writerEpubChapterLevel = 2
|
||||
}
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Build site
|
||||
|
@ -198,40 +174,14 @@ main = do
|
|||
--
|
||||
-- NOTE: The order of the various match expressions is important:
|
||||
-- Special-case compilation instructions for files such as
|
||||
-- "src/plfa/epub.md" and "src/plfa/index.md" would be overwritten
|
||||
-- by the general purpose compilers for "src/**.md", which would
|
||||
-- "src/plfa/index.md" would be overwritten by the general
|
||||
-- purpose compilers for "src/**.md", which would
|
||||
-- cause them to render incorrectly. It is possible to explicitly
|
||||
-- exclude such files using `complement` patterns, but this vastly
|
||||
-- complicates the match patterns.
|
||||
--
|
||||
hakyll $ do
|
||||
|
||||
-- Compile EPUB
|
||||
match "src/plfa/epub.md" $ do
|
||||
route $ constRoute "plfa.epub"
|
||||
compile $ do
|
||||
epubTemplate <- load "templates/epub.html"
|
||||
>>= compilePandocTemplate
|
||||
epubMetadata <- load "src/plfa/epub.xml"
|
||||
let ropt = epubReaderOptions
|
||||
let wopt = epubWriterOptions
|
||||
{ writerTemplate = Just . itemBody $ epubTemplate
|
||||
, writerEpubMetadata = Just . T.pack . itemBody $ epubMetadata
|
||||
}
|
||||
getResourceBody
|
||||
>>= applyAsTemplate (tableOfContentsContext epubSectionContext)
|
||||
>>= readPandocWith ropt
|
||||
>>= applyPandocFilters ropt [] "epub3"
|
||||
>>= writeEPUB3With wopt
|
||||
|
||||
match "templates/epub.html" $
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate siteContext
|
||||
|
||||
match "src/plfa/epub.xml" $
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate siteContext
|
||||
|
||||
-- Compile Table of Contents
|
||||
match "src/plfa/index.md" $ do
|
||||
route permalinkRoute
|
||||
|
@ -253,15 +203,16 @@ main = do
|
|||
>>= applyAsTemplate acknowledgementsContext
|
||||
>>= readMarkdownWith siteReaderOptions
|
||||
<&> writeHTML5With siteWriterOptions
|
||||
>>= saveSnapshot "content"
|
||||
>>= loadAndApplyTemplate "templates/page.html" siteContext
|
||||
>>= loadAndApplyTemplate "templates/default.html" siteContext
|
||||
>>= prettifyUrls
|
||||
|
||||
match "src/plfa/backmatter/acknowledgements.md" $ version "raw" $ do
|
||||
route $ constRoute "acknowledgements.md"
|
||||
match "src/plfa/backmatter/acknowledgements.md" $ version "native" $ do
|
||||
route $ constRoute "versions/native/acknowledgements.native"
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate acknowledgementsContext
|
||||
>>= readMarkdownWith siteReaderOptions
|
||||
<&> writeNativeWith siteWriterOptions
|
||||
|
||||
match "authors/*.metadata" $
|
||||
compile getResourceBody
|
||||
|
@ -298,7 +249,7 @@ main = do
|
|||
compile $ pageWithAgdaCompiler agdaOptions
|
||||
|
||||
-- Compile other sections and pages
|
||||
match ("README.md" .||. "src/**.md" .&&. complement "src/plfa/epub.md") $ do
|
||||
match ("README.md" .||. "src/**.md") $ do
|
||||
route permalinkRoute
|
||||
compile pageCompiler
|
||||
|
||||
|
@ -349,7 +300,7 @@ main = do
|
|||
create ["public/css/style.css"] $ do
|
||||
route idRoute
|
||||
compile $ do
|
||||
csses <- loadAll ("css/*.css" .||. "css/*.scss" .&&. complement "css/epub.css")
|
||||
csses <- loadAll ("css/*.css" .||. "css/*.scss")
|
||||
makeItem $ unlines $ map itemBody csses
|
||||
|
||||
-- Copy versions
|
||||
|
@ -395,7 +346,7 @@ processCites csl bib item = do
|
|||
withItemBody (return . CSL.processCites style refs) item
|
||||
|
||||
-- | Write a document as HTML using Pandoc, with the supplied options.
|
||||
writeHTML5With :: WriterOptions -- ^ Writer options for pandoc
|
||||
writeHTML5With :: WriterOptions -- ^ Writer options for Pandoc
|
||||
-> Item Pandoc -- ^ Document to write
|
||||
-> Item String -- ^ Resulting HTML
|
||||
writeHTML5With wopt (Item itemi doc) =
|
||||
|
@ -403,26 +354,14 @@ writeHTML5With wopt (Item itemi doc) =
|
|||
Left err -> error $ "Hakyll.Web.Pandoc.writePandocWith: " ++ show err
|
||||
Right item' -> Item itemi $ T.unpack item'
|
||||
|
||||
-- | Write a document as EPUB3 using Pandoc, with the supplied options.
|
||||
writeEPUB3With :: WriterOptions -> Item Pandoc -> Compiler (Item BL.ByteString)
|
||||
writeEPUB3With wopt (Item itemi doc) =
|
||||
return $ case Pandoc.runPure $ Pandoc.writeEPUB3 wopt doc of
|
||||
Left err -> error $ "Hakyll.Web.Pandoc.writeEPUB3With: " ++ show err
|
||||
Right doc' -> Item itemi doc'
|
||||
|
||||
-- | Apply a filter to a Pandoc document.
|
||||
applyPandocFilters :: ReaderOptions -> [Pandoc.Filter] -> String -> Item Pandoc -> Compiler (Item Pandoc)
|
||||
applyPandocFilters ropt filters fmt = withItemBody $
|
||||
unsafeCompiler . Pandoc.runIOorExplode . Pandoc.applyFilters ropt filters [fmt]
|
||||
|
||||
-- | Compile a Pandoc template (as opposed to a Hakyll template).
|
||||
compilePandocTemplate :: Item String -> Compiler (Item (Pandoc.Template T.Text))
|
||||
compilePandocTemplate i = do
|
||||
let templatePath = toFilePath $ itemIdentifier i
|
||||
let templateBody = T.pack $ itemBody i
|
||||
templateOrError <- unsafeCompiler $ Pandoc.compileTemplate templatePath templateBody
|
||||
template <- either fail return templateOrError
|
||||
makeItem template
|
||||
-- | Write a document as HTML using Pandoc, with the supplied options.
|
||||
writeNativeWith :: WriterOptions -- ^ Writer options for Pandoc
|
||||
-> Item Pandoc -- ^ Document to write
|
||||
-> Item String -- ^ Resulting Markdown
|
||||
writeNativeWith wopt (Item itemi doc) =
|
||||
case Pandoc.runPure $ Pandoc.writeNative wopt doc of
|
||||
Left err -> error $ "Hakyll.Web.Pandoc.writeNativeWith: " ++ show err
|
||||
Right item' -> Item itemi $ T.unpack item'
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
function remove_badges(ln)
|
||||
-- Remove link elements if their first child is a badge.
|
||||
if #ln.content > 0 and ln.content[1].t == "Image" then
|
||||
if string.match(ln.content[1].src, 'https://img%.shields%.io/badge/') then
|
||||
return pandoc.Null()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
{ Link = remove_badges }
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
-- Performs the following transformations on Header identifiers:
|
||||
--
|
||||
-- Case 1:
|
||||
-- /title/: "Some Title"
|
||||
-- /permalink/: /Title/ -> # Some Title {#Title}
|
||||
--
|
||||
-- Case 2:
|
||||
-- ## Subsection Title {name=more-stuff} -> ## Subsection Title {#Title-more-stuff}
|
||||
--
|
||||
|
||||
local identifier = nil
|
||||
local title = nil
|
||||
|
||||
function get_identifier(meta)
|
||||
-- get title identifier from permalinks
|
||||
for k,v in pairs(meta) do
|
||||
if k == "title" then
|
||||
title = {table.unpack(v)}
|
||||
end
|
||||
if k == "permalink" then
|
||||
-- set lower case permalink as identifier
|
||||
identifier = v[1].c:match("^/(%w+)/$")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function insert_title(doc)
|
||||
-- insert title in front of the blocks
|
||||
if title then
|
||||
header = pandoc.Header(1,title)
|
||||
header.identifier = identifier
|
||||
table.insert(doc.blocks,1,header)
|
||||
end
|
||||
return doc
|
||||
end
|
||||
|
||||
function change_identifier(elem)
|
||||
-- change header identifier based on metadata
|
||||
local name = elem.attributes["name"]
|
||||
if elem.t == "Header" and name then
|
||||
elem.identifier = identifier .. "-" .. name
|
||||
end
|
||||
return elem
|
||||
end
|
||||
|
||||
return {
|
||||
{Meta = get_identifier},
|
||||
{Header = change_identifier},
|
||||
{Pandoc = insert_title}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
\chapter{Acknowledgements}
|
||||
$body$
|
||||
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
$body$
|
||||
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
$for(parts)$
|
||||
# $title$
|
||||
$for(sections)$
|
||||
$content$
|
||||
$endfor$
|
||||
$endfor$
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:epub="http://www.idpf.org/2007/ops" xml:lang="$language$">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>$pagetitle$</title>
|
||||
<link rel="stylesheet" type="text/css" href="css/agda.css" />
|
||||
<link rel="stylesheet" type="text/css" href="css/epub.css" />
|
||||
</head>
|
||||
<body>
|
||||
<section epub:type="titlepage" class="titlepage">
|
||||
<h1 class="title">$pagetitle$</h1>
|
||||
$for(authors)$
|
||||
<p class="author">$name$</p>
|
||||
$endfor$
|
||||
$if(rights)$
|
||||
<div class="rights">$rights$</div>
|
||||
$endif$
|
||||
</section>
|
||||
<!-- NOTE: body is escaped because it is filled in by the Pandoc template engine, -->
|
||||
<!-- whereas the rest is filled in by the Hakyll template engine, both of -->
|
||||
<!-- which use the same syntax. Yes, this is confusing. -->
|
||||
$$body$$
|
||||
</body>
|
||||
</html>
|
||||
|
Loading…
Add table
Reference in a new issue