EPUB and PDF now more-or-less work

This commit is contained in:
Wen Kokke 2021-08-24 22:41:18 +01:00
parent b07957d430
commit a9f85c9ab1
No known key found for this signature in database
GPG key ID: 7EB7DBBCEB539DB8
14 changed files with 236 additions and 290 deletions

View file

@ -18,18 +18,15 @@ PANDOC := stack exec pandoc --
.PHONY: all
all:
@echo "Building site..."
make build
@echo "Testing site..."
make test
@echo "Building epub..."
make epub-build
@echo "Testing epub..."
make epub-test
@echo "Building pdf..."
make pdf-build
@echo "Testing pdf..."
@make build
@make epub-build
@make pdf-build
.PHONY: all-clean
all-clean:
@make clean
@make epub-clean
@make pdf-clean
#################################################################################
# Setup Git Hooks
@ -37,7 +34,8 @@ all:
.PHONY: init
init: setup-check-fix-whitespace setup-install-htmlproofer
git config core.hooksPath .githooks
@echo "Setting up Git Hooks"
@git config core.hooksPath .githooks
#################################################################################
@ -45,13 +43,14 @@ init: setup-check-fix-whitespace setup-install-htmlproofer
#################################################################################
.PHONY: build
build: \
standard-library/ChangeLog.md
stack build && stack exec site build
build: standard-library/ChangeLog.md
@echo "Building site"
@stack build && stack exec site build
standard-library/ChangeLog.md:
git submodule init
git submodule update --recursive
@echo "Updating Agda standard library"
@git submodule init
@git submodule update --recursive
#################################################################################
@ -60,7 +59,8 @@ standard-library/ChangeLog.md:
.PHONY: test
test: setup-install-htmlproofer build
cd $(SITE_DIR) && htmlproofer \
@echo "Testing generated HTML using HTMLProofer"
@cd $(SITE_DIR) && htmlproofer \
--check-html \
--disable-external \
--report-invalid-tags \
@ -79,9 +79,9 @@ test: setup-install-htmlproofer build
#################################################################################
.PHONY: watch
watch: \
standard-library/ChangeLog.md
stack build && stack exec site watch
watch: standard-library/ChangeLog.md
@echo "Watching for changes and rebuilding"
@stack build && stack exec site watch
#################################################################################
@ -90,7 +90,8 @@ watch: \
.PHONY: update-contributors
update-contributors:
stack build && stack exec update-contributors
@echo "Updating contributors from GitHub"
@stack build && stack exec update-contributors
#################################################################################
@ -98,9 +99,9 @@ update-contributors:
#################################################################################
.PHONY: clean
clean: \
standard-library/ChangeLog.md
stack build && stack exec site clean
clean: standard-library/ChangeLog.md
@echo "Cleaning generated files for site"
@stack build && stack exec site clean
#################################################################################
@ -119,9 +120,7 @@ list:
.PHONY: publish
publish: setup-check-rsync
@echo "Building site..."
make build
@echo "Testing site..."
make all
make test
@echo "Creating web branch..."
git fetch --all
@ -159,13 +158,16 @@ ifeq (,$(wildcard $(PLFA_AFS_DIR)))
@exit 1
else
ifeq (,$(wildcard $(PLFA_AFS_DIR)/html))
@echo "Checkout latest version from GitHub"
git clone https://github.com/plfa/plfa.github.io.git --branch web --single-branch --depth 1 html
endif
cd $(PLFA_AFS_DIR)/html \
@echo "Checkout latest version from GitHub"
@cd $(PLFA_AFS_DIR)/html \
&& git fetch --depth 1 \
&& git reset --hard origin/web \
&& git clean -dfx
fsr setacl $(PLFA_AFS_DIR)/html system:groupwebserver rl
@echo "Setting permissions to include web server"
@fsr setacl $(PLFA_AFS_DIR)/html system:groupwebserver rl
endif

View file

@ -4,7 +4,7 @@
font-family: 'DejaVu-mononoki-Symbola-Droid';
font-weight: normal;
font-style: normal;
src: url('../public/webfonts/DejaVu-mononoki-Symbola-Droid.woff');
src: url('../fonts/DejaVu-mononoki-Symbola-Droid.woff');
}
body {

View file

@ -2,6 +2,6 @@ $for(parts)$
# $title$
$for(sections)$
## $title$ {#$anchor$}
$body$
$shifted_raw$
$endfor$
$endfor$

View file

@ -15,24 +15,25 @@ FRANKENFONT := public/webfonts/DejaVu-mononoki-Symbola-Droid.woff
# Compile PLFA to an EPUB using Pandoc
#################################################################################
.PHONY: epub-build
.PHONY: epub epub-build
epub: epub-build
epub-build: $(SITE_DIR)/plfa.epub
$(SITE_DIR)/plfa.epub: \
$(EPUB_DIR)/epub.md $(EPUB_DIR)/epub.css $(RAW_DIR)/epub.xml $(FRANKENFONT) \
$(RAW_DIR)/epub.md $(EPUB_DIR)/epub.css $(RAW_DIR)/epub.xml $(FRANKENFONT) \
$(MD_FILES) $(EPUB_LUA_SCRIPTS) | setup-install-pandoc
@$(PANDOC) \
@echo "Building EPUB"
$(PANDOC) \
--strip-comments \
--css=$(EPUB_DIR)/epub.css \
--epub-embed-font=$(FRANKENFONT) \
--epub-metadata=$(RAW_DIR)/epub.xml
--epub-metadata=$(RAW_DIR)/epub.xml \
--indented-code-class=default \
--lua-filter=$(EPUB_LUA_DIR)/set-default-code-class.lua -M default-code-class=agda \
--lua-filter=$(EPUB_LUA_DIR)/remove-badges.lua -M badge-url=https://img.shields.io/badge/ \
--lua-filter=$(EPUB_LUA_DIR)/epub-clean-html.lua \
--lua-filter=$(EPUB_LUA_DIR)/single-file-links.lua \
--standalone \
--fail-if-warnings \
--toc --toc-depth=2 \
--epub-chapter-level=2 \
$< -o $@
@ -44,7 +45,8 @@ $(SITE_DIR)/plfa.epub: \
.PHONY: epub-test
epub-test: $(SITE_DIR)/plfa.epub | setup-check-epubcheck
epubcheck $(SITE_DIR)/plfa.epub
@echo "Testing EPUB with EPUBCheck"
@epubcheck $(SITE_DIR)/plfa.epub
#################################################################################
@ -52,7 +54,7 @@ epub-test: $(SITE_DIR)/plfa.epub | setup-check-epubcheck
#################################################################################
$(RAW_DIR)/epub.xml: $(EPUB_DIR)/epub.xml
make build
@make build
#################################################################################
@ -61,7 +63,8 @@ $(RAW_DIR)/epub.xml: $(EPUB_DIR)/epub.xml
.PHONY: epub-clean
epub-clean:
rm -f $(SITE_DIR)/plfa.epub
@echo "Cleaning generated files for EPUB"
@rm -f $(SITE_DIR)/plfa.epub
#################################################################################

View file

@ -1,5 +1,5 @@
-- Transforms '<ul class={something}>' into '<ul>'.
function RawBlock(el)
-- Transforms '<ul class={something}>' into '<ul>'.
el.text = el.text:gsub('%s*<%s*ul%s*class=%s*"?[%w-]+"?%s*>%s*', '<ul>')
return el
end

View file

@ -1,51 +0,0 @@
--- include-files.lua filter to include Markdown files
---
--- Copyright: © 20192020 Albert Krewinkel
--- Copyright: © 2020 Michael Reed
--- License: MIT see LICENSE file for details
---
--- Created by Albert Krewinkel. Slightly modified by Michael Reed for use in
--- generating the EPUB for "Programming Language Foundations in Agda".
---
--- For documentation, see: https://github.com/pandoc/lua-filters/tree/master/include-files
-- pandoc's List type
local List = require 'pandoc.List'
--- Filter function for code blocks
function CodeBlock(cb)
-- Ignore code blocks which are not of class "include".
if not cb.classes:includes 'include' then
return
end
-- Markdown is used if this is nil.
local format = cb.attributes['format']
local shift_heading_level_by =
tonumber(cb.attributes['shift-heading-level-by'])
local blocks = List:new()
for line in cb.text:gmatch('[^\n]+') do
if line:sub(1,2) ~= '//' then
-- Read in the document at the file path specified by `line`.
local fh = io.open(line)
local doc = pandoc.read(fh:read '*a', format)
blocks:extend(document.blocks)
fh:close()
end
end
return blocks
end
-- Apply a filter to a document.
function apply_filter(doc, filters)
div = pandoc.Div(doc.blocks)
for _, filter in pairs(filters) do
if filter.Meta then
filter.Meta(doc.meta)
end
div = pandoc.walk_block(div, filter)
end
return pandoc.Pandoc(div.content, doc.meta)
end

View file

@ -1,57 +0,0 @@
-- Performs the following transformations on Header identifiers:
--
-- Case 1:
-- /title/: "Some Title"
-- /permalink/: /Title/ -> # Some Title {#Title}
--
-- Case 2:
-- ## Subsection Title {name=more-stuff} -> ## Subsection Title {#Title-more-stuff}
--
local identifier = nil
local title = nil
local function get_meta_info(meta)
-- Get the title.
if meta['title'] then
title = meta['title']
elseif meta['default-title'] then
title = meta['default-title']
end
title = pandoc.utils.stringify(title)
-- Get the identifier.
if meta['permalink'] then
identifier = meta['permalink'][1].c:match("^/(%w+)/$")
elseif meta['title'] then
identifier = meta['title']
elseif meta['default-title'] then
identifier = meta['default-title']
end
identifier = string.lower(pandoc.utils.stringify(identifier))
end
local function insert_title(doc)
-- Insert title in front of the blocks
if title then
header = pandoc.Header(1,title)
header.identifier = identifier
table.insert(doc.blocks,1,header)
end
return doc
end
local function change_identifier(elem)
-- Change header identifier based on metadata
if elem.t == "Header" and elem.attributes.name then
elem.identifier = identifier .. "-" .. elem.attributes.name
end
return elem
end
return {
{Meta = get_meta_info},
{Header = change_identifier},
{Pandoc = insert_title}
}

View file

@ -14,7 +14,11 @@ function Link (el)
el.target, n = el.target:gsub("^/(%w+)/#([%w-]+)$", "#%1-%2")
-- Case 2:
if n == 0 then
el.target = el.target:gsub("^/(%w+)/$", "#%1")
el.target, n = el.target:gsub("^/(%w+)/$", "#%1")
end
-- If either Case 1 or Case 2, lowercase target:
if n ~= 0 then
el.target = string.lower(el.target)
end
return el
end

View file

@ -8,6 +8,7 @@ PDF_LUA_DIR := $(PDF_DIR)/lua
MD_DIR := src
LAGDA_TEX_DIR := $(TMP_DIR)/lagda_tex
TEX_DIR := $(TMP_DIR)/tex
FRANKENFONT := $(PDF_DIR)/DejaVu-mononoki-Symbola-Droid.ttf
#################################################################################
@ -43,7 +44,7 @@ endef
#################################################################################
PDF_LUA_SCRIPTS := $(wildcard $(PDF_LUA_DIR)/*.lua)
PDF_STATIC_FILES := $(PDF_DIR)/pdf.tex $(PDF_DIR)/DejaVu-mononoki-Symbola-Droid.ttf
PDF_STATIC_FILES := $(PDF_DIR)/pdf.tex $(FRANKENFONT)
MD_FILES := README.md $(wildcard $(MD_DIR)/plfa/**/*.md)
LAGDA_MD_FILES := $(filter %.lagda.md,$(MD_FILES))
LAGDA_TEX_FILES := $(call LAGDA_TEX_PATH,$(LAGDA_MD_FILES))
@ -51,13 +52,15 @@ TEX_FILES := $(call TEX_PATH,$(MD_FILES) $(RAW_DIR)/pdf.tex $(PDF_STATIC_
#################################################################################
# Compile PLFA to a PDF via Pandoc and Latexmk
# Compile PLFA to a PDF using Pandoc and Latexmk
#################################################################################
.PHONY: pdf-build
.PHONY: pdf pdf-build
pdf: pdf-build
pdf-build: $(SITE_DIR)/plfa.pdf
$(SITE_DIR)/plfa.pdf: $(TEX_FILES)
@echo "Building PDF"
@cd $(TEX_DIR) && latexmk -pdf -lualatex -use-make -halt-on-error pdf.tex
@cp $(TEX_DIR)/pdf.pdf $(SITE_DIR)/plfa.pdf
@ -144,11 +147,11 @@ $(foreach lagda_md_file,\
#################################################################################
$(RAW_DIR)/pdf.tex: $(PDF_DIR)/pdf.tex $(MD_DIR)/plfa/toc.metadata
make build
@make build
# Generated by Hakyll
$(RAW_DIR)/plfa/backmatter/acknowledgements.md: $(MD_DIR)/plfa/backmatter/acknowledgements.md
make build
@make build
#################################################################################
@ -157,4 +160,5 @@ $(RAW_DIR)/plfa/backmatter/acknowledgements.md: $(MD_DIR)/plfa/backmatter/acknow
.PHONY: pdf-clean
pdf-clean:
@echo "Cleaning generated files for PDF"
@rm -rf $(SITE_DIR)/plfa.pdf $(LAGDA_TEX_DIR) $(TEX_DIR)

View file

@ -29,7 +29,7 @@
\setcounter{secnumdepth}{0}
% Set the global text color:
\definecolor{textcolor}{111111}
\definecolor{textcolor}{HTML}{111111}
\color{textcolor}
% Change background color for inline code in markdown files.

View file

@ -5,6 +5,7 @@
module Hakyll.Web.Agda
( agdaCompilerWith
, agdaVerbosityQuiet
, compileAgdaWith
, CommandLineOptions(..)
, PragmaOptions(..)
, defaultAgdaOptions
@ -50,8 +51,12 @@ defaultAgdaPragmaOptions = defaultPragmaOptions
-- |Compile literate Agda to HTML
agdaCompilerWith :: CommandLineOptions -> Compiler (Item String)
agdaCompilerWith agdaOptions = cached "Hakyll.Web.Agda.agdaCompilerWith" $ do
item <- getResourceBody
agdaCompilerWith agdaOptions =
getResourceBody >>= compileAgdaWith agdaOptions
-- |Compile literate Agda to HTML
compileAgdaWith :: CommandLineOptions -> Item String -> Compiler (Item String)
compileAgdaWith agdaOptions item = cached "Hakyll.Web.Agda.agdaCompilerWith" $ do
let agdaPath = toFilePath (itemIdentifier item)
let moduleName = agdaModule (itemBody item)
TmpFile tmpPath <- newTmpFile ".lock"
@ -120,7 +125,7 @@ readStdlibVersion stdlibPath = do
changelog <- T.readFile changelogPath
let versionLine = head (T.lines changelog)
case T.stripPrefix "Version " versionLine of
Just versionStr -> return . T.unpack $ "v" <> T.strip versionStr
Just versionStr -> return $ T.unpack ("v" <> T.strip versionStr)
Nothing -> error $ printf "Could not read version from '%s'" changelogPath
-- |Fix references to the Agda standard library.

View file

@ -3,6 +3,7 @@
module Hakyll.Web.Template.Context.Derived where
import Hakyll
import Text.Printf
addDerivedField
:: String
@ -20,3 +21,13 @@ addDerivedField key derive ctx = Context $ \k a i ->
ListField itemCtx items -> ListField (addDerivedField key derive itemCtx) items
-- Otherwise, simply return the field.
otherFld -> otherFld
-- Retrieve a String from the context
getString :: String -> Context a -> [String] -> Item a -> Compiler String
getString key ctx a i = do
fld <- unContext ctx key a i
case fld of
StringField str -> return str
_ -> fail $ printf "Key '%s' does not return a String" key

View file

@ -1,4 +1,3 @@
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad ((<=<), forM_)
@ -7,6 +6,8 @@ import Data.Char (toLower)
import Data.Functor ((<&>))
import Data.List (isPrefixOf, stripPrefix)
import qualified Data.Text as T
import qualified Data.Text.ICU as RE
import qualified Data.Text.ICU.Replace as T
import qualified Data.Yaml as Y
import Hakyll
import Hakyll.Web.Agda
@ -21,8 +22,6 @@ import qualified Text.CSL as CSL
import qualified Text.CSL.Pandoc as CSL (processCites)
import Text.Pandoc (Pandoc(..), ReaderOptions(..), WriterOptions(..), Extension(..))
import qualified Text.Pandoc as Pandoc
import Text.Pandoc.Definition (Block(..))
import Text.Pandoc.Walk (walk)
import Text.Printf
--------------------------------------------------------------------------------
@ -79,6 +78,7 @@ siteSectionContext :: Context String
siteSectionContext = mconcat
[ titlerunningField
, subtitleField
, addShiftedBody "raw" (contentField "raw" "raw")
, siteContext
]
@ -133,67 +133,6 @@ sassOptions = defaultSassOptions
}
-- Convert MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex or TEX_DIR/%.tex
--
-- NOTE: This logic is partially duplicated in book/pdf.mk:TEX_PATH.
--
-- NOTE: This function assumes pdf.tex will be at TEX_DIR/.
--
addTexPath :: Context a -> Context a
addTexPath = addDerivedField "tex_path" deriveTexPath
where
deriveTexPath :: Context a -> [String] -> Item a -> Compiler ContextField
deriveTexPath ctx a i = do
fld <- unContext ctx "include" a i
case fld of
StringField includePath -> return $ StringField (texPath includePath)
_ -> fail "Key 'include' does not return a String"
texPath :: FilePath -> FilePath
texPath fnDotMd
| fnDotMd == "README.md" = "plfa/frontmatter/README.tex"
| any (`isPrefixOf` fnDotMd) ["src/", "book/"] = dropTopDirectory (replaceExtensions fnDotMd ".tex")
| otherwise = error ("textPath: cannot map " <> fnDotMd)
dropTopDirectory :: FilePath -> FilePath
dropTopDirectory = joinPath . tail . splitPath
-- Add an anchor based on the permalink, to be used as the header id.
addAnchor :: Context a -> Context a
addAnchor = addDerivedField "anchor" deriveAnchor
where
deriveAnchor :: Context a -> [String] -> Item a -> Compiler ContextField
deriveAnchor ctx a i = do
fld <- unContext ctx "permalink" a i
case fld of
StringField permalink -> StringField <$> anchor permalink
_ -> fail "Key 'permalink' does not return a String"
anchor :: String -> Compiler String
anchor permalink =
let maybeAnchor = map toLower <$> (stripSuffix "/" <=< stripPrefix "/") permalink
in maybe (fail $ printf "Key 'permalink' malformed '%s'" permalink) return maybeAnchor
stripSuffix :: String -> String -> Maybe String
stripSuffix suf str = reverse <$> stripPrefix (reverse suf) (reverse str)
-- Add the metadata back to the file as a Yaml header.
addMetadata :: Item String -> Compiler (Item String)
addMetadata item = do
metadata <- getMetadata (itemIdentifier item)
let yaml = "---\n" <> BS.unpack (Y.encode metadata) <> "---\n\n"
withItemBody (\body -> return (yaml <> body)) item
-- Shift all headers by a given value.
shiftHeadersBy :: Int -> Pandoc -> Pandoc
shiftHeadersBy n = walk shiftHeader
where
shiftHeader :: Block -> Block
shiftHeader (Header level attr inlines) = Header (level + n) attr inlines
shiftHeader block = block
--------------------------------------------------------------------------------
-- Build site
--------------------------------------------------------------------------------
@ -216,10 +155,10 @@ main = do
csl <- load cslFileName
bib <- load bibFileName
getResourceBody
>>= saveSnapshot "raw"
>>= readMarkdownWith siteReaderOptions
>>= processCites csl bib
<&> writeHTML5With siteWriterOptions
>>= saveSnapshot "content"
>>= loadAndApplyTemplate "templates/page.html" siteSectionContext
>>= loadAndApplyTemplate "templates/default.html" siteSectionContext
>>= prettifyUrls
@ -229,13 +168,14 @@ main = do
pageWithAgdaCompiler opts = do
csl <- load cslFileName
bib <- load bibFileName
agdaCompilerWith opts
getResourceBody
>>= saveSnapshot "raw"
>>= compileAgdaWith opts
>>= withItemBody (return . withUrls fixStdlibLink)
>>= withItemBody (return . withUrls fixLocalLink)
>>= readMarkdownWith siteReaderOptions
>>= processCites csl bib
<&> writeHTML5With siteWriterOptions
>>= saveSnapshot "content"
>>= loadAndApplyTemplate "templates/page.html" siteSectionContext
>>= loadAndApplyTemplate "templates/default.html" siteSectionContext
>>= prettifyUrls
@ -271,37 +211,13 @@ main = do
route permalinkRoute
compile $ getResourceBody
>>= applyAsTemplate acknowledgementsContext
>>= saveSnapshot "raw"
>>= readMarkdownWith siteReaderOptions
<&> writeHTML5With siteWriterOptions
>>= loadAndApplyTemplate "templates/page.html" siteContext
>>= loadAndApplyTemplate "templates/default.html" siteContext
>>= prettifyUrls
-- Compile raw version of acknowledgements used in constructing the PDF and EPUB
match "src/plfa/backmatter/acknowledgements.md" $ version "raw" $ do
route $ gsubRoute "src/" (const "raw/")
compile $ getResourceBody
>>= applyAsTemplate acknowledgementsContext
>>= addMetadata
-- Compile raw version of index used in constructing the PDF
match "book/pdf.tex" $ do
route $ gsubRoute "book/" (const "raw/")
compile $ getResourceBody
>>= applyAsTemplate (addTexPath (tableOfContentsContext siteSectionContext))
-- Compile raw version of index used in constructing the EPUB
match "book/epub.md" $ do
route $ gsubRoute "book/" (const "raw/")
compile $ getResourceBody
>>= applyAsTemplate (tableOfContentsContext siteSectionContext)
-- Compile metadata XML used in constructing the EPUB
match "book/epub.xml" $ version "raw" $ do
route $ constRoute "raw/epub.xml"
compile $ getResourceBody
>>= applyAsTemplate siteContext
match "authors/*.metadata" $
compile getResourceBody
@ -326,7 +242,7 @@ main = do
>>= readMarkdownWith siteReaderOptions
>>= processCites csl bib
<&> writeHTML5With siteWriterOptions
>>= saveSnapshot "content"
>>= saveSnapshot "content" -- used for teaser
>>= loadAndApplyTemplate "templates/post.html" postContext
>>= loadAndApplyTemplate "templates/default.html" siteContext
>>= prettifyUrls
@ -391,6 +307,7 @@ main = do
csses <- loadAll ("css/*.css" .||. "css/*.scss")
makeItem $ unlines $ map itemBody csses
-- Copy versions
let versions = ["19.08", "20.07"]
forM_ versions $ \v -> do
@ -407,6 +324,33 @@ main = do
compile copyFileCompiler
-- Compile raw version of acknowledgements used in constructing the PDF
match "src/plfa/backmatter/acknowledgements.md" $ version "raw" $ do
route $ gsubRoute "src/" (const "raw/")
compile $ getResourceBody
>>= applyAsTemplate acknowledgementsContext
>>= restoreMetadata
-- Compile raw version of index used in constructing the PDF
match "book/pdf.tex" $ version "raw" $ do
route $ gsubRoute "book/" (const "raw/")
compile $ getResourceBody
>>= applyAsTemplate (addTexPath (tableOfContentsContext siteSectionContext))
-- Compile raw version of index used in constructing the EPUB
match "book/epub.md" $ version "raw" $ do
route $ gsubRoute "book/" (const "raw/")
compile $ getResourceBody
>>= applyAsTemplate (tableOfContentsContext siteSectionContext)
>>= loadAndApplyTemplate "templates/metadata.md" siteContext
-- Compile metadata XML used in constructing the EPUB
match "book/epub.xml" $ version "raw" $ do
route $ gsubRoute "book/" (const "raw/")
compile $ getResourceBody
>>= applyAsTemplate siteContext
--------------------------------------------------------------------------------
-- Custom readers and writers
--------------------------------------------------------------------------------
@ -451,9 +395,90 @@ contentField :: String -> Snapshot -> Context String
contentField key snapshot = field key $ \item ->
itemBody <$> loadSnapshot (itemIdentifier item) snapshot
--------------------------------------------------------------------------------
-- Relativise URLs and strip "index.html" suffixes
--------------------------------------------------------------------------------
prettifyUrls :: Item String -> Compiler (Item String)
prettifyUrls = relativizeUrls <=< withItemBody (return . stripIndexFile)
--------------------------------------------------------------------------------
-- Text wrangling for EPUB and PDF
--------------------------------------------------------------------------------
-- Convert MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex or TEX_DIR/%.tex
--
-- NOTE: This logic is partially duplicated in book/pdf.mk:TEX_PATH.
--
-- NOTE: This function assumes pdf.tex will be at TEX_DIR/.
--
addTexPath :: Context a -> Context a
addTexPath = addDerivedField "tex_path" deriveTexPath
where
deriveTexPath :: Context a -> [String] -> Item a -> Compiler ContextField
deriveTexPath ctx a i = do
includePath <- getString "include" ctx a i
return $ StringField (texPath includePath)
texPath :: FilePath -> FilePath
texPath fnDotMd
| fnDotMd == "README.md" = "plfa/frontmatter/README.tex"
| any (`isPrefixOf` fnDotMd) ["src/", "book/"] = dropTopDirectory (replaceExtensions fnDotMd ".tex")
| otherwise = error ("textPath: cannot map " <> fnDotMd)
dropTopDirectory :: FilePath -> FilePath
dropTopDirectory = joinPath . tail . splitPath
-- Add an anchor based on the permalink, to be used as the header id.
addAnchor :: Context a -> Context a
addAnchor = addDerivedField "anchor" deriveAnchor
where
deriveAnchor :: Context a -> [String] -> Item a -> Compiler ContextField
deriveAnchor ctx a i = do
permalink <- getString "permalink" ctx a i
StringField <$> anchor permalink
anchor :: String -> Compiler String
anchor permalink =
let maybeAnchor = map toLower <$> (stripSuffix "/" <=< stripPrefix "/") permalink
in maybe (fail $ printf "Key 'permalink' malformed '%s'" permalink) return maybeAnchor
stripSuffix :: String -> String -> Maybe String
stripSuffix suf str = reverse <$> stripPrefix (reverse suf) (reverse str)
-- Add a variant of 'key' where all headers have been shifted by 1.
addShiftedBody :: String -> Context a -> Context a
addShiftedBody key = addDerivedField ("shifted_" <> key) deriveShiftedBody
where
deriveShiftedBody :: Context a -> [String] -> Item a -> Compiler ContextField
deriveShiftedBody ctx a i = do
body <- getString key ctx a i
return $ StringField (shiftHeadersBy body)
-- Shift all headers by a given value.
--
-- NOTE: This is the /proper/ implementation of shift headers.
-- In practice, we use the fast one, which uses regular
-- expressions and only works on Markdown '#' headers.
--
-- shiftHeadersBy :: Int -> Pandoc -> Pandoc
-- shiftHeadersBy n = walk shiftHeader
-- where
-- shiftHeader :: Block -> Block
-- shiftHeader (Header level attr inlines) = Header (level + n) attr inlines
-- shiftHeader block = block
--
shiftHeadersBy :: String -> String
shiftHeadersBy body = T.unpack (T.replaceAll re "#$1" (T.pack body))
where
re = RE.regex [RE.Multiline] "^(#+)"
-- |Add the original metadata block back to the file.
restoreMetadata :: Item String -> Compiler (Item String)
restoreMetadata item = do
metadata <- getMetadata (itemIdentifier item)
let yaml = "---\n" <> BS.unpack (Y.encode metadata) <> "---\n\n"
withItemBody (\body -> return (yaml <> body)) item

View file

@ -27,6 +27,8 @@ common shared-properties
, pandoc-types >=1.20 && <1.23
, pandoc-citeproc >=0.17 && <0.18
, text >=1.2 && <1.3
, text-icu >=0.7.1 && <0.8
, text-regex-replace >=0.1 && <0.2
, unordered-containers >=0.2 && <0.3
, vector >=0.12 && <0.13
, yaml >=0.11 && <0.12
@ -44,8 +46,6 @@ library
build-depends: Agda ==2.6.1.3
, hsass >=0.8 && <0.9
, regex-tdfa >=1.3 && <1.4
, text-icu >=0.7.1 && <0.8
, text-regex-replace >=0.1 && <0.2
executable site
import: shared-properties