Fixed PDF generate to use toc.metadata.

This commit is contained in:
Wen Kokke 2021-08-24 15:00:50 +01:00
parent 2987995c7a
commit e8df980d4b
No known key found for this signature in database
GPG key ID: 7EB7DBBCEB539DB8
11 changed files with 233 additions and 146 deletions

View file

@ -4,6 +4,7 @@
#################################################################################
SITE_DIR := _site
RAW_DIR := $(SITE_DIR)/raw
CACHE_DIR := _cache
TMP_DIR := $(CACHE_DIR)/tmp

View file

@ -66,6 +66,6 @@ src/plfa/part2/Substitution.lagda.md
## Acknowledgements
``` {.include shift-heading-level-by=1}
_site/acknowledgements.md
_site/raw/plfa/backmatter/acknowledgements.md
src/plfa/backmatter/Fonts.lagda.md
```

View file

@ -7,8 +7,8 @@ MD_FILES := README.md $(wildcard $(MD_DIR)/plfa/**/*.md)
# Compile PLFA to an EPUB
.PHONY: epub
epub: $(SITE_DIR)/plfa.epub
.PHONY: epub-build
epub-build: $(SITE_DIR)/plfa.epub
$(SITE_DIR)/plfa.epub: book/epub.md book/epub.css $(MD_FILES) $(EPUB_LUA_SCRIPTS) | setup-install-pandoc
@$(PANDOC) \

View file

@ -5,32 +5,50 @@ MD_DIR := src
LAGDA_TEX_DIR := $(TMP_DIR)/lagda_tex
TEX_DIR := $(TMP_DIR)/tex
# Convert MD_DIR/%.md to its modified source path
define MD_PATH
$(patsubst $(MD_DIR)/%/acknowledgements.md,$(RAW_DIR)/%/acknowledgements.md,\
$(patsubst $(PDF_DIR)/pdf.tex,$(RAW_DIR)/pdf.tex,$(1)))
endef
# Convert MD_DIR/%.lagda.md to LAGDA_TEX_DIR/%.lagda.tex
define LAGDA_TEX_PATH
$(patsubst $(MD_DIR)/%.lagda.md,$(LAGDA_TEX_DIR)/%.lagda.tex,$(1))
endef
# Convert MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex or TEX_DIR/%.tex
#
# NOTE: This logic is partially duplicated in hs/Main.hs:addTexPath:texPath.
#
define TEX_PATH
$(patsubst $(PDF_DIR)/%,$(TEX_DIR)/%,\
$(patsubst $(RAW_DIR)/%,$(TEX_DIR)/%,\
$(patsubst $(PDF_DIR)/%,$(TEX_DIR)/%,\
$(patsubst README.md,$(TEX_DIR)/plfa/frontmatter/README.tex,\
$(patsubst $(MD_DIR)/%.md,$(TEX_DIR)/%.tex,\
$(patsubst $(MD_DIR)/%.lagda.md,$(TEX_DIR)/%.tex,$(1)))))
$(patsubst $(MD_DIR)/%.lagda.md,$(TEX_DIR)/%.tex,$(1))))))
endef
# List source and intermediate files
PDF_LUA_SCRIPTS := $(wildcard $(PDF_LUA_DIR)/*.lua)
PDF_STATIC_FILES := book/pdf.tex book/DejaVu-mononoki-Symbola-Droid.ttf
PDF_STATIC_FILES := $(PDF_DIR)/pdf.tex $(PDF_DIR)/DejaVu-mononoki-Symbola-Droid.ttf
MD_FILES := README.md $(wildcard $(MD_DIR)/plfa/**/*.md)
MD_FILES := $(filter-out %Fonts.lagda.md,$(MD_FILES))
LAGDA_MD_FILES := $(filter %.lagda.md,$(MD_FILES))
LAGDA_TEX_FILES := $(call LAGDA_TEX_PATH,$(LAGDA_MD_FILES))
TEX_FILES := $(call TEX_PATH,$(MD_FILES) $(PDF_STATIC_FILES))
TEX_FILES := $(call TEX_PATH,$(MD_FILES) $(RAW_DIR)/pdf.tex $(PDF_STATIC_FILES))
# Generated by Hakyll
$(RAW_DIR)/pdf.tex: $(PDF_DIR)/pdf.tex $(MD_DIR)/plfa/toc.metadata
make build
# Generated by Hakyll
$(RAW_DIR)/plfa/backmatter/acknowledgements.md: $(MD_DIR)/plfa/backmatter/acknowledgements.md
make build
# Compile PLFA to a PDF
.PHONY: pdf
pdf: $(SITE_DIR)/plfa.pdf
.PHONY: pdf-build
pdf-build: $(SITE_DIR)/plfa.pdf
$(SITE_DIR)/plfa.pdf: $(TEX_FILES)
@cd $(TEX_DIR) && latexmk -pdf -lualatex -use-make -halt-on-error pdf.tex
@ -58,7 +76,6 @@ endef
define MK_MD2TEX_RULE
src := $(1)
dst := $(2)
ttl := $(3)
$$(dst): $$(src) $(PDF_LUA_SCRIPTS) | setup-install-pandoc
@echo "Compile $$< to $$@"
@mkdir -p '$$(@D)'
@ -68,7 +85,7 @@ $$(dst): $$(src) $(PDF_LUA_SCRIPTS) | setup-install-pandoc
--lua-filter=$(PDF_LUA_DIR)/remove-badges.lua -M badge-url=https://img.shields.io/badge/ \
--lua-filter=$(PDF_LUA_DIR)/latex-render-codeblocks.lua -M unchecked-files=README.md \
--lua-filter=$(PDF_LUA_DIR)/single-file-links.lua \
--lua-filter=$(PDF_LUA_DIR)/single-file-headers.lua -M default-title=$$(ttl) \
--lua-filter=$(PDF_LUA_DIR)/single-file-headers.lua \
$$< -o $$@
endef
@ -85,14 +102,14 @@ endef
$(foreach static_file,\
$(PDF_STATIC_FILES),\
$(eval $(call MK_COPYSTATIC_RULE,\
$(static_file),\
$(call MD_PATH,$(static_file)),\
$(call TEX_PATH,$(static_file)))))
# Compile .md files (from MD_DIR/%.md to TEX_DIR/%.tex)
$(foreach md_file,\
$(filter-out %acknowledgements.md %.lagda.md,$(MD_FILES)),\
$(filter-out %.lagda.md,$(MD_FILES)),\
$(eval $(call MK_MD2TEX_RULE,\
$(md_file),\
$(call MD_PATH,$(md_file)),\
$(call TEX_PATH,$(md_file)))))
# Compile .lagda.md files (from MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex)
@ -102,12 +119,6 @@ $(foreach lagda_md_file,\
$(lagda_md_file),\
$(call LAGDA_TEX_PATH,$(lagda_md_file)))))
# Compile acknowledgements (from SITE_DIR/acknowledgements.md to TEX_DIR/acknowledgements.tex)
$(eval $(call MK_MD2TEX_RULE,\
$(SITE_DIR)/acknowledgements.md,\
$(TEX_DIR)/plfa/backmatter/acknowledgements.tex,\
Acknowledgements))
# Compile .lagda.tex files (from LAGDA_TEX_DIR/%.lagda.tex to TEX_DIR/%.tex)
$(foreach lagda_md_file,\
$(LAGDA_MD_FILES),\

View file

@ -123,54 +123,45 @@
\null\endgroup
\end{titlepage}
\frontmatter
$for(parts)$
% Open front, main, and back matter sections:
$if(frontmatter)$
\frontmatter%
\setcounter{tocdepth}{0}
\tableofcontents
\tableofcontents%
\setcounter{tocdepth}{1}
\input{plfa/frontmatter/dedication}
\input{plfa/frontmatter/preface}
\input{plfa/frontmatter/README}
$endif$
$if(mainmatter)$
\mainmatter%
\part{Part 1: Logical Foundations}
\input{plfa/part1/Naturals}
\input{plfa/part1/Induction}
\input{plfa/part1/Relations}
\input{plfa/part1/Equality}
\input{plfa/part1/Isomorphism}
\input{plfa/part1/Connectives}
\input{plfa/part1/Negation}
\input{plfa/part1/Quantifiers}
\input{plfa/part1/Decidable}
\input{plfa/part1/Lists}
\part{Part 2: Programming Language Foundations}
\input{plfa/part2/Lambda}
\input{plfa/part2/Properties}
\input{plfa/part2/DeBruijn}
\input{plfa/part2/More}
\input{plfa/part2/Bisimulation}
\input{plfa/part2/Inference}
\input{plfa/part2/Untyped}
\input{plfa/part2/Confluence}
\input{plfa/part2/BigStep}
\part{Part 3: Denotational Semantics}
\input{plfa/part3/Denotational}
\input{plfa/part3/Compositional}
\input{plfa/part3/Soundness}
\input{plfa/part3/Adequacy}
\input{plfa/part3/ContextualEquivalence}
\cleardoublepage%
\phantomsection%
$endif$
$if(backmatter)$
\appendix
\addcontentsline{toc}{part}{Appendices}
$endif$
% Only print title for main and back matter:
$if(frontmatter)$
% NOTE: Front matter titles are inserted via book/lua/single-file-headers.lua.
$else$
\part{$title$}
$endif$
% Include each section.
$for(sections)$
\input{$tex_path$}
$endfor$
% Close front, main, and back matter sections:
$if(frontmatter)$
$endif$
$if(mainmatter)$
\cleardoublepage%
\phantomsection%
$endif$
$if(backmatter)$
$endif$
$endfor$
\input{plfa/part2/Substitution}
\input{plfa/backmatter/acknowledgements}
\end{document}

View file

@ -0,0 +1,22 @@
{-# LANGUAGE RankNTypes #-}
module Hakyll.Web.Template.Context.Derived where
import Hakyll
addDerivedField
:: String
-> (forall b. Context b -> [String] -> Item b -> Compiler ContextField)
-> Context a
-> Context a
addDerivedField key derive ctx = Context $ \k a i ->
if k == key then
derive ctx a i
else do
fld <- unContext ctx k a i
return $
case fld of
-- If the field is a list, recursively derive the field for any list items.
ListField itemCtx items -> ListField (addDerivedField key derive itemCtx) items
-- Otherwise, simply return the field.
otherFld -> otherFld

View file

@ -1,3 +1,5 @@
{-# LANGUAGE OverloadedStrings #-}
module Hakyll.Web.Template.Context.Metadata where
import Control.Monad ((<=<))
@ -8,21 +10,43 @@ import qualified Data.Vector as V
import Hakyll
import Text.Printf (printf)
-- |Create a context from a JSON object which loads the included file and uses it to generate a context.
includeContext :: Context String -> Context Object
includeContext ctx = Context $ \k a i -> do
let o = itemBody i
v <- lookupObject "include" o
identifier <- fromFilePath <$> toString v
unContext (ctx <> metadataContext ctx) k a =<< load identifier
-- |Create a Context based on a JSON Object.
objectContext :: Context String -> Context Object
objectContext ctx = Context $ \k _ i ->
let o = itemBody i in
case H.lookup (T.pack k) o of
Just v -> decodeValue ctx v
Nothing -> fail $ printf "Key '%s' undefined in context '%s'" k (show o)
objectContext ctx = Context $ \k _ i -> do
let o = itemBody i
decodeValue ctx =<< lookupObject k o
isObject :: Value -> Bool
isObject (Object _) = True
isObject _ = False
-- |Decode a JSON Value to a context field.
decodeValue :: Context String -> Value -> Compiler ContextField
decodeValue ctx (Array a) = do
objs <- mapM (makeItem <=< toObject) (V.toList a)
return $ ListField (includeContext ctx <> objectContext ctx) objs
decodeValue _ctx (String s) = return . StringField $ T.unpack s
decodeValue _ctx (Number n) = return . StringField $ show n
decodeValue _ctx (Bool b) = return . StringField $ show b
decodeValue _ctx v = fail $ printf "Unsupported value '%s'" (show v)
isString :: Value -> Bool
isString (String _) = True
isString _ = False
-- |Create a Context based on the Metadata.
metadataContext :: Context String -> Context String
metadataContext ctx = Context $ \k a i -> do
let identifier = itemIdentifier i
metadata <- getMetadata identifier
item <- makeItem metadata
unContext (objectContext ctx) k a item
lookupObject :: String -> Object -> Compiler Value
lookupObject k o = maybe ifNotFound ifFound (H.lookup (T.pack k) o)
where
ifFound = return
ifNotFound = fail $ printf "Key '%s' undefined in context '%s'" k (show o)
toObject :: MonadFail m => Value -> m Object
toObject (Object o) = return o
@ -31,23 +55,3 @@ toObject v = fail $ printf "Not an object '%s'" (show v)
toString :: MonadFail m => Value -> m String
toString (String s) = return (T.unpack s)
toString v = fail $ printf "Not a string '%s'" (show v)
-- |Decode a JSON Value to a context field.
decodeValue :: Context String -> Value -> Compiler ContextField
decodeValue ctx (Array a)
| V.all isObject a = do
objs <- mapM (makeItem <=< toObject) (V.toList a)
return $ ListField (objectContext ctx) objs
| V.all isString a = do
items <- mapM (load . fromFilePath <=< toString) (V.toList a)
return $ ListField ctx items
decodeValue _ (String s) = return . StringField $ T.unpack s
decodeValue _ (Number n) = return . StringField $ show n
decodeValue _ (Bool b) = return . StringField $ show b
decodeValue _ v = fail $ printf "Unsupported value '%s'" (show v)
-- |Create a Context based on the Metadata.
metadataContext :: Context String -> Context String
metadataContext ctx = Context $ \k a i ->
unContext (objectContext ctx) k a <=< makeItem <=< getMetadata $ itemIdentifier i

View file

@ -3,15 +3,17 @@
import Control.Monad ((<=<), forM_)
import Data.Functor ((<&>))
import Data.List (isPrefixOf)
import qualified Data.Text as T
import Hakyll
import Hakyll.Web.Agda
import Hakyll.Web.Routes.Permalink
import Hakyll.Web.Template.Numeric
import Hakyll.Web.Template.Context.Derived
import Hakyll.Web.Template.Context.Metadata
import Hakyll.Web.Template.Context.Title
import Hakyll.Web.Sass
import System.FilePath ((</>), takeDirectory)
import System.FilePath ((</>), takeDirectory, replaceExtensions, splitPath, joinPath)
import qualified Text.CSL as CSL
import qualified Text.CSL.Pandoc as CSL (processCites)
import Text.Pandoc (Pandoc(..), ReaderOptions(..), WriterOptions(..), Extension(..))
@ -76,7 +78,8 @@ siteSectionContext = mconcat
tableOfContentsContext :: Context String -> Context String
tableOfContentsContext ctx = Context $ \k a _ -> do
m <- makeItem <=< getMetadata $ "src/plfa/toc.metadata"
metadata <- getMetadata "src/plfa/toc.metadata"
m <- makeItem metadata
unContext (objectContext ctx) k a m
acknowledgementsContext :: Context String
@ -124,6 +127,32 @@ sassOptions = defaultSassOptions
}
-- Convert MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex or TEX_DIR/%.tex
--
-- NOTE: This logic is partially duplicated in book/pdf.mk:TEX_PATH.
--
-- NOTE: This function assumes pdf.tex will be at TEX_DIR/.
--
addTexPath :: Context a -> Context a
addTexPath = addDerivedField "tex_path" deriveTexPath
where
deriveTexPath :: Context a -> [String] -> Item a -> Compiler ContextField
deriveTexPath ctx a i = do
fld <- unContext ctx "include" a i
case fld of
StringField includePath -> return $ StringField (texPath includePath)
_ -> fail "Key 'include' does not return a String"
texPath :: FilePath -> FilePath
texPath fnDotMd
| fnDotMd == "README.md" = "plfa/frontmatter/README.tex"
| any (`isPrefixOf` fnDotMd) ["src/", "book/"] = dropTopDirectory (replaceExtensions fnDotMd ".tex")
| otherwise = error ("textPath: cannot map " <> fnDotMd)
dropTopDirectory :: FilePath -> FilePath
dropTopDirectory = joinPath . tail . splitPath
--------------------------------------------------------------------------------
-- Build site
--------------------------------------------------------------------------------
@ -207,12 +236,24 @@ main = do
>>= loadAndApplyTemplate "templates/default.html" siteContext
>>= prettifyUrls
match "src/plfa/backmatter/acknowledgements.md" $ version "native" $ do
route $ constRoute "versions/native/acknowledgements.native"
-- Compile raw version of acknowledgements used in constructing the PDF and EPUB
match "src/plfa/backmatter/acknowledgements.md" $ version "raw" $ do
route $ gsubRoute "src/" (const "raw/")
compile $ getResourceBody
>>= applyAsTemplate acknowledgementsContext
>>= readMarkdownWith siteReaderOptions
<&> writeNativeWith siteWriterOptions
>>= loadAndApplyTemplate "templates/metadata.md" siteContext
-- Compile raw version of index used in constructing the PDF
match "book/pdf.tex" $ do
route $ gsubRoute "book/" (const "raw/")
compile $ getResourceBody
>>= applyAsTemplate (addTexPath (tableOfContentsContext siteSectionContext))
-- Compile metadata XML used in constructing the EPUB
match "book/epub.xml" $ version "raw" $ do
route $ constRoute "raw/epub.xml"
compile $ getResourceBody
>>= applyAsTemplate siteContext
match "authors/*.metadata" $
compile getResourceBody
@ -354,15 +395,6 @@ writeHTML5With wopt (Item itemi doc) =
Left err -> error $ "Hakyll.Web.Pandoc.writePandocWith: " ++ show err
Right item' -> Item itemi $ T.unpack item'
-- | Write a document as HTML using Pandoc, with the supplied options.
writeNativeWith :: WriterOptions -- ^ Writer options for Pandoc
-> Item Pandoc -- ^ Document to write
-> Item String -- ^ Resulting Markdown
writeNativeWith wopt (Item itemi doc) =
case Pandoc.runPure $ Pandoc.writeNative wopt doc of
Left err -> error $ "Hakyll.Web.Pandoc.writeNativeWith: " ++ show err
Right item' -> Item itemi $ T.unpack item'
--------------------------------------------------------------------------------
-- Supply snapshot as a field to the template

View file

@ -36,6 +36,7 @@ library
hs-source-dirs: hs
exposed-modules: Hakyll.Web.Agda
, Hakyll.Web.Template.Numeric
, Hakyll.Web.Template.Context.Derived
, Hakyll.Web.Template.Context.Metadata
, Hakyll.Web.Template.Context.Title
, Hakyll.Web.Sass

View file

@ -1,45 +1,48 @@
---
parts:
- title: "Front matter"
- frontmatter: True
title: "Front matter"
sections:
- src/plfa/frontmatter/dedication.md
- src/plfa/frontmatter/preface.md
- README.md
- title: "Part 1: Logical Foundations"
- include: src/plfa/frontmatter/dedication.md
- include: src/plfa/frontmatter/preface.md
- include: README.md
- mainmatter: True
title: "Part 1: Logical Foundations"
sections:
- src/plfa/part1/Naturals.lagda.md
- src/plfa/part1/Induction.lagda.md
- src/plfa/part1/Relations.lagda.md
- src/plfa/part1/Equality.lagda.md
- src/plfa/part1/Isomorphism.lagda.md
- src/plfa/part1/Connectives.lagda.md
- src/plfa/part1/Negation.lagda.md
- src/plfa/part1/Quantifiers.lagda.md
- src/plfa/part1/Decidable.lagda.md
- src/plfa/part1/Lists.lagda.md
- include: src/plfa/part1/Naturals.lagda.md
- include: src/plfa/part1/Induction.lagda.md
- include: src/plfa/part1/Relations.lagda.md
- include: src/plfa/part1/Equality.lagda.md
- include: src/plfa/part1/Isomorphism.lagda.md
- include: src/plfa/part1/Connectives.lagda.md
- include: src/plfa/part1/Negation.lagda.md
- include: src/plfa/part1/Quantifiers.lagda.md
- include: src/plfa/part1/Decidable.lagda.md
- include: src/plfa/part1/Lists.lagda.md
- title: "Part 2: Programming Language Foundations"
sections:
- src/plfa/part2/Lambda.lagda.md
- src/plfa/part2/Properties.lagda.md
- src/plfa/part2/DeBruijn.lagda.md
- src/plfa/part2/More.lagda.md
- src/plfa/part2/Bisimulation.lagda.md
- src/plfa/part2/Inference.lagda.md
- src/plfa/part2/Untyped.lagda.md
- src/plfa/part2/Confluence.lagda.md
- src/plfa/part2/BigStep.lagda.md
- include: src/plfa/part2/Lambda.lagda.md
- include: src/plfa/part2/Properties.lagda.md
- include: src/plfa/part2/DeBruijn.lagda.md
- include: src/plfa/part2/More.lagda.md
- include: src/plfa/part2/Bisimulation.lagda.md
- include: src/plfa/part2/Inference.lagda.md
- include: src/plfa/part2/Untyped.lagda.md
- include: src/plfa/part2/Confluence.lagda.md
- include: src/plfa/part2/BigStep.lagda.md
- title: "Part 3: Denotational Semantics"
sections:
- src/plfa/part3/Denotational.lagda.md
- src/plfa/part3/Compositional.lagda.md
- src/plfa/part3/Soundness.lagda.md
- src/plfa/part3/Adequacy.lagda.md
- src/plfa/part3/ContextualEquivalence.lagda.md
- title: "Appendix"
- include: src/plfa/part3/Denotational.lagda.md
- include: src/plfa/part3/Compositional.lagda.md
- include: src/plfa/part3/Soundness.lagda.md
- include: src/plfa/part3/Adequacy.lagda.md
- include: src/plfa/part3/ContextualEquivalence.lagda.md
- backmatter: True
title: "Appendix"
sections:
- src/plfa/part2/Substitution.lagda.md
- include: src/plfa/part2/Substitution.lagda.md
- title: "Back matter"
sections:
- src/plfa/backmatter/acknowledgements.md
- src/plfa/backmatter/Fonts.lagda.md
- include: src/plfa/backmatter/acknowledgements.md
- include: src/plfa/backmatter/Fonts.lagda.md
---

22
templates/metadata.md Normal file
View file

@ -0,0 +1,22 @@
---
comment: This template is used to restore the standard PLFA metadata to Markdown files. It is used in raw writers.
---
---
title : $title$
$if(authors)$
author :
$for(authors)$
- $name$
$endfor$
$endif$
description : $description$
rights : $rights$
language : $language$
layout : $layout$
prev : $prev$
permalink : $permalink$
next : $next$
---
$body$