fixed Assignment4
This commit is contained in:
commit
27006a8181
53 changed files with 1198 additions and 522 deletions
8
.gitignore
vendored
8
.gitignore
vendored
|
@ -1,4 +1,5 @@
|
|||
## Agda files
|
||||
_build/
|
||||
*.agdai
|
||||
.agda-stdlib.sed
|
||||
.links-*.sed
|
||||
|
@ -11,7 +12,6 @@ stack.yaml.lock
|
|||
dist-newstyle
|
||||
|
||||
## Jekyll files
|
||||
_site/
|
||||
.sass-cache/
|
||||
.agda-stdlib.sed
|
||||
.jekyll-metadata
|
||||
|
@ -32,6 +32,8 @@ Gemfile.lock
|
|||
auto/
|
||||
|
||||
## Misc build files
|
||||
out/
|
||||
*.zip
|
||||
versions/plfa.github.io-web-*/
|
||||
versions/plfa.github.io-web-*/
|
||||
|
||||
## Include plfa.pdf
|
||||
!plfa.pdf
|
||||
|
|
3
.gitmodules
vendored
3
.gitmodules
vendored
|
@ -1,3 +1,4 @@
|
|||
[submodule "standard-library"]
|
||||
path = standard-library
|
||||
url = git@github.com:agda/agda-stdlib.git
|
||||
url = https://github.com/agda/agda-stdlib.git
|
||||
shallow = true
|
||||
|
|
167
Makefile
167
Makefile
|
@ -1,11 +1,32 @@
|
|||
|
||||
#################################################################################
|
||||
# Configuration
|
||||
#################################################################################
|
||||
|
||||
SITE_DIR := _site
|
||||
SITE_DIR := _site
|
||||
RAW_DIR := $(SITE_DIR)/raw
|
||||
CACHE_DIR := _cache
|
||||
TMP_DIR := $(CACHE_DIR)/tmp
|
||||
TMP_DIR := $(CACHE_DIR)/tmp
|
||||
|
||||
AGDA := stack exec agda -- --no-libraries --include-path=standard-library/src
|
||||
PANDOC := stack exec pandoc --
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Build PLFA site, EPUB, and PDF
|
||||
#################################################################################
|
||||
|
||||
.PHONY: all
|
||||
all:
|
||||
@make build
|
||||
@make epub-build
|
||||
@make pdf-build
|
||||
|
||||
.PHONY: all-clean
|
||||
all-clean:
|
||||
@make clean
|
||||
@make epub-clean
|
||||
@make pdf-clean
|
||||
|
||||
#################################################################################
|
||||
# Setup Git Hooks
|
||||
|
@ -13,7 +34,8 @@ TMP_DIR := $(CACHE_DIR)/tmp
|
|||
|
||||
.PHONY: init
|
||||
init: setup-check-fix-whitespace setup-install-htmlproofer
|
||||
git config core.hooksPath .githooks
|
||||
@echo "Setting up Git Hooks"
|
||||
@git config core.hooksPath .githooks
|
||||
|
||||
|
||||
#################################################################################
|
||||
|
@ -21,13 +43,15 @@ init: setup-check-fix-whitespace setup-install-htmlproofer
|
|||
#################################################################################
|
||||
|
||||
.PHONY: build
|
||||
build: \
|
||||
standard-library/ChangeLog.md
|
||||
stack build && stack exec site build
|
||||
build: standard-library/ChangeLog.md
|
||||
@echo "Building site"
|
||||
@stack build && stack exec site build
|
||||
|
||||
standard-library/ChangeLog.md:
|
||||
git submodule init
|
||||
git submodule update --recursive
|
||||
@echo "Updating Agda standard library"
|
||||
@git submodule init
|
||||
@git submodule update --recursive
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Test generated site with HTMLProofer
|
||||
|
@ -35,36 +59,29 @@ standard-library/ChangeLog.md:
|
|||
|
||||
.PHONY: test
|
||||
test: setup-install-htmlproofer build
|
||||
cd $(SITE_DIR) && htmlproofer \
|
||||
--check-html \
|
||||
--disable-external \
|
||||
--report-invalid-tags \
|
||||
--report-missing-names \
|
||||
--report-script-embeds \
|
||||
--report-missing-doctype \
|
||||
--report-eof-tags \
|
||||
--report-mismatched-tags \
|
||||
--check-img-http \
|
||||
--check-opengraph \
|
||||
@echo "Testing generated HTML using HTMLProofer"
|
||||
@cd $(SITE_DIR) && htmlproofer \
|
||||
--check-html \
|
||||
--disable-external \
|
||||
--report-invalid-tags \
|
||||
--report-missing-names \
|
||||
--report-script-embeds \
|
||||
--report-missing-doctype \
|
||||
--report-eof-tags \
|
||||
--report-mismatched-tags \
|
||||
--check-img-http \
|
||||
--check-opengraph \
|
||||
.
|
||||
|
||||
#################################################################################
|
||||
# Test generated EPUB with EPUBCheck
|
||||
#################################################################################
|
||||
|
||||
.PHONY: test-epub
|
||||
test-epub: setup-check-epubcheck build
|
||||
epubcheck $(SITE_DIR)/plfa.epub
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Automatically rebuild the site on changes, and start a preview server
|
||||
#################################################################################
|
||||
|
||||
.PHONY: watch
|
||||
watch: \
|
||||
standard-library/ChangeLog.md
|
||||
stack build && stack exec site watch
|
||||
watch: standard-library/ChangeLog.md
|
||||
@echo "Watching for changes and rebuilding"
|
||||
@stack build && stack exec site watch
|
||||
|
||||
|
||||
#################################################################################
|
||||
|
@ -73,7 +90,8 @@ watch: \
|
|||
|
||||
.PHONY: update-contributors
|
||||
update-contributors:
|
||||
stack build && stack exec update-contributors
|
||||
@echo "Updating contributors from GitHub"
|
||||
@stack build && stack exec update-contributors
|
||||
|
||||
|
||||
#################################################################################
|
||||
|
@ -81,9 +99,9 @@ update-contributors:
|
|||
#################################################################################
|
||||
|
||||
.PHONY: clean
|
||||
clean: \
|
||||
standard-library/ChangeLog.md
|
||||
stack build && stack exec site clean
|
||||
clean: standard-library/ChangeLog.md
|
||||
@echo "Cleaning generated files for site"
|
||||
@stack build && stack exec site clean
|
||||
|
||||
|
||||
#################################################################################
|
||||
|
@ -102,28 +120,28 @@ list:
|
|||
|
||||
.PHONY: publish
|
||||
publish: setup-check-rsync
|
||||
@echo "Building site..."
|
||||
make build
|
||||
@echo "Testing site..."
|
||||
make test
|
||||
@echo "Creating web branch..."
|
||||
@make all
|
||||
@echo "Cleaning intermediate files"
|
||||
rm -rf $(RAW_DIR)
|
||||
@make test
|
||||
@echo "Creating web branch"
|
||||
git fetch --all
|
||||
git checkout -b web --track origin/web
|
||||
rsync -a \
|
||||
--filter='P _site/' \
|
||||
--filter='P _cache/' \
|
||||
--filter='P .git/' \
|
||||
--filter='P .gitignore' \
|
||||
--filter='P .stack-work' \
|
||||
--filter='P .nojekyll' \
|
||||
--filter='P CNAME' \
|
||||
--delete-excluded \
|
||||
_site/ .
|
||||
rsync -a \
|
||||
--filter='P $(SITE_DIR)/' \
|
||||
--filter='P $(CACHE_DIR)/' \
|
||||
--filter='P .git/' \
|
||||
--filter='P .gitignore' \
|
||||
--filter='P .stack-work' \
|
||||
--filter='P .nojekyll' \
|
||||
--filter='P CNAME' \
|
||||
--delete-excluded \
|
||||
$(SITE_DIR) .
|
||||
git add -A
|
||||
@echo "Publishing web branch..."
|
||||
@echo "Publishing web branch"
|
||||
git commit -m "Publish."
|
||||
git push origin web:web
|
||||
@echo "Deleting web branch..."
|
||||
@echo "Deleting web branch"
|
||||
git checkout dev
|
||||
git branch -D web
|
||||
|
||||
|
@ -142,18 +160,35 @@ ifeq (,$(wildcard $(PLFA_AFS_DIR)))
|
|||
@exit 1
|
||||
else
|
||||
ifeq (,$(wildcard $(PLFA_AFS_DIR)/html))
|
||||
@echo "Checkout latest version from GitHub"
|
||||
git clone https://github.com/plfa/plfa.github.io.git --branch web --single-branch --depth 1 html
|
||||
endif
|
||||
cd $(PLFA_AFS_DIR)/html \
|
||||
@echo "Checkout latest version from GitHub"
|
||||
@cd $(PLFA_AFS_DIR)/html \
|
||||
&& git fetch --depth 1 \
|
||||
&& git reset --hard origin/web \
|
||||
&& git clean -dfx
|
||||
fsr setacl $(PLFA_AFS_DIR)/html system:groupwebserver rl
|
||||
@echo "Setting permissions to include web server"
|
||||
@fsr setacl $(PLFA_AFS_DIR)/html system:groupwebserver rl
|
||||
endif
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Setup dependencies
|
||||
# Build EPUB
|
||||
#################################################################################
|
||||
|
||||
include book/epub.mk
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Build PDF
|
||||
#################################################################################
|
||||
|
||||
include book/pdf.mk
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Setup or check dependencies
|
||||
#################################################################################
|
||||
|
||||
.PHONY: setup-check-stack
|
||||
|
@ -189,11 +224,12 @@ ifeq (,$(wildcard $(shell which fix-whitespace)))
|
|||
@echo " stack install --stack-yaml stack-8.8.3.yaml"
|
||||
endif
|
||||
|
||||
.PHONY: setup-check-epubcheck
|
||||
setup-check-epubcheck:
|
||||
ifeq (,$(wildcard $(shell which epubcheck)))
|
||||
@echo "The command you called requires EPUBCheck"
|
||||
@echo "See: https://github.com/w3c/epubcheck"
|
||||
.PHONY: setup-check-latexmk
|
||||
setup-check-latexmk:
|
||||
ifeq (,$(wildcard $(shell which latexmk)))
|
||||
@echo "The command you called requires Latexmk"
|
||||
@echo "Latemk is included in MacTeX and MikTeX"
|
||||
@echo "See: https://mg.readthedocs.io/latexmk.html"
|
||||
endif
|
||||
|
||||
.PHONY: setup-check-rsync
|
||||
|
@ -218,6 +254,19 @@ ifeq (,$(wildcard $(shell which bundle)))
|
|||
gem install bundle
|
||||
endif
|
||||
|
||||
.PHONY: setup-install-agda
|
||||
setup-install-agda: setup-check-stack
|
||||
ifeq (,$(wildcard $(shell stack exec which -- agda)))
|
||||
@echo "Installing Agda"
|
||||
stack build --only-dependencies
|
||||
endif
|
||||
|
||||
.PHONY: setup-install-pandoc
|
||||
setup-install-pandoc: setup-check-stack
|
||||
ifeq (,$(wildcard $(shell stack exec which -- pandoc)))
|
||||
@echo "Installing Pandoc"
|
||||
stack build --only-dependencies
|
||||
endif
|
||||
|
||||
#################################################################################
|
||||
# Build legacy versions of website using Jekyll
|
||||
|
|
15
README.md
15
README.md
|
@ -94,20 +94,21 @@ Finished hello.
|
|||
### Install PLFA and the Agda standard library
|
||||
You can get the latest version of Programming Language Foundations in Agda from GitHub, either by cloning the repository, or by downloading [the zip archive][plfa-dev]:
|
||||
```bash
|
||||
git clone --recurse-submodules https://github.com/plfa/plfa.github.io plfa
|
||||
git clone --depth 1 --recurse-submodules --shallow-submodules https://github.com/plfa/plfa.github.io plfa
|
||||
# Remove `--depth 1` and `--shallow-submodules` if you want the complete git history of PLFA and the standard library.
|
||||
```
|
||||
PLFA ships with the required version of the Agda standard library, so if you cloned with the `--recurse-submodules` flag, you’ve already got, in the `standard-library` directory!
|
||||
|
||||
If you forgot to add the `--recurse-submodules` flag, no worries, we can fix that!
|
||||
```bash
|
||||
cd plfa/
|
||||
git submodule update --recursive
|
||||
git submodule update --init --recursive --depth 1
|
||||
# Remove `--depth 1` if you want the complete git history of the standard library.
|
||||
```
|
||||
If you obtained PLFA by downloading the zip archive, you can get the required version of the Agda standard library from GitHub. You can either clone the repository and switch to the correct branch, or you can download the [the zip archive][agda-stdlib]:
|
||||
```bash
|
||||
git clone https://github.com/agda/agda-stdlib.git agda-stdlib
|
||||
cd agda-stdlib
|
||||
git checkout v1.3
|
||||
git clone https://github.com/agda/agda-stdlib.git --branch v1.6 --depth 1 agda-stdlib
|
||||
# Remove `--depth 1` if you want the complete git history of the standard library.
|
||||
```
|
||||
Finally, we need to let Agda know where to find the Agda standard library.
|
||||
You'll need the path where you installed the standard library. Check to see that the file “standard-library.agda-lib” exists, and make a note of the path to this file.
|
||||
|
@ -318,9 +319,7 @@ The repository comes with several Git hooks:
|
|||
You can install these Git hooks by calling `make init`.
|
||||
You can install [fix-whitespace][fix-whitespace] by running:
|
||||
```bash
|
||||
git clone https://github.com/agda/fix-whitespace
|
||||
cd fix-whitespace/
|
||||
stack install --stack-yaml stack-8.8.3.yaml
|
||||
stack install fix-whitespace
|
||||
```
|
||||
If you want Stack to use your system installation of GHC, follow the instructions for [Using an existing installation of GHC](#using-an-existing-installation-of-ghc).
|
||||
|
||||
|
|
BIN
book/DejaVu-mononoki-Symbola-Droid.ttf
Normal file
BIN
book/DejaVu-mononoki-Symbola-Droid.ttf
Normal file
Binary file not shown.
90
book/epub.css
Normal file
90
book/epub.css
Normal file
|
@ -0,0 +1,90 @@
|
|||
/* This is Pandoc's default ebook CSS, modified slightly. */
|
||||
/* https://github.com/jgm/pandoc/blob/master/data/epub.css */
|
||||
@font-face {
|
||||
font-family: 'DejaVu-mononoki-Symbola-Droid';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
src: url('../fonts/DejaVu-mononoki-Symbola-Droid.woff');
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 5%;
|
||||
text-align: justify;
|
||||
font-size: medium;
|
||||
}
|
||||
|
||||
code {
|
||||
font-family: 'DejaVu-mononoki-Symbola-Droid', monospace;
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
nav#toc ol,
|
||||
nav#landmarks ol {
|
||||
padding: 0;
|
||||
margin-left: 1em;
|
||||
}
|
||||
|
||||
nav#toc ol li,
|
||||
nav#landmarks ol li {
|
||||
list-style-type: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
a.footnote-ref {
|
||||
vertical-align: super;
|
||||
}
|
||||
|
||||
em,
|
||||
em em em,
|
||||
em em em em em {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
em em,
|
||||
em em em em {
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
code {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
span.smallcaps {
|
||||
font-variant: small-caps;
|
||||
}
|
||||
|
||||
span.underline {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
q {
|
||||
quotes: "“""”""‘""’";
|
||||
}
|
||||
|
||||
div.column {
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
width: 50%;
|
||||
}
|
||||
|
||||
div.hanging-indent {
|
||||
margin-left: 1.5em;
|
||||
text-indent: -1.5em;
|
||||
}
|
||||
|
||||
/* Workaround for iBooks issue; see #6242 */
|
||||
@media screen {
|
||||
.sourceCode {
|
||||
overflow: visible !important;
|
||||
white-space: pre-wrap !important;
|
||||
}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
$for(parts)$
|
||||
# $title$
|
||||
$for(sections)$
|
||||
$content$
|
||||
## $title$ {#$anchor$}
|
||||
$shifted_raw$
|
||||
$endfor$
|
||||
$endfor$
|
||||
|
80
book/epub.mk
Normal file
80
book/epub.mk
Normal file
|
@ -0,0 +1,80 @@
|
|||
#################################################################################
|
||||
# Configuration
|
||||
#################################################################################
|
||||
|
||||
EPUB_DIR := book
|
||||
EPUB_TEMPLATE_DIR := $(EPUB_DIR)/templates
|
||||
EPUB_LUA_DIR := $(EPUB_DIR)/lua
|
||||
EPUB_LUA_SCRIPTS := $(wildcard $(EPUB_LUA_DIR)/*.lua)
|
||||
MD_DIR := src
|
||||
MD_FILES := README.md $(wildcard $(MD_DIR)/plfa/**/*.md)
|
||||
FRANKENFONT := public/webfonts/DejaVu-mononoki-Symbola-Droid.woff
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Compile PLFA to an EPUB using Pandoc
|
||||
#################################################################################
|
||||
|
||||
.PHONY: epub epub-build
|
||||
epub: epub-build
|
||||
epub-build: $(SITE_DIR)/plfa.epub
|
||||
|
||||
$(SITE_DIR)/plfa.epub: \
|
||||
$(RAW_DIR)/epub.md $(EPUB_DIR)/epub.css $(RAW_DIR)/epub.xml $(FRANKENFONT) \
|
||||
$(MD_FILES) $(EPUB_LUA_SCRIPTS) | setup-install-pandoc
|
||||
@echo "Building EPUB"
|
||||
@$(PANDOC) \
|
||||
--strip-comments \
|
||||
--css=$(EPUB_DIR)/epub.css \
|
||||
--epub-embed-font=$(FRANKENFONT) \
|
||||
--epub-metadata=$(RAW_DIR)/epub.xml \
|
||||
--indented-code-class=default \
|
||||
--lua-filter=$(EPUB_LUA_DIR)/set-default-code-class.lua -M default-code-class=agda \
|
||||
--lua-filter=$(EPUB_LUA_DIR)/remove-badges.lua -M badge-url=https://img.shields.io/badge/ \
|
||||
--lua-filter=$(EPUB_LUA_DIR)/epub-clean-html.lua \
|
||||
--lua-filter=$(EPUB_LUA_DIR)/single-file-links.lua \
|
||||
--lua-filter=$(EPUB_LUA_DIR)/single-file-identifiers.lua \
|
||||
--standalone \
|
||||
--toc --toc-depth=2 \
|
||||
--epub-chapter-level=2 \
|
||||
$< -o $@
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Test EPUB with EPUBCheck
|
||||
#################################################################################
|
||||
|
||||
.PHONY: epub-test
|
||||
epub-test: $(SITE_DIR)/plfa.epub | setup-check-epubcheck
|
||||
@echo "Testing EPUB with EPUBCheck"
|
||||
@epubcheck $(SITE_DIR)/plfa.epub
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Tasks for files that are generated by Hakyll
|
||||
#################################################################################
|
||||
|
||||
$(RAW_DIR)/epub.xml: $(EPUB_DIR)/epub.xml
|
||||
@make build
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Clean up and remove generated EPUB
|
||||
#################################################################################
|
||||
|
||||
.PHONY: epub-clean
|
||||
epub-clean:
|
||||
@echo "Cleaning generated files for EPUB"
|
||||
@rm -f $(SITE_DIR)/plfa.epub
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Setup or check dependencies
|
||||
#################################################################################
|
||||
|
||||
.PHONY: setup-check-epubcheck
|
||||
setup-check-epubcheck:
|
||||
ifeq (,$(wildcard $(shell which epubcheck)))
|
||||
@echo "The command you called requires EPUBCheck"
|
||||
@echo "See: https://github.com/w3c/epubcheck"
|
||||
endif
|
5
book/lua/epub-clean-html.lua
Normal file
5
book/lua/epub-clean-html.lua
Normal file
|
@ -0,0 +1,5 @@
|
|||
function RawBlock(el)
|
||||
-- Transforms '<ul class={something}>' into '<ul>'.
|
||||
el.text = el.text:gsub('%s*<%s*ul%s*class=%s*"?[%w-]+"?%s*>%s*', '<ul>')
|
||||
return el
|
||||
end
|
47
book/lua/latex-render-codeblocks.lua
Normal file
47
book/lua/latex-render-codeblocks.lua
Normal file
|
@ -0,0 +1,47 @@
|
|||
local unchecked_files = {}
|
||||
|
||||
local function is_checked()
|
||||
-- Check if any of our input files is an unchecked file.
|
||||
for _, input_file in pairs(PANDOC_STATE.input_files) do
|
||||
if unchecked_files[input_file] then
|
||||
if #PANDOC_STATE.input_files > 1 then
|
||||
error("Cannot handle multiple unchecked input files.")
|
||||
else
|
||||
return false
|
||||
end
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
local function render_codeblock(cb)
|
||||
-- If a code block has class Agda or its class is omitted, format it as...
|
||||
--
|
||||
-- \begin{agda}\begin{code} .. \end{agda}\end{code}
|
||||
--
|
||||
-- ...otherwise, format it as...
|
||||
--
|
||||
-- \begin{pre} .. \end{pre}
|
||||
--
|
||||
-- Any file which is not checked by Agda must have its code blocks typeset in the latter style.
|
||||
-- Specify these files via the UNCHECKED_FILES environment variable.
|
||||
if is_checked() then
|
||||
if #cb.classes == 0 or cb.classes[1] == 'agda' then
|
||||
return pandoc.RawBlock('tex', '\\begin{agda}\n\\begin{code}\n' .. cb.text .. '\n\\end{code}\n\\end{agda}')
|
||||
end
|
||||
end
|
||||
return pandoc.RawBlock('tex', '\\begin{pre}\n' .. cb.text .. '\n\\end{pre}')
|
||||
end
|
||||
|
||||
local function get_unchecked_files(meta)
|
||||
if meta['unchecked-files'] then
|
||||
for unchecked_file in string.gmatch(pandoc.utils.stringify(meta['unchecked-files']), "([^ ]+)") do
|
||||
unchecked_files[unchecked_file] = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
{ Meta = get_unchecked_files },
|
||||
{ CodeBlock = render_codeblock }
|
||||
}
|
22
book/lua/remove-badges.lua
Normal file
22
book/lua/remove-badges.lua
Normal file
|
@ -0,0 +1,22 @@
|
|||
local badge_url = nil
|
||||
|
||||
local function remove_badges(ln)
|
||||
-- Remove link elements if their first child is a badge.
|
||||
if #ln.content > 0 and ln.content[1].t == "Image" then
|
||||
if string.find(ln.content[1].src, badge_url, 1, true) then
|
||||
return pandoc.Null()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function get_badge_url(meta)
|
||||
if meta['badge-url'] then
|
||||
badge_url = pandoc.utils.stringify(meta['badge-url'])
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
return {
|
||||
{ Meta = get_badge_url },
|
||||
{ Link = remove_badges }
|
||||
}
|
|
@ -8,19 +8,21 @@
|
|||
|
||||
local default_code_classes = {}
|
||||
|
||||
function add_default_code_class(el)
|
||||
local function add_default_code_class(el)
|
||||
if #(el.classes) == 0 then
|
||||
el.classes = default_code_classes
|
||||
return el
|
||||
end
|
||||
end
|
||||
|
||||
function get_default_code_class(meta)
|
||||
local function get_default_code_class(meta)
|
||||
if meta['default-code-class'] then
|
||||
default_code_classes = {pandoc.utils.stringify(meta['default-code-class'])}
|
||||
end
|
||||
end
|
||||
|
||||
return {{Meta = get_default_code_class},
|
||||
{Code = add_default_code_class},
|
||||
{CodeBlock = add_default_code_class}}
|
||||
return {
|
||||
{Meta = get_default_code_class},
|
||||
{Code = add_default_code_class},
|
||||
{CodeBlock = add_default_code_class}
|
||||
}
|
31
book/lua/single-file-identifiers.lua
Normal file
31
book/lua/single-file-identifiers.lua
Normal file
|
@ -0,0 +1,31 @@
|
|||
local chapter = nil
|
||||
|
||||
function Header(el)
|
||||
if el.level == 2 then
|
||||
chapter = el.attr.identifier
|
||||
end
|
||||
if el.level >= 3 then
|
||||
if el.attr.attributes.name then
|
||||
el.attr.identifier = chapter .. '-' .. el.attr.attributes.name
|
||||
el.attr.attributes.name = nil
|
||||
else
|
||||
el.attr.identifier = chapter .. '-' .. el.attr.identifier
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function show(t)
|
||||
local str = ''
|
||||
str = str .. '{'
|
||||
for k,v in pairs(t) do
|
||||
str = str .. k
|
||||
str = str .. ':'
|
||||
if type(v) == 'table' then
|
||||
str = str .. show(v)
|
||||
else
|
||||
str = str .. v
|
||||
end
|
||||
end
|
||||
str = str .. '}'
|
||||
return str
|
||||
end
|
24
book/lua/single-file-links.lua
Normal file
24
book/lua/single-file-links.lua
Normal file
|
@ -0,0 +1,24 @@
|
|||
-- from https://github.com/plfa/plfa.github.io/blob/dev/epub/rewrite-links.lua
|
||||
-- Performs the following transformations on Link targets:
|
||||
--
|
||||
-- Case 1:
|
||||
-- [text](/chapter/#more-stuff) -> [text](#chapter-more-stuff)
|
||||
--
|
||||
-- Case 2:
|
||||
-- [text](/chapter/) -> [text](#chapter)
|
||||
--
|
||||
-- All other Links are ignored.
|
||||
function Link(el)
|
||||
local n
|
||||
-- Case 1:
|
||||
el.target, n = el.target:gsub("^/(%w+)/#([%w-]+)$", "#%1-%2")
|
||||
-- Case 2:
|
||||
if n == 0 then
|
||||
el.target, n = el.target:gsub("^/(%w+)/$", "#%1")
|
||||
end
|
||||
-- If either Case 1 or Case 2, lowercase target:
|
||||
if n ~= 0 then
|
||||
el.target = string.lower(el.target)
|
||||
end
|
||||
return el
|
||||
end
|
164
book/pdf.mk
Normal file
164
book/pdf.mk
Normal file
|
@ -0,0 +1,164 @@
|
|||
#################################################################################
|
||||
# Configuration
|
||||
#################################################################################
|
||||
|
||||
PDF_DIR := book
|
||||
PDF_TEMPLATE_DIR := $(PDF_DIR)/templates
|
||||
PDF_LUA_DIR := $(PDF_DIR)/lua
|
||||
MD_DIR := src
|
||||
LAGDA_TEX_DIR := $(TMP_DIR)/lagda_tex
|
||||
TEX_DIR := $(TMP_DIR)/tex
|
||||
FRANKENFONT := $(PDF_DIR)/DejaVu-mononoki-Symbola-Droid.ttf
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Helper functions for translating paths
|
||||
#################################################################################
|
||||
|
||||
# Convert MD_DIR/%.md to its modified source path
|
||||
define MD_PATH
|
||||
$(patsubst $(MD_DIR)/%/acknowledgements.md,$(RAW_DIR)/%/acknowledgements.md,\
|
||||
$(patsubst $(PDF_DIR)/pdf.tex,$(RAW_DIR)/pdf.tex,$(1)))
|
||||
endef
|
||||
|
||||
# Convert MD_DIR/%.lagda.md to LAGDA_TEX_DIR/%.lagda.tex
|
||||
define LAGDA_TEX_PATH
|
||||
$(patsubst $(MD_DIR)/%.lagda.md,$(LAGDA_TEX_DIR)/%.lagda.tex,$(1))
|
||||
endef
|
||||
|
||||
# Convert MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex or TEX_DIR/%.tex
|
||||
#
|
||||
# NOTE: This logic is partially duplicated in hs/Main.hs:addTexPath:texPath.
|
||||
#
|
||||
define TEX_PATH
|
||||
$(patsubst $(RAW_DIR)/%,$(TEX_DIR)/%,\
|
||||
$(patsubst $(PDF_DIR)/%,$(TEX_DIR)/%,\
|
||||
$(patsubst README.md,$(TEX_DIR)/plfa/frontmatter/README.tex,\
|
||||
$(patsubst $(MD_DIR)/%.md,$(TEX_DIR)/%.tex,\
|
||||
$(patsubst $(MD_DIR)/%.lagda.md,$(TEX_DIR)/%.tex,$(1))))))
|
||||
endef
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Lists of source and intermediate files
|
||||
#################################################################################
|
||||
|
||||
PDF_LUA_SCRIPTS := $(wildcard $(PDF_LUA_DIR)/*.lua)
|
||||
PDF_STATIC_FILES := $(PDF_DIR)/pdf.tex $(FRANKENFONT)
|
||||
MD_FILES := README.md $(wildcard $(MD_DIR)/plfa/**/*.md)
|
||||
LAGDA_MD_FILES := $(filter %.lagda.md,$(MD_FILES))
|
||||
LAGDA_TEX_FILES := $(call LAGDA_TEX_PATH,$(LAGDA_MD_FILES))
|
||||
TEX_FILES := $(call TEX_PATH,$(MD_FILES) $(RAW_DIR)/pdf.tex $(PDF_STATIC_FILES))
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Compile PLFA to a PDF using Pandoc and Latexmk
|
||||
#################################################################################
|
||||
|
||||
.PHONY: pdf pdf-build
|
||||
pdf: pdf-build
|
||||
pdf-build: $(SITE_DIR)/plfa.pdf
|
||||
|
||||
$(SITE_DIR)/plfa.pdf: $(TEX_FILES)
|
||||
@echo "Building PDF"
|
||||
@cd $(TEX_DIR) && latexmk -pdf -lualatex -use-make -halt-on-error pdf.tex
|
||||
@cp $(TEX_DIR)/pdf.pdf $(SITE_DIR)/plfa.pdf
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Definitions of various compilation tasks
|
||||
#################################################################################
|
||||
|
||||
# Copy static files needed by PDF compilation
|
||||
define MK_COPYSTATIC_RULE
|
||||
src := $(1)
|
||||
dst := $(2)
|
||||
$$(dst): $$(src)
|
||||
@echo "Copy $$< to $$@"
|
||||
@mkdir -p '$$(@D)'
|
||||
@cp $$< $$@
|
||||
endef
|
||||
|
||||
# Copy static files from PDF_DIR/% to TEX_DIR/%
|
||||
$(foreach static_file,\
|
||||
$(PDF_STATIC_FILES),\
|
||||
$(eval $(call MK_COPYSTATIC_RULE,\
|
||||
$(call MD_PATH,$(static_file)),\
|
||||
$(call TEX_PATH,$(static_file)))))
|
||||
|
||||
|
||||
# Compile Markdown files to LaTeX
|
||||
#
|
||||
# NOTE: Passing --indented-code-classes=default sets the class for /indented/ code blocks
|
||||
# to 'default', which lets us distinguish them from /fenced/ code blocks without a
|
||||
# class. That's important, since fenced code blocks are checked by Agda, but indented
|
||||
# code blocks are /not/, so latex-render-codeblocks.lua needs to be able to tell the
|
||||
# difference.
|
||||
#
|
||||
define MK_MD2TEX_RULE
|
||||
src := $(1)
|
||||
dst := $(2)
|
||||
$$(dst): $$(src) $(PDF_LUA_SCRIPTS) | setup-install-pandoc
|
||||
@echo "Compile $$< to $$@"
|
||||
@mkdir -p '$$(@D)'
|
||||
@$(PANDOC) \
|
||||
--top-level-division=chapter \
|
||||
--indented-code-classes=default \
|
||||
--lua-filter=$(PDF_LUA_DIR)/remove-badges.lua -M badge-url=https://img.shields.io/badge/ \
|
||||
--lua-filter=$(PDF_LUA_DIR)/latex-render-codeblocks.lua -M unchecked-files=README.md \
|
||||
--lua-filter=$(PDF_LUA_DIR)/single-file-links.lua \
|
||||
$$< -o $$@
|
||||
endef
|
||||
|
||||
# Compile .md files (from MD_DIR/%.md to TEX_DIR/%.tex)
|
||||
$(foreach md_file,\
|
||||
$(filter-out %.lagda.md,$(MD_FILES)),\
|
||||
$(eval $(call MK_MD2TEX_RULE,\
|
||||
$(call MD_PATH,$(md_file)),\
|
||||
$(call TEX_PATH,$(md_file)))))
|
||||
|
||||
# Compile .lagda.md files (from MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex)
|
||||
$(foreach lagda_md_file,\
|
||||
$(LAGDA_MD_FILES),\
|
||||
$(eval $(call MK_MD2TEX_RULE,\
|
||||
$(lagda_md_file),\
|
||||
$(call LAGDA_TEX_PATH,$(lagda_md_file)))))
|
||||
|
||||
|
||||
# Compile Literate Agda files to LaTeX
|
||||
define MK_LAGDA_MD2TEX_RULE
|
||||
src := $(1)
|
||||
dst := $(2)
|
||||
$$(dst): $$(src) $(LAGDA_TEX_FILES) | setup-install-agda setup-check-latexmk
|
||||
@$(AGDA) --include-path=$(LAGDA_TEX_DIR) --latex --latex-dir=$(TEX_DIR) $$<
|
||||
endef
|
||||
|
||||
# Compile .lagda.tex files (from LAGDA_TEX_DIR/%.lagda.tex to TEX_DIR/%.tex)
|
||||
$(foreach lagda_md_file,\
|
||||
$(LAGDA_MD_FILES),\
|
||||
$(eval $(call MK_LAGDA_MD2TEX_RULE,\
|
||||
$(call LAGDA_TEX_PATH,$(lagda_md_file)),\
|
||||
$(call TEX_PATH,$(lagda_md_file)))))
|
||||
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Tasks for files that are generated by Hakyll
|
||||
#################################################################################
|
||||
|
||||
$(RAW_DIR)/pdf.tex: $(PDF_DIR)/pdf.tex $(MD_DIR)/plfa/toc.metadata
|
||||
@make build
|
||||
|
||||
# Generated by Hakyll
|
||||
$(RAW_DIR)/plfa/backmatter/acknowledgements.md: $(MD_DIR)/plfa/backmatter/acknowledgements.md
|
||||
@make build
|
||||
|
||||
|
||||
#################################################################################
|
||||
# Clean up and remove generated PDF
|
||||
#################################################################################
|
||||
|
||||
.PHONY: pdf-clean
|
||||
pdf-clean:
|
||||
@echo "Cleaning generated files for PDF"
|
||||
@rm -rf $(SITE_DIR)/plfa.pdf $(LAGDA_TEX_DIR) $(TEX_DIR)
|
166
book/pdf.tex
Normal file
166
book/pdf.tex
Normal file
|
@ -0,0 +1,166 @@
|
|||
\documentclass[10pt]{book}
|
||||
|
||||
\usepackage{hyperref}
|
||||
\usepackage[links]{agda}
|
||||
\usepackage{fontspec}
|
||||
\setmainfont{DejaVu Sans}
|
||||
\setsansfont{DejaVu Sans}
|
||||
\setmonofont{[DejaVu-mononoki-Symbola-Droid.ttf]}
|
||||
\usepackage{soul}
|
||||
\usepackage{tcolorbox}
|
||||
\tcbuselibrary{skins,breakable}
|
||||
\usepackage{fancyvrb}
|
||||
\usepackage{xcolor}
|
||||
\usepackage{tikz}
|
||||
\usepackage{setspace}
|
||||
\usepackage{geometry}
|
||||
\geometry{
|
||||
a4paper,
|
||||
total={170mm,257mm},
|
||||
left=20mm,
|
||||
top=20mm,
|
||||
}
|
||||
|
||||
|
||||
% Wrap texttt lines
|
||||
\sloppy
|
||||
|
||||
% Disable section numbering
|
||||
\setcounter{secnumdepth}{0}
|
||||
|
||||
% Set the global text color:
|
||||
\definecolor{textcolor}{HTML}{111111}
|
||||
\color{textcolor}
|
||||
|
||||
% Change background color for inline code in markdown files.
|
||||
% The following code does not work well for long text as the
|
||||
% text will exceed the page boundary.
|
||||
\definecolor{background-color}{HTML}{EEEEFF}
|
||||
\let\oldtexttt\texttt%
|
||||
\renewcommand{\texttt}[1]{\colorbox{background-color}{\oldtexttt{#1}}}
|
||||
|
||||
% Box with background colour similar to web version:
|
||||
\newtcolorbox{agda}[1][]{
|
||||
frame hidden,
|
||||
colback=background-color,
|
||||
spartan,
|
||||
left=5pt,
|
||||
boxrule=0pt,
|
||||
breakable,
|
||||
}
|
||||
|
||||
% Verbatim environment similarly indented to Agda code blocks.
|
||||
\DefineVerbatimEnvironment{verbatim}{Verbatim}{xleftmargin=0pt}%
|
||||
|
||||
% Adding backgrounds to verbatim environments.
|
||||
\newenvironment{pre}{
|
||||
\VerbatimEnvironment
|
||||
\begin{agda}
|
||||
\begin{verbatim}
|
||||
}{\end{verbatim}
|
||||
\end{agda}
|
||||
}
|
||||
|
||||
% Use special font families without TeX ligatures for Agda code.
|
||||
% Solution inspired by a comment by Enrico Gregorio:
|
||||
% https://tex.stackexchange.com/a/103078
|
||||
\newfontfamily{\AgdaSerifFont}{DejaVu-Serif}
|
||||
\newfontfamily{\AgdaSansSerifFont}{DejaVu-Sans}
|
||||
\newfontfamily{\AgdaTypewriterFont}{DejaVu-mononoki-Symbola-Droid}
|
||||
\renewcommand{\AgdaFontStyle}[1]{{\AgdaTypewriterFont{}#1}}
|
||||
\renewcommand{\AgdaKeywordFontStyle}[1]{{\AgdaTypewriterFont{}#1}}
|
||||
\renewcommand{\AgdaStringFontStyle}[1]{{\AgdaTypewriterFont{}#1}}
|
||||
\renewcommand{\AgdaCommentFontStyle}[1]{{\AgdaTypewriterFont{}#1}}
|
||||
\renewcommand{\AgdaBoundFontStyle}[1]{{\AgdaTypewriterFont{}#1}}
|
||||
|
||||
\AgdaNoSpaceAroundCode{}
|
||||
|
||||
% Adjust spacing after toc numbering
|
||||
\usepackage{tocloft}
|
||||
\setlength\cftchapnumwidth{3em}
|
||||
\cftsetpnumwidth{4em}
|
||||
|
||||
% Style links with colors instead of boxes:
|
||||
% https://tex.stackexchange.com/q/823
|
||||
\definecolor{linkcolor}{HTML}{2A7AE2}
|
||||
\hypersetup{
|
||||
colorlinks,
|
||||
linkcolor={linkcolor},
|
||||
citecolor={linkcolor},
|
||||
urlcolor={linkcolor}
|
||||
}
|
||||
|
||||
\begin{document}
|
||||
|
||||
% Adjust indentation of Agda code blocks
|
||||
\setlength{\mathindent}{0pt}
|
||||
\setlength{\parindent}{0em}
|
||||
\setlength{\parskip}{1em}
|
||||
|
||||
% Provide \tightlist environment
|
||||
% https://tex.stackexchange.com/q/257418
|
||||
\providecommand{\tightlist}{%
|
||||
\setlength{\itemsep}{0pt}\setlength{\parskip}{0pt}}
|
||||
|
||||
% Based on \titleAM:
|
||||
% https://ctan.org/pkg/titlepages
|
||||
\begin{titlepage}
|
||||
\newlength{\drop}%
|
||||
\setlength{\drop}{0.12\textheight}%
|
||||
\centering%
|
||||
\vspace*{\drop}
|
||||
\begingroup% Ancient Mariner, AW p. 151
|
||||
{\large Philip Wadler, Wen Kokke, and Jeremy G. Siek}\\[\baselineskip]
|
||||
{\Huge PROGRAMMING LANGUAGE}\\[\baselineskip]
|
||||
{\Huge FOUNDATIONS}\\[\baselineskip]
|
||||
{\Large IN}\\[\baselineskip]
|
||||
{\Huge Agda}\\[\drop]
|
||||
\vfill%
|
||||
{\small\scshape 2021}\par%
|
||||
\null\endgroup
|
||||
\end{titlepage}
|
||||
|
||||
$for(parts)$
|
||||
|
||||
% Open front, main, and back matter sections:
|
||||
$if(frontmatter)$
|
||||
\frontmatter%
|
||||
\setcounter{tocdepth}{0}
|
||||
|
||||
\tableofcontents%
|
||||
\setcounter{tocdepth}{1}
|
||||
$endif$
|
||||
$if(mainmatter)$
|
||||
\mainmatter%
|
||||
$endif$
|
||||
$if(backmatter)$
|
||||
\appendix
|
||||
\addcontentsline{toc}{part}{Appendices}
|
||||
$endif$
|
||||
|
||||
% Only print title for main and back matter:
|
||||
$if(frontmatter)$
|
||||
% NOTE: Front matter titles are inserted via book/lua/single-file-headers.lua.
|
||||
$else$
|
||||
\part{$title$}
|
||||
$endif$
|
||||
|
||||
% Include each section.
|
||||
$for(sections)$
|
||||
\hypertarget{$anchor$}{%
|
||||
\chapter{$title$}\label{$anchor$}}
|
||||
\input{$tex_path$}
|
||||
$endfor$
|
||||
|
||||
% Close front, main, and back matter sections:
|
||||
$if(frontmatter)$
|
||||
$endif$
|
||||
$if(mainmatter)$
|
||||
\cleardoublepage%
|
||||
\phantomsection%
|
||||
$endif$
|
||||
$if(backmatter)$
|
||||
$endif$
|
||||
$endfor$
|
||||
|
||||
\end{document}
|
|
@ -989,8 +989,8 @@ Remember to indent all code by two spaces.
|
|||
ext∋ _ ¬∃ ⟨ A , S _ ⊢x ⟩ = ¬∃ ⟨ A , ⊢x ⟩
|
||||
|
||||
lookup : ∀ (Γ : Context) (x : Id)
|
||||
-----------------------
|
||||
→ Dec (∃[ A ](Γ ∋ x ⦂ A))
|
||||
------------------------
|
||||
→ Dec (∃[ A ]( Γ ∋ x ⦂ A ))
|
||||
lookup ∅ x = no (λ ())
|
||||
lookup (Γ , y ⦂ B) x with x ≟ y
|
||||
... | yes refl = yes ⟨ B , Z ⟩
|
||||
|
@ -1022,12 +1022,12 @@ Remember to indent all code by two spaces.
|
|||
|
||||
```
|
||||
synthesize : ∀ (Γ : Context) (M : Term⁺)
|
||||
-----------------------
|
||||
→ Dec (∃[ A ](Γ ⊢ M ↑ A))
|
||||
-----------------------
|
||||
→ Dec (∃[ A ]( Γ ⊢ M ↑ A ))
|
||||
|
||||
inherit : ∀ (Γ : Context) (M : Term⁻) (A : Type)
|
||||
---------------
|
||||
→ Dec (Γ ⊢ M ↓ A)
|
||||
---------------
|
||||
→ Dec (Γ ⊢ M ↓ A)
|
||||
|
||||
synthesize Γ (` x) with lookup Γ x
|
||||
... | no ¬∃ = no (λ{ ⟨ A , ⊢` ∋x ⟩ → ¬∃ ⟨ A , ∋x ⟩ })
|
||||
|
|
|
@ -631,7 +631,7 @@ module Problem3 where
|
|||
```
|
||||
synthesize : ∀ (Γ : Context) (M : Term⁺)
|
||||
-----------------------
|
||||
→ Dec (∃[ A ](Γ ⊢ M ↑ A))
|
||||
→ Dec (∃[ A ]( Γ ⊢ M ↑ A ))
|
||||
|
||||
inherit : ∀ (Γ : Context) (M : Term⁻) (A : Type)
|
||||
---------------
|
||||
|
|
42
css/epub.css
42
css/epub.css
|
@ -1,42 +0,0 @@
|
|||
/* This is Pandoc's default ebook CSS, modified slightly. */
|
||||
/* https://github.com/jgm/pandoc/blob/master/data/epub.css */
|
||||
@font-face {
|
||||
font-family: 'mononoki';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
src: url('../webfonts/mononoki.woff');
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'FreeMono';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
src: url('../webfonts/FreeMono.woff');
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'DejaVuSansMono';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
src: url('../webfonts/DejaVuSansMono.woff');
|
||||
}
|
||||
body { margin: 5%; text-align: justify; font-size: medium; }
|
||||
code { font-family: 'mononoki', 'FreeMono', 'DejaVuSansMono', monospace; }
|
||||
h1, h2, h3, h4, h5, h6 { text-align: left; }
|
||||
nav#toc ol, nav#landmarks ol { padding: 0; margin-left: 1em; }
|
||||
nav#toc ol li, nav#landmarks ol li { list-style-type: none; margin: 0; padding: 0; }
|
||||
a.footnote-ref { vertical-align: super; }
|
||||
em, em em em, em em em em em { font-style: italic;}
|
||||
em em, em em em em { font-style: normal; }
|
||||
code{ white-space: pre-wrap; }
|
||||
span.smallcaps{ font-variant: small-caps; }
|
||||
span.underline{ text-decoration: underline; }
|
||||
q { quotes: "“" "”" "‘" "’"; }
|
||||
div.column{ display: inline-block; vertical-align: top; width: 50%; }
|
||||
div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
|
||||
|
||||
/* Workaround for iBooks issue; see #6242 */
|
||||
@media screen {
|
||||
.sourceCode {
|
||||
overflow: visible !important;
|
||||
white-space: pre-wrap !important;
|
||||
}
|
||||
}
|
|
@ -1,3 +1,8 @@
|
|||
@font-face{
|
||||
font-family: DejaVu mononoki Symbola Droid;
|
||||
src:
|
||||
url('../webfonts/DejaVu-mononoki-Symbola-Droid.woff') format('woff')
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'mononoki';
|
||||
src: url('../webfonts/mononoki.woff2') format('woff2'),
|
||||
|
@ -19,7 +24,7 @@
|
|||
}
|
||||
|
||||
@mixin code-font {
|
||||
font-family: 'mononoki', 'DejaVu Sans Mono', 'Source Code Pro', 'Bitstream Vera Sans Mono', 'FreeMono', 'Courier New', 'Monaco', 'Menlo', monospace, serif;
|
||||
font-family: 'DejaVu mononoki Symbola Droid', 'mononoki', 'DejaVu Sans Mono', 'Source Code Pro', 'Bitstream Vera Sans Mono', 'FreeMono', 'Courier New', 'Monaco', 'Menlo', monospace, serif;
|
||||
font-size: .85em;
|
||||
}
|
||||
@mixin code-container {
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
--- include-files.lua – filter to include Markdown files
|
||||
---
|
||||
--- Copyright: © 2019–2020 Albert Krewinkel
|
||||
--- Copyright: © 2020 Michael Reed
|
||||
--- License: MIT – see LICENSE file for details
|
||||
---
|
||||
--- Created by Albert Krewinkel. Slightly modified by Michael Reed for use in
|
||||
--- generating the EPUB for "Programming Language Foundations in Agda".
|
||||
---
|
||||
--- For documentation, see: https://github.com/pandoc/lua-filters/tree/master/include-files
|
||||
|
||||
-- pandoc's List type
|
||||
local List = require 'pandoc.List'
|
||||
|
||||
--- Shift headings in block list by given number
|
||||
function shift_headings(blocks, shift_by)
|
||||
if not shift_by then
|
||||
return blocks
|
||||
end
|
||||
|
||||
local shift_headings_filter = {
|
||||
Header = function (header)
|
||||
header.level = header.level + shift_by
|
||||
return header
|
||||
end
|
||||
}
|
||||
|
||||
return pandoc.walk_block(pandoc.Div(blocks), shift_headings_filter).content
|
||||
end
|
||||
|
||||
--- Filter function for code blocks
|
||||
function CodeBlock(cb)
|
||||
-- ignore code blocks which are not of class "include".
|
||||
if not cb.classes:includes 'include' then
|
||||
return
|
||||
end
|
||||
|
||||
-- Markdown is used if this is nil.
|
||||
local format = cb.attributes['format']
|
||||
local shift_heading_level_by =
|
||||
tonumber(cb.attributes['shift-heading-level-by'])
|
||||
|
||||
|
||||
local blocks = List:new()
|
||||
for line in cb.text:gmatch('[^\n]+') do
|
||||
if line:sub(1,2) ~= '//' then
|
||||
-- Read in the document at the file path specified by `line`.
|
||||
local fh = io.open(line)
|
||||
local document = pandoc.read(fh:read '*a', format)
|
||||
-- Before shifting headings, add a title heading at the beginning of the chapter.
|
||||
local heading = pandoc.Header(1, pandoc.Str(pandoc.utils.stringify(document.meta.title)))
|
||||
document.blocks:insert(1, heading)
|
||||
-- Shift all headings by the user-specified amount, which is 0 by default.
|
||||
local chapter = shift_headings(document.blocks, shift_heading_level_by)
|
||||
-- Concatenate the chapter blocks (discarding the metadata) to the current document.
|
||||
blocks:extend(chapter)
|
||||
fh:close()
|
||||
end
|
||||
end
|
||||
return blocks
|
||||
end
|
|
@ -1,19 +0,0 @@
|
|||
#!/usr/bin/env ruby
|
||||
require 'yaml'
|
||||
require 'liquid'
|
||||
|
||||
unless ARGV.length == 3
|
||||
abort "Usage: render-liquid-template.rb [yaml_file] [markdown_in_file] [markdown_out_file]"
|
||||
end
|
||||
|
||||
yaml_file, markdown_file_in, markdown_file_out = ARGV
|
||||
|
||||
[yaml_file, markdown_file_in].each do |file|
|
||||
unless File.file? file
|
||||
abort "File does not exist: '%s'" % [file]
|
||||
end
|
||||
end
|
||||
|
||||
metadata = { 'site' => (YAML.load (File.read yaml_file)) }
|
||||
template = Liquid::Template.parse (File.read markdown_file_in)
|
||||
File.write markdown_file_out, (template.render metadata)
|
|
@ -1,5 +0,0 @@
|
|||
-- Transforms '<ul class={something}>' into '<ul>'.
|
||||
function RawBlock (el)
|
||||
el.text = el.text:gsub('%s*<%s*ul%s*class=%s*"?[%w-]+"?%s*>%s*', '<ul>')
|
||||
return el
|
||||
end
|
|
@ -1,19 +0,0 @@
|
|||
-- Performs the following transformations on Link targets:
|
||||
--
|
||||
-- Case 1:
|
||||
-- [text](/chapter/#more-stuff) -> [text](#chapter-more-stuff)
|
||||
--
|
||||
-- Case 2:
|
||||
-- [text](/chapter/) -> [text](#chapter)
|
||||
--
|
||||
-- All other Links are ignored.
|
||||
function Link (el)
|
||||
local n
|
||||
-- Case 1:
|
||||
el.target, n = el.target:gsub("^/(%w+)/#([%w-]+)$", "#%1-%2")
|
||||
-- Case 2:
|
||||
if n == 0 then
|
||||
el.target = el.target:gsub("^/(%w+)/$", "#%1")
|
||||
end
|
||||
return el
|
||||
end
|
|
@ -5,6 +5,7 @@
|
|||
module Hakyll.Web.Agda
|
||||
( agdaCompilerWith
|
||||
, agdaVerbosityQuiet
|
||||
, compileAgdaWith
|
||||
, CommandLineOptions(..)
|
||||
, PragmaOptions(..)
|
||||
, defaultAgdaOptions
|
||||
|
@ -50,8 +51,12 @@ defaultAgdaPragmaOptions = defaultPragmaOptions
|
|||
|
||||
-- |Compile literate Agda to HTML
|
||||
agdaCompilerWith :: CommandLineOptions -> Compiler (Item String)
|
||||
agdaCompilerWith agdaOptions = cached "Hakyll.Web.Agda.agdaCompilerWith" $ do
|
||||
item <- getResourceBody
|
||||
agdaCompilerWith agdaOptions =
|
||||
getResourceBody >>= compileAgdaWith agdaOptions
|
||||
|
||||
-- |Compile literate Agda to HTML
|
||||
compileAgdaWith :: CommandLineOptions -> Item String -> Compiler (Item String)
|
||||
compileAgdaWith agdaOptions item = cached "Hakyll.Web.Agda.agdaCompilerWith" $ do
|
||||
let agdaPath = toFilePath (itemIdentifier item)
|
||||
let moduleName = agdaModule (itemBody item)
|
||||
TmpFile tmpPath <- newTmpFile ".lock"
|
||||
|
@ -120,7 +125,7 @@ readStdlibVersion stdlibPath = do
|
|||
changelog <- T.readFile changelogPath
|
||||
let versionLine = head (T.lines changelog)
|
||||
case T.stripPrefix "Version " versionLine of
|
||||
Just versionStr -> return . T.unpack $ "v" <> T.strip versionStr
|
||||
Just versionStr -> return $ T.unpack ("v" <> T.strip versionStr)
|
||||
Nothing -> error $ printf "Could not read version from '%s'" changelogPath
|
||||
|
||||
-- |Fix references to the Agda standard library.
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
module Hakyll.Web.Routes.Permalink
|
||||
( convertPermalink
|
||||
, permalinkRoute
|
||||
, permalinkRouteWithDefault
|
||||
, stripIndexFile
|
||||
) where
|
||||
|
||||
|
@ -19,11 +18,12 @@ convertPermalink :: FilePath -> FilePath
|
|||
convertPermalink link = fromMaybe link (stripPrefix "/" link) </> "index.html"
|
||||
|
||||
permalinkRoute :: Routes
|
||||
permalinkRoute = permalinkRouteWithDefault (error "Missing field 'permalink'.")
|
||||
|
||||
permalinkRouteWithDefault :: Routes -> Routes
|
||||
permalinkRouteWithDefault def = metadataRoute $ \metadata ->
|
||||
maybe def (constRoute . convertPermalink) (lookupString "permalink" metadata)
|
||||
permalinkRoute = metadataRoute $ \metadata ->
|
||||
case lookupString "permalink" metadata of
|
||||
Nothing ->
|
||||
customRoute (\identifier -> error $ "missing field 'permalink' in metadata " <> toFilePath identifier)
|
||||
Just permalink ->
|
||||
constRoute (convertPermalink permalink)
|
||||
|
||||
-- Removes "index.html" from URLs.
|
||||
stripIndexFile :: String -> String
|
||||
|
|
33
hs/Hakyll/Web/Template/Context/Derived.hs
Normal file
33
hs/Hakyll/Web/Template/Context/Derived.hs
Normal file
|
@ -0,0 +1,33 @@
|
|||
{-# LANGUAGE RankNTypes #-}
|
||||
|
||||
module Hakyll.Web.Template.Context.Derived where
|
||||
|
||||
import Hakyll
|
||||
import Text.Printf
|
||||
|
||||
addDerivedField
|
||||
:: String
|
||||
-> (forall b. Context b -> [String] -> Item b -> Compiler ContextField)
|
||||
-> Context a
|
||||
-> Context a
|
||||
addDerivedField key derive ctx = Context $ \k a i ->
|
||||
if k == key then
|
||||
derive ctx a i
|
||||
else do
|
||||
fld <- unContext ctx k a i
|
||||
return $
|
||||
case fld of
|
||||
-- If the field is a list, recursively derive the field for any list items.
|
||||
ListField itemCtx items -> ListField (addDerivedField key derive itemCtx) items
|
||||
-- Otherwise, simply return the field.
|
||||
otherFld -> otherFld
|
||||
|
||||
|
||||
|
||||
-- Retrieve a String from the context
|
||||
getString :: String -> Context a -> [String] -> Item a -> Compiler String
|
||||
getString key ctx a i = do
|
||||
fld <- unContext ctx key a i
|
||||
case fld of
|
||||
StringField str -> return str
|
||||
_ -> fail $ printf "Key '%s' does not return a String" key
|
|
@ -1,53 +1,85 @@
|
|||
module Hakyll.Web.Template.Context.Metadata where
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Hakyll.Web.Template.Context.Metadata
|
||||
( includeContext
|
||||
, metadataContext
|
||||
, objectContext
|
||||
, restoreMetadata
|
||||
) where
|
||||
|
||||
import Control.Monad ((<=<))
|
||||
import Data.Aeson (Object, Value(..))
|
||||
import qualified Data.ByteString.Char8 as BS
|
||||
import qualified Data.HashMap.Strict as H
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Vector as V
|
||||
import Hakyll
|
||||
import Text.Printf (printf)
|
||||
import qualified Data.Yaml as Y
|
||||
|
||||
-- |Create a Context based on a JSON Object.
|
||||
objectContext :: Context String -> Context Object
|
||||
objectContext ctx = Context $ \k _ i ->
|
||||
let o = itemBody i in
|
||||
case H.lookup (T.pack k) o of
|
||||
Just v -> decodeValue ctx v
|
||||
Nothing -> fail $ printf "Key '%s' undefined in context '%s'" k (show o)
|
||||
|
||||
isObject :: Value -> Bool
|
||||
isObject (Object _) = True
|
||||
isObject _ = False
|
||||
-- |Create a |Context| from the |Metadata| associated with an |Item|.
|
||||
metadataContext
|
||||
:: Context String -- ^ |Context| used when unfolding a JSON |Array| into a |ListField|.
|
||||
-> Context String
|
||||
metadataContext ctx = Context $ \k a i -> do
|
||||
let identifier = itemIdentifier i
|
||||
metadata <- getMetadata identifier
|
||||
item <- makeItem metadata
|
||||
unContext (objectContext ctx) k a item
|
||||
|
||||
isString :: Value -> Bool
|
||||
isString (String _) = True
|
||||
isString _ = False
|
||||
-- |Create a |Context| from a JSON |Object| which loads data from files under "include" keys.
|
||||
includeContext
|
||||
:: Context String -- ^ |Context| used when unfolding a JSON |Array| into a |ListField|.
|
||||
-> Context Object
|
||||
includeContext ctx = Context $ \k a i -> do
|
||||
let o = itemBody i
|
||||
v <- lookupObject "include" o
|
||||
identifier <- fromFilePath <$> toString v
|
||||
unContext (ctx <> metadataContext ctx) k a =<< load identifier
|
||||
|
||||
-- |Create a |Context| from a JSON |Object|.
|
||||
objectContext
|
||||
:: Context String -- ^ |Context| used when unfolding a JSON |Array| into a |ListField|.
|
||||
-> Context Object
|
||||
objectContext ctx = Context $ \k _ i -> do
|
||||
let o = itemBody i
|
||||
decodeValue ctx =<< lookupObject k o
|
||||
|
||||
-- |Decode a JSON Value to a context field.
|
||||
decodeValue :: Context String -> Value -> Compiler ContextField
|
||||
decodeValue ctx (Array a) = do
|
||||
objs <- mapM (makeItem <=< toObject) (V.toList a)
|
||||
return $ ListField (includeContext ctx <> objectContext ctx) objs
|
||||
decodeValue _ctx (String s) = return . StringField $ T.unpack s
|
||||
decodeValue _ctx (Number n) = return . StringField $ show n
|
||||
decodeValue _ctx (Bool b) = return . StringField $ show b
|
||||
decodeValue _ctx v = fail $ printf "Unsupported value '%s'" (show v)
|
||||
|
||||
-- |Lookup the |Value| stored in an |Object| at the given key.
|
||||
lookupObject :: MonadFail m => String -> Object -> m Value
|
||||
lookupObject k o = maybe ifNotFound ifFound (H.lookup (T.pack k) o)
|
||||
where
|
||||
ifFound = return
|
||||
ifNotFound = fail $ printf "Key '%s' undefined in context '%s'" k (show o)
|
||||
|
||||
-- |Convert a |Value| to an |Object|, or fail.
|
||||
toObject :: MonadFail m => Value -> m Object
|
||||
toObject (Object o) = return o
|
||||
toObject v = fail $ printf "Not an object '%s'" (show v)
|
||||
|
||||
-- |Convert a |Value| to an |String|, or fail.
|
||||
toString :: MonadFail m => Value -> m String
|
||||
toString (String s) = return (T.unpack s)
|
||||
toString v = fail $ printf "Not a string '%s'" (show v)
|
||||
|
||||
-- |Decode a JSON Value to a context field.
|
||||
decodeValue :: Context String -> Value -> Compiler ContextField
|
||||
decodeValue ctx (Array a)
|
||||
| V.all isObject a = do
|
||||
objs <- mapM (makeItem <=< toObject) (V.toList a)
|
||||
return $ ListField (objectContext ctx) objs
|
||||
| V.all isString a = do
|
||||
items <- mapM (load . fromFilePath <=< toString) (V.toList a)
|
||||
return $ ListField ctx items
|
||||
decodeValue _ (String s) = return . StringField $ T.unpack s
|
||||
decodeValue _ (Number n) = return . StringField $ show n
|
||||
decodeValue _ (Bool b) = return . StringField $ show b
|
||||
decodeValue _ v = fail $ printf "Unsupported value '%s'" (show v)
|
||||
|
||||
-- |Create a Context based on the Metadata.
|
||||
metadataContext :: Context String -> Context String
|
||||
metadataContext ctx = Context $ \k a i ->
|
||||
unContext (objectContext ctx) k a <=< makeItem <=< getMetadata $ itemIdentifier i
|
||||
|
||||
-- |Add the original |Metadata| block back to the file.
|
||||
restoreMetadata :: Item String -> Compiler (Item String)
|
||||
restoreMetadata item = do
|
||||
metadata <- getMetadata (itemIdentifier item)
|
||||
if H.null metadata then
|
||||
return item
|
||||
else do
|
||||
let yaml = "---\n" <> BS.unpack (Y.encode metadata) <> "---\n\n"
|
||||
withItemBody (\body -> return (yaml <> body)) item
|
||||
|
|
255
hs/Main.hs
255
hs/Main.hs
|
@ -1,22 +1,26 @@
|
|||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
import Control.Monad ((<=<), forM_)
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import Control.Monad ((<=<), (>=>), forM_)
|
||||
import Data.Char (toLower)
|
||||
import Data.Functor ((<&>))
|
||||
import Data.List (isPrefixOf, stripPrefix)
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Text.ICU as RE
|
||||
import qualified Data.Text.ICU.Replace as T
|
||||
import Hakyll
|
||||
import Hakyll.Web.Agda
|
||||
import Hakyll.Web.Routes.Permalink
|
||||
import Hakyll.Web.Template.Numeric
|
||||
import Hakyll.Web.Template.Context.Derived
|
||||
import Hakyll.Web.Template.Context.Metadata
|
||||
import Hakyll.Web.Template.Context.Title
|
||||
import Hakyll.Web.Sass
|
||||
import System.FilePath ((</>), takeDirectory)
|
||||
import System.FilePath ((</>), takeDirectory, replaceExtensions, splitPath, joinPath)
|
||||
import qualified Text.CSL as CSL
|
||||
import qualified Text.CSL.Pandoc as CSL (processCites)
|
||||
import Text.Pandoc (Pandoc(..), ReaderOptions(..), WriterOptions(..), Extension(..))
|
||||
import qualified Text.Pandoc as Pandoc
|
||||
import qualified Text.Pandoc.Filter as Pandoc (Filter(..), applyFilters)
|
||||
import Text.Printf
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Configuration
|
||||
|
@ -65,20 +69,22 @@ siteContext = mconcat
|
|||
, "authors/jsiek.metadata"
|
||||
]
|
||||
, constField "google_analytics" "UA-125055580-1"
|
||||
, defaultContext
|
||||
, addAnchor defaultContext
|
||||
]
|
||||
|
||||
siteSectionContext :: Context String
|
||||
siteSectionContext = mconcat
|
||||
[ titlerunningField
|
||||
, subtitleField
|
||||
, addShiftedBody "raw" (contentField "raw" "raw")
|
||||
, siteContext
|
||||
]
|
||||
|
||||
tableOfContentsContext :: Context String -> Context String
|
||||
tableOfContentsContext ctx = Context $ \k a _ -> do
|
||||
m <- makeItem <=< getMetadata $ "src/plfa/toc.metadata"
|
||||
unContext (objectContext ctx) k a m
|
||||
tableOfContentsContext ctx = Context $ \k a _ ->
|
||||
unContext (objectContext ctx) k a
|
||||
=<< makeItem
|
||||
=<< getMetadata "src/plfa/toc.metadata"
|
||||
|
||||
acknowledgementsContext :: Context String
|
||||
acknowledgementsContext = mconcat
|
||||
|
@ -124,29 +130,6 @@ sassOptions = defaultSassOptions
|
|||
{ sassIncludePaths = Just ["css"]
|
||||
}
|
||||
|
||||
epubSectionContext :: Context String
|
||||
epubSectionContext = mconcat
|
||||
[ contentField "content" "content"
|
||||
, titlerunningField
|
||||
, subtitleField
|
||||
]
|
||||
|
||||
epubReaderOptions :: ReaderOptions
|
||||
epubReaderOptions = siteReaderOptions
|
||||
{ readerStandalone = True
|
||||
, readerStripComments = True
|
||||
}
|
||||
|
||||
epubWriterOptions :: WriterOptions
|
||||
epubWriterOptions = siteWriterOptions
|
||||
{ writerTableOfContents = True
|
||||
, writerTOCDepth = 2
|
||||
, writerEpubFonts = [ "public/webfonts/DejaVuSansMono.woff"
|
||||
, "public/webfonts/FreeMono.woff"
|
||||
, "public/webfonts/mononoki.woff"
|
||||
]
|
||||
, writerEpubChapterLevel = 2
|
||||
}
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Build site
|
||||
|
@ -164,32 +147,29 @@ main = do
|
|||
-- Build function to fix local URLs
|
||||
fixLocalLink <- mkFixLocalLink "src"
|
||||
|
||||
-- Build compiler for Markdown pages
|
||||
let pageCompiler :: Compiler (Item String)
|
||||
pageCompiler = do
|
||||
let maybeCompileAgda
|
||||
:: Maybe CommandLineOptions -- ^ If this argument is passed, Agda compilation is used.
|
||||
-> Item String
|
||||
-> Compiler (Item String)
|
||||
maybeCompileAgda Nothing = return
|
||||
maybeCompileAgda (Just opts) =
|
||||
compileAgdaWith opts >=>
|
||||
withItemBody (return . withUrls fixStdlibLink) >=>
|
||||
withItemBody (return . withUrls fixLocalLink)
|
||||
|
||||
-- Build compiler for Markdown pages with optional Literate Agda
|
||||
let pageCompiler
|
||||
:: Maybe CommandLineOptions -- ^ If this argument is passed, Agda compilation is used.
|
||||
-> Compiler (Item String)
|
||||
pageCompiler maybeOpts = do
|
||||
csl <- load cslFileName
|
||||
bib <- load bibFileName
|
||||
getResourceBody
|
||||
>>= saveSnapshot "raw"
|
||||
>>= maybeCompileAgda maybeOpts
|
||||
>>= readMarkdownWith siteReaderOptions
|
||||
>>= processCites csl bib
|
||||
<&> writeHTML5With siteWriterOptions
|
||||
>>= saveSnapshot "content"
|
||||
>>= loadAndApplyTemplate "templates/page.html" siteSectionContext
|
||||
>>= loadAndApplyTemplate "templates/default.html" siteSectionContext
|
||||
>>= prettifyUrls
|
||||
|
||||
-- Build compiler for literate Agda pages
|
||||
let pageWithAgdaCompiler :: CommandLineOptions -> Compiler (Item String)
|
||||
pageWithAgdaCompiler opts = do
|
||||
csl <- load cslFileName
|
||||
bib <- load bibFileName
|
||||
agdaCompilerWith opts
|
||||
>>= withItemBody (return . withUrls fixStdlibLink)
|
||||
>>= withItemBody (return . withUrls fixLocalLink)
|
||||
>>= readMarkdownWith siteReaderOptions
|
||||
>>= processCites csl bib
|
||||
<&> writeHTML5With siteWriterOptions
|
||||
>>= saveSnapshot "content"
|
||||
>>= loadAndApplyTemplate "templates/page.html" siteSectionContext
|
||||
>>= loadAndApplyTemplate "templates/default.html" siteSectionContext
|
||||
>>= prettifyUrls
|
||||
|
@ -198,40 +178,14 @@ main = do
|
|||
--
|
||||
-- NOTE: The order of the various match expressions is important:
|
||||
-- Special-case compilation instructions for files such as
|
||||
-- "src/plfa/epub.md" and "src/plfa/index.md" would be overwritten
|
||||
-- by the general purpose compilers for "src/**.md", which would
|
||||
-- "src/plfa/index.md" would be overwritten by the general
|
||||
-- purpose compilers for "src/**.md", which would
|
||||
-- cause them to render incorrectly. It is possible to explicitly
|
||||
-- exclude such files using `complement` patterns, but this vastly
|
||||
-- complicates the match patterns.
|
||||
--
|
||||
hakyll $ do
|
||||
|
||||
-- Compile EPUB
|
||||
match "src/plfa/epub.md" $ do
|
||||
route $ constRoute "plfa.epub"
|
||||
compile $ do
|
||||
epubTemplate <- load "templates/epub.html"
|
||||
>>= compilePandocTemplate
|
||||
epubMetadata <- load "src/plfa/epub.xml"
|
||||
let ropt = epubReaderOptions
|
||||
let wopt = epubWriterOptions
|
||||
{ writerTemplate = Just . itemBody $ epubTemplate
|
||||
, writerEpubMetadata = Just . T.pack . itemBody $ epubMetadata
|
||||
}
|
||||
getResourceBody
|
||||
>>= applyAsTemplate (tableOfContentsContext epubSectionContext)
|
||||
>>= readPandocWith ropt
|
||||
>>= applyPandocFilters ropt [] "epub3"
|
||||
>>= writeEPUB3With wopt
|
||||
|
||||
match "templates/epub.html" $
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate siteContext
|
||||
|
||||
match "src/plfa/epub.xml" $
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate siteContext
|
||||
|
||||
-- Compile Table of Contents
|
||||
match "src/plfa/index.md" $ do
|
||||
route permalinkRoute
|
||||
|
@ -251,9 +205,9 @@ main = do
|
|||
route permalinkRoute
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate acknowledgementsContext
|
||||
>>= saveSnapshot "raw"
|
||||
>>= readMarkdownWith siteReaderOptions
|
||||
<&> writeHTML5With siteWriterOptions
|
||||
>>= saveSnapshot "content"
|
||||
>>= loadAndApplyTemplate "templates/page.html" siteContext
|
||||
>>= loadAndApplyTemplate "templates/default.html" siteContext
|
||||
>>= prettifyUrls
|
||||
|
@ -282,7 +236,7 @@ main = do
|
|||
>>= readMarkdownWith siteReaderOptions
|
||||
>>= processCites csl bib
|
||||
<&> writeHTML5With siteWriterOptions
|
||||
>>= saveSnapshot "content"
|
||||
>>= saveSnapshot "content" -- used for teaser
|
||||
>>= loadAndApplyTemplate "templates/post.html" postContext
|
||||
>>= loadAndApplyTemplate "templates/default.html" siteContext
|
||||
>>= prettifyUrls
|
||||
|
@ -290,12 +244,12 @@ main = do
|
|||
-- Compile sections using literate Agda
|
||||
match "src/**.lagda.md" $ do
|
||||
route permalinkRoute
|
||||
compile $ pageWithAgdaCompiler agdaOptions
|
||||
compile $ pageCompiler (Just agdaOptions)
|
||||
|
||||
-- Compile other sections and pages
|
||||
match ("README.md" .||. "src/**.md" .&&. complement "src/plfa/epub.md") $ do
|
||||
match ("README.md" .||. "src/**.md") $ do
|
||||
route permalinkRoute
|
||||
compile pageCompiler
|
||||
compile (pageCompiler Nothing)
|
||||
|
||||
-- Compile course pages
|
||||
match "courses/**.lagda.md" $ do
|
||||
|
@ -305,11 +259,11 @@ main = do
|
|||
let courseOptions = agdaOptions
|
||||
{ optIncludePaths = courseDir : optIncludePaths agdaOptions
|
||||
}
|
||||
pageWithAgdaCompiler courseOptions
|
||||
pageCompiler (Just courseOptions)
|
||||
|
||||
match "courses/**.md" $ do
|
||||
route permalinkRoute
|
||||
compile pageCompiler
|
||||
compile $ pageCompiler Nothing
|
||||
|
||||
match "courses/**.pdf" $ do
|
||||
route idRoute
|
||||
|
@ -344,9 +298,10 @@ main = do
|
|||
create ["public/css/style.css"] $ do
|
||||
route idRoute
|
||||
compile $ do
|
||||
csses <- loadAll ("css/*.css" .||. "css/*.scss" .&&. complement "css/epub.css")
|
||||
csses <- loadAll ("css/*.css" .||. "css/*.scss")
|
||||
makeItem $ unlines $ map itemBody csses
|
||||
|
||||
|
||||
-- Copy versions
|
||||
let versions = ["19.08", "20.07"]
|
||||
forM_ versions $ \v -> do
|
||||
|
@ -363,6 +318,35 @@ main = do
|
|||
compile copyFileCompiler
|
||||
|
||||
|
||||
-- Raw versions used from Makefile for PDF and EPUB construction
|
||||
|
||||
-- Compile raw version of acknowledgements used in constructing the PDF
|
||||
match "src/plfa/backmatter/acknowledgements.md" $ version "raw" $ do
|
||||
route $ gsubRoute "src/" (const "raw/")
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate acknowledgementsContext
|
||||
>>= restoreMetadata
|
||||
|
||||
-- Compile raw version of index used in constructing the PDF
|
||||
match "book/pdf.tex" $ version "raw" $ do
|
||||
route $ gsubRoute "book/" (const "raw/")
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate (addTexPath (tableOfContentsContext siteSectionContext))
|
||||
|
||||
-- Compile raw version of index used in constructing the EPUB
|
||||
match "book/epub.md" $ version "raw" $ do
|
||||
route $ gsubRoute "book/" (const "raw/")
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate (tableOfContentsContext siteSectionContext)
|
||||
>>= loadAndApplyTemplate "templates/metadata.md" siteContext
|
||||
|
||||
-- Compile metadata XML used in constructing the EPUB
|
||||
match "book/epub.xml" $ version "raw" $ do
|
||||
route $ gsubRoute "book/" (const "raw/")
|
||||
compile $ getResourceBody
|
||||
>>= applyAsTemplate siteContext
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Custom readers and writers
|
||||
--------------------------------------------------------------------------------
|
||||
|
@ -390,7 +374,7 @@ processCites csl bib item = do
|
|||
withItemBody (return . CSL.processCites style refs) item
|
||||
|
||||
-- | Write a document as HTML using Pandoc, with the supplied options.
|
||||
writeHTML5With :: WriterOptions -- ^ Writer options for pandoc
|
||||
writeHTML5With :: WriterOptions -- ^ Writer options for Pandoc
|
||||
-> Item Pandoc -- ^ Document to write
|
||||
-> Item String -- ^ Resulting HTML
|
||||
writeHTML5With wopt (Item itemi doc) =
|
||||
|
@ -398,27 +382,6 @@ writeHTML5With wopt (Item itemi doc) =
|
|||
Left err -> error $ "Hakyll.Web.Pandoc.writePandocWith: " ++ show err
|
||||
Right item' -> Item itemi $ T.unpack item'
|
||||
|
||||
-- | Write a document as EPUB3 using Pandoc, with the supplied options.
|
||||
writeEPUB3With :: WriterOptions -> Item Pandoc -> Compiler (Item BL.ByteString)
|
||||
writeEPUB3With wopt (Item itemi doc) =
|
||||
return $ case Pandoc.runPure $ Pandoc.writeEPUB3 wopt doc of
|
||||
Left err -> error $ "Hakyll.Web.Pandoc.writeEPUB3With: " ++ show err
|
||||
Right doc' -> Item itemi doc'
|
||||
|
||||
-- | Apply a filter to a Pandoc document.
|
||||
applyPandocFilters :: ReaderOptions -> [Pandoc.Filter] -> String -> Item Pandoc -> Compiler (Item Pandoc)
|
||||
applyPandocFilters ropt filters fmt = withItemBody $
|
||||
unsafeCompiler . Pandoc.runIOorExplode . Pandoc.applyFilters ropt filters [fmt]
|
||||
|
||||
-- | Compile a Pandoc template (as opposed to a Hakyll template).
|
||||
compilePandocTemplate :: Item String -> Compiler (Item (Pandoc.Template T.Text))
|
||||
compilePandocTemplate i = do
|
||||
let templatePath = toFilePath $ itemIdentifier i
|
||||
let templateBody = T.pack $ itemBody i
|
||||
templateOrError <- unsafeCompiler $ Pandoc.compileTemplate templatePath templateBody
|
||||
template <- either fail return templateOrError
|
||||
makeItem template
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Supply snapshot as a field to the template
|
||||
|
@ -428,9 +391,83 @@ contentField :: String -> Snapshot -> Context String
|
|||
contentField key snapshot = field key $ \item ->
|
||||
itemBody <$> loadSnapshot (itemIdentifier item) snapshot
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Relativise URLs and strip "index.html" suffixes
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
prettifyUrls :: Item String -> Compiler (Item String)
|
||||
prettifyUrls = relativizeUrls <=< withItemBody (return . stripIndexFile)
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Text wrangling for EPUB and PDF
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- Convert MD_DIR/%.md to LAGDA_TEX_DIR/%.lagda.tex or TEX_DIR/%.tex
|
||||
--
|
||||
-- NOTE: This logic is partially duplicated in book/pdf.mk:TEX_PATH.
|
||||
--
|
||||
-- NOTE: This function assumes pdf.tex will be at TEX_DIR/.
|
||||
--
|
||||
addTexPath :: Context a -> Context a
|
||||
addTexPath = addDerivedField "tex_path" deriveTexPath
|
||||
where
|
||||
deriveTexPath :: Context a -> [String] -> Item a -> Compiler ContextField
|
||||
deriveTexPath ctx a i = do
|
||||
includePath <- getString "include" ctx a i
|
||||
return $ StringField (texPath includePath)
|
||||
|
||||
texPath :: FilePath -> FilePath
|
||||
texPath fnDotMd
|
||||
| fnDotMd == "README.md" = "plfa/frontmatter/README.tex"
|
||||
| any (`isPrefixOf` fnDotMd) ["src/", "book/"] = dropTopDirectory (replaceExtensions fnDotMd ".tex")
|
||||
| otherwise = error ("textPath: cannot map " <> fnDotMd)
|
||||
|
||||
dropTopDirectory :: FilePath -> FilePath
|
||||
dropTopDirectory = joinPath . tail . splitPath
|
||||
|
||||
-- Add an anchor based on the permalink, to be used as the header id.
|
||||
addAnchor :: Context a -> Context a
|
||||
addAnchor = addDerivedField "anchor" deriveAnchor
|
||||
where
|
||||
deriveAnchor :: Context a -> [String] -> Item a -> Compiler ContextField
|
||||
deriveAnchor ctx a i = do
|
||||
permalink <- getString "permalink" ctx a i
|
||||
StringField <$> anchor permalink
|
||||
|
||||
anchor :: String -> Compiler String
|
||||
anchor permalink =
|
||||
let maybeAnchor = map toLower <$> (stripSuffix "/" <=< stripPrefix "/") permalink
|
||||
in maybe (fail $ printf "Key 'permalink' malformed '%s'" permalink) return maybeAnchor
|
||||
|
||||
stripSuffix :: String -> String -> Maybe String
|
||||
stripSuffix suf str = reverse <$> stripPrefix (reverse suf) (reverse str)
|
||||
|
||||
|
||||
-- Add a variant of 'key' where all headers have been shifted by 1.
|
||||
addShiftedBody :: String -> Context a -> Context a
|
||||
addShiftedBody key = addDerivedField ("shifted_" <> key) deriveShiftedBody
|
||||
where
|
||||
deriveShiftedBody :: Context a -> [String] -> Item a -> Compiler ContextField
|
||||
deriveShiftedBody ctx a i = do
|
||||
body <- getString key ctx a i
|
||||
return $ StringField (shiftHeadersBy body)
|
||||
|
||||
-- Shift all headers by a given value.
|
||||
--
|
||||
-- NOTE: This is the /proper/ implementation of shift headers.
|
||||
-- In practice, we use the fast one, which uses regular
|
||||
-- expressions and only works on Markdown '#' headers.
|
||||
--
|
||||
-- shiftHeadersBy :: Int -> Pandoc -> Pandoc
|
||||
-- shiftHeadersBy n = walk shiftHeader
|
||||
-- where
|
||||
-- shiftHeader :: Block -> Block
|
||||
-- shiftHeader (Header level attr inlines) = Header (level + n) attr inlines
|
||||
-- shiftHeader block = block
|
||||
--
|
||||
shiftHeadersBy :: String -> String
|
||||
shiftHeadersBy body = T.unpack (T.replaceAll re "#$1" (T.pack body))
|
||||
where
|
||||
re = RE.regex [RE.Multiline] "^(#+)"
|
||||
|
|
41
plfa.cabal
41
plfa.cabal
|
@ -12,30 +12,33 @@ build-type: Simple
|
|||
common shared-properties
|
||||
ghc-options: -Wall
|
||||
default-language: Haskell2010
|
||||
build-depends: aeson >=1.4 && <1.5
|
||||
, base >=4.6 && <5
|
||||
, bytestring >=0.10 && <0.11
|
||||
, containers >=0.6 && <0.7
|
||||
, directory >=1.2 && <1.4
|
||||
, doctemplates >=0.8 && <0.9
|
||||
, extra >=1.7 && <1.8
|
||||
, filemanip >=0.3 && <0.4
|
||||
, filepath >=1.3 && <1.5
|
||||
, frontmatter >=0.1 && <0.2
|
||||
, hakyll >=4.10 && <4.15
|
||||
, pandoc >=2.1 && <2.11
|
||||
, pandoc-types >=1.20 && <1.21
|
||||
, pandoc-citeproc >=0.17 && <0.18
|
||||
, text >=1.2 && <1.3
|
||||
, unordered-containers >=0.2 && <0.3
|
||||
, vector >=0.12 && <0.13
|
||||
, yaml >=0.11 && <0.12
|
||||
build-depends: aeson >=1.4 && <1.6
|
||||
, base >=4.6 && <5
|
||||
, bytestring >=0.10 && <0.11
|
||||
, containers >=0.6 && <0.7
|
||||
, directory >=1.2 && <1.4
|
||||
, doctemplates >=0.8 && <1.0
|
||||
, extra >=1.7 && <1.8
|
||||
, filemanip >=0.3 && <0.4
|
||||
, filepath >=1.3 && <1.5
|
||||
, frontmatter >=0.1 && <0.2
|
||||
, hakyll >=4.10 && <4.15
|
||||
, pandoc >=2.1 && <2.11
|
||||
, pandoc-types >=1.20 && <1.23
|
||||
, pandoc-citeproc >=0.17 && <0.18
|
||||
, text >=1.2 && <1.3
|
||||
, text-icu >=0.7.1 && <0.8
|
||||
, text-regex-replace >=0.1 && <0.2
|
||||
, unordered-containers >=0.2 && <0.3
|
||||
, vector >=0.12 && <0.13
|
||||
, yaml >=0.11 && <0.12
|
||||
|
||||
library
|
||||
import: shared-properties
|
||||
hs-source-dirs: hs
|
||||
exposed-modules: Hakyll.Web.Agda
|
||||
, Hakyll.Web.Template.Numeric
|
||||
, Hakyll.Web.Template.Context.Derived
|
||||
, Hakyll.Web.Template.Context.Metadata
|
||||
, Hakyll.Web.Template.Context.Title
|
||||
, Hakyll.Web.Sass
|
||||
|
@ -43,8 +46,6 @@ library
|
|||
build-depends: Agda ==2.6.1.3
|
||||
, hsass >=0.8 && <0.9
|
||||
, regex-tdfa >=1.3 && <1.4
|
||||
, text-icu >=0.7.1 && <0.8
|
||||
, text-regex-replace >=0.1 && <0.2
|
||||
|
||||
executable site
|
||||
import: shared-properties
|
||||
|
|
|
@ -4,6 +4,6 @@ title: "PLFA as EPUB"
|
|||
|
||||
It has been just over a year and a half since this feature was first requested in [#112][issue112]… Thanks to hard work by [Michael Reed][mreed20], and a little elbow grease on our part, it is finally here! [An EPUB version of PLFA!][EPUB]
|
||||
|
||||
[EPUB]: https://plfa.github.io/out/epub/plfa.epub
|
||||
[EPUB]: https://plfa.github.io/epub/plfa.epub
|
||||
[issue112]: https://github.com/plfa/plfa.github.io/issues/112
|
||||
[mreed20]: https://github.com/mreed20
|
||||
|
|
31
posts/2021-08-24-plfa-as-pdf-and-epub.md
Normal file
31
posts/2021-08-24-plfa-as-pdf-and-epub.md
Normal file
|
@ -0,0 +1,31 @@
|
|||
---
|
||||
title: "PLFA as PDF and EPUB"
|
||||
---
|
||||
|
||||
We’re pleased to announce that PLFA is now available as both [a PDF][PDF] and [an EPUB][EPUB]! Thanks to hard work by [Yuanting Mao][Altariarite], who did an internship with Phil at the University of Edinburgh, and a little elbow grease on our part, we're finally compiling to PDF!
|
||||
|
||||
In a double whammy, we’ve recently fixed compilation to EPUB as well! Compilation to EPUB never quite survived the move to Hakyll, and the EPUB available on the website was broken for quite a while. Fortunately, we’ve fixed the EPUB compilation, based on [Yuanting Mao][Altariarite] work on PDF compilation and [Michael Reed][mreed20]’s original work on EPUB compilation.
|
||||
|
||||
There’s still several kinks to even out in the process:
|
||||
|
||||
- ([#577][issue577]) EPUB generation breaks internal links;
|
||||
- ([#578][issue578]) EPUB fragment identifier is not defined;
|
||||
- ([#579][issue579]) EPUB font cannot be found;
|
||||
- ([#580][issue580]) EPUB attribute "name" not allowed;
|
||||
- ([#581][issue581]) PDF code overflows margins;
|
||||
|
||||
And probably tons of other little bugs I've missed!
|
||||
|
||||
As always, bug reports and pull requests are very, very welcome!
|
||||
|
||||
[Altariarite]: https://github.com/Altariarite
|
||||
[mreed20]: https://github.com/mreed20
|
||||
|
||||
[PDF]: https://plfa.github.io/plfa.pdf
|
||||
[EPUB]: https://plfa.github.io/plfa.epub
|
||||
|
||||
[issue577]: https://github.com/plfa/plfa.github.io/issues/577
|
||||
[issue578]: https://github.com/plfa/plfa.github.io/issues/578
|
||||
[issue579]: https://github.com/plfa/plfa.github.io/issues/579
|
||||
[issue580]: https://github.com/plfa/plfa.github.io/issues/580
|
||||
[issue581]: https://github.com/plfa/plfa.github.io/issues/581
|
BIN
public/webfonts/DejaVu-mononoki-Symbola-Droid.woff
Normal file
BIN
public/webfonts/DejaVu-mononoki-Symbola-Droid.woff
Normal file
Binary file not shown.
|
@ -536,7 +536,7 @@ Show empty is the right identity of sums up to isomorphism.
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
## Implication is function {name=implication}
|
||||
## Implication is function {#implication}
|
||||
|
||||
Given two propositions `A` and `B`, the implication `A → B` holds if
|
||||
whenever `A` holds then `B` must also hold. We formalise implication using
|
||||
|
|
|
@ -561,7 +561,7 @@ postulate
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
## Proof by reflection {name=proof-by-reflection}
|
||||
## Proof by reflection {#proof-by-reflection}
|
||||
|
||||
Let's revisit our definition of monus from
|
||||
Chapter [Naturals](/Naturals/).
|
||||
|
|
|
@ -132,7 +132,7 @@ Again, a useful exercise is to carry out an interactive development,
|
|||
checking how Agda's knowledge changes as each of the two arguments is
|
||||
instantiated.
|
||||
|
||||
## Congruence and substitution {name=cong}
|
||||
## Congruence and substitution {#cong}
|
||||
|
||||
Equality satisfies _congruence_. If two terms are equal,
|
||||
they remain so after the same function is applied to both:
|
||||
|
@ -635,7 +635,7 @@ Jesper Cockx, Dominique Devries, Andreas Nuyts, and Philip Wadler,
|
|||
draft, 2017.)
|
||||
|
||||
|
||||
## Universe polymorphism {name=unipoly}
|
||||
## Universe polymorphism {#unipoly}
|
||||
|
||||
As we have seen, not every type belongs to `Set`, but instead every
|
||||
type belongs somewhere in the hierarchy `Set₀`, `Set₁`, `Set₂`, and so on,
|
||||
|
|
|
@ -71,7 +71,7 @@ distributes over another operator. A careful author will often call
|
|||
out these properties---or their lack---for instance by pointing out
|
||||
that a newly introduced operator is associative but not commutative.
|
||||
|
||||
#### Exercise `operators` (practice) {name=operators}
|
||||
#### Exercise `operators` (practice) {#operators}
|
||||
|
||||
Give another example of a pair of operators that have an identity
|
||||
and are associative, commutative, and distribute over one another.
|
||||
|
@ -599,7 +599,7 @@ the main proposition first, and the equations required to do so
|
|||
will suggest what lemmas to prove.
|
||||
|
||||
|
||||
## Our first corollary: rearranging {name=sections}
|
||||
## Our first corollary: rearranging {#sections}
|
||||
|
||||
We can apply associativity to rearrange parentheses however we like.
|
||||
Here is an example:
|
||||
|
@ -695,7 +695,7 @@ judgments where the first number is less than _m_.
|
|||
There is also a completely finite approach to generating the same equations,
|
||||
which is left as an exercise for the reader.
|
||||
|
||||
#### Exercise `finite-|-assoc` (stretch) {name=finite-plus-assoc}
|
||||
#### Exercise `finite-|-assoc` (stretch) {#finite-plus-assoc}
|
||||
|
||||
Write out what is known about associativity of addition on each of the
|
||||
first four days using a finite story of creation, as
|
||||
|
@ -857,7 +857,7 @@ typing `C-c C-r` will fill it in, completing the proof:
|
|||
+-assoc′ (suc m) n p rewrite +-assoc′ m n p = refl
|
||||
|
||||
|
||||
#### Exercise `+-swap` (recommended) {name=plus-swap}
|
||||
#### Exercise `+-swap` (recommended) {#plus-swap}
|
||||
|
||||
Show
|
||||
|
||||
|
@ -872,7 +872,7 @@ is associative and commutative.
|
|||
```
|
||||
|
||||
|
||||
#### Exercise `*-distrib-+` (recommended) {name=times-distrib-plus}
|
||||
#### Exercise `*-distrib-+` (recommended) {#times-distrib-plus}
|
||||
|
||||
Show multiplication distributes over addition, that is,
|
||||
|
||||
|
@ -885,7 +885,7 @@ for all naturals `m`, `n`, and `p`.
|
|||
```
|
||||
|
||||
|
||||
#### Exercise `*-assoc` (recommended) {name=times-assoc}
|
||||
#### Exercise `*-assoc` (recommended) {#times-assoc}
|
||||
|
||||
Show multiplication is associative, that is,
|
||||
|
||||
|
@ -898,7 +898,7 @@ for all naturals `m`, `n`, and `p`.
|
|||
```
|
||||
|
||||
|
||||
#### Exercise `*-comm` (practice) {name=times-comm}
|
||||
#### Exercise `*-comm` (practice) {#times-comm}
|
||||
|
||||
Show multiplication is commutative, that is,
|
||||
|
||||
|
@ -912,7 +912,7 @@ you will need to formulate and prove suitable lemmas.
|
|||
```
|
||||
|
||||
|
||||
#### Exercise `0∸n≡0` (practice) {name=zero-monus}
|
||||
#### Exercise `0∸n≡0` (practice) {#zero-monus}
|
||||
|
||||
Show
|
||||
|
||||
|
@ -925,7 +925,7 @@ for all naturals `n`. Did your proof require induction?
|
|||
```
|
||||
|
||||
|
||||
#### Exercise `∸-|-assoc` (practice) {name=monus-plus-assoc}
|
||||
#### Exercise `∸-|-assoc` (practice) {#monus-plus-assoc}
|
||||
|
||||
Show that monus associates with addition, that is,
|
||||
|
||||
|
@ -949,7 +949,7 @@ Show the following three laws
|
|||
for all `m`, `n`, and `p`.
|
||||
|
||||
|
||||
#### Exercise `Bin-laws` (stretch) {name=Bin-laws}
|
||||
#### Exercise `Bin-laws` (stretch) {#Bin-laws}
|
||||
|
||||
Recall that
|
||||
Exercise [Bin](/Naturals/#Bin)
|
||||
|
|
|
@ -83,7 +83,7 @@ g ∘′ f = λ x → g (f x)
|
|||
```
|
||||
|
||||
|
||||
## Extensionality {name=extensionality}
|
||||
## Extensionality {#extensionality}
|
||||
|
||||
Extensionality asserts that the only way to distinguish functions is
|
||||
by applying them; if two functions applied to the same argument always
|
||||
|
@ -450,7 +450,7 @@ postulate
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
#### Exercise `_⇔_` (practice) {name=iff}
|
||||
#### Exercise `_⇔_` (practice) {#iff}
|
||||
|
||||
Define equivalence of propositions (also known as "if and only if") as follows:
|
||||
```
|
||||
|
@ -465,7 +465,7 @@ Show that equivalence is reflexive, symmetric, and transitive.
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
#### Exercise `Bin-embedding` (stretch) {name=Bin-embedding}
|
||||
#### Exercise `Bin-embedding` (stretch) {#Bin-embedding}
|
||||
|
||||
Recall that Exercises
|
||||
[Bin](/Naturals/#Bin) and
|
||||
|
|
|
@ -458,7 +458,7 @@ _ =
|
|||
```
|
||||
Now the time to reverse a list is linear in the length of the list.
|
||||
|
||||
## Map {name=Map}
|
||||
## Map {#Map}
|
||||
|
||||
Map applies a function to every element of a list to generate a corresponding list.
|
||||
Map is an example of a _higher-order function_, one which takes a function as an
|
||||
|
@ -558,7 +558,7 @@ Define a suitable map operator over trees:
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
## Fold {name=Fold}
|
||||
## Fold {#Fold}
|
||||
|
||||
Fold takes an operator and a value, and uses the operator to combine
|
||||
each of the elements of the list, taking the given value as the result
|
||||
|
@ -833,7 +833,7 @@ Show that if `_⊗_` and `e` form a monoid, then `foldr _⊗_ e` and
|
|||
```
|
||||
|
||||
|
||||
## All {name=All}
|
||||
## All {#All}
|
||||
|
||||
We can also define predicates over lists. Two of the most important
|
||||
are `All` and `Any`.
|
||||
|
|
|
@ -76,7 +76,7 @@ after zero; and `2` is shorthand for `suc (suc zero)`, which is the
|
|||
same as `suc 1`, the successor of one; and `3` is shorthand for the
|
||||
successor of two; and so on.
|
||||
|
||||
#### Exercise `seven` (practice) {name=seven}
|
||||
#### Exercise `seven` (practice) {#seven}
|
||||
|
||||
Write out `7` in longhand.
|
||||
|
||||
|
@ -287,7 +287,7 @@ Parentheses and semicolons are among the few characters that cannot
|
|||
appear in names, so we do not need extra spaces in the `using` list.
|
||||
|
||||
|
||||
## Operations on naturals are recursive functions {name=plus}
|
||||
## Operations on naturals are recursive functions {#plus}
|
||||
|
||||
Now that we have the natural numbers, what can we do with them?
|
||||
For instance, can we define arithmetic operations such as
|
||||
|
@ -425,7 +425,7 @@ is not like testimony in a court which must be weighed to determine
|
|||
whether the witness is trustworthy. Rather, it is ironclad. The
|
||||
other word for evidence, which we will use interchangeably, is _proof_.
|
||||
|
||||
#### Exercise `+-example` (practice) {name=plus-example}
|
||||
#### Exercise `+-example` (practice) {#plus-example}
|
||||
|
||||
Compute `3 + 4`, writing out your reasoning as a chain of equations, using the equations for `+`.
|
||||
|
||||
|
@ -486,7 +486,7 @@ Here we have omitted the signature declaring `_ : 2 * 3 ≡ 6`, since
|
|||
it can easily be inferred from the corresponding term.
|
||||
|
||||
|
||||
#### Exercise `*-example` (practice) {name=times-example}
|
||||
#### Exercise `*-example` (practice) {#times-example}
|
||||
|
||||
Compute `3 * 4`, writing out your reasoning as a chain of equations, using the equations for `*`.
|
||||
(You do not need to step through the evaluation of `+`.)
|
||||
|
@ -496,7 +496,7 @@ Compute `3 * 4`, writing out your reasoning as a chain of equations, using the e
|
|||
```
|
||||
|
||||
|
||||
#### Exercise `_^_` (recommended) {name=power}
|
||||
#### Exercise `_^_` (recommended) {#power}
|
||||
|
||||
Define exponentiation, which is given by the following equations:
|
||||
|
||||
|
@ -566,7 +566,7 @@ _ =
|
|||
∎
|
||||
```
|
||||
|
||||
#### Exercise `∸-example₁` and `∸-example₂` (recommended) {name=monus-examples}
|
||||
#### Exercise `∸-example₁` and `∸-example₂` (recommended) {#monus-examples}
|
||||
|
||||
Compute `5 ∸ 3` and `3 ∸ 5`, writing out your reasoning as a chain of equations.
|
||||
|
||||
|
@ -701,7 +701,7 @@ definitions is quite similar. They might be considered two sides of
|
|||
the same coin.
|
||||
|
||||
|
||||
## The story of creation, finitely {name=finite-creation}
|
||||
## The story of creation, finitely {#finite-creation}
|
||||
|
||||
The above story was told in a stratified way. First, we create
|
||||
the infinite set of naturals. We take that set as given when
|
||||
|
@ -872,7 +872,7 @@ Haskell requires time proportional to the sum of the logarithms of
|
|||
_m_ and _n_.
|
||||
|
||||
|
||||
#### Exercise `Bin` (stretch) {name=Bin}
|
||||
#### Exercise `Bin` (stretch) {#Bin}
|
||||
|
||||
A more efficient representation of natural numbers uses a binary
|
||||
rather than a unary system. We represent a number as a bitstring:
|
||||
|
|
|
@ -438,7 +438,7 @@ postulate
|
|||
Does the converse hold? If so, prove; if not, explain why.
|
||||
|
||||
|
||||
#### Exercise `Bin-isomorphism` (stretch) {name=Bin-isomorphism}
|
||||
#### Exercise `Bin-isomorphism` (stretch) {#Bin-isomorphism}
|
||||
|
||||
Recall that Exercises
|
||||
[Bin](/Naturals/#Bin),
|
||||
|
|
|
@ -241,7 +241,7 @@ lack---for instance by saying that a newly introduced relation is a
|
|||
partial order but not a total order.
|
||||
|
||||
|
||||
#### Exercise `orderings` (practice) {name=orderings}
|
||||
#### Exercise `orderings` (practice) {#orderings}
|
||||
|
||||
Give an example of a preorder that is not a partial order.
|
||||
|
||||
|
@ -359,7 +359,7 @@ and `suc n ≤ suc m` and must show `suc m ≡ suc n`. The inductive
|
|||
hypothesis `≤-antisym m≤n n≤m` establishes that `m ≡ n`, and our goal
|
||||
follows by congruence.
|
||||
|
||||
#### Exercise `≤-antisym-cases` (practice) {name=leq-antisym-cases}
|
||||
#### Exercise `≤-antisym-cases` (practice) {#leq-antisym-cases}
|
||||
|
||||
The above proof omits cases where one argument is `z≤n` and one
|
||||
argument is `s≤s`. Why is it ok to omit them?
|
||||
|
@ -559,7 +559,7 @@ Show that multiplication is monotonic with regard to inequality.
|
|||
```
|
||||
|
||||
|
||||
## Strict inequality {name=strict-inequality}
|
||||
## Strict inequality {#strict-inequality}
|
||||
|
||||
We can define strict inequality similarly to inequality:
|
||||
```
|
||||
|
@ -597,7 +597,7 @@ and conversely. One can then give an alternative derivation of the
|
|||
properties of strict inequality, such as transitivity, by
|
||||
exploiting the corresponding properties of inequality.
|
||||
|
||||
#### Exercise `<-trans` (recommended) {name=less-trans}
|
||||
#### Exercise `<-trans` (recommended) {#less-trans}
|
||||
|
||||
Show that strict inequality is transitive.
|
||||
|
||||
|
@ -605,7 +605,7 @@ Show that strict inequality is transitive.
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
#### Exercise `trichotomy` (practice) {name=trichotomy}
|
||||
#### Exercise `trichotomy` (practice) {#trichotomy}
|
||||
|
||||
Show that strict inequality satisfies a weak version of trichotomy, in
|
||||
the sense that for any `m` and `n` that one of the following holds:
|
||||
|
@ -623,7 +623,7 @@ similar to that used for totality.
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
#### Exercise `+-mono-<` (practice) {name=plus-mono-less}
|
||||
#### Exercise `+-mono-<` (practice) {#plus-mono-less}
|
||||
|
||||
Show that addition is monotonic with respect to strict inequality.
|
||||
As with inequality, some additional definitions may be required.
|
||||
|
@ -632,7 +632,7 @@ As with inequality, some additional definitions may be required.
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
#### Exercise `≤-iff-<` (recommended) {name=leq-iff-less}
|
||||
#### Exercise `≤-iff-<` (recommended) {#leq-iff-less}
|
||||
|
||||
Show that `suc m ≤ n` implies `m < n`, and conversely.
|
||||
|
||||
|
@ -640,7 +640,7 @@ Show that `suc m ≤ n` implies `m < n`, and conversely.
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
#### Exercise `<-trans-revisited` (practice) {name=less-trans-revisited}
|
||||
#### Exercise `<-trans-revisited` (practice) {#less-trans-revisited}
|
||||
|
||||
Give an alternative proof that strict inequality is transitive,
|
||||
using the relation between strict inequality and inequality and
|
||||
|
@ -749,7 +749,7 @@ evidence that the first number is odd. If it is because it is the
|
|||
successor of an even number, then the result is odd because it is the
|
||||
successor of the sum of two even numbers, which is even.
|
||||
|
||||
#### Exercise `o+o≡e` (stretch) {name=odd-plus-odd}
|
||||
#### Exercise `o+o≡e` (stretch) {#odd-plus-odd}
|
||||
|
||||
Show that the sum of two odd numbers is even.
|
||||
|
||||
|
@ -757,7 +757,7 @@ Show that the sum of two odd numbers is even.
|
|||
-- Your code goes here
|
||||
```
|
||||
|
||||
#### Exercise `Bin-predicates` (stretch) {name=Bin-predicates}
|
||||
#### Exercise `Bin-predicates` (stretch) {#Bin-predicates}
|
||||
|
||||
Recall that
|
||||
Exercise [Bin](/Naturals/#Bin)
|
||||
|
|
|
@ -32,7 +32,7 @@ Chapter [Lambda](/Lambda/),
|
|||
and from it we compute an intrinsically-typed term, in the style of
|
||||
Chapter [DeBruijn](/DeBruijn/).
|
||||
|
||||
## Introduction: Inference rules as algorithms {name=algorithms}
|
||||
## Introduction: Inference rules as algorithms {#algorithms}
|
||||
|
||||
In the calculus we have considered so far, a term may have more than
|
||||
one type. For example,
|
||||
|
@ -208,11 +208,11 @@ _decidable_:
|
|||
|
||||
synthesize : ∀ (Γ : Context) (M : Term⁺)
|
||||
-----------------------
|
||||
→ Dec (∃[ A ](Γ ⊢ M ↑ A))
|
||||
→ Dec (∃[ A ]( Γ ⊢ M ↑ A ))
|
||||
|
||||
inherit : ∀ (Γ : Context) (M : Term⁻) (A : Type)
|
||||
---------------
|
||||
→ Dec (Γ ⊢ M ↓ A)
|
||||
---------------
|
||||
→ Dec (Γ ⊢ M ↓ A)
|
||||
|
||||
Given context `Γ` and synthesised term `M`, we must decide whether
|
||||
there exists a type `A` such that `Γ ⊢ M ↑ A` holds, or its negation.
|
||||
|
@ -470,7 +470,7 @@ the equality test in the application rule in the first
|
|||
[section](/Inference/#algorithms).
|
||||
|
||||
|
||||
#### Exercise `bidirectional-mul` (recommended) {name=bidirectional-mul}
|
||||
#### Exercise `bidirectional-mul` (recommended) {#bidirectional-mul}
|
||||
|
||||
Rewrite your definition of multiplication from
|
||||
Chapter [Lambda](/Lambda/), decorated to support inference.
|
||||
|
@ -480,7 +480,7 @@ Chapter [Lambda](/Lambda/), decorated to support inference.
|
|||
```
|
||||
|
||||
|
||||
#### Exercise `bidirectional-products` (recommended) {name=bidirectional-products}
|
||||
#### Exercise `bidirectional-products` (recommended) {#bidirectional-products}
|
||||
|
||||
Extend the bidirectional type rules to include products from
|
||||
Chapter [More](/More/).
|
||||
|
@ -597,8 +597,8 @@ there exists a type `A` such that `Γ ∋ x ⦂ A` holds, or its
|
|||
negation:
|
||||
```
|
||||
lookup : ∀ (Γ : Context) (x : Id)
|
||||
-----------------------
|
||||
→ Dec (∃[ A ](Γ ∋ x ⦂ A))
|
||||
-------------------------
|
||||
→ Dec (∃[ A ]( Γ ∋ x ⦂ A ))
|
||||
lookup ∅ x = no (λ ())
|
||||
lookup (Γ , y ⦂ B) x with x ≟ y
|
||||
... | yes refl = yes ⟨ B , Z ⟩
|
||||
|
@ -686,8 +686,8 @@ and a type `A` and either returns evidence that `Γ ⊢ M ↓ A`,
|
|||
or its negation:
|
||||
```
|
||||
synthesize : ∀ (Γ : Context) (M : Term⁺)
|
||||
-----------------------
|
||||
→ Dec (∃[ A ](Γ ⊢ M ↑ A))
|
||||
---------------------------
|
||||
→ Dec (∃[ A ]( Γ ⊢ M ↑ A ))
|
||||
|
||||
inherit : ∀ (Γ : Context) (M : Term⁻) (A : Type)
|
||||
---------------
|
||||
|
|
|
@ -213,7 +213,7 @@ definition may use `plusᶜ` as defined earlier (or may not
|
|||
```
|
||||
|
||||
|
||||
#### Exercise `primed` (stretch) {name=primed}
|
||||
#### Exercise `primed` (stretch) {#primed}
|
||||
|
||||
Some people find it annoying to write `` ` "x" `` instead of `x`.
|
||||
We can make examples with lambda terms slightly easier to write
|
||||
|
@ -1201,7 +1201,7 @@ the three places where a bound variable is introduced.
|
|||
The rules are deterministic, in that at most one rule applies to every term.
|
||||
|
||||
|
||||
### Example type derivations {name=derivation}
|
||||
### Example type derivations {#derivation}
|
||||
|
||||
Type derivations correspond to trees. In informal notation, here
|
||||
is a type derivation for the Church numeral two,
|
||||
|
|
|
@ -147,7 +147,7 @@ Here `M †` is the translation of term `M` from a calculus with the
|
|||
construct to a calculus without the construct.
|
||||
|
||||
|
||||
## Products {name=products}
|
||||
## Products {#products}
|
||||
|
||||
### Syntax
|
||||
|
||||
|
@ -285,7 +285,7 @@ We can also translate back the other way:
|
|||
(`proj₁ L) ‡ = case× (L ‡) [⟨ x , y ⟩⇒ x ]
|
||||
(`proj₂ L) ‡ = case× (L ‡) [⟨ x , y ⟩⇒ y ]
|
||||
|
||||
## Sums {name=sums}
|
||||
## Sums {#sums}
|
||||
|
||||
### Syntax
|
||||
|
||||
|
|
|
@ -879,7 +879,7 @@ typed (C-suc c) = ⊢suc (typed c)
|
|||
typed (C-rcd ⊢Ms dks As<:Bs) = ⊢<: (⊢rcd ⊢Ms dks) As<:Bs
|
||||
```
|
||||
|
||||
## Progress <a name="subtyping-progress"></a>
|
||||
## Progress {#subtyping-progress}
|
||||
|
||||
The Progress theorem states that a well-typed term may either take a
|
||||
reduction step or it is already a value. The proof of Progress is like
|
||||
|
@ -952,7 +952,7 @@ progress (⊢<: {A = A}{B} ⊢M A<:B) = progress ⊢M
|
|||
* Case `⊢<:`: we invoke the induction hypothesis on sub-derivation of `Γ ⊢ M ⦂ A`.
|
||||
|
||||
|
||||
## Preservation <a name="subtyping-preservation"></a>
|
||||
## Preservation {#subtyping-preservation}
|
||||
|
||||
In this section we prove that when a well-typed term takes a reduction
|
||||
step, the result is another well-typed term with the same type.
|
||||
|
|
|
@ -1012,7 +1012,7 @@ all-funs∈ {⊥} f with f {⊥} refl
|
|||
... | fun ()
|
||||
all-funs∈ {v ↦ w} f = ⟨ v , ⟨ w , refl ⟩ ⟩
|
||||
all-funs∈ {u ⊔ u′} f
|
||||
with all-funs∈ λ z → f (inj₁ z)
|
||||
with all-funs∈ (λ z → f (inj₁ z))
|
||||
... | ⟨ v , ⟨ w , m ⟩ ⟩ = ⟨ v , ⟨ w , (inj₁ m) ⟩ ⟩
|
||||
```
|
||||
|
||||
|
|
|
@ -1,45 +1,48 @@
|
|||
---
|
||||
parts:
|
||||
- title: "Front matter"
|
||||
- frontmatter: True
|
||||
title: "Front matter"
|
||||
sections:
|
||||
- src/plfa/frontmatter/dedication.md
|
||||
- src/plfa/frontmatter/preface.md
|
||||
- README.md
|
||||
- title: "Part 1: Logical Foundations"
|
||||
- include: src/plfa/frontmatter/dedication.md
|
||||
- include: src/plfa/frontmatter/preface.md
|
||||
- include: README.md
|
||||
- mainmatter: True
|
||||
title: "Part 1: Logical Foundations"
|
||||
sections:
|
||||
- src/plfa/part1/Naturals.lagda.md
|
||||
- src/plfa/part1/Induction.lagda.md
|
||||
- src/plfa/part1/Relations.lagda.md
|
||||
- src/plfa/part1/Equality.lagda.md
|
||||
- src/plfa/part1/Isomorphism.lagda.md
|
||||
- src/plfa/part1/Connectives.lagda.md
|
||||
- src/plfa/part1/Negation.lagda.md
|
||||
- src/plfa/part1/Quantifiers.lagda.md
|
||||
- src/plfa/part1/Decidable.lagda.md
|
||||
- src/plfa/part1/Lists.lagda.md
|
||||
- include: src/plfa/part1/Naturals.lagda.md
|
||||
- include: src/plfa/part1/Induction.lagda.md
|
||||
- include: src/plfa/part1/Relations.lagda.md
|
||||
- include: src/plfa/part1/Equality.lagda.md
|
||||
- include: src/plfa/part1/Isomorphism.lagda.md
|
||||
- include: src/plfa/part1/Connectives.lagda.md
|
||||
- include: src/plfa/part1/Negation.lagda.md
|
||||
- include: src/plfa/part1/Quantifiers.lagda.md
|
||||
- include: src/plfa/part1/Decidable.lagda.md
|
||||
- include: src/plfa/part1/Lists.lagda.md
|
||||
- title: "Part 2: Programming Language Foundations"
|
||||
sections:
|
||||
- src/plfa/part2/Lambda.lagda.md
|
||||
- src/plfa/part2/Properties.lagda.md
|
||||
- src/plfa/part2/DeBruijn.lagda.md
|
||||
- src/plfa/part2/More.lagda.md
|
||||
- src/plfa/part2/Bisimulation.lagda.md
|
||||
- src/plfa/part2/Inference.lagda.md
|
||||
- src/plfa/part2/Untyped.lagda.md
|
||||
- src/plfa/part2/Confluence.lagda.md
|
||||
- src/plfa/part2/BigStep.lagda.md
|
||||
- include: src/plfa/part2/Lambda.lagda.md
|
||||
- include: src/plfa/part2/Properties.lagda.md
|
||||
- include: src/plfa/part2/DeBruijn.lagda.md
|
||||
- include: src/plfa/part2/More.lagda.md
|
||||
- include: src/plfa/part2/Bisimulation.lagda.md
|
||||
- include: src/plfa/part2/Inference.lagda.md
|
||||
- include: src/plfa/part2/Untyped.lagda.md
|
||||
- include: src/plfa/part2/Confluence.lagda.md
|
||||
- include: src/plfa/part2/BigStep.lagda.md
|
||||
- title: "Part 3: Denotational Semantics"
|
||||
sections:
|
||||
- src/plfa/part3/Denotational.lagda.md
|
||||
- src/plfa/part3/Compositional.lagda.md
|
||||
- src/plfa/part3/Soundness.lagda.md
|
||||
- src/plfa/part3/Adequacy.lagda.md
|
||||
- src/plfa/part3/ContextualEquivalence.lagda.md
|
||||
- title: "Appendix"
|
||||
- include: src/plfa/part3/Denotational.lagda.md
|
||||
- include: src/plfa/part3/Compositional.lagda.md
|
||||
- include: src/plfa/part3/Soundness.lagda.md
|
||||
- include: src/plfa/part3/Adequacy.lagda.md
|
||||
- include: src/plfa/part3/ContextualEquivalence.lagda.md
|
||||
- backmatter: True
|
||||
title: "Appendix"
|
||||
sections:
|
||||
- src/plfa/part2/Substitution.lagda.md
|
||||
- include: src/plfa/part2/Substitution.lagda.md
|
||||
- title: "Back matter"
|
||||
sections:
|
||||
- src/plfa/backmatter/acknowledgements.md
|
||||
- src/plfa/backmatter/Fonts.lagda.md
|
||||
---
|
||||
- include: src/plfa/backmatter/acknowledgements.md
|
||||
- include: src/plfa/backmatter/Fonts.lagda.md
|
||||
---
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
resolver: lts-17.2
|
||||
|
||||
# Use GHC 8.10.3
|
||||
compiler: ghc-8.10.3
|
||||
# Use GHC 8.10.5
|
||||
compiler: ghc-8.10.5
|
||||
compiler-check: match-exact
|
||||
|
||||
# Allow never versions of packages
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:epub="http://www.idpf.org/2007/ops" xml:lang="$language$">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>$pagetitle$</title>
|
||||
<link rel="stylesheet" type="text/css" href="css/agda.css" />
|
||||
<link rel="stylesheet" type="text/css" href="css/epub.css" />
|
||||
</head>
|
||||
<body>
|
||||
<section epub:type="titlepage" class="titlepage">
|
||||
<h1 class="title">$pagetitle$</h1>
|
||||
$for(authors)$
|
||||
<p class="author">$name$</p>
|
||||
$endfor$
|
||||
$if(rights)$
|
||||
<div class="rights">$rights$</div>
|
||||
$endif$
|
||||
</section>
|
||||
<!-- NOTE: body is escaped because it is filled in by the Pandoc template engine, -->
|
||||
<!-- whereas the rest is filled in by the Hakyll template engine, both of -->
|
||||
<!-- which use the same syntax. Yes, this is confusing. -->
|
||||
$$body$$
|
||||
</body>
|
||||
</html>
|
||||
|
19
templates/metadata.md
Normal file
19
templates/metadata.md
Normal file
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
comment: This file exists to save the correct metadata to the generated `epub.md` file.
|
||||
---
|
||||
|
||||
---
|
||||
lang : $language$
|
||||
pagetitle : $pagetitle$
|
||||
title : $pagetitle$
|
||||
titlepage : true
|
||||
description : $description$
|
||||
author :
|
||||
$for(authors)$
|
||||
- $name$
|
||||
$endfor$
|
||||
date : $modified$
|
||||
rights : $rights$
|
||||
---
|
||||
|
||||
$body$
|
|
@ -1,7 +1,7 @@
|
|||
<article class="post">
|
||||
$partial("templates/next.html")$
|
||||
<header class="post-header">
|
||||
<h1 class="post-title">$title$</h1>
|
||||
<h1 class="post-title" $if(anchor)$id="$anchor$"$endif$>$title$</h1>
|
||||
</header>
|
||||
<div class="post-content">
|
||||
$body$
|
||||
|
|
Loading…
Add table
Reference in a new issue