New Upstream Snapshot - erlang-asciideck

Ready changes

Summary

Merged new upstream version: 0.2.0+git20230116.1.73a2e67 (was: 0.0+git20170714.48cbfe8b).

Resulting package

Built on 2023-01-21T22:23 (took 4m13s)

The resulting binary packages can be installed (if you have the apt repository enabled) by running one of:

apt install -t fresh-snapshots erlang-asciideck

Lintian Result

Diff

diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index fb569f4..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,7 +0,0 @@
-.erlang.mk/
-*.d
-deps/
-ebin/*.beam
-ebin/test
-logs/
-test/*.beam
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..b9f1042
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu>
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/Makefile b/Makefile
index bba4e2f..94a1c3c 100644
--- a/Makefile
+++ b/Makefile
@@ -2,17 +2,63 @@
 
 PROJECT = asciideck
 PROJECT_DESCRIPTION = Asciidoc for Erlang.
-PROJECT_VERSION = 0.1.0
-
-# Options.
-
-CI_OTP ?= OTP-18.0.3 OTP-18.1.5 OTP-18.2.4.1 OTP-18.3.4.4 OTP-19.0.7 OTP-19.1.5
-CI_HIPE ?= $(lastword $(CI_OTP))
-CI_ERLLVM ?= $(CI_HIPE)
+PROJECT_VERSION = 0.2.0
 
 # Dependencies.
 
-TEST_DEPS = ct_helper
+TEST_ERLC_OPTS += +'{parse_transform, eunit_autoexport}'
+TEST_DEPS = $(if $(CI_ERLANG_MK),ci.erlang.mk) ct_helper
 dep_ct_helper = git https://github.com/ninenines/ct_helper master
 
+# CI configuration.
+
+dep_ci.erlang.mk = git https://github.com/ninenines/ci.erlang.mk master
+DEP_EARLY_PLUGINS = ci.erlang.mk
+
+AUTO_CI_OTP ?= OTP-21+
+AUTO_CI_HIPE ?= OTP-LATEST
+# AUTO_CI_ERLLVM ?= OTP-LATEST
+AUTO_CI_WINDOWS ?= OTP-21+
+
 include erlang.mk
+
+# Test building documentation of projects that use Asciideck
+# and run Groff checks against the output.
+#
+# We only run against asciidoc-manual because the guide requires
+# the DocBook toolchain at this time.
+
+.PHONY: groff
+
+GROFF_PROJECTS = cowboy gun ranch
+
+tests:: groff
+
+groff: $(addprefix groff-,$(GROFF_PROJECTS))
+
+$(ERLANG_MK_TMP)/groff:
+	$(verbose) mkdir -p $@
+
+define groff_targets
+$(ERLANG_MK_TMP)/groff/$1: | $(ERLANG_MK_TMP)/groff
+	$(verbose) rm -rf $$@
+	$(verbose) git clone -q --depth 1 -- $(call dep_repo,$1) $$@
+	$(verbose) mkdir $$@/deps
+ifeq ($(PLATFORM),msys2)
+	$(verbose) cmd //c mklink $(call core_native_path,$(ERLANG_MK_TMP)/groff/$1/deps/asciideck) \
+		$(call core_native_path,$(CURDIR))
+else
+	$(verbose) ln -s $(CURDIR) $$@/deps/asciideck
+endif
+	$(verbose) touch $$@/deps/ci.erlang.mk
+	$(verbose) cp $(CURDIR)/erlang.mk $$@/
+
+groff-$1: $(ERLANG_MK_TMP)/groff/$1 app
+	$(gen_verbose) $(MAKE) -C $$< asciidoc-manual MAKEFLAGS= DEPS_DIR=$$</deps ERL_LIBS=$$</deps
+	$(verbose) for f in $$</doc/man*/*.gz; do \
+		echo " GROFF " `basename "$$$$f"`; \
+		zcat "$$$$f" | groff -man -rD1 -z -ww; \
+	done
+endef
+
+$(foreach p,$(GROFF_PROJECTS),$(eval $(call groff_targets,$p)))
diff --git a/debian/changelog b/debian/changelog
index 80cceed..acc186b 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+erlang-asciideck (0.2.0+git20230116.1.73a2e67-1) UNRELEASED; urgency=low
+
+  * New upstream snapshot.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Sat, 21 Jan 2023 22:20:44 -0000
+
 erlang-asciideck (0.0+git20170714.48cbfe8b-4) unstable; urgency=medium
 
   * Fix install path of ebin.
diff --git a/debian/patches/0001-Add-support-kfreebsd.patch b/debian/patches/0001-Add-support-kfreebsd.patch
index 10b9d6d..227d5cc 100644
--- a/debian/patches/0001-Add-support-kfreebsd.patch
+++ b/debian/patches/0001-Add-support-kfreebsd.patch
@@ -8,11 +8,11 @@ Signed-off-by: Nobuhiro Iwamatsu <iwamatsu@debian.org>
  erlang.mk | 6 ++++++
  1 file changed, 6 insertions(+)
 
-diff --git a/erlang.mk b/erlang.mk
-index 38bea99..1f9f188 100644
---- a/erlang.mk
-+++ b/erlang.mk
-@@ -81,6 +81,8 @@ else ifeq ($(UNAME_S),DragonFly)
+Index: erlang-asciideck.git/erlang.mk
+===================================================================
+--- erlang-asciideck.git.orig/erlang.mk
++++ erlang-asciideck.git/erlang.mk
+@@ -83,6 +83,8 @@ else ifeq ($(UNAME_S),DragonFly)
  PLATFORM = dragonfly
  else ifeq ($(shell uname -o),Msys)
  PLATFORM = msys2
@@ -21,7 +21,7 @@ index 38bea99..1f9f188 100644
  else
  $(error Unable to detect platform. Please open a ticket with the output of uname -a.)
  endif
-@@ -5817,6 +5819,10 @@ else ifeq ($(PLATFORM),linux)
+@@ -6021,6 +6023,10 @@ else ifeq ($(PLATFORM),linux)
  	CC ?= gcc
  	CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
  	CXXFLAGS ?= -O3 -finline-functions -Wall
@@ -32,6 +32,3 @@ index 38bea99..1f9f188 100644
  endif
  
  ifneq ($(PLATFORM),msys2)
--- 
-2.14.1
-
diff --git a/debian/patches/0002-Change-optimize-from-O3-to-O2.patch b/debian/patches/0002-Change-optimize-from-O3-to-O2.patch
index 0284655..9f24bbd 100644
--- a/debian/patches/0002-Change-optimize-from-O3-to-O2.patch
+++ b/debian/patches/0002-Change-optimize-from-O3-to-O2.patch
@@ -8,11 +8,11 @@ Signed-off-by: Nobuhiro Iwamatsu <iwamatsu@debian.org>
  erlang.mk | 16 ++++++++--------
  1 file changed, 8 insertions(+), 8 deletions(-)
 
-diff --git a/erlang.mk b/erlang.mk
-index 1f9f188..cf413cf 100644
---- a/erlang.mk
-+++ b/erlang.mk
-@@ -5804,21 +5804,21 @@ ifeq ($(PLATFORM),msys2)
+Index: erlang-asciideck.git/erlang.mk
+===================================================================
+--- erlang-asciideck.git.orig/erlang.mk
++++ erlang-asciideck.git/erlang.mk
+@@ -6008,21 +6008,21 @@ ifeq ($(PLATFORM),msys2)
  # not produce working code. The "gcc" MSYS2 package also doesn't.
  	CC = /mingw64/bin/gcc
  	export CC
@@ -42,6 +42,3 @@ index 1f9f188..cf413cf 100644
  else ifeq ($(PLATFORM),kfreebsd)
  	CC ?= gcc
  	CFLAGS ?= -O2 -std=c99 -finline-functions -Wall -Wmissing-prototypes
--- 
-2.14.1
-
diff --git a/ebin/asciideck.app b/ebin/asciideck.app
index 56ab5f3..da6189e 100644
--- a/ebin/asciideck.app
+++ b/ebin/asciideck.app
@@ -1,7 +1,7 @@
-{application, asciideck, [
+{application, 'asciideck', [
 	{description, "Asciidoc for Erlang."},
-	{vsn, "0.1.0"},
-	{modules, ['asciideck','asciideck_parser','asciideck_to_manpage']},
+	{vsn, "0.2.0"},
+	{modules, ['asciideck','asciideck_attributes_parser','asciideck_attributes_pass','asciideck_block_parser','asciideck_inline_pass','asciideck_line_reader','asciideck_lists_pass','asciideck_reader','asciideck_source_highlight','asciideck_stdin_reader','asciideck_tables_pass','asciideck_to_html','asciideck_to_manpage','asciideck_transform_pass']},
 	{registered, []},
 	{applications, [kernel,stdlib]},
 	{env, []}
diff --git a/erlang.mk b/erlang.mk
index 38bea99..6e67382 100644
--- a/erlang.mk
+++ b/erlang.mk
@@ -15,8 +15,10 @@
 .PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
 
 ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+export ERLANG_MK_FILENAME
 
-ERLANG_MK_VERSION = 2016.11.03-4-g9e9b7d2
+ERLANG_MK_VERSION = 2018.06.21-6-g6c8664c-dirty
+ERLANG_MK_WITHOUT = 
 
 # Make 3.81 and 3.82 are deprecated.
 
@@ -152,9 +154,13 @@ define comma_list
 $(subst $(space),$(comma),$(strip $(1)))
 endef
 
+define escape_dquotes
+$(subst ",\",$1)
+endef
+
 # Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
 define erlang
-$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
 endef
 
 ifeq ($(PLATFORM),msys2)
@@ -183,19 +189,108 @@ ERLANG_MK_COMMIT ?=
 ERLANG_MK_BUILD_CONFIG ?= build.config
 ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
 
+erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
 erlang-mk:
-	git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
 ifdef ERLANG_MK_COMMIT
+	git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
 	cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+else
+	git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
 endif
 	if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
-	$(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+	$(MAKE) -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))'
 	cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
 	rm -rf $(ERLANG_MK_BUILD_DIR)
 
 # The erlang.mk package index is bundled in the default erlang.mk build.
 # Search for the string "copyright" to skip to the rest of the code.
 
+# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-kerl
+
+KERL_INSTALL_DIR ?= $(HOME)/erlang
+
+ifeq ($(strip $(KERL)),)
+KERL := $(ERLANG_MK_TMP)/kerl/kerl
+endif
+
+export KERL
+
+KERL_GIT ?= https://github.com/kerl/kerl
+KERL_COMMIT ?= master
+
+KERL_MAKEFLAGS ?=
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+define kerl_otp_target
+ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(1)),)
+$(KERL_INSTALL_DIR)/$(1): $(KERL)
+	MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1)
+	$(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1)
+endif
+endef
+
+define kerl_hipe_target
+ifeq ($(wildcard $(KERL_INSTALL_DIR)/$1-native),)
+$(KERL_INSTALL_DIR)/$1-native: $(KERL)
+	KERL_CONFIGURE_OPTIONS=--enable-native-libs \
+		MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native
+	$(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native
+endif
+endef
+
+$(KERL):
+	$(verbose) mkdir -p $(ERLANG_MK_TMP)
+	$(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
+	$(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
+	$(verbose) chmod +x $(KERL)
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+	$(gen_verbose) rm -rf $(KERL)
+
+# Allow users to select which version of Erlang/OTP to use for a project.
+
+ifneq ($(strip $(LATEST_ERLANG_OTP)),)
+ERLANG_OTP := $(notdir $(lastword $(sort $(filter-out $(KERL_INSTALL_DIR)/OTP_R%,\
+	$(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
+endif
+
+ERLANG_OTP ?=
+ERLANG_HIPE ?=
+
+# Use kerl to enforce a specific Erlang/OTP version for a project.
+ifneq ($(strip $(ERLANG_OTP)),)
+export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
+SHELL := env PATH=$(PATH) $(SHELL)
+$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
+
+# Build Erlang/OTP only if it doesn't already exist.
+ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
+$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
+$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
+endif
+
+else
+# Same for a HiPE enabled VM.
+ifneq ($(strip $(ERLANG_HIPE)),)
+export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
+SHELL := env PATH=$(PATH) $(SHELL)
+$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
+
+# Build Erlang/OTP only if it doesn't already exist.
+ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE))$(BUILD_ERLANG_OTP),)
+$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
+$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE) ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
+endif
+
+endif
+endif
+
 PACKAGES += aberth
 pkg_aberth_name = aberth
 pkg_aberth_description = Generic BERT-RPC server in Erlang
@@ -319,9 +414,9 @@ pkg_basho_bench_commit = master
 PACKAGES += bcrypt
 pkg_bcrypt_name = bcrypt
 pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
 pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
 pkg_bcrypt_commit = master
 
 PACKAGES += beam
@@ -404,14 +499,6 @@ pkg_bootstrap_fetch = git
 pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
 pkg_bootstrap_commit = master
 
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
 PACKAGES += boss
 pkg_boss_name = boss
 pkg_boss_description = Erlang web MVC, now featuring Comet
@@ -420,6 +507,14 @@ pkg_boss_fetch = git
 pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
 pkg_boss_commit = master
 
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
 PACKAGES += brod
 pkg_brod_name = brod
 pkg_brod_description = Kafka client in Erlang
@@ -524,13 +619,13 @@ pkg_chumak_fetch = git
 pkg_chumak_repo = https://github.com/chovencorp/chumak
 pkg_chumak_commit = master
 
-PACKAGES += classifier
-pkg_classifier_name = classifier
-pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
-pkg_classifier_homepage = https://github.com/inaka/classifier
-pkg_classifier_fetch = git
-pkg_classifier_repo = https://github.com/inaka/classifier
-pkg_classifier_commit = master
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
 
 PACKAGES += clique
 pkg_clique_name = clique
@@ -540,14 +635,6 @@ pkg_clique_fetch = git
 pkg_clique_repo = https://github.com/basho/clique
 pkg_clique_commit = develop
 
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
 PACKAGES += cloudi_core
 pkg_cloudi_core_name = cloudi_core
 pkg_cloudi_core_description = CloudI internal service runtime
@@ -564,13 +651,13 @@ pkg_cloudi_service_api_requests_fetch = git
 pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
 pkg_cloudi_service_api_requests_commit = master
 
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
 
 PACKAGES += cloudi_service_db_cassandra
 pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
@@ -580,6 +667,14 @@ pkg_cloudi_service_db_cassandra_fetch = git
 pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
 pkg_cloudi_service_db_cassandra_commit = master
 
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
 PACKAGES += cloudi_service_db_couchdb
 pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
 pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
@@ -604,14 +699,6 @@ pkg_cloudi_service_db_memcached_fetch = git
 pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
 pkg_cloudi_service_db_memcached_commit = master
 
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
 PACKAGES += cloudi_service_db_mysql
 pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
 pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
@@ -940,14 +1027,6 @@ pkg_dnssd_fetch = git
 pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
 pkg_dnssd_commit = master
 
-PACKAGES += dtl
-pkg_dtl_name = dtl
-pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
-pkg_dtl_homepage = https://github.com/oinksoft/dtl
-pkg_dtl_fetch = git
-pkg_dtl_repo = https://github.com/oinksoft/dtl
-pkg_dtl_commit = master
-
 PACKAGES += dynamic_compile
 pkg_dynamic_compile_name = dynamic_compile
 pkg_dynamic_compile_description = compile and load erlang modules from string input
@@ -1036,14 +1115,6 @@ pkg_edown_fetch = git
 pkg_edown_repo = https://github.com/uwiger/edown
 pkg_edown_commit = master
 
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
 PACKAGES += eep
 pkg_eep_name = eep
 pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
@@ -1052,6 +1123,14 @@ pkg_eep_fetch = git
 pkg_eep_repo = https://github.com/virtan/eep
 pkg_eep_commit = master
 
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
 PACKAGES += efene
 pkg_efene_name = efene
 pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
@@ -1076,14 +1155,6 @@ pkg_ehsa_fetch = hg
 pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
 pkg_ehsa_commit = default
 
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
 PACKAGES += ej
 pkg_ej_name = ej
 pkg_ej_description = Helper module for working with Erlang terms representing JSON
@@ -1092,6 +1163,14 @@ pkg_ej_fetch = git
 pkg_ej_repo = https://github.com/seth/ej
 pkg_ej_commit = master
 
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
 PACKAGES += ejwt
 pkg_ejwt_name = ejwt
 pkg_ejwt_description = erlang library for JSON Web Token
@@ -1252,6 +1331,14 @@ pkg_eredis_pool_fetch = git
 pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
 pkg_eredis_pool_commit = master
 
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
 PACKAGES += erlang_cep
 pkg_erlang_cep_name = erlang_cep
 pkg_erlang_cep_description = A basic CEP package written in erlang
@@ -1428,14 +1515,6 @@ pkg_erlport_fetch = git
 pkg_erlport_repo = https://github.com/hdima/erlport
 pkg_erlport_commit = master
 
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
 PACKAGES += erlsh
 pkg_erlsh_name = erlsh
 pkg_erlsh_description = Erlang shell tools
@@ -1444,6 +1523,14 @@ pkg_erlsh_fetch = git
 pkg_erlsh_repo = https://github.com/proger/erlsh
 pkg_erlsh_commit = master
 
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
 PACKAGES += erlsom
 pkg_erlsom_name = erlsom
 pkg_erlsom_description = XML parser for Erlang
@@ -1452,14 +1539,6 @@ pkg_erlsom_fetch = git
 pkg_erlsom_repo = https://github.com/willemdj/erlsom
 pkg_erlsom_commit = master
 
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
 PACKAGES += erlubi
 pkg_erlubi_name = erlubi
 pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
@@ -1516,6 +1595,14 @@ pkg_erwa_fetch = git
 pkg_erwa_repo = https://github.com/bwegh/erwa
 pkg_erwa_commit = master
 
+PACKAGES += escalus
+pkg_escalus_name = escalus
+pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
+pkg_escalus_homepage = https://github.com/esl/escalus
+pkg_escalus_fetch = git
+pkg_escalus_repo = https://github.com/esl/escalus
+pkg_escalus_commit = master
+
 PACKAGES += espec
 pkg_espec_name = espec
 pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
@@ -1540,14 +1627,6 @@ pkg_etap_fetch = git
 pkg_etap_repo = https://github.com/ngerakines/etap
 pkg_etap_commit = master
 
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
 PACKAGES += etest
 pkg_etest_name = etest
 pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
@@ -1556,6 +1635,14 @@ pkg_etest_fetch = git
 pkg_etest_repo = https://github.com/wooga/etest
 pkg_etest_commit = master
 
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
 PACKAGES += etoml
 pkg_etoml_name = etoml
 pkg_etoml_description = TOML language erlang parser
@@ -1564,14 +1651,6 @@ pkg_etoml_fetch = git
 pkg_etoml_repo = https://github.com/kalta/etoml
 pkg_etoml_commit = master
 
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
 PACKAGES += eunit
 pkg_eunit_name = eunit
 pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
@@ -1580,6 +1659,14 @@ pkg_eunit_fetch = git
 pkg_eunit_repo = https://github.com/richcarl/eunit
 pkg_eunit_commit = master
 
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
 PACKAGES += euthanasia
 pkg_euthanasia_name = euthanasia
 pkg_euthanasia_description = Merciful killer for your Erlang processes
@@ -1716,6 +1803,14 @@ pkg_fn_fetch = git
 pkg_fn_repo = https://github.com/reiddraper/fn
 pkg_fn_commit = master
 
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
 PACKAGES += folsom_cowboy
 pkg_folsom_cowboy_name = folsom_cowboy
 pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
@@ -1732,14 +1827,6 @@ pkg_folsomite_fetch = git
 pkg_folsomite_repo = https://github.com/campanja/folsomite
 pkg_folsomite_commit = master
 
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
 PACKAGES += fs
 pkg_fs_name = fs
 pkg_fs_description = Erlang FileSystem Listener
@@ -1908,14 +1995,6 @@ pkg_gold_fever_fetch = git
 pkg_gold_fever_repo = https://github.com/inaka/gold_fever
 pkg_gold_fever_commit = master
 
-PACKAGES += gossiperl
-pkg_gossiperl_name = gossiperl
-pkg_gossiperl_description = Gossip middleware in Erlang
-pkg_gossiperl_homepage = http://gossiperl.com/
-pkg_gossiperl_fetch = git
-pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
-pkg_gossiperl_commit = master
-
 PACKAGES += gpb
 pkg_gpb_name = gpb
 pkg_gpb_description = A Google Protobuf implementation for Erlang
@@ -1940,6 +2019,22 @@ pkg_grapherl_fetch = git
 pkg_grapherl_repo = https://github.com/eproxus/grapherl
 pkg_grapherl_commit = master
 
+PACKAGES += grpc
+pkg_grpc_name = grpc
+pkg_grpc_description = gRPC server in Erlang
+pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
+pkg_grpc_fetch = git
+pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
+pkg_grpc_commit = master
+
+PACKAGES += grpc_client
+pkg_grpc_client_name = grpc_client
+pkg_grpc_client_description = gRPC client in Erlang
+pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
+pkg_grpc_client_fetch = git
+pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
+pkg_grpc_client_commit = master
+
 PACKAGES += gun
 pkg_gun_name = gun
 pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
@@ -2020,6 +2115,14 @@ pkg_ibrowse_fetch = git
 pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
 pkg_ibrowse_commit = master
 
+PACKAGES += idna
+pkg_idna_name = idna
+pkg_idna_description = Erlang IDNA lib
+pkg_idna_homepage = https://github.com/benoitc/erlang-idna
+pkg_idna_fetch = git
+pkg_idna_repo = https://github.com/benoitc/erlang-idna
+pkg_idna_commit = master
+
 PACKAGES += ierlang
 pkg_ierlang_name = ierlang
 pkg_ierlang_description = An Erlang language kernel for IPython.
@@ -2036,14 +2139,6 @@ pkg_iota_fetch = git
 pkg_iota_repo = https://github.com/jpgneves/iota
 pkg_iota_commit = master
 
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
 PACKAGES += irc_lib
 pkg_irc_lib_name = irc_lib
 pkg_irc_lib_description = Erlang irc client library
@@ -2052,6 +2147,14 @@ pkg_irc_lib_fetch = git
 pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
 pkg_irc_lib_commit = master
 
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
 PACKAGES += iris
 pkg_iris_name = iris
 pkg_iris_description = Iris Erlang binding
@@ -2124,13 +2227,29 @@ pkg_joxa_fetch = git
 pkg_joxa_repo = https://github.com/joxa/joxa
 pkg_joxa_commit = master
 
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
 
 PACKAGES += jsonerl
 pkg_jsonerl_name = jsonerl
@@ -2140,14 +2259,6 @@ pkg_jsonerl_fetch = git
 pkg_jsonerl_repo = https://github.com/lambder/jsonerl
 pkg_jsonerl_commit = master
 
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
 PACKAGES += jsonpath
 pkg_jsonpath_name = jsonpath
 pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
@@ -2156,14 +2267,6 @@ pkg_jsonpath_fetch = git
 pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
 pkg_jsonpath_commit = master
 
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
 PACKAGES += jsonx
 pkg_jsonx_name = jsonx
 pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
@@ -2292,6 +2395,14 @@ pkg_kvs_fetch = git
 pkg_kvs_repo = https://github.com/synrc/kvs
 pkg_kvs_commit = master
 
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/erlang-lager/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/erlang-lager/lager
+pkg_lager_commit = master
+
 PACKAGES += lager_amqp_backend
 pkg_lager_amqp_backend_name = lager_amqp_backend
 pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
@@ -2300,20 +2411,12 @@ pkg_lager_amqp_backend_fetch = git
 pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
 pkg_lager_amqp_backend_commit = master
 
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/basho/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/basho/lager
-pkg_lager_commit = master
-
 PACKAGES += lager_syslog
 pkg_lager_syslog_name = lager_syslog
 pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
 pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
 pkg_lager_syslog_commit = master
 
 PACKAGES += lambdapad
@@ -2484,6 +2587,14 @@ pkg_mavg_fetch = git
 pkg_mavg_repo = https://github.com/EchoTeam/mavg
 pkg_mavg_commit = master
 
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
 PACKAGES += mcd
 pkg_mcd_name = mcd
 pkg_mcd_description = Fast memcached protocol client in pure Erlang
@@ -2500,14 +2611,6 @@ pkg_mcerlang_fetch = git
 pkg_mcerlang_repo = https://github.com/fredlund/McErlang
 pkg_mcerlang_commit = master
 
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
 PACKAGES += meck
 pkg_meck_name = meck
 pkg_meck_description = A mocking library for Erlang
@@ -2772,6 +2875,14 @@ pkg_nprocreg_fetch = git
 pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
 pkg_nprocreg_commit = master
 
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
 PACKAGES += oauth2
 pkg_oauth2_name = oauth2
 pkg_oauth2_description = Erlang Oauth2 implementation
@@ -2780,13 +2891,13 @@ pkg_oauth2_fetch = git
 pkg_oauth2_repo = https://github.com/kivra/oauth2
 pkg_oauth2_commit = master
 
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
+PACKAGES += observer_cli
+pkg_observer_cli_name = observer_cli
+pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
+pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
+pkg_observer_cli_fetch = git
+pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
+pkg_observer_cli_commit = master
 
 PACKAGES += octopus
 pkg_octopus_name = octopus
@@ -2836,6 +2947,14 @@ pkg_openpoker_fetch = git
 pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
 pkg_openpoker_commit = master
 
+PACKAGES += otpbp
+pkg_otpbp_name = otpbp
+pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
+pkg_otpbp_homepage = https://github.com/Ledest/otpbp
+pkg_otpbp_fetch = git
+pkg_otpbp_repo = https://github.com/Ledest/otpbp
+pkg_otpbp_commit = master
+
 PACKAGES += pal
 pkg_pal_name = pal
 pkg_pal_description = Pragmatic Authentication Library
@@ -2972,14 +3091,6 @@ pkg_procket_fetch = git
 pkg_procket_repo = https://github.com/msantos/procket
 pkg_procket_commit = master
 
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
 PACKAGES += prop
 pkg_prop_name = prop
 pkg_prop_description = An Erlang code scaffolding and generator system.
@@ -2988,6 +3099,14 @@ pkg_prop_fetch = git
 pkg_prop_repo = https://github.com/nuex/prop
 pkg_prop_commit = master
 
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
 PACKAGES += props
 pkg_props_name = props
 pkg_props_description = Property structure library
@@ -3060,14 +3179,6 @@ pkg_quickrand_fetch = git
 pkg_quickrand_repo = https://github.com/okeuday/quickrand
 pkg_quickrand_commit = master
 
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
 PACKAGES += rabbit
 pkg_rabbit_name = rabbit
 pkg_rabbit_description = RabbitMQ Server
@@ -3076,6 +3187,14 @@ pkg_rabbit_fetch = git
 pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
 pkg_rabbit_commit = master
 
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
 PACKAGES += rack
 pkg_rack_name = rack
 pkg_rack_description = Rack handler for erlang
@@ -3220,14 +3339,6 @@ pkg_rfc4627_jsonrpc_fetch = git
 pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
 pkg_rfc4627_jsonrpc_commit = master
 
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
 PACKAGES += riak_control
 pkg_riak_control_name = riak_control
 pkg_riak_control_description = Webmachine-based administration interface for Riak.
@@ -3260,14 +3371,6 @@ pkg_riak_ensemble_fetch = git
 pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
 pkg_riak_ensemble_commit = master
 
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
 PACKAGES += riak_kv
 pkg_riak_kv_name = riak_kv
 pkg_riak_kv_description = Riak Key/Value Store
@@ -3276,14 +3379,6 @@ pkg_riak_kv_fetch = git
 pkg_riak_kv_repo = https://github.com/basho/riak_kv
 pkg_riak_kv_commit = master
 
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
 PACKAGES += riak_pg
 pkg_riak_pg_name = riak_pg
 pkg_riak_pg_description = Distributed process groups with riak_core.
@@ -3300,14 +3395,6 @@ pkg_riak_pipe_fetch = git
 pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
 pkg_riak_pipe_commit = master
 
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
 PACKAGES += riak_sysmon
 pkg_riak_sysmon_name = riak_sysmon
 pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
@@ -3324,6 +3411,38 @@ pkg_riak_test_fetch = git
 pkg_riak_test_repo = https://github.com/basho/riak_test
 pkg_riak_test_commit = master
 
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
 PACKAGES += rivus_cep
 pkg_rivus_cep_name = rivus_cep
 pkg_rivus_cep_description = Complex event processing in Erlang
@@ -3604,6 +3723,14 @@ pkg_stripe_fetch = git
 pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
 pkg_stripe_commit = v1
 
+PACKAGES += subproc
+pkg_subproc_name = subproc
+pkg_subproc_description = unix subprocess manager with {active,once|false} modes
+pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
+pkg_subproc_fetch = git
+pkg_subproc_repo = https://github.com/dozzie/subproc
+pkg_subproc_commit = v0.1.0
+
 PACKAGES += supervisor3
 pkg_supervisor3_name = supervisor3
 pkg_supervisor3_description = OTP supervisor with additional strategies
@@ -3644,14 +3771,6 @@ pkg_switchboard_fetch = git
 pkg_switchboard_repo = https://github.com/thusfresh/switchboard
 pkg_switchboard_commit = master
 
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
 PACKAGES += syn
 pkg_syn_name = syn
 pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
@@ -3660,6 +3779,14 @@ pkg_syn_fetch = git
 pkg_syn_repo = https://github.com/ostinelli/syn
 pkg_syn_commit = master
 
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
 PACKAGES += syntaxerl
 pkg_syntaxerl_name = syntaxerl
 pkg_syntaxerl_description = Syntax checker for Erlang
@@ -3732,6 +3859,14 @@ pkg_tirerl_fetch = git
 pkg_tirerl_repo = https://github.com/inaka/tirerl
 pkg_tirerl_commit = master
 
+PACKAGES += toml
+pkg_toml_name = toml
+pkg_toml_description = TOML (0.4.0) config parser
+pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
+pkg_toml_fetch = git
+pkg_toml_repo = https://github.com/dozzie/toml
+pkg_toml_commit = v0.2.0
+
 PACKAGES += traffic_tools
 pkg_traffic_tools_name = traffic_tools
 pkg_traffic_tools_description = Simple traffic limiting library
@@ -3775,9 +3910,9 @@ pkg_trie_commit = master
 PACKAGES += triq
 pkg_triq_name = triq
 pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_homepage = https://triq.gitlab.io
 pkg_triq_fetch = git
-pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_repo = https://gitlab.com/triq/triq.git
 pkg_triq_commit = master
 
 PACKAGES += tunctl
@@ -4012,14 +4147,6 @@ pkg_yaws_fetch = git
 pkg_yaws_repo = https://github.com/klacke/yaws
 pkg_yaws_commit = master
 
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
 PACKAGES += zab_engine
 pkg_zab_engine_name = zab_engine
 pkg_zab_engine_description = zab propotocol implement by erlang
@@ -4028,6 +4155,14 @@ pkg_zab_engine_fetch = git
 pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
 pkg_zab_engine_commit = master
 
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
 PACKAGES += zeta
 pkg_zeta_name = zeta
 pkg_zeta_description = HTTP access log parser in Erlang
@@ -4098,7 +4233,7 @@ endif
 # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
 # This file is part of erlang.mk and subject to the terms of the ISC License.
 
-.PHONY: distclean-deps
+.PHONY: distclean-deps clean-tmp-deps.log
 
 # Configuration.
 
@@ -4118,11 +4253,32 @@ export DEPS_DIR
 REBAR_DEPS_DIR = $(DEPS_DIR)
 export REBAR_DEPS_DIR
 
+# External "early" plugins (see core/plugins.mk for regular plugins).
+# They both use the core_dep_plugin macro.
+
+define core_dep_plugin
+ifeq ($(2),$(PROJECT))
+-include $$(patsubst $(PROJECT)/%,%,$(1))
+else
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endif
+endef
+
+DEP_EARLY_PLUGINS ?=
+
+$(foreach p,$(DEP_EARLY_PLUGINS),\
+	$(eval $(if $(findstring /,$p),\
+		$(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+		$(call core_dep_plugin,$p/early-plugins.mk,$p))))
+
 dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
 dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
 	$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
 dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
 
+LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
 ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
 ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
 
@@ -4139,16 +4295,13 @@ export NO_AUTOPATCH
 
 # Verbosity.
 
-dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_0 = @echo " DEP    $1 ($(call dep_commit,$1))";
 dep_verbose_2 = set -x;
 dep_verbose = $(dep_verbose_$(V))
 
 # Core targets.
 
-ifdef IS_APP
-apps::
-else
-apps:: $(ALL_APPS_DIRS)
+apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log
 ifeq ($(IS_APP)$(IS_DEP),)
 	$(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
 endif
@@ -4156,45 +4309,47 @@ endif
 # Create ebin directory for all apps to make sure Erlang recognizes them
 # as proper OTP applications when using -include_lib. This is a temporary
 # fix, a proper fix would be to compile apps/* in the right order.
-	$(verbose) for dep in $(ALL_APPS_DIRS) ; do \
-		mkdir -p $$dep/ebin || exit $$?; \
+ifndef IS_APP
+	$(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
+		mkdir -p $$dep/ebin; \
 	done
-	$(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+endif
+# at the toplevel: if LOCAL_DEPS is defined with at least one local app, only
+# compile that list of apps. otherwise, compile everything.
+# within an app: compile all LOCAL_DEPS that are (uncompiled) local apps
+	$(verbose) set -e; for dep in $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS)) ; do \
 		if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
 			:; \
 		else \
 			echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
-			$(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+			$(MAKE) -C $$dep IS_APP=1; \
 		fi \
 	done
+
+clean-tmp-deps.log:
+ifeq ($(IS_APP)$(IS_DEP),)
+	$(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
 endif
 
 ifneq ($(SKIP_DEPS),)
 deps::
 else
-ifeq ($(ALL_DEPS_DIRS),)
-deps:: apps
-else
-deps:: $(ALL_DEPS_DIRS) apps
-ifeq ($(IS_APP)$(IS_DEP),)
-	$(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
-endif
+deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log
 	$(verbose) mkdir -p $(ERLANG_MK_TMP)
-	$(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+	$(verbose) set -e; for dep in $(ALL_DEPS_DIRS) ; do \
 		if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
 			:; \
 		else \
 			echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
 			if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
-				$(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+				$(MAKE) -C $$dep IS_DEP=1; \
 			else \
-				echo "Error: No Makefile to build dependency $$dep."; \
+				echo "Error: No Makefile to build dependency $$dep." >&2; \
 				exit 2; \
 			fi \
 		fi \
 	done
 endif
-endif
 
 # Deps related targets.
 
@@ -4203,17 +4358,18 @@ endif
 # in practice only Makefile is needed so far.
 define dep_autopatch
 	if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+		rm -rf $(DEPS_DIR)/$1/ebin/; \
 		$(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
 		$(call dep_autopatch_erlang_mk,$(1)); \
 	elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
-		if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+		if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
+			$(call dep_autopatch2,$1); \
+		elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
 			$(call dep_autopatch2,$(1)); \
-		elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+		elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
 			$(call dep_autopatch2,$(1)); \
-		elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+		elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
 			$(call dep_autopatch2,$(1)); \
-		else \
-			$(call erlang,$(call dep_autopatch_app.erl,$(1))); \
 		fi \
 	else \
 		if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
@@ -4225,11 +4381,14 @@ define dep_autopatch
 endef
 
 define dep_autopatch2
+	! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
+	mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
+	rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
 	if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
 		$(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
 	fi; \
 	$(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
-	if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+	if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
 		$(call dep_autopatch_fetch_rebar); \
 		$(call dep_autopatch_rebar,$(1)); \
 	else \
@@ -4241,11 +4400,15 @@ define dep_autopatch_noop
 	printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
 endef
 
-# Overwrite erlang.mk with the current file by default.
+# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
+# if given. Do it for all 3 possible Makefile file names.
 ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
 define dep_autopatch_erlang_mk
-	echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
-		> $(DEPS_DIR)/$1/erlang.mk
+	for f in Makefile makefile GNUmakefile; do \
+		if [ -f $(DEPS_DIR)/$1/$$f ]; then \
+			sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
+		fi \
+	done
 endef
 else
 define dep_autopatch_erlang_mk
@@ -4310,6 +4473,10 @@ define dep_autopatch_rebar.erl
 	Write("C_SRC_TYPE = rebar\n"),
 	Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
 	Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+	ToList = fun
+		(V) when is_atom(V) -> atom_to_list(V);
+		(V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
+	end,
 	fun() ->
 		Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
 		case lists:keyfind(erl_opts, 1, Conf) of
@@ -4317,26 +4484,50 @@ define dep_autopatch_rebar.erl
 			{_, ErlOpts} ->
 				lists:foreach(fun
 					({d, D}) ->
-						Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+						Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
+					({d, DKey, DVal}) ->
+						Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
 					({i, I}) ->
 						Write(["ERLC_OPTS += -I ", I, "\n"]);
 					({platform_define, Regex, D}) ->
 						case rebar_utils:is_arch(Regex) of
-							true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+							true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
 							false -> ok
 						end;
 					({parse_transform, PT}) ->
-						Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+						Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
 					(_) -> ok
 				end, ErlOpts)
 		end,
 		Write("\n")
 	end(),
+	GetHexVsn = fun(N) ->
+		case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
+			{ok, Lock} ->
+				io:format("~p~n", [Lock]),
+				case lists:keyfind("1.1.0", 1, Lock) of
+					{_, LockPkgs} ->
+						io:format("~p~n", [LockPkgs]),
+						case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
+							{_, {pkg, _, Vsn}, _} ->
+								io:format("~p~n", [Vsn]),
+								{N, {hex, binary_to_list(Vsn)}};
+							_ ->
+								false
+						end;
+					_ ->
+						false
+				end;
+			_ ->
+				false
+		end
+	end,
 	fun() ->
 		File = case lists:keyfind(deps, 1, Conf) of
 			false -> [];
 			{_, Deps} ->
 				[begin case case Dep of
+							N when is_atom(N) -> GetHexVsn(N);
 							{N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
 							{N, S} when is_tuple(S) -> {N, S};
 							{N, _, S} -> {N, S};
@@ -4373,7 +4564,8 @@ define dep_autopatch_rebar.erl
 	Write("\npre-deps::\n"),
 	Write("\npre-app::\n"),
 	PatchHook = fun(Cmd) ->
-		case Cmd of
+		Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
+		case Cmd2 of
 			"make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
 			"gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
 			"make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
@@ -4488,7 +4680,7 @@ define dep_autopatch_rebar.erl
 			end,
 			[PortSpec(S) || S <- PortSpecs]
 	end,
-	Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+	Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
 	RunPlugin = fun(Plugin, Step) ->
 		case erlang:function_exported(Plugin, Step, 2) of
 			false -> ok;
@@ -4536,27 +4728,17 @@ define dep_autopatch_rebar.erl
 	halt()
 endef
 
-define dep_autopatch_app.erl
-	UpdateModules = fun(App) ->
-		case filelib:is_regular(App) of
-			false -> ok;
-			true ->
-				{ok, [{application, '$(1)', L0}]} = file:consult(App),
-				Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
-					fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
-				L = lists:keystore(modules, 1, L0, {modules, Mods}),
-				ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
-		end
-	end,
-	UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
-	halt()
-endef
-
 define dep_autopatch_appsrc_script.erl
 	AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
 	AppSrcScript = AppSrc ++ ".script",
-	Bindings = erl_eval:new_bindings(),
-	{ok, Conf} = file:script(AppSrcScript, Bindings),
+	{ok, Conf0} = file:consult(AppSrc),
+	Bindings0 = erl_eval:new_bindings(),
+	Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
+	Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
+	Conf = case file:script(AppSrcScript, Bindings) of
+		{ok, [C]} -> C;
+		{ok, C} -> C
+	end,
 	ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
 	halt()
 endef
@@ -4569,7 +4751,11 @@ define dep_autopatch_appsrc.erl
 		true ->
 			{ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
 			L1 = lists:keystore(modules, 1, L0, {modules, []}),
-			L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+			L2 = case lists:keyfind(vsn, 1, L1) of
+				{_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"});
+				{_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
+				_ -> L1
+			end,
 			L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
 			ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
 			case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
@@ -4599,11 +4785,15 @@ define dep_fetch_cp
 	cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
 endef
 
+define dep_fetch_ln
+	ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
 # Hex only has a package version. No need to look in the Erlang.mk packages.
 define dep_fetch_hex
 	mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
 	$(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
-		https://s3.amazonaws.com/s3.hex.pm/tarballs/$1-$(strip $(word 2,$(dep_$1))).tar); \
+		https://repo.hex.pm/tarballs/$1-$(strip $(word 2,$(dep_$1))).tar); \
 	tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
 endef
 
@@ -4634,7 +4824,7 @@ $(DEPS_DIR)/$(call dep_name,$1):
 	$(eval DEP_NAME := $(call dep_name,$1))
 	$(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
 	$(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
-		echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+		echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
 		exit 17; \
 	fi
 	$(verbose) mkdir -p $(DEPS_DIR)
@@ -4676,15 +4866,15 @@ ifndef IS_APP
 clean:: clean-apps
 
 clean-apps:
-	$(verbose) for dep in $(ALL_APPS_DIRS) ; do \
-		$(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+	$(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
+		$(MAKE) -C $$dep clean IS_APP=1; \
 	done
 
 distclean:: distclean-apps
 
 distclean-apps:
-	$(verbose) for dep in $(ALL_APPS_DIRS) ; do \
-		$(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+	$(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
+		$(MAKE) -C $$dep distclean IS_APP=1; \
 	done
 endif
 
@@ -4704,84 +4894,6 @@ ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
 ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
 ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
 
-# External plugins.
-
-DEP_PLUGINS ?=
-
-define core_dep_plugin
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endef
-
-$(foreach p,$(DEP_PLUGINS),\
-	$(eval $(if $(findstring /,$p),\
-		$(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
-		$(call core_dep_plugin,$p/plugins.mk,$p))))
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES   = $(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
-	@mkdir -p $(ERLANG_MK_TMP)
-	@if test -f $@; then \
-		touch $(DTL_FILES); \
-	fi
-	@touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
-	[begin
-		Module0 = case "$(strip $(DTL_FULL_PATH))" of
-			"" ->
-				filename:basename(F, ".dtl");
-			_ ->
-				"$(DTL_PATH)/" ++ F2 = filename:rootname(F, ".dtl"),
-				re:replace(F2, "/",  "_",  [{return, list}, global])
-		end,
-		Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
-		case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
-			ok -> ok;
-			{ok, _} -> ok
-		end
-	end || F <- string:tokens("$(1)", " ")],
-	halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
-	$(if $(strip $?),\
-		$(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
-			-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
-
-endif
-
 # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
 # This file is part of erlang.mk and subject to the terms of the ISC License.
 
@@ -4801,10 +4913,9 @@ endef
 
 define compile_proto.erl
 	[begin
-		Dir = filename:dirname(filename:dirname(F)),
 		protobuffs_compile:generate_source(F,
-			[{output_include_dir, Dir ++ "/include"},
-				{output_src_dir, Dir ++ "/ebin"}])
+			[{output_include_dir, "./include"},
+				{output_src_dir, "./ebin"}])
 	end || F <- string:tokens("$(1)", " ")],
 	halt().
 endef
@@ -4828,6 +4939,8 @@ COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
 ERLC_EXCLUDE ?=
 ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
 
+ERLC_ASN1_OPTS ?=
+
 ERLC_MIB_OPTS ?=
 COMPILE_MIB_FIRST ?=
 COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
@@ -4877,25 +4990,25 @@ endif
 
 ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
 define app_file
-{application, $(PROJECT), [
+{application, '$(PROJECT)', [
 	{description, "$(PROJECT_DESCRIPTION)"},
 	{vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
 	{id$(comma)$(space)"$(1)"}$(comma))
 	{modules, [$(call comma_list,$(2))]},
 	{registered, []},
-	{applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+	{applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
 	{env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
 ]}.
 endef
 else
 define app_file
-{application, $(PROJECT), [
+{application, '$(PROJECT)', [
 	{description, "$(PROJECT_DESCRIPTION)"},
 	{vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
 	{id$(comma)$(space)"$(1)"}$(comma))
 	{modules, [$(call comma_list,$(2))]},
 	{registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
-	{applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+	{applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
 	{mod, {$(PROJECT_MOD), []}},
 	{env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
 ]}.
@@ -4920,7 +5033,7 @@ ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
 
 define compile_asn1
 	$(verbose) mkdir -p include/
-	$(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+	$(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
 	$(verbose) mv asn1/*.erl src/
 	$(verbose) mv asn1/*.hrl include/
 	$(verbose) mv asn1/*.asn1db include/
@@ -4960,6 +5073,14 @@ define makedep.erl
 	E = ets:new(makedep, [bag]),
 	G = digraph:new([acyclic]),
 	ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+	DepsDir = "$(call core_native_path,$(DEPS_DIR))",
+	AppsDir = "$(call core_native_path,$(APPS_DIR))",
+	DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
+	DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
+	AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
+	AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
+	DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
+	AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
 	Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
 	Add = fun (Mod, Dep) ->
 		case lists:keyfind(Dep, 1, Modules) of
@@ -4974,61 +5095,99 @@ define makedep.erl
 	end,
 	AddHd = fun (F, Mod, DepFile) ->
 		case file:open(DepFile, [read]) of
-			{error, enoent} -> ok;
+			{error, enoent} ->
+				ok;
 			{ok, Fd} ->
-				F(F, Fd, Mod),
 				{_, ModFile} = lists:keyfind(Mod, 1, Modules),
-				ets:insert(E, {ModFile, DepFile})
+				case ets:match(E, {ModFile, DepFile}) of
+					[] ->
+						ets:insert(E, {ModFile, DepFile}),
+						F(F, Fd, Mod,0);
+					_ -> ok
+				end
 		end
 	end,
+	SearchHrl = fun
+		F(_Hrl, []) -> {error,enoent};
+		F(Hrl, [Dir|Dirs]) ->
+			HrlF = filename:join([Dir,Hrl]),
+			case filelib:is_file(HrlF) of
+				true  ->
+				{ok, HrlF};
+				false -> F(Hrl,Dirs)
+			end
+	end,
 	Attr = fun
-		(F, Mod, behavior, Dep) -> Add(Mod, Dep);
-		(F, Mod, behaviour, Dep) -> Add(Mod, Dep);
-		(F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
-		(F, Mod, compile, Opts) when is_list(Opts) ->
+		(_F, Mod, behavior, Dep) ->
+			Add(Mod, Dep);
+		(_F, Mod, behaviour, Dep) ->
+			Add(Mod, Dep);
+		(_F, Mod, compile, {parse_transform, Dep}) ->
+			Add(Mod, Dep);
+		(_F, Mod, compile, Opts) when is_list(Opts) ->
 			case proplists:get_value(parse_transform, Opts) of
 				undefined -> ok;
 				Dep -> Add(Mod, Dep)
 			end;
 		(F, Mod, include, Hrl) ->
-			case filelib:is_file("include/" ++ Hrl) of
-				true -> AddHd(F, Mod, "include/" ++ Hrl);
-				false ->
-					case filelib:is_file("src/" ++ Hrl) of
-						true -> AddHd(F, Mod, "src/" ++ Hrl);
-						false -> false
-					end
+			case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
+				{ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
+				{error, _} -> false
+			end;
+		(F, Mod, include_lib, Hrl) ->
+			case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
+				{ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
+				{error, _} -> false
 			end;
-		(F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
-		(F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
 		(F, Mod, import, {Imp, _}) ->
-			case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+			IsFile =
+				case lists:keyfind(Imp, 1, Modules) of
+					false -> false;
+					{_, FilePath} -> filelib:is_file(FilePath)
+				end,
+			case IsFile of
 				false -> ok;
 				true -> Add(Mod, Imp)
 			end;
 		(_, _, _, _) -> ok
 	end,
-	MakeDepend = fun(F, Fd, Mod) ->
-		case io:parse_erl_form(Fd, undefined) of
-			{ok, {attribute, _, Key, Value}, _} ->
-				Attr(F, Mod, Key, Value),
-				F(F, Fd, Mod);
-			{eof, _} ->
-				file:close(Fd);
-			_ ->
-				F(F, Fd, Mod)
-		end
+	MakeDepend = fun
+		(F, Fd, Mod, StartLocation) ->
+			{ok, Filename} = file:pid2name(Fd),
+			case io:parse_erl_form(Fd, undefined, StartLocation) of
+				{ok, AbsData, EndLocation} ->
+					case AbsData of
+						{attribute, _, Key, Value} ->
+							Attr(F, Mod, Key, Value),
+							F(F, Fd, Mod, EndLocation);
+						_ -> F(F, Fd, Mod, EndLocation)
+					end;
+				{eof, _ } -> file:close(Fd);
+				{error, ErrorDescription } ->
+					file:close(Fd);
+				{error, ErrorInfo, ErrorLocation} ->
+					F(F, Fd, Mod, ErrorLocation)
+			end,
+			ok
 	end,
 	[begin
 		Mod = list_to_atom(filename:basename(F, ".erl")),
 		{ok, Fd} = file:open(F, [read]),
-		MakeDepend(MakeDepend, Fd, Mod)
+		MakeDepend(MakeDepend, Fd, Mod,0)
 	end || F <- ErlFiles],
 	Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
 	CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+	TargetPath = fun(Target) ->
+		case lists:keyfind(Target, 1, Modules) of
+			false -> "";
+			{_, DepFile} ->
+				DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
+				string:join(DirSubname ++ [atom_to_list(Target)], "/")
+		end
+	end,
 	ok = file:write_file("$(1)", [
 		[[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
-		"\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+		"\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
 	]),
 	halt()
 endef
@@ -5052,7 +5211,7 @@ $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):
 ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
 endif
 
--include $(PROJECT).d
+include $(wildcard $(PROJECT).d)
 
 ebin/$(PROJECT).app:: ebin/
 
@@ -5113,7 +5272,7 @@ ifneq ($(SKIP_DEPS),)
 doc-deps:
 else
 doc-deps: $(ALL_DOC_DEPS_DIRS)
-	$(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+	$(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
 endif
 
 # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
@@ -5133,7 +5292,7 @@ ifneq ($(SKIP_DEPS),)
 rel-deps:
 else
 rel-deps: $(ALL_REL_DEPS_DIRS)
-	$(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+	$(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
 endif
 
 # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
@@ -5158,7 +5317,7 @@ ifneq ($(SKIP_DEPS),)
 test-deps:
 else
 test-deps: $(ALL_TEST_DEPS_DIRS)
-	$(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+	$(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
 endif
 
 ifneq ($(wildcard $(TEST_DIR)),)
@@ -5170,17 +5329,17 @@ endif
 ifeq ($(wildcard src),)
 test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
 test-build:: clean deps test-deps
-	$(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+	$(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
 else
 ifeq ($(wildcard ebin/test),)
 test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
 test-build:: clean deps test-deps $(PROJECT).d
-	$(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+	$(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
 	$(gen_verbose) touch ebin/test
 else
 test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
 test-build:: deps test-deps $(PROJECT).d
-	$(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+	$(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
 endif
 
 clean:: clean-test-dir
@@ -5277,6 +5436,7 @@ MAN_VERSION ?= $(PROJECT_VERSION)
 define asciidoc2man.erl
 try
 	[begin
+		io:format(" ADOC   ~s~n", [F]),
 		ok = asciideck:to_manpage(asciideck:parse_file(F), #{
 			compress => gzip,
 			outdir => filename:dirname(F),
@@ -5285,7 +5445,8 @@ try
 		})
 	end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
 	halt(0)
-catch _:_ ->
+catch C:E ->
+	io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
 	halt(1)
 end.
 endef
@@ -5293,15 +5454,15 @@ endef
 asciidoc-manual:: doc-deps
 
 asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
-	$(call erlang,$(call asciidoc2man.erl,$?))
-	$(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
+	$(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
+	$(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
 
 install-docs:: install-asciidoc
 
 install-asciidoc: asciidoc-manual
 	$(foreach s,$(MAN_SECTIONS),\
 		mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
-		install -g `id -u` -o `id -g` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
+		install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
 
 distclean-asciidoc-manual:
 	$(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
@@ -5562,6 +5723,51 @@ code_change(_OldVsn, StateName, StateData, _Extra) ->
 	{ok, StateName, StateData}.
 endef
 
+define tpl_gen_statem
+-module($(n)).
+-behaviour(gen_statem).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_statem.
+-export([callback_mode/0]).
+-export([init/1]).
+-export([state_name/3]).
+-export([handle_event/4]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+	gen_statem:start_link(?MODULE, [], []).
+
+%% gen_statem.
+
+callback_mode() ->
+	state_functions.
+
+init([]) ->
+	{ok, state_name, #state{}}.
+
+state_name(_EventType, _EventData, StateData) ->
+	{next_state, state_name, StateData}.
+
+handle_event(_EventType, _EventData, StateName, StateData) ->
+	{next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+	ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+	{ok, StateName, StateData}.
+endef
+
 define tpl_cowboy_loop
 -module($(n)).
 -behaviour(cowboy_loop_handler).
@@ -5754,20 +5960,18 @@ endif
 ifndef t
 	$(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
 endif
-ifndef tpl_$(t)
-	$(error Unknown template)
-endif
 ifndef n
 	$(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
 endif
 ifdef in
-	$(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+	$(call render_template,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
 else
 	$(call render_template,tpl_$(t),src/$(n).erl)
 endif
 
 list-templates:
-	$(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+	$(verbose) @echo Available templates:
+	$(verbose) printf "    %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
 
 # Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
 # This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -6004,10 +6208,10 @@ else
 	$(call render_template,bs_erl_nif,src/$n.erl)
 endif
 
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
 # This file is part of erlang.mk and subject to the terms of the ISC License.
 
-.PHONY: ci ci-prepare ci-setup distclean-kerl
+.PHONY: ci ci-prepare ci-setup
 
 CI_OTP ?=
 CI_HIPE ?=
@@ -6025,24 +6229,9 @@ ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
 ci::
 else
 
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-CI_INSTALL_DIR ?= $(HOME)/erlang
-
 ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
 
-ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
+ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
 
 ci-setup::
 
@@ -6052,10 +6241,10 @@ ci_verbose_0 = @echo " CI    " $(1);
 ci_verbose = $(ci_verbose_$(V))
 
 define ci_target
-ci-$1: $(CI_INSTALL_DIR)/$2
-	$(verbose) $(MAKE) --no-print-directory clean
+ci-$1: $(KERL_INSTALL_DIR)/$2
+	$(verbose) $(MAKE) --no-print-directory clean distclean-c_src-env
 	$(ci_verbose) \
-		PATH="$(CI_INSTALL_DIR)/$2/bin:$(PATH)" \
+		PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
 		CI_OTP_RELEASE="$1" \
 		CT_OPTS="-label $1" \
 		CI_VM="$3" \
@@ -6067,32 +6256,8 @@ $(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
 $(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
 $(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
 
-define ci_otp_target
-ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
-$(CI_INSTALL_DIR)/$(1): $(KERL)
-	MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1)
-	$(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
-endif
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
-
-define ci_hipe_target
-ifeq ($(wildcard $(CI_INSTALL_DIR)/$1-native),)
-$(CI_INSTALL_DIR)/$1-native: $(KERL)
-	KERL_CONFIGURE_OPTIONS=--enable-native-libs \
-		MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native
-	$(KERL) install $1-native $(CI_INSTALL_DIR)/$1-native
-endif
-endef
-
-$(foreach otp,$(sort $(CI_HIPE) $(CI_ERLLLVM)),$(eval $(call ci_hipe_target,$(otp))))
-
-$(KERL):
-	$(verbose) mkdir -p $(ERLANG_MK_TMP)
-	$(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
-	$(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
-	$(verbose) chmod +x $(KERL)
+$(foreach otp,$(CI_OTP),$(eval $(call kerl_otp_target,$(otp))))
+$(foreach otp,$(sort $(CI_HIPE) $(CI_ERLLLVM)),$(eval $(call kerl_hipe_target,$(otp))))
 
 help::
 	$(verbose) printf "%s\n" "" \
@@ -6102,10 +6267,6 @@ help::
 		"The CI_OTP variable must be defined with the Erlang versions" \
 		"that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
 
-distclean:: distclean-kerl
-
-distclean-kerl:
-	$(gen_verbose) rm -rf $(KERL)
 endif
 
 # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
@@ -6123,6 +6284,7 @@ CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*
 endif
 endif
 CT_SUITES ?=
+CT_LOGS_DIR ?= $(CURDIR)/logs
 
 # Core targets.
 
@@ -6145,15 +6307,18 @@ CT_RUN = ct_run \
 	-noinput \
 	-pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
 	-dir $(TEST_DIR) \
-	-logdir $(CURDIR)/logs
+	-logdir $(CT_LOGS_DIR)
 
 ifeq ($(CT_SUITES),)
 ct: $(if $(IS_APP),,apps-ct)
 else
+# We do not run tests if we are in an apps/* with no test directory.
+ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
 ct: test-build $(if $(IS_APP),,apps-ct)
-	$(verbose) mkdir -p $(CURDIR)/logs/
+	$(verbose) mkdir -p $(CT_LOGS_DIR)
 	$(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
 endif
+endif
 
 ifneq ($(ALL_APPS_DIRS),)
 define ct_app_target
@@ -6179,14 +6344,14 @@ endif
 
 define ct_suite_target
 ct-$(1): test-build
-	$(verbose) mkdir -p $(CURDIR)/logs/
+	$(verbose) mkdir -p $(CT_LOGS_DIR)
 	$(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
 endef
 
 $(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
 
 distclean-ct:
-	$(gen_verbose) rm -rf $(CURDIR)/logs/
+	$(gen_verbose) rm -rf $(CT_LOGS_DIR)
 
 # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
 # This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -6201,6 +6366,7 @@ export DIALYZER_PLT
 PLT_APPS ?=
 DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
 DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+DIALYZER_PLT_OPTS ?=
 
 # Core targets.
 
@@ -6232,8 +6398,10 @@ define filter_opts.erl
 endef
 
 $(DIALYZER_PLT): deps app
-	$(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) \
-		`test -f $(ERLANG_MK_TMP)/deps.log && cat $(ERLANG_MK_TMP)/deps.log`
+	$(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
+		while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
+	$(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
+		erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
 
 plt: $(DIALYZER_PLT)
 
@@ -6245,7 +6413,7 @@ dialyze:
 else
 dialyze: $(DIALYZER_PLT)
 endif
-	$(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+	$(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
 
 # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
 # This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -6255,10 +6423,21 @@ endif
 # Configuration.
 
 EDOC_OPTS ?=
+EDOC_SRC_DIRS ?=
+EDOC_OUTPUT ?= doc
+
+define edoc.erl
+	SrcPaths = lists:foldl(fun(P, Acc) ->
+		filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
+	end, [], [$(call comma_list,$(patsubst %,'%',$(EDOC_SRC_DIRS)))]),
+	DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
+	edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
+	halt(0).
+endef
 
 # Core targets.
 
-ifneq ($(wildcard doc/overview.edoc),)
+ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
 docs:: edoc
 endif
 
@@ -6267,10 +6446,73 @@ distclean:: distclean-edoc
 # Plugin-specific targets.
 
 edoc: distclean-edoc doc-deps
-	$(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+	$(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
 
 distclean-edoc:
-	$(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+	$(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
+
+# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_PATH := $(abspath $(DTL_PATH))
+DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+DTL_NAMES   = $(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%))
+DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
+BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+	@mkdir -p $(ERLANG_MK_TMP)
+	@if test -f $@; then \
+		touch $(DTL_FILES); \
+	fi
+	@touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+	[begin
+		Module0 = case "$(strip $(DTL_FULL_PATH))" of
+			"" ->
+				filename:basename(F, ".dtl");
+			_ ->
+				"$(DTL_PATH)/" ++ F2 = filename:rootname(F, ".dtl"),
+				re:replace(F2, "/",  "_",  [{return, list}, global])
+		end,
+		Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+		case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
+			ok -> ok;
+			{ok, _} -> ok
+		end
+	end || F <- string:tokens("$(1)", " ")],
+	halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+	$(if $(strip $?),\
+		$(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
+			-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
 
 # Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
 # Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
@@ -6319,7 +6561,7 @@ escript:: escript-zip
 	$(verbose) chmod +x $(ESCRIPT_FILE)
 
 distclean-escript:
-	$(gen_verbose) rm -f $(ESCRIPT_NAME)
+	$(gen_verbose) rm -f $(ESCRIPT_FILE)
 
 # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
 # Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
@@ -6344,22 +6586,27 @@ help::
 # Plugin-specific targets.
 
 define eunit.erl
-	case "$(COVER)" of
-		"" -> ok;
+	Enabled = case "$(COVER)" of
+		"" -> false;
 		_ ->
-			case cover:compile_beam_directory("ebin") of
-				{error, _} -> halt(1);
-				_ -> ok
+			case filelib:is_dir("ebin") of
+				false -> false;
+				true ->
+					case cover:compile_beam_directory("ebin") of
+						{error, _} -> halt(1);
+						_ -> true
+					end
 			end
 	end,
 	case eunit:test($1, [$(EUNIT_OPTS)]) of
 		ok -> ok;
 		error -> halt(2)
 	end,
-	case "$(COVER)" of
-		"" -> ok;
+	case {Enabled, "$(COVER)"} of
+		{false, _} -> ok;
+		{_, ""} -> ok;
 		_ ->
-			cover:export("eunit.coverdata")
+			cover:export("$(COVER_DATA_DIR)/eunit.coverdata")
 	end,
 	halt()
 endef
@@ -6368,10 +6615,10 @@ EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR
 
 ifdef t
 ifeq (,$(findstring :,$(t)))
-eunit: test-build
+eunit: test-build cover-data-dir
 	$(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
 else
-eunit: test-build
+eunit: test-build cover-data-dir
 	$(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
 endif
 else
@@ -6381,12 +6628,72 @@ EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
 EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
 	$(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
 
-eunit: test-build $(if $(IS_APP),,apps-eunit)
+eunit: test-build $(if $(IS_APP),,apps-eunit) cover-data-dir
 	$(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
 
 ifneq ($(ALL_APPS_DIRS),)
 apps-eunit:
-	$(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+	$(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
+		[ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
+		exit $$eunit_retcode
+endif
+endif
+
+# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
+.PHONY: proper
+
+# Targets.
+
+tests:: proper
+
+define proper_check.erl
+	code:add_pathsa([
+		"$(call core_native_path,$(CURDIR)/ebin)",
+		"$(call core_native_path,$(DEPS_DIR)/*/ebin)",
+		"$(call core_native_path,$(TEST_DIR))"]),
+	Module = fun(M) ->
+		[true] =:= lists:usort([
+			case atom_to_list(F) of
+				"prop_" ++ _ ->
+					io:format("Testing ~p:~p/0~n", [M, F]),
+					proper:quickcheck(M:F(), nocolors);
+				_ ->
+					true
+			end
+		|| {F, 0} <- M:module_info(exports)])
+	end,
+	try
+		case $(1) of
+			all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
+			module -> Module($(2));
+			function -> proper:quickcheck($(2), nocolors)
+		end
+	of
+		true -> halt(0);
+		_ -> halt(1)
+	catch error:undef ->
+		io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
+		halt(0)
+	end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+proper: test-build
+	$(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
+else
+proper: test-build
+	$(verbose) echo Testing $(t)/0
+	$(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
+endif
+else
+proper: test-build
+	$(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+		$(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
+	$(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
 endif
 endif
 
@@ -6400,9 +6707,15 @@ endif
 RELX ?= $(ERLANG_MK_TMP)/relx
 RELX_CONFIG ?= $(CURDIR)/relx.config
 
-RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_URL ?= https://erlang.mk/res/relx-v3.26.0
 RELX_OPTS ?=
 RELX_OUTPUT_DIR ?= _rel
+RELX_REL_EXT ?=
+RELX_TAR ?= 1
+
+ifdef SFX
+	RELX_TAR = 1
+endif
 
 ifeq ($(firstword $(RELX_OPTS)),-o)
 	RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
@@ -6425,14 +6738,15 @@ distclean:: distclean-relx-rel
 # Plugin-specific targets.
 
 $(RELX):
+	$(verbose) mkdir -p $(ERLANG_MK_TMP)
 	$(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
 	$(verbose) chmod +x $(RELX)
 
 relx-rel: $(RELX) rel-deps app
-	$(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release tar
+	$(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release $(if $(filter 1,$(RELX_TAR)),tar)
 
 relx-relup: $(RELX) rel-deps app
-	$(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release relup tar
+	$(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release relup $(if $(filter 1,$(RELX_TAR)),tar)
 
 distclean-relx-rel:
 	$(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
@@ -6440,12 +6754,18 @@ distclean-relx-rel:
 # Run target.
 
 ifeq ($(wildcard $(RELX_CONFIG)),)
-run:
+run::
 else
 
 define get_relx_release.erl
-	{ok, Config} = file:consult("$(RELX_CONFIG)"),
-	{release, {Name, Vsn}, _} = lists:keyfind(release, 1, Config),
+	{ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
+	{release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
+	Vsn = case Vsn0 of
+		{cmd, Cmd} -> os:cmd(Cmd);
+		semver -> "";
+		{semver, _} -> "";
+		VsnStr -> Vsn0
+	end,
 	io:format("~s ~s", [Name, Vsn]),
 	halt(0).
 endef
@@ -6454,8 +6774,19 @@ RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
 RELX_REL_NAME := $(word 1,$(RELX_REL))
 RELX_REL_VSN := $(word 2,$(RELX_REL))
 
-run: all
-	$(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME) console
+ifeq ($(PLATFORM),msys2)
+RELX_REL_EXT := .cmd
+endif
+
+run:: all
+	$(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) console
+
+ifdef RELOAD
+rel::
+	$(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
+	$(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
+		eval "io:format(\"~p~n\", [c:lm()])"
+endif
 
 help::
 	$(verbose) printf "%s\n" "" \
@@ -6473,7 +6804,7 @@ endif
 # Configuration.
 
 SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR)
 SHELL_OPTS ?=
 
 ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
@@ -6490,11 +6821,88 @@ help::
 $(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
 
 build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
-	$(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+	$(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
 
 shell: build-shell-deps
 	$(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
 
+# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-sphinx sphinx
+
+# Configuration.
+
+SPHINX_BUILD ?= sphinx-build
+SPHINX_SOURCE ?= doc
+SPHINX_CONFDIR ?=
+SPHINX_FORMATS ?= html
+SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
+SPHINX_OPTS ?=
+
+#sphinx_html_opts =
+#sphinx_html_output = html
+#sphinx_man_opts =
+#sphinx_man_output = man
+#sphinx_latex_opts =
+#sphinx_latex_output = latex
+
+# Helpers.
+
+sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
+sphinx_build_1 = $(SPHINX_BUILD) -N
+sphinx_build_2 = set -x; $(SPHINX_BUILD)
+sphinx_build = $(sphinx_build_$(V))
+
+define sphinx.build
+$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
+
+endef
+
+define sphinx.output
+$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
+endef
+
+# Targets.
+
+ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
+docs:: sphinx
+distclean:: distclean-sphinx
+endif
+
+help::
+	$(verbose) printf "%s\n" "" \
+		"Sphinx targets:" \
+		"  sphinx      Generate Sphinx documentation." \
+		"" \
+		"ReST sources and 'conf.py' file are expected in directory pointed by" \
+		"SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
+		"'html' format is generated by default); target directory can be specified by" \
+		'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
+		"Additional Sphinx options can be set in SPHINX_OPTS."
+
+# Plugin-specific targets.
+
+sphinx:
+	$(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
+
+distclean-sphinx:
+	$(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
+
+# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
+
+show-ERL_LIBS:
+	@echo $(ERL_LIBS)
+
+show-ERLC_OPTS:
+	@$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
+
+show-TEST_ERLC_OPTS:
+	@$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
+
 # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
 # This file is part of erlang.mk and subject to the terms of the ISC License.
 
@@ -6506,7 +6914,10 @@ ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
 tests:: triq
 
 define triq_check.erl
-	code:add_pathsa(["$(call core_native_path,$(CURDIR)/ebin)", "$(call core_native_path,$(DEPS_DIR)/*/ebin)"]),
+	code:add_pathsa([
+		"$(call core_native_path,$(CURDIR)/ebin)",
+		"$(call core_native_path,$(DEPS_DIR)/*/ebin)",
+		"$(call core_native_path,$(TEST_DIR))"]),
 	try
 		case $(1) of
 			all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
@@ -6517,7 +6928,7 @@ define triq_check.erl
 		true -> halt(0);
 		_ -> halt(1)
 	catch error:undef ->
-		io:format("Undefined property or module~n"),
+		io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
 		halt(0)
 	end.
 endef
@@ -6533,7 +6944,8 @@ triq: test-build
 endif
 else
 triq: test-build
-	$(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+	$(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+		$(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
 	$(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
 endif
 endif
@@ -6555,14 +6967,14 @@ endif
 XREFR ?= $(CURDIR)/xrefr
 export XREFR
 
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
 
 # Core targets.
 
 help::
-	$(verbose) printf "%s\n" "" \
-		"Xref targets:" \
-		"  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+	$(verbose) printf '%s\n' '' \
+		'Xref targets:' \
+		'  xref        Run Xrefr using $$XREF_CONFIG as config file if defined'
 
 distclean:: distclean-xref
 
@@ -6582,26 +6994,25 @@ distclean-xref:
 # Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
 # This file is part of erlang.mk and subject to the terms of the ISC License.
 
-COVER_REPORT_DIR = cover
+COVER_REPORT_DIR ?= cover
+COVER_DATA_DIR ?= $(CURDIR)
 
 # Hook in coverage to ct
 
 ifdef COVER
 ifdef CT_RUN
-# All modules in 'ebin'
-COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
-
+ifneq ($(wildcard $(TEST_DIR)),)
 test-build:: $(TEST_DIR)/ct.cover.spec
 
-$(TEST_DIR)/ct.cover.spec:
-	$(verbose) echo Cover mods: $(COVER_MODS)
+$(TEST_DIR)/ct.cover.spec: cover-data-dir
 	$(gen_verbose) printf "%s\n" \
-		'{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
-		'{export,"$(CURDIR)/ct.coverdata"}.' > $@
+		"{incl_app, '$(PROJECT)', details}." \
+		'{export,"$(abspath $(COVER_DATA_DIR))/ct.coverdata"}.' > $@
 
 CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
 endif
 endif
+endif
 
 # Core targets
 
@@ -6610,6 +7021,13 @@ ifneq ($(COVER_REPORT_DIR),)
 tests::
 	$(verbose) $(MAKE) --no-print-directory cover-report
 endif
+
+cover-data-dir: | $(COVER_DATA_DIR)
+
+$(COVER_DATA_DIR):
+	$(verbose) mkdir -p $(COVER_DATA_DIR)
+else
+cover-data-dir:
 endif
 
 clean:: coverdata-clean
@@ -6623,7 +7041,7 @@ help::
 		"Cover targets:" \
 		"  cover-report  Generate a HTML coverage report from previously collected" \
 		"                cover data." \
-		"  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+		"  all.coverdata Merge all coverdata files into all.coverdata." \
 		"" \
 		"If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
 		"target tests additionally generates a HTML coverage report from the combined" \
@@ -6632,17 +7050,20 @@ help::
 
 # Plugin specific targets
 
-COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
 
 .PHONY: coverdata-clean
 coverdata-clean:
-	$(gen_verbose) rm -f *.coverdata ct.cover.spec
+	$(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
 
 # Merge all coverdata files into one.
-all.coverdata: $(COVERDATA)
-	$(gen_verbose) $(ERL) -eval ' \
-		$(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
-		cover:export("$@"), halt(0).'
+define cover_export.erl
+	$(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+	cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
+endef
+
+all.coverdata: $(COVERDATA) cover-data-dir
+	$(gen_verbose) $(call erlang,$(cover_export.erl))
 
 # These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
 # empty if you want the coverdata files but not the HTML report.
@@ -6652,6 +7073,7 @@ ifneq ($(COVER_REPORT_DIR),)
 
 cover-report-clean:
 	$(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+	$(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
 
 ifeq ($(COVERDATA),)
 cover-report:
@@ -6660,7 +7082,7 @@ else
 # Modules which include eunit.hrl always contain one line without coverage
 # because eunit defines test/0 which is never called. We compensate for this.
 EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
-	grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+	grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
 	| sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
 
 define cover_report.erl
@@ -6695,7 +7117,7 @@ define cover_report.erl
 endef
 
 cover-report:
-	$(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+	$(verbose) mkdir -p $(COVER_REPORT_DIR)
 	$(gen_verbose) $(call erlang,$(cover_report.erl))
 
 endif
@@ -6748,6 +7170,18 @@ sfx:
 endif
 endif
 
+# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+$(foreach p,$(DEP_PLUGINS),\
+	$(eval $(if $(findstring /,$p),\
+		$(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+		$(call core_dep_plugin,$p/plugins.mk,$p))))
+
 # Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
 # Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
 # This file is part of erlang.mk and subject to the terms of the ISC License.
@@ -6815,22 +7249,20 @@ ifeq ($(IS_APP)$(IS_DEP),)
 	$(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
 endif
 ifndef IS_APP
-	$(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+	$(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
 		$(MAKE) -C $$dep $@ \
 		 IS_APP=1 \
-		 ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
-		 || exit $$?; \
+		 ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
 	done
 endif
-	$(verbose) for dep in $^ ; do \
+	$(verbose) set -e; for dep in $^ ; do \
 		if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
 			echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
-			if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+			if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
 			 $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
 				$(MAKE) -C $$dep fetch-deps \
 				 IS_DEP=1 \
-				 ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
-				 || exit $$?; \
+				 ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
 			fi \
 		fi \
 	done
diff --git a/scripts/asciidoc b/scripts/asciidoc
new file mode 100755
index 0000000..3983c9c
--- /dev/null
+++ b/scripts/asciidoc
@@ -0,0 +1,102 @@
+#!/usr/bin/env sh
+
+set -e
+#set -x
+
+ARGUMENTS="$@"
+
+TEMP=$(getopt -o 'a:b:d:hso:nv' -l 'attribute:,backend:,doctype:,help,no-header-footer,out-file:,section-numbers,safe,theme:,verbose,version' -n asciidoc -- "$@")
+
+if [ $? -ne 0 ]; then
+	exit 1
+fi
+
+eval set -- "$TEMP"
+unset TEMP
+
+NO_HEADER_FOOTER=
+OUT_DIR=
+OUT_FILE=
+SAFE=0
+VERBOSE=0
+
+while true; do
+	case "$1" in
+		'-a'|'--attribute')
+			echo 'The option -a|--attribute is currently ignored.' >&2
+			shift 2 ;;
+		'-b'|'--backend')
+			echo 'The option -b|--backend is currently ignored.' >&2
+			shift 2 ;;
+		'-d'|'--doctype')
+			echo 'The option -d|--doctype is currently ignored.' >&2
+			shift 2 ;;
+		'-h'|'--help')
+			echo 'TODO'
+			exit 0 ;;
+		'-s'|'--no-header-footer')
+			NO_HEADER_FOOTER=1
+			shift ;;
+		'-o'|'--out-file')
+			OUT_DIR=`dirname $2`
+			OUT_FILE=`basename ${2%.*}`
+			shift 2 ;;
+		'-n'|'--section-numbers')
+			echo 'The option -n|--section-numbers is currently ignored.' >&2
+			shift ;;
+		'--safe')
+			SAFE=1
+			shift ;;
+		'--theme')
+			echo 'The option --theme is currently ignored.' >&2
+			shift ;;
+		'-v'|'--verbose')
+			VERBOSE=1
+			shift ;;
+		'--version')
+			echo 'Asciideck compatibility script'
+			exit 0 ;;
+		'--')
+			shift
+			break ;;
+		*)
+			echo 'Unexpected error:' $1 >&2
+			exit 1 ;;
+	esac
+done
+
+IN_FILE=
+
+case "$1" in
+	'')
+		echo 'No file name was provided. Use - for standard input.' >&2
+		exit 1 ;;
+	'-')
+		PARSE_CALL="asciideck:parse_stdin()" ;;
+	*)
+		IN_FILE=$1
+		PARSE_CALL="asciideck:parse_file(\"$IN_FILE\")" ;;
+esac
+
+if [ $IN_FILE -a -z $OUT_FILE ]; then
+	OUT_DIR=`dirname $IN_FILE`
+	OUT_FILE=`basename ${IN_FILE%.*}`
+fi
+
+# We need a dummy variable to not have to worry about commas
+# so let's pass the original command line forward.
+TRANSLATE_OPTS="#{ command_line => \"$0 $ARGUMENTS\""
+if [ $OUT_FILE ]; then
+	TRANSLATE_OPTS="$TRANSLATE_OPTS, outdir => \"$OUT_DIR\", outfile => \"$OUT_FILE\""
+fi
+if [ $NO_HEADER_FOOTER ]; then
+	TRANSLATE_OPTS="$TRANSLATE_OPTS, no_header_footer => true"
+fi
+TRANSLATE_OPTS="$TRANSLATE_OPTS }"
+
+<&0 erl +A0 -boot no_dot_erlang -noshell -pz `dirname $0`/../ebin -eval " \
+	case asciideck:to_html($PARSE_CALL, $TRANSLATE_OPTS) of \
+		ok -> ok; \
+		Output -> io:format(\"~ts~n\", [Output]) \
+	end, \
+	halt()"
diff --git a/src/asciideck.erl b/src/asciideck.erl
index 749ccec..232ff32 100644
--- a/src/asciideck.erl
+++ b/src/asciideck.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
+%% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
@@ -14,14 +14,25 @@
 
 -module(asciideck).
 
+-export([parse_stdin/0]).
+-export([parse_stdin/1]).
 -export([parse_file/1]).
 -export([parse_file/2]).
 -export([parse/1]).
 -export([parse/2]).
 
+-export([to_html/1]).
+-export([to_html/2]).
 -export([to_manpage/1]).
 -export([to_manpage/2]).
 
+parse_stdin() ->
+	parse_stdin(#{}).
+
+parse_stdin(St) ->
+	{ok, ReaderPid} = asciideck_stdin_reader:start_link(),
+	parse(ReaderPid, St).
+
 parse_file(Filename) ->
 	parse_file(Filename, #{}).
 
@@ -32,10 +43,22 @@ parse_file(Filename, St) ->
 parse(Data) ->
 	parse(Data, #{}).
 
-parse(Data, St) when is_binary(Data) ->
-	asciideck_parser:parse(Data, St);
-parse(Data, St) ->
-	parse(iolist_to_binary(Data), St).
+parse(Data, _St) ->
+	Passes = [
+		asciideck_attributes_pass,
+		asciideck_transform_pass,
+		asciideck_lists_pass,
+		asciideck_tables_pass,
+		asciideck_inline_pass
+	],
+	lists:foldl(fun(M, AST) -> M:run(AST) end,
+		asciideck_block_parser:parse(Data), Passes).
+
+to_html(AST) ->
+	asciideck_to_html:translate(AST, #{}).
+
+to_html(AST, Opts) ->
+	asciideck_to_html:translate(AST, Opts).
 
 to_manpage(AST) ->
 	asciideck_to_manpage:translate(AST, #{}).
diff --git a/src/asciideck_attributes_parser.erl b/src/asciideck_attributes_parser.erl
new file mode 100644
index 0000000..b89c3f4
--- /dev/null
+++ b/src/asciideck_attributes_parser.erl
@@ -0,0 +1,120 @@
+%% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% Asciidoc User Guide 29
+-module(asciideck_attributes_parser).
+
+-export([parse/1]).
+
+-type attributes() :: #{
+	%% The raw attribute list.
+	0 := binary(),
+	%% Positional attributes.
+	pos_integer() => binary(),
+	%% Named attributes.
+	binary() => binary()
+}.
+-export_type([attributes/0]).
+
+-define(IS_WS(C), (C =:= $\s) or (C =:= $\t)).
+
+-spec parse(binary()) -> attributes().
+parse(Data) ->
+	parse(Data, #{0 => Data}, 1).
+
+parse(<<>>, Attrs, _) ->
+	Attrs;
+parse(Data, Attrs, Nth) ->
+	case parse_attr(Data, <<>>) of
+		{Value, Rest} when Nth =/= undefined ->
+			parse(Rest, Attrs#{Nth => Value}, Nth + 1);
+		{Name, Value, Rest} ->
+			parse(Rest, Attrs#{Name => Value}, undefined)
+	end.
+
+parse_attr(<<>>, Acc) ->
+	{Acc, <<>>};
+%% Skip preceding whitespace.
+parse_attr(<<C, R/bits>>, <<>>) when ?IS_WS(C) ->
+	parse_attr(R, <<>>);
+%% Parse quoted positional attributes in their own function.
+parse_attr(<<$", R/bits>>, <<>>) ->
+	parse_quoted_attr(R, <<>>);
+%% We have a named attribute, parse the value.
+parse_attr(<<$=, R/bits>>, Name) when Name =/= <<>> ->
+	parse_attr_value(R, asciideck_block_parser:trim(Name, trailing), <<>>);
+%% We have a positional attribute.
+parse_attr(<<$,, R/bits>>, Value) ->
+	{asciideck_block_parser:trim(Value, trailing), R};
+%% Continue.
+parse_attr(<<C, R/bits>>, Acc) when C =/= $= ->
+	parse_attr(R, <<Acc/binary, C>>).
+
+%% Get everything until the next double quote.
+parse_quoted_attr(<<$", R/bits>>, Acc) ->
+	parse_quoted_attr_end(R, Acc);
+parse_quoted_attr(<<$\\, $", R/bits>>, Acc) ->
+	parse_quoted_attr(R, <<Acc/binary, $">>);
+parse_quoted_attr(<<C, R/bits>>, Acc) ->
+	parse_quoted_attr(R, <<Acc/binary, C>>).
+
+%% Skip the whitespace until the next comma or eof.
+parse_quoted_attr_end(<<>>, Value) ->
+	{Value, <<>>};
+parse_quoted_attr_end(<<$,, R/bits>>, Value) ->
+	{Value, R};
+parse_quoted_attr_end(<<C, R/bits>>, Value) when ?IS_WS(C) ->
+	parse_quoted_attr_end(R, Value).
+
+parse_attr_value(<<>>, Name, Acc) ->
+	{Name, Acc, <<>>};
+%% Skip preceding whitespace.
+parse_attr_value(<<C, R/bits>>, Name, <<>>) when ?IS_WS(C) ->
+	parse_attr_value(R, Name, <<>>);
+%% Parse quoted positional attributes in their own function.
+parse_attr_value(<<$", R/bits>>, Name, <<>>) ->
+	{Value, Rest} = parse_quoted_attr(R, <<>>),
+	{Name, Value, Rest};
+%% Done.
+parse_attr_value(<<$,, R/bits>>, Name, Value) ->
+	{Name, asciideck_block_parser:trim(Value, trailing), R};
+%% Continue.
+parse_attr_value(<<C, R/bits>>, Name, Acc) ->
+	parse_attr_value(R, Name, <<Acc/binary, C>>).
+
+-ifdef(TEST).
+attribute_0_test() ->
+	#{0 := <<"Hello,world,width=\"50\"">>} = parse(<<"Hello,world,width=\"50\"">>),
+	ok.
+
+parse_test() ->
+	#{} = parse(<<>>),
+	#{
+		1 := <<"Hello">>
+	} = parse(<<"Hello">>),
+	#{
+		1 := <<"quote">>,
+		2 := <<"Bertrand Russell">>,
+		3 := <<"The World of Mathematics (1956)">>
+	} = parse(<<"quote, Bertrand Russell, The World of Mathematics (1956)">>),
+	#{
+		1 := <<"22 times">>,
+		<<"backcolor">> := <<"#0e0e0e">>,
+		<<"options">> := <<"noborders,wide">>
+	} = parse(<<"\"22 times\", backcolor=\"#0e0e0e\", options=\"noborders,wide\"">>),
+	#{
+		1 := <<"A footnote&#44; &#34;with an image&#34; image:smallnew.png[]">>
+	} = parse(<<"A footnote&#44; &#34;with an image&#34; image:smallnew.png[]">>),
+	ok.
+-endif.
diff --git a/src/asciideck_attributes_pass.erl b/src/asciideck_attributes_pass.erl
new file mode 100644
index 0000000..393b57d
--- /dev/null
+++ b/src/asciideck_attributes_pass.erl
@@ -0,0 +1,112 @@
+%% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% The purpose of this pass is to apply attributes to
+%% their corresponding blocks. For macros the attributes
+%% are already applied. For inline elements the inline
+%% pass is taking care of it.
+-module(asciideck_attributes_pass).
+
+-export([run/1]).
+
+run([]) ->
+	[];
+%% A block identifier is an alternative way of specifying
+%% the id attribute for a block.
+run([{block_id, #{id := ID}, <<>>, _}|Tail0]) ->
+	Tail = apply_attributes(Tail0, #{<<"id">> => ID}),
+	run(Tail);
+%% A block title is ultimately treated as an attribute
+%% for the following block.
+run([{block_title, _, Title, _}|Tail0]) ->
+	Tail = apply_attributes(Tail0, #{<<"title">> => Title}),
+	run(Tail);
+run([{attribute_list, Attrs, <<>>, _}|Tail0]) ->
+	Tail = apply_attributes(Tail0, Attrs),
+	run(Tail);
+run([Block|Tail]) ->
+	[Block|run(Tail)].
+
+%% Find the next block to apply the attributes.
+apply_attributes([], _) ->
+	[];
+apply_attributes(AST=[Element0={Type, Attrs0, Content, Ann}|Tail], Attrs) ->
+	case can_apply(Type) of
+		drop ->
+			AST;
+		skip ->
+			[Element0|apply_attributes(Tail, Attrs)];
+		apply ->
+			Element = {Type, maps:merge(Attrs0, Attrs), Content, Ann},
+			[Element|Tail]
+	end.
+
+%% Block macros already come with a mandatory attribute list.
+%% Just to play it safe we drop the attributes for now.
+can_apply(block_macro) -> drop;
+%% If we hit a list item continuation, drop the attributes for now.
+can_apply(list_item_continuation) -> drop;
+%% We skip attribute lists and alike and let it sort itself out.
+can_apply(block_id) -> skip;
+can_apply(attribute_list) -> skip;
+can_apply(block_title) -> skip;
+%% Everything else is a block.
+can_apply(_) -> apply.
+
+-ifdef(TEST).
+attribute_list_test() ->
+	AST0 = [
+		{attribute_list, #{
+			0 => <<"width=400">>,
+			<<"width">> => <<"400">>
+		}, <<>>, #{line => 1}},
+		{listing_block, #{}, <<"Hello!">>, #{line => 2}}
+	],
+	AST = [
+		{listing_block, #{
+			0 => <<"width=400">>,
+			<<"width">> => <<"400">>
+		}, <<"Hello!">>, #{line => 2}}
+	],
+	AST = run(AST0),
+	ok.
+
+block_id_test() ->
+	AST0 = [
+		{block_id, #{
+			id => <<"cowboy_req">>
+		}, <<>>, #{line => 1}},
+		{listing_block, #{}, <<"Hello!">>, #{line => 2}}
+	],
+	AST = [
+		{listing_block, #{
+			<<"id">> => <<"cowboy_req">>
+		}, <<"Hello!">>, #{line => 2}}
+	],
+	AST = run(AST0),
+	ok.
+
+block_title_test() ->
+	AST0 = [
+		{block_title, #{}, <<"Title">>, #{line => 1}},
+		{listing_block, #{}, <<"Hello!">>, #{line => 2}}
+	],
+	AST = [
+		{listing_block, #{
+			<<"title">> => <<"Title">>
+		}, <<"Hello!">>, #{line => 2}}
+	],
+	AST = run(AST0),
+	ok.
+-endif.
diff --git a/src/asciideck_block_parser.erl b/src/asciideck_block_parser.erl
new file mode 100644
index 0000000..5cea5f9
--- /dev/null
+++ b/src/asciideck_block_parser.erl
@@ -0,0 +1,1127 @@
+%% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% The block parser is the first pass of the parsing of Asciidoc
+%% files. It only isolates the different top-level blocks and
+%% produces a representation that can then be manipulated.
+%%
+%% Further passes are necessary to propagate the parsed lists
+%% of attributes to their respective blocks, to create actual
+%% lists from the parsed list items or to parse the contents
+%% of tables. Finally a final pass will parse inline elements.
+%%
+%% This module may be called again for parsing the content
+%% of individual table cells.
+-module(asciideck_block_parser).
+
+-export([parse/1]).
+
+%% @todo Temporary export. Move somewhere else.
+-export([trim/1]).
+-export([trim/2]).
+-export([while/2]).
+
+-type ast() :: list(). %% @todo
+
+-record(state, {
+	reader :: pid()
+}).
+
+-define(IS_WS(C), (C =:= $\s) or (C =:= $\t)).
+
+-ifdef(TEST).
+-define(NOT(Type, Value), true = Type =/= element(1, hd(Value))).
+
+define_NOT_test() ->
+	%% This succeeds.
+	?NOT(block_id, parse(<<"[[block,id]]">>)),
+	%% This fails.
+	{'EXIT', _} = (catch ?NOT(block_id, parse(<<"[[block_id]]">>))),
+	ok.
+-endif.
+
+-spec parse(binary() | pid()) -> ast().
+parse(Data) when is_binary(Data) ->
+	%% @todo Might want to start it supervised.
+	%% @todo Might want to stop it also.
+	{ok, ReaderPid} = asciideck_line_reader:start_link(Data),
+	parse(ReaderPid);
+parse(Data) when is_list(Data) ->
+	parse(iolist_to_binary(Data));
+parse(ReaderPid) when is_pid(ReaderPid) ->
+	blocks(#state{reader=ReaderPid}).
+
+blocks(St) ->
+	case block(St) of
+		eof -> [];
+		Block -> [Block|blocks(St)]
+	end.
+
+%% Asciidoc parsing never fails. If a block is not
+%% formatted properly, it will be treated as a paragraph.
+block(St) ->
+	skip(fun empty_line/1, St),
+	oneof([
+		fun eof/1,
+		%% Section titles.
+		fun section_title/1,
+		fun long_section_title/1,
+		%% Block macros.
+		fun block_id/1,
+		fun block_macro/1,
+		%% Lists.
+		fun bulleted_list/1,
+		fun numbered_list/1,
+		fun labeled_list/1,
+		fun callout_list/1,
+		fun list_item_continuation/1,
+		%% Delimited blocks.
+		fun listing_block/1,
+		fun literal_block/1,
+		fun sidebar_block/1,
+		fun comment_block/1,
+		fun passthrough_block/1,
+		fun quote_block/1,
+		fun example_block/1,
+		fun open_block/1,
+		%% Table.
+		fun table/1,
+		%% Attributes.
+		fun attribute_entry/1,
+		fun attribute_list/1,
+		%% Block title.
+		fun block_title/1,
+		%% Comment lines.
+		fun comment_line/1,
+		%% Paragraphs.
+		fun literal_para/1,
+		fun admonition_para/1,
+		fun para/1
+	], St).
+
+eof(St) ->
+	eof = read_line(St).
+
+-ifdef(TEST).
+eof_test() ->
+	[] = parse(<<>>).
+-endif.
+
+empty_line(St) ->
+	<<>> = trim(read_line(St)).
+
+-ifdef(TEST).
+empty_line_test() ->
+	[] = parse(<<
+		"\n"
+		"           \n"
+		"			\n"
+		"\n"
+	>>).
+-endif.
+
+%% Asciidoc User Guide 11.2
+section_title(St) ->
+	{Level, Title0} = case read_line(St) of
+		<<"=", C, R/bits>> when ?IS_WS(C) -> {0, R};
+		<<"==", C, R/bits>> when ?IS_WS(C) -> {1, R};
+		<<"===", C, R/bits>> when ?IS_WS(C) -> {2, R};
+		<<"====", C, R/bits>> when ?IS_WS(C) -> {3, R};
+		<<"=====", C, R/bits>> when ?IS_WS(C) -> {4, R}
+	end,
+	Ann = ann(St),
+	Title1 = trim(Title0),
+	%% Optional: trailing title delimiter.
+	Trailer = case Level of
+		0 -> <<"=">>;
+		1 -> <<"==">>;
+		2 -> <<"===">>;
+		3 -> <<"====">>;
+		4 -> <<"=====">>
+	end,
+	Len = byte_size(Title1) - Level - 2,
+	Title = case Title1 of
+		<<Title2:Len/binary, WS, Trailer/binary>> when ?IS_WS(WS) -> trim(Title2);
+		_ -> trim(Title1)
+	end,
+	%% Section titles must be followed by at least one empty line.
+	_ = empty_line(St),
+	%% Good!
+	{section_title, #{level => Level}, Title, Ann}.
+
+-ifdef(TEST).
+section_title_test() ->
+	%% With trailing title delimiter.
+	[{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}]
+		= parse(<<"= Document Title (level 0) =">>),
+	[{section_title, #{level := 1}, <<"Section Title (level 1)">>, _}]
+		= parse(<<"== Section Title (level 1) ==">>),
+	[{section_title, #{level := 2}, <<"Section Title (level 2)">>, _}]
+		= parse(<<"=== Section Title (level 2) ===">>),
+	[{section_title, #{level := 3}, <<"Section Title (level 3)">>, _}]
+		= parse(<<"==== Section Title (level 3) ====">>),
+	[{section_title, #{level := 4}, <<"Section Title (level 4)">>, _}]
+		= parse(<<"===== Section Title (level 4) =====">>),
+	%% Without trailing title delimiter.
+	[{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}]
+		= parse(<<"= Document Title (level 0)">>),
+	[{section_title, #{level := 1}, <<"Section Title (level 1)">>, _}]
+		= parse(<<"== Section Title (level 1)">>),
+	[{section_title, #{level := 2}, <<"Section Title (level 2)">>, _}]
+		= parse(<<"=== Section Title (level 2)">>),
+	[{section_title, #{level := 3}, <<"Section Title (level 3)">>, _}]
+		= parse(<<"==== Section Title (level 3)">>),
+	[{section_title, #{level := 4}, <<"Section Title (level 4)">>, _}]
+		= parse(<<"===== Section Title (level 4)">>),
+	%% Accept more spaces before/after delimiters.
+	[{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}]
+		= parse(<<"=   Document Title (level 0)">>),
+	[{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}]
+		= parse(<<"=   Document Title (level 0) =">>),
+	[{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}]
+		= parse(<<"= Document Title (level 0)   =">>),
+	[{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}]
+		= parse(<<"= Document Title (level 0) =  ">>),
+	%% A space before the first delimiter is not a title.
+	?NOT(section_title, parse(<<" = Document Title (level 0)">>)),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 11.1
+long_section_title(St) ->
+	%% Title must be hard against the left margin.
+	<<C, _/bits>> = Title0 = read_line(St),
+	Ann = ann(St),
+	false = ?IS_WS(C),
+	Title = trim(Title0),
+	%% Read the underline.
+	{Level, Char, Underline0} = case read_line(St) of
+		U = <<"=", _/bits >> -> {0, $=, U};
+		U = <<"-", _/bits >> -> {1, $-, U};
+		U = <<"~", _/bits >> -> {2, $~, U};
+		U = <<"^", _/bits >> -> {3, $^, U};
+		U = <<"+", _/bits >> -> {4, $+, U}
+	end,
+	Underline = trim(Underline0, trailing),
+	%% Underline must be the same character repeated over the entire line.
+	repeats(Underline, Char),
+	%% Underline must be the same size as the title, +/- 2 characters.
+	TLen = byte_size(Title),
+	ULen = byte_size(Underline),
+	true = (TLen >= ULen - 2) andalso (TLen =< ULen + 2),
+	%% Good!
+	{section_title, #{level => Level}, Title, Ann}.
+
+-ifdef(TEST).
+long_section_title_test() ->
+	%% Same amount of characters for the underline.
+	[{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}] = parse(<<
+		"Document Title (level 0)\n"
+		"========================">>),
+	[{section_title, #{level := 1}, <<"Section Title (level 1)">>, _}] = parse(<<
+		"Section Title (level 1)\n"
+		"-----------------------">>),
+	[{section_title, #{level := 2}, <<"Section Title (level 2)">>, _}] = parse(<<
+		"Section Title (level 2)\n"
+		"~~~~~~~~~~~~~~~~~~~~~~~">>),
+	[{section_title, #{level := 3}, <<"Section Title (level 3)">>, _}] = parse(<<
+		"Section Title (level 3)\n"
+		"^^^^^^^^^^^^^^^^^^^^^^^">>),
+	[{section_title, #{level := 4}, <<"Section Title (level 4)">>, _}] = parse(<<
+		"Section Title (level 4)\n"
+		"+++++++++++++++++++++++">>),
+	%% A shorter title to confirm we are not cheating.
+	[{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<<
+		"Hello!\n"
+		"======">>),
+	%% Underline can be +/- 2 characters.
+	[{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<<
+		"Hello!\n"
+		"====">>),
+	[{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<<
+		"Hello!\n"
+		"=====">>),
+	[{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<<
+		"Hello!\n"
+		"=======">>),
+	[{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<<
+		"Hello!\n"
+		"========">>),
+	%% Underline too short/long results in a different block.
+	?NOT(section_title, parse(<<
+		"Hello!\n"
+		"===">>)),
+	?NOT(section_title, parse(<<
+		"Hello!\n"
+		"=========">>)),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 21.2.1
+%%
+%% We currently do not implement the <xreflabel> value.
+%% I am also not sure what characters are allowed,
+%% so what is here is what I came up with guessing.
+block_id(St) ->
+	<<"[[", Line0/bits>> = read_line(St),
+	Line = trim(Line0),
+	Len = byte_size(Line) - 2,
+	<<BlockID:Len/binary, "]]">> = Line,
+	%% Make sure there are only valid characters.
+	{BlockID, <<>>} = while(fun(C) ->
+		(C =/= $,) andalso (C =/= $[) andalso (C =/= $])
+		andalso (C =/= $\s) andalso (C =/= $\t)
+	end, BlockID),
+	%% Good!
+	{block_id, #{id => BlockID}, <<>>, ann(St)}.
+
+-ifdef(TEST).
+block_id_test() ->
+	%% Valid.
+	[{block_id, #{id := <<"X30">>}, <<>>, _}] = parse(<<"[[X30]]">>),
+	%% Invalid.
+	?NOT(block_id, parse(<<"[[block,id]]">>)),
+	?NOT(block_id, parse(<<"[[block[id]]">>)),
+	?NOT(block_id, parse(<<"[[block]id]]">>)),
+	?NOT(block_id, parse(<<"[[block id]]">>)),
+	?NOT(block_id, parse(<<"[[block\tid]]">>)),
+	%% Must be hard on the left of the line.
+	?NOT(block_id, parse(<<" [[block_id]]">>)),
+	?NOT(block_id, parse(<<"\t[[block_id]]">>)),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 21.2.3
+comment_line(St) ->
+	<<"//", Comment0/bits>> = read_line(St),
+	Comment = trim(Comment0),
+	%% Good!
+	{comment_line, #{<<"subs">> => <<"verbatim">>}, Comment, ann(St)}.
+
+-ifdef(TEST).
+comment_line_test() ->
+	[{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"// This is a comment.">>),
+	%% We trim the whitespace around the comment.
+	[{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"//   This is a comment.">>),
+	[{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"// This is a comment.   ">>),
+	[{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"//\tThis is a comment.">>),
+	[{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"// This is a comment.\t">>),
+	[
+		{comment_line, _, <<"First line.">>, _},
+		{comment_line, _, <<"Second line.">>, _}
+	] = parse(<<
+		"// First line.\n"
+		"// Second line.\n">>),
+	%% Must be hard on the left of the line.
+	?NOT(comment_line, parse(<<" // This is a comment.">>)),
+	?NOT(comment_line, parse(<<"\t// This is a comment.">>)),
+	ok.
+-endif.
+
+%% We currently implement the following block macros
+%% from the Asciidoc User Guide:
+%%
+%% - image (21.2.2)
+%% - include (21.3.1)
+%% - ifdef (21.3.2)
+%% - ifndef (21.3.2)
+%% - endif (21.3.2)
+block_macro(St) ->
+	Line0 = read_line(St),
+	Ann = ann(St),
+	%% Name must contain letters, digits or dash characters.
+	{Name, <<"::", Line1/bits>>} = while(fun(C) ->
+		((C >= $a) andalso (C =< $z))
+		orelse ((C >= $A) andalso (C =< $Z))
+		orelse ((C >= $0) andalso (C =< $9))
+		orelse (C =:= $-)
+	end, Line0),
+	%% Name must not begin with a dash.
+	true = binary:at(Name, 0) =/= $-,
+	%% Target must not contain whitespace characters.
+	%% It is followed by an [attribute list].
+	{Target, AttrList0 = <<"[", _/bits>>} = while(fun(C) ->
+		(C =/= $[) andalso (C =/= $\s) andalso (C =/= $\t)
+	end, Line1),
+	AttrList1 = trim(AttrList0),
+	{attribute_list, AttrList, <<>>, _} = attribute_list(St, AttrList1),
+	%% Block macros must be followed by at least one empty line.
+	_ = empty_line(St),
+	{block_macro, AttrList#{
+		name => Name,
+		target => Target
+	}, <<>>, Ann}.
+
+-ifdef(TEST).
+block_macro_image_test() ->
+	[{block_macro, #{
+		name := <<"image">>,
+		target := <<"images/layout.png">>,
+		1 := <<"J14P main circuit board">>
+	}, <<>>, _}] = parse(<<"image::images/layout.png[J14P main circuit board]">>),
+	[{block_macro, #{
+		name := <<"image">>,
+		target := <<"images/layout.png">>,
+		1 := <<"J14P main circuit board">>,
+		<<"title">> := <<"Main circuit board">>
+	}, <<>>, _}] = parse(
+		<<"image::images/layout.png[\"J14P main circuit board\", "
+			"title=\"Main circuit board\"]">>),
+	ok.
+
+block_macro_include_test() ->
+	[{block_macro, #{
+		name := <<"include">>,
+		target := <<"chapter1.txt">>,
+		<<"tabsize">> := <<"4">>
+	}, <<>>, _}] = parse(<<"include::chapter1.txt[tabsize=4]">>),
+	ok.
+
+block_macro_ifdef_test() ->
+	[{block_macro, #{
+		name := <<"ifdef">>,
+		target := <<"revnumber">>,
+		0 := <<>>
+	}, <<>>, _}] = parse(<<"ifdef::revnumber[]">>),
+	[{block_macro, #{
+		name := <<"ifdef">>,
+		target := <<"revnumber">>,
+		1 := <<"Version number 42">>
+	}, <<>>, _}] = parse(<<"ifdef::revnumber[Version number 42]">>),
+	ok.
+
+block_macro_ifndef_test() ->
+	[{block_macro, #{
+		name := <<"ifndef">>,
+		target := <<"revnumber">>,
+		0 := <<>>
+	}, <<>>, _}] = parse(<<"ifndef::revnumber[]">>),
+	ok.
+
+block_macro_endif_test() ->
+	[{block_macro, #{
+		name := <<"endif">>,
+		target := <<"revnumber">>,
+		0 := <<>>
+	}, <<>>, _}] = parse(<<"endif::revnumber[]">>),
+	%% Some macros accept an empty target.
+	[{block_macro, #{
+		name := <<"endif">>,
+		target := <<>>,
+		0 := <<>>
+	}, <<>>, _}] = parse(<<"endif::[]">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 17.1
+bulleted_list(St) ->
+	Line0 = read_line(St),
+	Line1 = trim(Line0),
+	{Type0, Level, ListItem} = case Line1 of
+		<<"-", C, R/bits>> when ?IS_WS(C) -> {dash, 1, R};
+		<<"*", C, R/bits>> when ?IS_WS(C) -> {star, 1, R};
+		<<"**", C, R/bits>> when ?IS_WS(C) -> {star, 2, R};
+		<<"***", C, R/bits>> when ?IS_WS(C) -> {star, 3, R};
+		<<"****", C, R/bits>> when ?IS_WS(C) -> {star, 4, R};
+		<<"*****", C, R/bits>> when ?IS_WS(C) -> {star, 5, R}
+	end,
+	Type = case Type0 of
+		dash -> bulleted_alt;
+		star -> bulleted
+	end,
+	list_item(St, #{
+		type => Type,
+		level => Level
+	}, ListItem).
+
+-ifdef(TEST).
+bulleted_list_test() ->
+	[{list_item, #{
+		type := bulleted_alt,
+		level := 1
+	}, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"- List item.">>),
+	[{list_item, #{
+		type := bulleted,
+		level := 1
+	}, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"* List item.">>),
+	[{list_item, #{
+		type := bulleted,
+		level := 2
+	}, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"** List item.">>),
+	[{list_item, #{
+		type := bulleted,
+		level := 3
+	}, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"*** List item.">>),
+	[{list_item, #{
+		type := bulleted,
+		level := 4
+	}, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"**** List item.">>),
+	[{list_item, #{
+		type := bulleted,
+		level := 5
+	}, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"***** List item.">>),
+	%% Two list items one after the other.
+	[
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, _, <<"List item 1.">>, _}], _},
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, _, <<"List item 2.">>, _}], _}
+	] = parse(<<"* List item 1.\n* List item 2.">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 17.2
+%%
+%% We currently only implement implicit numbering.
+numbered_list(St) ->
+	Line0 = read_line(St),
+	Line1 = trim(Line0),
+	{Level, ListItem} = case Line1 of
+		<<".", C, R/bits>> when ?IS_WS(C) -> {1, R};
+		<<"..", C, R/bits>> when ?IS_WS(C) -> {2, R};
+		<<"...", C, R/bits>> when ?IS_WS(C) -> {3, R};
+		<<"....", C, R/bits>> when ?IS_WS(C) -> {4, R};
+		<<".....", C, R/bits>> when ?IS_WS(C) -> {5, R}
+	end,
+	list_item(St, #{
+		type => numbered,
+		level => Level
+	}, ListItem).
+
+-ifdef(TEST).
+numbered_list_test() ->
+	[{list_item, #{
+		type := numbered,
+		level := 1
+	}, [{paragraph, _, <<"Arabic (decimal) numbered list item.">>, _}], _}]
+		= parse(<<". Arabic (decimal) numbered list item.">>),
+	[{list_item, #{
+		type := numbered,
+		level := 2
+	}, [{paragraph, _, <<"Lower case alpha (letter) numbered list item.">>, _}], _}]
+		= parse(<<".. Lower case alpha (letter) numbered list item.">>),
+	[{list_item, #{
+		type := numbered,
+		level := 3
+	}, [{paragraph, _, <<"Lower case roman numbered list item.">>, _}], _}]
+		= parse(<<"... Lower case roman numbered list item.">>),
+	[{list_item, #{
+		type := numbered,
+		level := 4
+	}, [{paragraph, _, <<"Upper case alpha (letter) numbered list item.">>, _}], _}]
+		= parse(<<".... Upper case alpha (letter) numbered list item.">>),
+	[{list_item, #{
+		type := numbered,
+		level := 5
+	}, [{paragraph, _, <<"Upper case roman numbered list item.">>, _}], _}]
+		= parse(<<"..... Upper case roman numbered list item.">>),
+	%% Two list items one after the other.
+	[
+		{list_item, #{type := numbered, level := 1},
+			[{paragraph, _, <<"List item 1.">>, _}], _},
+		{list_item, #{type := numbered, level := 1},
+			[{paragraph, _, <<"List item 2.">>, _}], _}
+	] = parse(<<". List item 1.\n. List item 2.">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 17.3
+%%
+%% The Asciidoc User Guide makes it sound like the
+%% label must be hard on the left margin but we don't
+%% enforce that to simplify the implementation.
+labeled_list(St) ->
+	Line0 = read_line(St),
+	%% We can't match directly to find the list separator,
+	%% we have to search for it.
+	{Label0, Sep, ListItem0} = find_labeled_list(Line0),
+	Label = trim(Label0),
+	ListItem = trim(ListItem0),
+	%% The label must not be empty.
+	true = trim(Label) =/= <<>>,
+	list_item(St, #{
+		type => labeled,
+		separator => Sep,
+		label => Label
+	}, ListItem).
+
+find_labeled_list(Line) ->
+	find_labeled_list(Line, <<>>).
+
+%% We don't have a final clause with an empty binary because
+%% we want to crash if we don't find a labeled list.
+find_labeled_list(<<"::">>, Acc) -> {Acc, <<"::">>, <<>>};
+find_labeled_list(<<":::">>, Acc) -> {Acc, <<":::">>, <<>>};
+find_labeled_list(<<"::::">>, Acc) -> {Acc, <<"::::">>, <<>>};
+find_labeled_list(<<";;">>, Acc) -> {Acc, <<";;">>, <<>>};
+find_labeled_list(<<"::", C, R/bits>>, Acc) when ?IS_WS(C) -> {Acc, <<"::">>, R};
+find_labeled_list(<<":::", C, R/bits>>, Acc) when ?IS_WS(C) -> {Acc, <<":::">>, R};
+find_labeled_list(<<"::::", C, R/bits>>, Acc) when ?IS_WS(C) -> {Acc, <<"::::">>, R};
+find_labeled_list(<<";;", C, R/bits>>, Acc) when ?IS_WS(C) -> {Acc, <<";;">>, R};
+find_labeled_list(<<C, R/bits>>, Acc) -> find_labeled_list(R, <<Acc/binary, C>>).
+
+-ifdef(TEST).
+labeled_list_test() ->
+	[{list_item, #{type := labeled, separator := <<"::">>, label := <<"Question">>},
+		[{paragraph, _, <<"Answer!">>, _}], _}] = parse(<<"Question:: Answer!">>),
+	[{list_item, #{type := labeled, separator := <<"::">>, label := <<"Question">>},
+		[{paragraph, _, <<"Answer!">>, _}], _}] = parse(<<"Question::\n  Answer!">>),
+	%% Long snippet from the Asciidoc User Guide, minus literal paragraph.
+	%% @todo Add the literal paragraph back once they are implemented.
+	[
+		{list_item, #{type := labeled, separator := <<"::">>, label := <<"In">>},
+			[{paragraph, _, <<>>, _}], _},
+		{list_item, #{type := labeled, separator := <<"::">>, label := <<"Lorem">>},
+			[{paragraph, _, <<"Fusce euismod commodo velit.">>, _}], _},
+		{list_item, #{type := labeled, separator := <<"::">>, label := <<"Ipsum">>},
+			[{paragraph, _, <<"Vivamus fringilla mi eu lacus.">>, _}], _},
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, _, <<"Vivamus fringilla mi eu lacus.">>, _}], _},
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, _, <<"Donec eget arcu bibendum nunc consequat lobortis.">>, _}], _},
+		{list_item, #{type := labeled, separator := <<"::">>, label := <<"Dolor">>},
+			[{paragraph, _, <<"Donec eget arcu bibendum nunc consequat lobortis.">>, _}], _},
+		{list_item, #{type := labeled, separator := <<";;">>, label := <<"Suspendisse">>},
+			[{paragraph, _, <<"A massa id sem aliquam auctor.">>, _}], _},
+		{list_item, #{type := labeled, separator := <<";;">>, label := <<"Morbi">>},
+			[{paragraph, _, <<"Pretium nulla vel lorem.">>, _}], _},
+		{list_item, #{type := labeled, separator := <<";;">>, label := <<"In">>},
+			[{paragraph, _, <<"Dictum mauris in urna.">>, _}], _},
+		{list_item, #{type := labeled, separator := <<":::">>, label := <<"Vivamus">>},
+			[{paragraph, _, <<"Fringilla mi eu lacus.">>, _}], _},
+		{list_item, #{type := labeled, separator := <<":::">>, label := <<"Donec">>},
+			[{paragraph, _, <<"Eget arcu bibendum nunc consequat lobortis.">>, _}], _}
+	] = parse(<<
+		"In::\n"
+		"Lorem::\n"
+		"  Fusce euismod commodo velit.\n"
+		%% @todo Add literal paragraph back here.
+		"Ipsum:: Vivamus fringilla mi eu lacus.\n"
+		"  * Vivamus fringilla mi eu lacus.\n"
+		"  * Donec eget arcu bibendum nunc consequat lobortis.\n"
+		"Dolor::\n"
+		"  Donec eget arcu bibendum nunc consequat lobortis.\n"
+		"  Suspendisse;;\n"
+		"    A massa id sem aliquam auctor.\n"
+		"  Morbi;;\n"
+		"    Pretium nulla vel lorem.\n"
+		"  In;;\n"
+		"    Dictum mauris in urna.\n"
+		"    Vivamus::: Fringilla mi eu lacus.\n"
+		"    Donec:::   Eget arcu bibendum nunc consequat lobortis.\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 20
+-spec callout_list(_) -> no_return().
+callout_list(St) -> throw({not_implemented, St}). %% @todo
+
+%% Asciidoc User Guide 17
+%%
+%% We do not apply rules about blocks being contained in
+%% the list item at this stage of parsing. We only concern
+%% ourselves with identifying blocks, and then another pass
+%% will build a tree from the result of this pass.
+list_item(St, Attrs, ListItem0) ->
+	ListItem1 = trim(ListItem0),
+	Ann = ann(St),
+	%% For labeled lists, we may need to skip empty lines
+	%% until the start of the list item contents, since
+	%% it can begin on a separate line from the label.
+	_ = case {ListItem1, Attrs} of
+		{<<>>, #{type := labeled}} ->
+			read_while(St, fun skip_empty_lines/1, <<>>);
+		_ ->
+			ok
+	end,
+	%% A list item ends on end of file, empty line or when a new list starts.
+	%% Any indentation is optional and therefore removed.
+	ListItem = read_while(St, fun fold_list_item/1, ListItem1),
+	{list_item, Attrs, [{paragraph, #{}, ListItem, Ann}], Ann}.
+
+skip_empty_lines(eof) ->
+	done;
+skip_empty_lines(Line) ->
+	case trim(Line) of
+		<<>> -> {more, <<>>};
+		_ -> done
+	end.
+
+fold_list_item(eof) ->
+	done;
+fold_list_item(Line0) ->
+	case trim(Line0) of
+		<<>> -> done;
+		<<"+">> -> done;
+		<<"//", _/bits >> -> done;
+		<<"-", C, _/bits>> when ?IS_WS(C) -> done;
+		<<"*", C, _/bits>> when ?IS_WS(C) -> done;
+		<<"**", C, _/bits>> when ?IS_WS(C) -> done;
+		<<"***", C, _/bits>> when ?IS_WS(C) -> done;
+		<<"****", C, _/bits>> when ?IS_WS(C) -> done;
+		<<"*****", C, _/bits>> when ?IS_WS(C) -> done;
+		<<".", C, _/bits>> when ?IS_WS(C) -> done;
+		<<"..", C, _/bits>> when ?IS_WS(C) -> done;
+		<<"...", C, _/bits>> when ?IS_WS(C) -> done;
+		<<"....", C, _/bits>> when ?IS_WS(C) -> done;
+		<<".....", C, _/bits>> when ?IS_WS(C) -> done;
+		Line ->
+			try find_labeled_list(Line) of
+				{_, _, _} -> done
+			catch _:_ ->
+				{more, Line}
+			end
+	end.
+
+-ifdef(TEST).
+list_item_test() ->
+	[
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, #{}, <<"List item.">>, _}], _},
+		{list_item, #{type := bulleted, level := 2},
+			[{paragraph, #{}, <<"List item.">>, _}], _},
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, #{}, <<"List item.">>, _}], _},
+		{list_item, #{type := numbered, level := 1},
+			[{paragraph, #{}, <<"List item.">>, _}], _},
+		{list_item, #{type := numbered, level := 1},
+			[{paragraph, #{}, <<"List item.">>, _}], _},
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, #{}, <<"List item.">>, _}], _}
+	] = parse(<<
+		"* List item.\n"
+		"** List item.\n"
+		"* List item.\n"
+		"  . List item.\n"
+		"  . List item.\n"
+		"* List item.\n">>),
+	%% Properly detect a labeled list.
+	[
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, #{}, <<"List item.\nMultiline.">>, _}], _},
+		{list_item, #{type := labeled, label := <<"Question">>},
+			[{paragraph, #{}, <<"Answer!">>, _}], _}
+	] = parse(<<
+		"* List item.\n"
+		"Multiline.\n"
+		"Question:: Answer!\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 17.7
+list_item_continuation(St) ->
+	%% Continuations are a single + hard against the left margin.
+	<<$+, Whitespace/bits>> = read_line(St),
+	<<>> = trim(Whitespace),
+	{list_item_continuation, #{}, <<>>, ann(St)}.
+
+-ifdef(TEST).
+list_item_continuation_test() ->
+	[{list_item_continuation, _, _, _}] = parse(<<"+">>),
+	[{list_item_continuation, _, _, _}] = parse(<<"+   ">>),
+	[{list_item_continuation, _, _, _}] = parse(<<"+\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 16.2
+listing_block(St) ->
+	delimited_block(St, listing_block, $-, #{<<"subs">> => <<"verbatim">>}).
+
+-ifdef(TEST).
+listing_block_test() ->
+	Block = <<
+		"#include <stdio.h>\n"
+		"\n"
+		"int main() {\n"
+		"   printf(\"Hello World!\n\");\n"
+		"   exit(0);\n"
+		"}">>,
+	[{listing_block, _, Block, _}] = parse(<<
+		"--------------------------------------\n",
+		Block/binary, "\n"
+		"--------------------------------------\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 16.3
+literal_block(St) ->
+	delimited_block(St, literal_block, $., #{<<"subs">> => <<"verbatim">>}).
+
+-ifdef(TEST).
+literal_block_test() ->
+	Block = <<
+		"Consul *necessitatibus* per id,\n"
+		"consetetur, eu pro everti postulant\n"
+		"homero verear ea mea, qui.">>,
+	[{literal_block, _, Block, _}] = parse(<<
+		"...................................\n",
+		Block/binary, "\n"
+		"...................................\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 16.4
+sidebar_block(St) ->
+	delimited_block(St, sidebar_block, $*).
+
+-ifdef(TEST).
+sidebar_block_test() ->
+	Block = <<
+		"Any AsciiDoc SectionBody element (apart from\n"
+		"SidebarBlocks) can be placed inside a sidebar.">>,
+	[{sidebar_block, _, Block, _}] = parse(<<
+		"************************************************\n",
+		Block/binary, "\n"
+		"************************************************\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 16.5
+comment_block(St) ->
+	delimited_block(St, comment_block, $/).
+
+-ifdef(TEST).
+comment_block_test() ->
+	Block = <<
+		"CommentBlock contents are not processed by\n"
+		"asciidoc(1).">>,
+	[{comment_block, _, Block, _}] = parse(<<
+		"//////////////////////////////////////////\n",
+		Block/binary, "\n"
+		"//////////////////////////////////////////\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 16.6
+passthrough_block(St) ->
+	delimited_block(St, passthrough_block, $+, #{<<"subs">> => <<"verbatim">>}).
+
+-ifdef(TEST).
+passthrough_block_test() ->
+	Block = <<
+		"<table border=\"1\"><tr>\n"
+		"  <td>*Cell 1*</td>\n"
+		"  <td>*Cell 2*</td>\n"
+		"</tr></table>">>,
+	[{passthrough_block, _, Block, _}] = parse(<<
+		"++++++++++++++++++++++++++++++++++++++\n",
+		Block/binary, "\n"
+		"++++++++++++++++++++++++++++++++++++++\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 16.7
+quote_block(St) ->
+	delimited_block(St, quote_block, $_).
+
+-ifdef(TEST).
+quote_block_test() ->
+	Block = <<
+		"As he spoke there was the sharp sound of horses' hoofs and\n"
+		"grating wheels against the curb, followed by a sharp pull at the\n"
+		"bell. Holmes whistled.\n"
+		"\n"
+		"\"A pair, by the sound,\" said he. \"Yes,\" he continued, glancing\n"
+		"out of the window. \"A nice little brougham and a pair of\n"
+		"beauties. A hundred and fifty guineas apiece. There's money in\n"
+		"this case, Watson, if there is nothing else.\"">>,
+	[{quote_block, _, Block, _}] = parse(<<
+		"____________________________________________________________________\n",
+		Block/binary, "\n"
+		"____________________________________________________________________\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 16.8
+example_block(St) ->
+	delimited_block(St, example_block, $=).
+
+-ifdef(TEST).
+example_block_test() ->
+	Block = <<
+		"Qui in magna commodo, est labitur dolorum an. Est ne magna primis\n"
+		"adolescens.">>,
+	[{example_block, _, Block, _}] = parse(<<
+		"=====================================================================\n",
+		Block/binary, "\n"
+		"=====================================================================\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 16
+delimited_block(St, Name, Char) ->
+	delimited_block(St, Name, Char, #{}, <<Char, Char, Char, Char>>).
+
+delimited_block(St, Name, Char, Attrs) ->
+	delimited_block(St, Name, Char, Attrs, <<Char, Char, Char, Char>>).
+
+delimited_block(St, Name, Char, Attrs, Four) ->
+	%% A delimiter block begins by a series of four or more repeated characters.
+	<<Four:4/binary, Line0/bits>> = read_line(St),
+	Ann = ann(St),
+	Line = trim(Line0, trailing),
+	repeats(Line, Char),
+	%% Get the content of the block as-is.
+	Block = read_while(St, fun(L) -> fold_delimited_block(L, Four, Char) end, <<>>),
+	%% Skip the trailing delimiter line.
+	_ = read_line(St),
+	{Name, Attrs, Block, Ann}.
+
+%% Accept eof as a closing delimiter.
+fold_delimited_block(eof, _, _) ->
+	done;
+fold_delimited_block(Line0, Four, Char) ->
+	case Line0 of
+		<<Four:4/binary, Line1/bits>> ->
+			try
+				Line = trim(Line1, trailing),
+				repeats(Line, Char),
+				done
+			catch _:_ ->
+				{more, Line0}
+			end;
+		_ ->
+			{more, Line0}
+	end.
+
+-ifdef(TEST).
+delimited_block_test() ->
+	%% Confirm that the block ends at eof.
+	%%
+	%% We see an extra line break because asciideck_line_reader adds
+	%% one at the end of every files to ease processing.
+	[{listing_block, _, <<"Hello!\n\n">>, _}] = parse(<<
+		"----\n"
+		"Hello!\n">>),
+	%% Same without a trailing line break.
+	%%
+	%% We also see an extra line break for the aforementioned reasons.
+	[{listing_block, _, <<"Hello!\n">>, _}] = parse(<<
+		"----\n"
+		"Hello!">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 16.10
+-spec open_block(_) -> no_return().
+open_block(St) -> throw({not_implemented, St}). %% @todo
+
+%% Asciidoc User Guide 23
+%%
+%% We do not parse the table in this pass. Instead we
+%% treat it like any other delimited block.
+table(St) ->
+	delimited_block(St, table, $=, #{}, <<"|===">>).
+
+-ifdef(TEST).
+table_test() ->
+	Block = <<
+		"|1 |2 |A\n"
+		"|3 |4 |B\n"
+		"|5 |6 |C">>,
+	[{table, _, Block, _}] = parse(<<
+		"|=======\n",
+		Block/binary, "\n"
+		"|=======\n">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 28
+-spec attribute_entry(_) -> no_return().
+attribute_entry(St) -> throw({not_implemented, St}). %% @todo
+
+%% Asciidoc User Guide 14, 29
+attribute_list(St) ->
+	AttrList = read_line(St),
+	attribute_list(St, AttrList).
+
+attribute_list(St, AttrList0) ->
+	%% First we remove the enclosing square brackets.
+	<<$[, AttrList1/bits>> = AttrList0,
+	AttrList2 = trim(AttrList1),
+	Len = byte_size(AttrList2) - 1,
+	<<AttrList3:Len/binary, $]>> = AttrList2,
+	AttrList = asciideck_attributes_parser:parse(AttrList3),
+	{attribute_list, AttrList, <<>>, ann(St)}.
+
+-ifdef(TEST).
+attribute_list_test() ->
+	[{attribute_list, #{0 := <<"Hello">>, 1 := <<"Hello">>}, <<>>, _}]
+		= parse(<<"[Hello]">>),
+	[{attribute_list, #{
+		1 := <<"quote">>,
+		2 := <<"Bertrand Russell">>,
+		3 := <<"The World of Mathematics (1956)">>
+	}, <<>>, _}]
+		= parse(<<"[quote, Bertrand Russell, The World of Mathematics (1956)]">>),
+	[{attribute_list, #{
+		1 := <<"22 times">>,
+		<<"backcolor">> := <<"#0e0e0e">>,
+		<<"options">> := <<"noborders,wide">>
+	}, <<>>, _}]
+		= parse(<<"[\"22 times\", backcolor=\"#0e0e0e\", options=\"noborders,wide\"]">>),
+	[{attribute_list, #{
+		1 := <<"A footnote&#44; &#34;with an image&#34; image:smallnew.png[]">>
+	}, <<>>, _}]
+		= parse(<<"[A footnote&#44; &#34;with an image&#34; image:smallnew.png[]]">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 12
+block_title(St) ->
+	%% A block title line begins with a period and is followed by the title text.
+	<<$., Title0/bits>> = read_line(St),
+	Ann = ann(St),
+	Title = trim(Title0),
+	{block_title, #{}, Title, Ann}.
+
+-ifdef(TEST).
+block_title_test() ->
+	%% Valid.
+	[{block_title, _, <<"Notes">>, _}] = parse(<<".Notes">>),
+	[{block_title, _, <<"Notes">>, _}] = parse(<<".Notes   ">>),
+	%% Invalid.
+	?NOT(block_title, parse(<<". Notes">>)),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 15.2
+-spec literal_para(_) -> no_return().
+literal_para(St) -> throw({not_implemented, St}). %% @todo
+
+%% Asciidoc User Guide 15.4
+-spec admonition_para(_) -> no_return().
+admonition_para(St) -> throw({not_implemented, St}). %% @todo
+
+%% Asciidoc User Guide 15.1
+para(St) ->
+	%% Paragraph must be hard against the left margin.
+	<<C, _/bits>> = Para0 = read_line(St),
+	Ann = ann(St),
+	%% @todo Uncomment this line once everything else has been implemented.
+	_ = ?IS_WS(C), % false = ?IS_WS(C),
+	Para1 = trim(Para0),
+	%% Paragraph ends at blank line, end of file or start of delimited block or list.
+	Para = read_while(St, fun fold_para/1, Para1),
+	{paragraph, #{}, Para, Ann}.
+
+fold_para(eof) ->
+	done;
+fold_para(Line) ->
+	case trim(Line) of
+		<<>> -> done;
+		<<"+">> -> done;
+		<<"//", _/bits>> -> done;
+		%% @todo Detect delimited block or list.
+		_ -> {more, Line}
+	end.
+
+-ifdef(TEST).
+para_test() ->
+	LoremIpsum = <<
+		"Lorem ipsum dolor sit amet, consectetur adipiscing elit,\n"
+		"sed do eiusmod tempor incididunt ut labore et dolore\n"
+		"magna aliqua. Ut enim ad minim veniam, quis nostrud\n"
+		"exercitation ullamco laboris nisi ut aliquip ex ea\n"
+		"commodo consequat. Duis aute irure dolor in reprehenderit\n"
+		"in voluptate velit esse cillum dolore eu fugiat nulla\n"
+		"pariatur. Excepteur sint occaecat cupidatat non proident,\n"
+		"sunt in culpa qui officia deserunt mollit anim id est laborum."
+	>>,
+	%% Paragraph followed by end of file.
+	[{paragraph, _, LoremIpsum, _}] = parse(<< LoremIpsum/binary, "\n">>),
+	%% Paragraph followed by end of file with no trailing line break..
+	[{paragraph, _, LoremIpsum, _}] = parse(LoremIpsum),
+	%% Paragraph followed by list continuation.
+	[{paragraph, _, LoremIpsum, _}, {list_item_continuation, _, _, _}]
+		= parse(<<LoremIpsum/binary, "\n+">>),
+	%% Paragraph followed by comment.
+	[{paragraph, _, LoremIpsum, _}, {comment_line, _, <<"@todo Double check.">>, _}]
+		= parse(<<LoremIpsum/binary, "\n// @todo Double check.">>),
+	%% Two paragraphs.
+	[{paragraph, _, LoremIpsum, _}, {paragraph, _, LoremIpsum, _}]
+		= parse(<<
+			LoremIpsum/binary,
+			"\n\n",
+			LoremIpsum/binary >>),
+	ok.
+-endif.
+
+%% Control functions.
+
+oneof([], St=#state{reader=ReaderPid}) ->
+	throw({error, St, sys:get_state(ReaderPid)});
+oneof([Parse|Tail], St=#state{reader=ReaderPid}) ->
+	Ln = asciideck_reader:get_position(ReaderPid),
+	try
+		Parse(St)
+	catch _:_ ->
+		asciideck_reader:set_position(ReaderPid, Ln),
+		oneof(Tail, St)
+	end.
+
+skip(Parse, St=#state{reader=ReaderPid}) ->
+	Ln = asciideck_reader:get_position(ReaderPid),
+	try
+		_ = Parse(St),
+		skip(Parse, St)
+	catch _:_ ->
+		asciideck_reader:set_position(ReaderPid, Ln),
+		ok
+	end.
+
+%% Line functions.
+
+read_line(#state{reader=ReaderPid}) ->
+	asciideck_reader:read_line(ReaderPid).
+
+read_while(St=#state{reader=ReaderPid}, F, Acc) ->
+	Ln = asciideck_reader:get_position(ReaderPid),
+	case F(read_line(St)) of
+		done ->
+			asciideck_reader:set_position(ReaderPid, Ln),
+			Acc;
+		{more, Line} ->
+			case Acc of
+				<<>> -> read_while(St, F, Line);
+				_ -> read_while(St, F, <<Acc/binary, $\n, Line/binary>>)
+			end
+	end.
+
+ann(#state{reader=ReaderPid}) ->
+	#{line => asciideck_reader:get_position(ReaderPid)}.
+
+trim(Line) ->
+	trim(Line, both).
+
+trim(Line, Direction) ->
+	Regex = case Direction of
+		both -> "^[ \\t\\r\\n]+|[ \\t\\r\\n]+$";
+		trailing -> "[ \\t\\r\\n]+$"
+	end,
+	iolist_to_binary(re:replace(Line, Regex, <<>>, [global])).
+
+repeats(<<>>, _) -> ok;
+repeats(<<C, Rest/bits>>, C) -> repeats(Rest, C).
+
+while(F, Bin) ->
+	while(Bin, F, <<>>).
+
+while(<<>>, _, Acc) ->
+	{Acc, <<>>};
+while(<<C, R/bits>>, F, Acc) ->
+	case F(C) of
+		true -> while(R, F, <<Acc/binary, C>>);
+		false -> {Acc, <<C, R/bits>>}
+	end.
diff --git a/src/asciideck_inline_pass.erl b/src/asciideck_inline_pass.erl
new file mode 100644
index 0000000..d190641
--- /dev/null
+++ b/src/asciideck_inline_pass.erl
@@ -0,0 +1,393 @@
+%% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% This pass walks over the tree and parses inline elements.
+-module(asciideck_inline_pass).
+
+-export([run/1]).
+
+-import(asciideck_block_parser, [trim/1, while/2]).
+
+-type inline_ast() :: list(). %% @todo
+-export_type([inline_ast/0]).
+
+run([]) ->
+	[];
+run([Data|Tail]) when is_binary(Data) ->
+	[inline(Data)|run(Tail)];
+%% We do not do any inline formatting for verbatim blocks,
+%% for example listing blocks.
+%%
+%% @todo subs is a list of values.
+run([Item={_, #{<<"subs">> := <<"verbatim">>}, _, _}|Tail]) ->
+	[Item|run(Tail)];
+%% Labeled lists' labels can also have inline formatting.
+run([{Type, Attrs=#{label := Label}, Items, Ann}|Tail]) when is_list(Items) ->
+	[{Type, Attrs#{label => inline(Label)}, run(Items), Ann}|run(Tail)];
+run([{Type, Attrs, Items, Ann}|Tail]) when is_list(Items) ->
+	[{Type, Attrs, run(Items), Ann}|run(Tail)];
+run([{Type, Attrs, Data, Ann}|Tail]) ->
+	[{Type, Attrs, inline(Data), Ann}|run(Tail)].
+
+%% We reduce inline content with a single text element
+%% with no formatting to a simple binary.
+inline(<<>>) ->
+	<<>>;
+inline(Data) ->
+	case inline(Data, <<>>, []) of
+		[] -> <<>>;
+		[Text] when is_binary(Text) -> Text;
+		AST -> AST
+	end.
+
+-spec inline(binary(), binary(), inline_ast()) -> inline_ast().
+inline(<<>>, <<>>, Acc) ->
+	lists:reverse(Acc);
+inline(<<>>, BinAcc, Acc) ->
+	lists:reverse([BinAcc|Acc]);
+inline(Data, BinAcc, Acc) ->
+	oneof(Data, BinAcc, Acc, [
+		%% Links.
+		fun xref/2,
+		fun link/2,
+		fun http_link/2,
+		fun https_link/2,
+		fun mailto_link/2,
+		%% Quoted text.
+		fun emphasized_single_quote/2,
+		fun emphasized_underline/2,
+		fun strong/2,
+		%% Passthrough macros.
+		fun inline_literal_passthrough/2,
+		%% Line breaks.
+		fun line_break/2
+	]).
+
+%% The inline pass replaces \r\n and \n with a simple space
+%% when it occurs within normal text.
+oneof(<<$\r, $\n, Rest/bits>>, BinAcc, Acc, []) ->
+	inline(Rest, <<BinAcc/binary, $\s>>, Acc);
+oneof(<<$\n, Rest/bits>>, BinAcc, Acc, []) ->
+	inline(Rest, <<BinAcc/binary, $\s>>, Acc);
+oneof(<<C, Rest/bits>>, BinAcc, Acc, []) ->
+	inline(Rest, <<BinAcc/binary, C>>, Acc);
+oneof(Data, BinAcc, Acc, [Parse|Tail]) ->
+	Prev = case BinAcc of
+		<<>> -> undefined;
+		_ -> binary:last(BinAcc)
+	end,
+	try Parse(Data, Prev) of
+		{ok, Inline, Rest} when BinAcc =:= <<>> ->
+			inline(Rest, BinAcc, [Inline|Acc]);
+		{ok, Inline, Rest} ->
+			inline(Rest, <<>>, [Inline, BinAcc|Acc]);
+		{skip, Text, Rest} ->
+			oneof(Rest, <<BinAcc/binary, Text/binary>>, Acc, Tail)
+	catch _:_ ->
+		oneof(Data, BinAcc, Acc, Tail)
+	end.
+
+-ifdef(TEST).
+text_test() ->
+	<<>> = inline(<<>>),
+	<<"Hello, Robert">> = inline(<<"Hello, Robert">>),
+	ok.
+-endif.
+
+-define(IS_BOUNDARY(C), C =:= undefined; C =:= $\s; C =:= $\t; C =:= $\r; C =:= $\n; C =:= $().
+
+%% Asciidoc User Guide 21.2.1
+%%
+%% We currently do not implement the <<...>> form.
+xref(<<"xref:", IDAndCaption/bits>>, Prev) when ?IS_BOUNDARY(Prev) ->
+	%% ID must not contain whitespace characters.
+	{ID, <<"[", Caption0/bits>>} = while(fun(C) ->
+		(C =/= $[) andalso (C =/= $\s) andalso (C =/= $\t)
+	end, IDAndCaption),
+	%% It is followed by a caption.
+	{Caption1, <<"]", Rest/bits>>} = while(fun(C) ->
+		C =/= $]
+	end, Caption0),
+	Caption = trim(Caption1),
+	{ok, {xref, #{
+		id => ID
+	}, Caption, inline}, Rest}.
+
+-ifdef(TEST).
+xref_test() ->
+	[{xref, #{
+		id := <<"tiger_image">>
+	}, <<"face of a tiger">>, _}] = inline(<<"xref:tiger_image[face of a tiger]">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 21.1.3
+link(<<"link:", TargetAndCaption/bits>>, Prev) when ?IS_BOUNDARY(Prev) ->
+	%% Target must not contain whitespace characters.
+	{Target, <<"[", Caption0/bits>>} = while(fun(C) ->
+		(C =/= $[) andalso (C =/= $\s) andalso (C =/= $\t)
+			andalso (C =/= $\r) andalso (C =/= $\n)
+	end, TargetAndCaption),
+	%% It is followed by a caption.
+	{Caption1, <<"]", Rest/bits>>} = while(fun(C) ->
+		C =/= $]
+	end, Caption0),
+	Caption = trim(Caption1),
+	{ok, {link, #{
+		target => Target
+	}, Caption, inline}, Rest}.
+
+-ifdef(TEST).
+link_test() ->
+	[{link, #{
+		target := <<"downloads/foo.zip">>
+	}, <<"download foo.zip">>, _}] = inline(<<"link:downloads/foo.zip[download foo.zip]">>),
+	[{link, #{
+		target := <<"chapter1.asciidoc#fragment">>
+	}, <<"Chapter 1.">>, _}] = inline(<<"link:chapter1.asciidoc#fragment[Chapter 1.]">>),
+	[
+		{link, #{target := <<"first.zip">>}, <<"first">>, _},
+		<<", ">>,
+		{link, #{target := <<"second.zip">>}, <<"second">>, _}
+	] = inline(<<"link:first.zip[first],\nlink:second.zip[second]">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 21.1.1
+http_link(<<"http:", Rest/bits>>, Prev) when ?IS_BOUNDARY(Prev) ->
+	direct_link(Rest, <<"http:">>).
+
+direct_link(Data, Prefix) ->
+	%% Target must not contain whitespace characters.
+	{Target0, Rest0} = while(fun(C) ->
+		(C =/= $[) andalso (C =/= $\s) andalso (C =/= $\t)
+			andalso (C =/= $\r) andalso (C =/= $\n)
+			andalso (C =/= $,)
+	end, Data),
+	%% The link must be more than just the prefix.
+	false = Target0 =:= <<>>,
+	Target = <<Prefix/binary, Target0/binary>>,
+	%% It is optionally followed by a caption. Otherwise
+	%% the link itself is the caption.
+	case Rest0 of
+		<<"[", Caption0/bits>> ->
+			{Caption1, <<"]", Rest/bits>>} = while(fun(C) ->
+				C =/= $]
+			end, Caption0),
+			Caption = trim(Caption1),
+			case Caption of
+				<<>> ->
+					{ok, {link, #{
+						target => Target
+					}, Target, inline}, Rest};
+				_ ->
+					{ok, {link, #{
+						target => Target
+					}, Caption, inline}, Rest}
+			end;
+		_ ->
+			{ok, {link, #{
+				target => Target
+			}, Target, inline}, Rest0}
+	end.
+
+-ifdef(TEST).
+http_link_test() ->
+	<<"Incomplete http: link">> = inline(<<"Incomplete http: link">>),
+	[
+		{link, #{
+			target := <<"http://example.org:8080">>
+		}, <<"http://example.org:8080">>, _},
+		<<", continued">>
+	] = inline(<<"http://example.org:8080, continued">>),
+	[
+		<<"If you have ">>,
+		{link, #{
+			target := <<"http://example.org/hello#fragment">>
+		}, <<"http://example.org/hello#fragment">>, _},
+		<<" then:">>
+	] = inline(<<"If you have http://example.org/hello#fragment then:">>),
+	[
+		<<"If you have ">>,
+		{link, #{
+			target := <<"http://example.org/hello#fragment">>
+		}, <<"http://example.org/hello#fragment">>, _},
+		<<" then:">>
+	] = inline(<<"If you have http://example.org/hello#fragment\nthen:">>),
+	[
+		<<"Oh, ">>,
+		{link, #{
+			target := <<"http://example.org/hello#fragment">>
+		}, <<"hello there">>, _},
+		<<", young lad.">>
+	] = inline(<<"Oh, http://example.org/hello#fragment[hello there], young lad.">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 21.1.1
+https_link(<<"https:", Rest/bits>>, Prev) when ?IS_BOUNDARY(Prev) ->
+	direct_link(Rest, <<"https:">>).
+
+-ifdef(TEST).
+https_link_test() ->
+	<<"Incomplete https: link">> = inline(<<"Incomplete https: link">>),
+	[
+		{link, #{
+			target := <<"https://example.org:8080">>
+		}, <<"https://example.org:8080">>, _},
+		<<", continued">>
+	] = inline(<<"https://example.org:8080, continued">>),
+	[
+		<<"If you have ">>,
+		{link, #{
+			target := <<"https://example.org/hello#fragment">>
+		}, <<"https://example.org/hello#fragment">>, _},
+		<<" then:">>
+	] = inline(<<"If you have https://example.org/hello#fragment then:">>),
+	[
+		<<"If you have ">>,
+		{link, #{
+			target := <<"https://example.org/hello#fragment">>
+		}, <<"https://example.org/hello#fragment">>, _},
+		<<" then:">>
+	] = inline(<<"If you have https://example.org/hello#fragment\nthen:">>),
+	[
+		<<"Oh, ">>,
+		{link, #{
+			target := <<"https://example.org/hello#fragment">>
+		}, <<"hello there">>, _},
+		<<", young lad.">>
+	] = inline(<<"Oh, https://example.org/hello#fragment[hello there], young lad.">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 21.1.1
+mailto_link(<<"mailto:", Rest0/bits>>, Prev) when ?IS_BOUNDARY(Prev) ->
+	{ok, {link, Attrs, Caption0, Ann}, Rest} = direct_link(Rest0, <<"mailto:">>),
+	Caption = case Caption0 of
+		<<"mailto:", Caption1/bits>> -> Caption1;
+		_ -> Caption0
+	end,
+	{ok, {link, Attrs, Caption, Ann}, Rest}.
+
+-ifdef(TEST).
+mailto_link_test() ->
+	[
+		{link, #{
+			target := <<"mailto:joe.bloggs@foobar.com">>
+		}, <<"email Joe Bloggs">>, _}
+	] = inline(<<"mailto:joe.bloggs@foobar.com[email Joe Bloggs]">>),
+	[
+		{link, #{
+			target := <<"mailto:srackham@gmail.com">>
+		}, <<"srackham@gmail.com">>, _}
+	] = inline(<<"mailto:srackham@gmail.com[]">>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 10.1
+%% @todo <<"\\**"
+%% @todo <<"\\*"
+%% @todo <<"**"
+emphasized_single_quote(Data, Prev) ->
+	quoted_text(Data, Prev, emphasized, $', $').
+emphasized_underline(Data, Prev) ->
+	quoted_text(Data, Prev, emphasized, $_, $_).
+strong(Data, Prev) ->
+	quoted_text(Data, Prev, strong, $*, $*).
+
+quoted_text(<<Left, Rest0/bits>>, Prev, Type, Left, Right) when ?IS_BOUNDARY(Prev) ->
+	{Content, <<Right, Rest/bits>>} = while(fun(C) -> C =/= Right end, Rest0),
+	{ok, {Type, #{
+		left => Left,
+		right => Right
+	}, inline(Content), inline}, Rest}.
+
+-ifdef(TEST).
+emphasized_test() ->
+	[
+		<<"Word phrases ">>,
+		{emphasized, #{left := $', right := $'},
+			<<"enclosed in single quote characters">>, _},
+		<<" (acute accents) or ">>,
+		{emphasized, #{left := $_, right := $_},
+			<<"underline characters">>, _},
+		<<" are emphasized.">>
+	] = inline(<<
+		"Word phrases 'enclosed in single quote characters' (acute accents) "
+		"or _underline characters_ are emphasized."
+	>>),
+	ok.
+
+strong_test() ->
+	[
+		<<"Word phrases ">>,
+		{strong, #{left := $*, right := $*},
+			<<"enclosed in asterisk characters">>, _},
+		<<" are rendered in a strong font (usually bold).">>
+	] = inline(<<
+		"Word phrases *enclosed in asterisk characters* "
+		"are rendered in a strong font (usually bold)."
+	>>),
+	ok.
+-endif.
+
+%% Asciidoc User Guide 21.4
+inline_literal_passthrough(<<"`", Rest0/bits>>, Prev) when ?IS_BOUNDARY(Prev) ->
+	{Content, <<"`", Rest/bits>>} = while(fun(C) -> C =/= $` end, Rest0),
+	{ok, {inline_literal_passthrough, #{}, Content, inline}, Rest}.
+
+-ifdef(TEST).
+inline_literal_passthrough_test() ->
+	[
+		<<"Word phrases ">>,
+		{inline_literal_passthrough, #{}, <<"enclosed in backtick characters">>, _},
+		<<" (grave accents)...">>
+	] = inline(<<"Word phrases `enclosed in backtick characters` (grave accents)...">>),
+	ok.
+-endif.
+
+-define(IS_WS(C), (C =:= $\s) or (C =:= $\t)).
+
+%% Asciidoc User Guide 10.3
+line_break(<<WS, "+", Rest0/bits>>, _) when ?IS_WS(WS) ->
+	{Eol, Rest} = case while(fun(C) -> (C =/= $\r) andalso (C =/= $\n) end, Rest0) of
+		{Eol0, <<"\r\n", Rest1/bits>>} -> {Eol0, Rest1};
+		{Eol0, <<"\n", Rest1/bits>>} -> {Eol0, Rest1};
+		Tuple -> Tuple
+	end,
+	<<>> = trim(Eol),
+	{ok, {line_break, #{}, <<>>, inline}, Rest}.
+
+-ifdef(TEST).
+line_break_test() ->
+	[
+		<<"Plus at the end of the line">>,
+		{line_break, #{}, <<>>, inline},
+		<<"should work">>
+	] = inline(<<"Plus at the end of the line +\nshould work">>),
+	[
+		<<"Plus at the end of the line   ">>,
+		{line_break, #{}, <<>>, inline},
+		<<"should work">>
+	] = inline(<<"Plus at the end of the line    +\nshould work">>),
+	[
+		<<"Plus at the end of the line">>,
+		{line_break, #{}, <<>>, inline},
+		<<"should work">>
+	] = inline(<<"Plus at the end of the line +\r\nshould work">>),
+	<<"Plus in the middle + should not.">>
+		= inline(<<"Plus in the middle + should not.">>),
+	ok.
+-endif.
diff --git a/src/asciideck_line_reader.erl b/src/asciideck_line_reader.erl
new file mode 100644
index 0000000..c469692
--- /dev/null
+++ b/src/asciideck_line_reader.erl
@@ -0,0 +1,77 @@
+%% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(asciideck_line_reader).
+-behaviour(gen_server).
+
+%% The API is defined in asciideck_reader.
+-export([start_link/1]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+	lines :: [binary()],
+	length :: non_neg_integer(),
+	pos = 1 :: non_neg_integer()
+}).
+
+%% API.
+
+-spec start_link(binary()) -> {ok, pid()}.
+start_link(Data) ->
+	gen_server:start_link(?MODULE, [Data], []).
+
+%% gen_server.
+
+init([Data]) ->
+	Lines0 = binary:split(Data, <<"\n">>, [global]),
+	%% We add an empty line at the end to simplify parsing.
+	%% This has the inconvenient that when parsing blocks
+	%% this empty line will be included in the result if
+	%% the block is not properly closed.
+	Lines = lists:append(Lines0, [<<>>]),
+	{ok, #state{lines=Lines, length=length(Lines)}}.
+
+handle_call(read_line, _From, State=#state{length=Length, pos=Pos})
+		when Pos > Length ->
+	{reply, eof, State};
+%% @todo I know this isn't the most efficient. We could keep
+%% the lines read separately and roll back when set_position
+%% wants us to. But it works fine for now.
+handle_call(read_line, _From, State=#state{lines=Lines, pos=Pos}) ->
+	{reply, lists:nth(Pos, Lines), State#state{pos=Pos + 1}};
+handle_call(get_position, _From, State=#state{pos=Pos}) ->
+	{reply, Pos, State};
+handle_call(_Request, _From, State) ->
+	{reply, ignored, State}.
+
+handle_cast({set_position, Pos}, State) ->
+	{noreply, State#state{pos=Pos}};
+handle_cast(_Msg, State) ->
+	{noreply, State}.
+
+handle_info(_Info, State) ->
+	{noreply, State}.
+
+terminate(_Reason, _State) ->
+	ok.
+
+code_change(_OldVsn, State, _Extra) ->
+	{ok, State}.
diff --git a/src/asciideck_lists_pass.erl b/src/asciideck_lists_pass.erl
new file mode 100644
index 0000000..efb8e87
--- /dev/null
+++ b/src/asciideck_lists_pass.erl
@@ -0,0 +1,155 @@
+%% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% The purpose of this pass is to aggregate list_item
+%% blocks into proper lists. This involves building a
+%% tree based on the rules for list items.
+%%
+%% The general rules are:
+%%
+%% - Any list item of different type/level than the
+%%   current list item is a child of the latter.
+%%
+%% - The level ultimately does not matter when building
+%%   the tree, * then **** then ** is accepted just fine.
+%%
+%% - Lists of the same type as a parent are not allowed.
+%%   On the other hand reusing a type in different parts
+%%   of the tree is not a problem.
+%%
+%% - Any literal paragraph following a list item is a
+%%   child of that list item. @todo
+%%
+%% - Any other block can be included as a child by using
+%%   list continuations.
+-module(asciideck_lists_pass).
+
+-export([run/1]).
+
+run(AST) ->
+	list(AST, []).
+
+list([], Acc) ->
+	lists:reverse(Acc);
+%% Any trailing block continuation is ignored.
+list([{list_item_continuation, _, _, _}], Acc) ->
+	lists:reverse(Acc);
+%% The first list item contains the attributes for the list.
+list([LI={list_item, Attrs, _, Ann}|Tail0], Acc) ->
+	{Items, Tail} = item(Tail0, LI, [type(Attrs)], []),
+	list(Tail, [{list, Attrs, Items, Ann}|Acc]);
+list([Block|Tail], Acc) ->
+	list(Tail, [Block|Acc]).
+
+%% Bulleted/numbered list item of the same type.
+item([NextLI={list_item, #{type := T, level := L}, _, _}|Tail],
+		CurrentLI={list_item, #{type := T, level := L}, _, _}, Parents, Acc) ->
+	item(Tail, NextLI, Parents, [reverse_children(CurrentLI)|Acc]);
+%% Labeled list item of the same type.
+item([NextLI={list_item, #{type := T, separator := S}, _, _}|Tail],
+		CurrentLI={list_item, #{type := T, separator := S}, _, _}, Parents, Acc) ->
+	item(Tail, NextLI, Parents, [reverse_children(CurrentLI)|Acc]);
+%% Other list items are either parent or children lists.
+item(FullTail=[NextLI={list_item, Attrs, _, Ann}|Tail0], CurrentLI, Parents, Acc) ->
+	case lists:member(type(Attrs), Parents) of
+		%% We have a parent list item. This is the end of this child list.
+		true ->
+			{lists:reverse([reverse_children(CurrentLI)|Acc]), FullTail};
+		%% We have a child list item. This is the beginning of a new list.
+		false ->
+			{Items, Tail} = item(Tail0, NextLI, [type(Attrs)|Parents], []),
+			item(Tail, add_child(CurrentLI, {list, Attrs, Items, Ann}), Parents, Acc)
+	end;
+%% Ignore multiple contiguous list continuations.
+item([LIC={list_item_continuation, _, _, _},
+		{list_item_continuation, _, _, _}|Tail], CurrentLI, Parents, Acc) ->
+	item([LIC|Tail], CurrentLI, Parents, Acc);
+%% Blocks that immediately follow list_item_continuation are children,
+%% unless they are list_item themselves in which case it depends on the
+%% type and level of the list item.
+item([{list_item_continuation, _, _, _}, LI={list_item, _, _, _}|Tail], CurrentLI, Parents, Acc) ->
+	item([LI|Tail], CurrentLI, Parents, Acc);
+item([{list_item_continuation, _, _, _}, Block|Tail], CurrentLI, Parents, Acc) ->
+	item(Tail, add_child(CurrentLI, Block), Parents, Acc);
+%% Anything else is the end of the list.
+item(Tail, CurrentLI, _, Acc) ->
+	{lists:reverse([reverse_children(CurrentLI)|Acc]), Tail}.
+
+type(Attrs) ->
+	maps:with([type, level, separator], Attrs).
+
+add_child({list_item, Attrs, Children, Ann}, Child) ->
+	{list_item, Attrs, [Child|Children], Ann}.
+
+reverse_children({list_item, Attrs, Children, Ann}) ->
+	{list_item, Attrs, lists:reverse(Children), Ann}.
+
+-ifdef(TEST).
+list_test() ->
+	[{list, #{type := bulleted, level := 1}, [
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, #{}, <<"Hello!">>, _}], #{line := 1}},
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, #{}, <<"World!">>, _}], #{line := 2}}
+	], #{line := 1}}] = run([
+		{list_item, #{type => bulleted, level => 1},
+			[{paragraph, #{}, <<"Hello!">>, #{line => 1}}], #{line => 1}},
+		{list_item, #{type => bulleted, level => 1},
+			[{paragraph, #{}, <<"World!">>, #{line => 2}}], #{line => 2}}
+	]),
+	ok.
+
+list_of_list_test() ->
+	[{list, #{type := bulleted, level := 1}, [
+		{list_item, #{type := bulleted, level := 1}, [
+			{paragraph, #{}, <<"Hello!">>, _},
+			{list, #{type := bulleted, level := 2}, [
+				{list_item, #{type := bulleted, level := 2},
+					[{paragraph, #{}, <<"Cat!">>, _}], #{line := 2}},
+				{list_item, #{type := bulleted, level := 2},
+					[{paragraph, #{}, <<"Dog!">>, _}], #{line := 3}}
+			], #{line := 2}}
+		], #{line := 1}},
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, #{}, <<"World!">>, _}], #{line := 4}}
+	], #{line := 1}}] = run([
+		{list_item, #{type => bulleted, level => 1},
+			[{paragraph, #{}, <<"Hello!">>, #{line => 1}}], #{line => 1}},
+		{list_item, #{type => bulleted, level => 2},
+			[{paragraph, #{}, <<"Cat!">>, #{line => 2}}], #{line => 2}},
+		{list_item, #{type => bulleted, level => 2},
+			[{paragraph, #{}, <<"Dog!">>, #{line => 3}}], #{line => 3}},
+		{list_item, #{type => bulleted, level => 1},
+			[{paragraph, #{}, <<"World!">>, #{line => 4}}], #{line => 4}}
+	]),
+	ok.
+
+list_continuation_test() ->
+	[{list, #{type := bulleted, level := 1}, [
+		{list_item, #{type := bulleted, level := 1}, [
+			{paragraph, #{}, <<"Hello!">>, _},
+			{listing_block, #{}, <<"hello() -> world.">>, #{line := 3}}
+		], #{line := 1}},
+		{list_item, #{type := bulleted, level := 1},
+			[{paragraph, #{}, <<"World!">>, _}], #{line := 6}}
+	], #{line := 1}}] = run([
+		{list_item, #{type => bulleted, level => 1},
+			[{paragraph, #{}, <<"Hello!">>, #{line => 1}}], #{line => 1}},
+		{list_item_continuation, #{}, <<>>, #{line => 2}},
+		{listing_block, #{}, <<"hello() -> world.">>, #{line => 3}},
+		{list_item, #{type => bulleted, level => 1},
+			[{paragraph, #{}, <<"World!">>, #{line => 6}}], #{line => 6}}
+	]),
+	ok.
+-endif.
diff --git a/src/asciideck_parser.erl b/src/asciideck_parser.erl
deleted file mode 100644
index 8016395..0000000
--- a/src/asciideck_parser.erl
+++ /dev/null
@@ -1,388 +0,0 @@
-%% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
--module(asciideck_parser).
-
--export([parse/2]).
-
-%% @todo
-%% All nodes in the AST are of type {Type, Attrs, Text | Nodes, Ann}
-%% except for text formatting nodes at the moment. Text formatting
-%% nodes will be converted to this form in a future change.
-
-%% Parsing occurs in a few passes:
-%%
-%% * p1: Line-based parsing of the raw Asciidoc document
-%% * p2: Deal with more compp1 structures like lists and tables
-
-parse(Data, St) ->
-	Lines0 = binary:split(Data, <<"\n">>, [global]),
-	%% Ensure there's an empty line at the end, to simplify parsing.
-	Lines1 = lists:append(Lines0, [<<>>]),
-	LineNumbers = lists:seq(1, length(Lines1)),
-	Lines = lists:zip(LineNumbers, Lines1),
-	%% @todo Document header, if any. Recognized by the author info/doc attributes?
-	%% Alternatively, don't recognize it, and only use attribute entries for the same info.
-	p2(p1(Lines, [], St), []).
-
-%% First pass.
-
-%% @todo When a block element is encountered asciidoc(1) determines the type of block by checking in the following order (first to last): (section) Titles, BlockMacros, Lists, DelimitedBlocks, Tables, AttributeEntrys, AttributeLists, BlockTitles, Paragraphs.
-
-%% @todo And this function is parsing, not p1ing.
-p1([], AST, _St) ->
-	lists:reverse(AST);
-%% Extra empty lines.
-p1([{_, <<>>}|Tail], AST, St) ->
-	p1(Tail, AST, St);
-%% Comments.
-p1([{LN, <<"//", Comment/bits >>}|Tail], AST, St) ->
-	p1(Tail, [comment(trim_ws(Comment), ann(LN, St))|AST], St);
-%% Section titles.
-p1([{LN, <<"= ", Title/bits >>}, {_, <<>>}|Tail], AST, St) ->
-	p1_title_short(Tail, AST, St, LN, Title, 0);
-p1([{LN, <<"== ", Title/bits >>}, {_, <<>>}|Tail], AST, St) ->
-	p1_title_short(Tail, AST, St, LN, Title, 1);
-p1([{LN, <<"=== ", Title/bits >>}, {_, <<>>}|Tail], AST, St) ->
-	p1_title_short(Tail, AST, St, LN, Title, 2);
-p1([{LN, <<"==== ", Title/bits >>}, {_, <<>>}|Tail], AST, St) ->
-	p1_title_short(Tail, AST, St, LN, Title, 3);
-p1([{LN, <<"===== ", Title/bits >>}, {_, <<>>}|Tail], AST, St) ->
-	p1_title_short(Tail, AST, St, LN, Title, 4);
-%% Block titles.
-p1([{_LN, <<".", Title/bits >>}|Tail], AST, St) ->
-	p1(Tail, [{block_title, Title}|AST], St);
-%% Attribute lists.
-p1([{_LN, <<"[", Attrs/bits >>}|Tail], AST, St) ->
-	p1(Tail, [{attribute_list, p1_attr_list(Attrs)}|AST], St);
-%% Listing blocks.
-p1([{LN, <<"----", _/bits >>}|Tail], AST, St) ->
-	p1_listing(Tail, AST, St, LN, []);
-%% Lists.
-p1([{LN, <<"* ", Text/bits >>}|Tail], AST, St) ->
-	p1_li(Tail, AST, St, uli1, {LN, Text});
-p1([{LN, <<"** ", Text/bits >>}|Tail], AST, St) ->
-	p1_li(Tail, AST, St, uli2, {LN, Text});
-p1([{LN, <<"*** ", Text/bits >>}|Tail], AST, St) ->
-	p1_li(Tail, AST, St, uli3, {LN, Text});
-p1([{LN, <<"**** ", Text/bits >>}|Tail], AST, St) ->
-	p1_li(Tail, AST, St, uli4, {LN, Text});
-p1([{LN, <<"***** ", Text/bits >>}|Tail], AST, St) ->
-	p1_li(Tail, AST, St, uli5, {LN, Text});
-%% Tables.
-p1([{LN, <<"|===", _/bits >>}|Tail], AST, St) ->
-	p1_table(Tail, AST, St, LN);
-p1([{LN, <<"|", Text/bits >>}|Tail], AST, St) ->
-	p1_cell(Tail, AST, St, LN, Text);
-%% Prefix-based or paragraph.
-p1(Lines, AST, St) ->
-	p1_text(Lines, AST, St).
-
-p1_title_short(Tail, AST, St, LN, Text0, Level) ->
-	%% Remove the trailer, if any.
-	Text1 = trim_ws(Text0),
-	Trailer = case Level of
-		0 -> <<" =">>;
-		1 -> <<" ==">>;
-		2 -> <<" ===">>;
-		3 -> <<" ====">>;
-		4 -> <<" =====">>
-	end,
-	TrailerSize = byte_size(Trailer),
-	Size = byte_size(Text1) - TrailerSize,
-	Text3 = case Text1 of
-		<< Text2:Size/binary, Trailer:TrailerSize/binary >> -> Text2;
-		_ -> Text1
-	end,
-	Text = trim_ws(Text3),
-	p1(Tail, [title(Text, #{level => Level}, ann(LN, St))|AST], St).
-
-p1_attr_list(AttrList0) ->
-	[AttrList|_] = binary:split(AttrList0, <<"]">>),
-	binary:split(AttrList, <<",">>).
-
-%% @todo Parse attributes properly.
-p1_table(Tail, [{attribute_list, Attrs}, {block_title, Title}|AST], St, LN) ->
-	p1(Tail, [{begin_table, #{title => Title, todo => Attrs}, ann(LN, St)}|AST], St);
-p1_table(Tail, [{attribute_list, Attrs}|AST], St, LN) ->
-	p1(Tail, [{begin_table, #{todo => Attrs}, ann(LN, St)}|AST], St);
-p1_table(Tail, AST=[nl, {cell, _, _, _}|_], St, _) ->
-	p1(Tail, [end_table|AST], St);
-p1_table(Tail, AST=[{cell, _, _, _}|_], St, _) ->
-	p1(Tail, [end_table|AST], St);
-p1_table(Tail, AST, St, LN) ->
-	p1(Tail, [{begin_table, #{}, ann(LN, St)}|AST], St).
-
-%% @todo Multiline cells.
-%% @todo Styled cells.
-%% @todo Strip whitespace at the beginning of the cell if on the same line.
-p1_cell(Tail=[{_, NextLine}|_], AST0, St, LN, Text) ->
-	case p1_cell_split(Text, <<>>) of
-		[Cell] ->
-			AST1 = [nl, cell(p1([{LN, trim_ws(Cell)}, {LN, <<>>}], [], St), ann(LN, St))|AST0],
-			AST = case NextLine of
-				<<>> -> [nl|AST1];
-				_ -> AST1
-			end,
-			p1(Tail, AST, St);
-		[Cell, Rest] ->
-			p1_cell(Tail, [cell(p1([{LN, trim_ws(Cell)}, {LN, <<>>}], [], St), ann(LN, St))|AST0], St, LN, Rest)
-	end.
-
-p1_cell_split(<<>>, Acc) ->
-	[Acc];
-p1_cell_split(<< $\\, $|, Rest/bits >>, Acc) ->
-	p1_cell_split(Rest, << Acc/binary, $| >>);
-p1_cell_split(<< $|, Rest/bits >>, Acc) ->
-	[Acc, Rest];
-p1_cell_split(<< C, Rest/bits >>, Acc) ->
-	p1_cell_split(Rest, << Acc/binary, C >>).
-
-p1_listing([{_, <<"----", _/bits >>}, {_, <<>>}|Tail], AST0, St, LN, [_|Acc]) ->
-	Text = iolist_to_binary(lists:reverse(Acc)),
-	case AST0 of
-		[{attribute_list, [<<"source">>, Lang]}, {block_title, Title}|AST] ->
-			p1(Tail, [listing(Text, #{title => Title, language => Lang}, ann(LN, St))|AST], St);
-		[{block_title, Title}, {attribute_list, [<<"source">>, Lang]}|AST] ->
-			p1(Tail, [listing(Text, #{title => Title, language => Lang}, ann(LN, St))|AST], St);
-		[{attribute_list, [<<"source">>, Lang]}|AST] ->
-			p1(Tail, [listing(Text, #{language => Lang}, ann(LN, St))|AST], St);
-		[{block_title, Title}|AST] ->
-			p1(Tail, [listing(Text, #{title => Title}, ann(LN, St))|AST], St);
-		AST ->
-			p1(Tail, [listing(Text, #{}, ann(LN, St))|AST], St)
-	end;
-p1_listing([{_, Line}|Tail], AST, St, LN, Acc) ->
-	p1_listing(Tail, AST, St, LN, [<<"\n">>, Line|Acc]).
-
-p1_li(Lines, AST, St, Type, FirstLine = {LN, _}) ->
-	{Tail, Glob} = p1_li_glob(Lines, []),
-	p1(Tail, [{Type, p1([FirstLine|Glob], [], St), ann(LN, St)}|AST], St).
-
-%% Glob everything until next list or empty line.
-p1_li_glob(Tail = [{LN, << "*", _/bits >>}|_], Acc) ->
-	{Tail, lists:reverse([{LN, <<>>}|Acc])};
-p1_li_glob(Tail = [{LN, <<>>}|_], Acc) ->
-	{Tail, lists:reverse([{LN, <<>>}|Acc])};
-p1_li_glob([{LN, <<"+">>}|Tail], Acc) ->
-	p1_li_glob(Tail, [{LN, <<>>}|Acc]);
-p1_li_glob([Line|Tail], Acc) ->
-	p1_li_glob(Tail, [Line|Acc]).
-
-%% Skip initial empty lines and then glob like normal lists.
-p1_ll_glob(Lines=[{_, Line}|Tail]) ->
-	case trim_ws(Line) of
-		<<>> -> p1_ll_glob(Tail);
-		_ -> p1_ll_glob(Lines, [])
-	end.
-
-%% Glob everything until empty line.
-%% @todo Detect next list.
-p1_ll_glob(Tail = [{LN, <<>>}|_], Acc) ->
-	{Tail, lists:reverse([{LN, <<>>}|Acc])};
-p1_ll_glob([{LN, <<"+">>}|Tail], Acc) ->
-	p1_ll_glob(Tail, [{LN, <<>>}|Acc]);
-p1_ll_glob([{LN, <<" ", Line/bits>>}|Tail], Acc) ->
-	p1_ll_glob([{LN, trim_ws(Line)}|Tail], Acc);
-p1_ll_glob(Lines=[Line={LN, Text}|Tail], Acc) ->
-	case binary:split(<< Text/binary, $\s >>, <<":: ">>) of
-		[_, _] ->
-			{Lines, lists:reverse([{LN, <<>>}|Acc])};
-		_ ->
-			p1_ll_glob(Tail, [Line|Acc])
-	end.
-
-p1_text(Lines=[{LN, Line}|Tail], AST, St) ->
-	case binary:split(<< Line/binary, $\s >>, <<":: ">>) of
-		%% Nothing else on the line.
-		[Label, <<>>] ->
-			{Tail1, Glob} = p1_ll_glob(Tail),
-			p1(Tail1, [{label, Label, p1(Glob, [], St), ann(LN, St)}|AST], St);
-		%% Text on the same line.
-		[Label, Text0] ->
-			Size = byte_size(Text0) - 1,
-			<< Text:Size/binary, _ >> = Text0,
-			{Tail1, Glob} = p1_ll_glob([{LN, Text}|Tail]),
-			%% Text on the same line is necessarily a paragraph I believe.
-			p1_p(Tail1, [{label, Label, p1(Glob, [], St), ann(LN, St)}|AST], St, LN, []);
-		%% Not a labeled list.
-		_ ->
-			p1_maybe_p(Lines, AST, St)
-	end.
-
-%% @todo Literal paragraphs.
-p1_maybe_p([{_LN, << " ", Line/bits >>}|Tail], AST, St) ->
-	<<>> = trim_ws(Line),
-	p1(Tail, AST, St);
-p1_maybe_p(Lines=[{LN, _}|_], AST, St) ->
-	p1_p(Lines, AST, St, LN, []).
-
-p1_p([{_, <<>>}|Tail], AST0, St, LN, [_|Acc]) ->
-	Text = format(iolist_to_binary(lists:reverse(Acc)), LN, St),
-	case AST0 of
-		[{block_title, Title}|AST] ->
-			p1(Tail, [paragraph(Text, #{title => Title}, ann(LN, St))|AST], St);
-		AST ->
-			p1(Tail, [paragraph(Text, #{}, ann(LN, St))|AST], St)
-	end;
-%% Ignore comments inside paragraphs.
-%% @todo Keep in the AST.
-p1_p([{_, <<"//", _/bits>>}|Tail], AST, St, LN, Acc) ->
-	p1_p(Tail, AST, St, LN, Acc);
-p1_p([{_, Line}|Tail], AST, St, LN, Acc) ->
-	%% @todo We need to keep line/col information. To do this
-	%% we probably should keep an index of character number -> line/col
-	%% that we pass to the format function. Otherwise the line/col
-	%% information on text will point to the paragraph start.
-	p1_p(Tail, AST, St, LN, [<<" ">>, Line|Acc]).
-
-%% Inline formatting.
-
-%% @todo Probably do it as part of the node functions that require it.
-format(Text, LN, St) ->
-	case format(Text, LN, St, [], <<>>, $\s) of
-		[Bin] when is_binary(Bin) -> Bin;
-		Formatted -> Formatted
-	end.
-
-format(<<>>, _, _, Acc, <<>>, _) ->
-	lists:reverse(Acc);
-format(<<>>, _, _, Acc, BinAcc, _) ->
-	lists:reverse([BinAcc|Acc]);
-format(<< "link:", Rest0/bits >>, LN, St, Acc0, BinAcc, Prev) when Prev =:= $\s ->
-	case re:run(Rest0, "^([^[]*)\\[([^]]*)\\](.*)", [{capture, all, binary}]) of
-		nomatch ->
-			format(Rest0, LN, St, Acc0, << BinAcc/binary, "link:" >>, $:);
-		{match, [_, Link, Text, Rest]} ->
-			Acc = case BinAcc of
-				<<>> -> Acc0;
-				_ -> [BinAcc|Acc0]
-			end,
-			format(Rest, LN, St, [rel_link(Text, Link, ann(LN, St))|Acc], <<>>, $])
-	end;
-format(<< C, Rest0/bits >>, LN, St, Acc0, BinAcc, Prev) when Prev =:= $\s ->
-	%% @todo In some cases we must format inside the quoted text too.
-	%% Therefore we need to have some information about what to do here.
-	Quotes = #{
-		$* => {strong, text},
-		$` => {mono, literal}
-	},
-	case maps:get(C, Quotes, undefined) of
-		undefined ->
-			format(Rest0, LN, St, Acc0, << BinAcc/binary, C >>, C);
-		{NodeType, QuotedType} ->
-			case binary:split(Rest0, << C >>) of
-				[_] ->
-					format(Rest0, LN, St, Acc0, << BinAcc/binary, $* >>, $*);
-				[QuotedText0, Rest] ->
-					Acc = case BinAcc of
-						<<>> -> Acc0;
-						_ -> [BinAcc|Acc0]
-					end,
-					QuotedText = case QuotedType of
-						text -> format(QuotedText0, LN, St);
-						literal -> QuotedText0
-					end,
-					format(Rest, LN, St, [quoted(NodeType, QuotedText, ann(LN, St))|Acc], <<>>, $*)
-			end
-	end;
-format(<< C, Rest/bits >>, LN, St, Acc, BinAcc, _) ->
-	format(Rest, LN, St, Acc, << BinAcc/binary, C >>, C).
-
-%% Second pass.
-
-p2([], Acc) ->
-	lists:reverse(Acc);
-p2([{label, Label, Items, Ann}|Tail], Acc) ->
-	%% @todo Handle this like other lists.
-	p2(Tail, [ll([li(p2(Items, []), #{label => Label}, Ann)], #{}, Ann)|Acc]);
-p2(Tail0=[{uli1, _, UlAnn}|_], Acc) ->
-	{LIs0, Tail} = lists:splitwith(fun({uli1, _, _}) -> true; (_) -> false end, Tail0),
-	LIs = [li(I, LiAnn) || {uli1, I, LiAnn} <- LIs0],
-	p2(Tail, [ul(LIs, #{}, UlAnn)|Acc]);
-p2([{begin_table, Attrs, Ann}|Tail0], Acc) ->
-	%% @todo Can also get them from Attrs?
-	N = count_table_columns(Tail0),
-	{Rows, Tail} = p2_rows(Tail0, [], [], N, 1),
-	p2(Tail, [table(Rows, Attrs, Ann)|Acc]);
-p2([Item|Tail], Acc) ->
-	p2(Tail, [Item|Acc]).
-
-%% @todo One cell per line version.
-count_table_columns(Cells) ->
-	length(lists:takewhile(fun({cell, _, _, _}) -> true; (_) -> false end, Cells)).
-
-p2_rows([nl|Tail], Rows, Cols, NumCols, N) ->
-	p2_rows(Tail, Rows, Cols, NumCols, N);
-p2_rows([Cell = {cell, _, _, Ann}|Tail], Rows, Cols, NumCols, NumCols) ->
-	p2_rows(Tail, [row(lists:reverse([Cell|Cols]), Ann)|Rows], [], NumCols, 1);
-p2_rows([Cell = {cell, _, _, _}|Tail], Rows, Cols, NumCols, N) ->
-	p2_rows(Tail, Rows, [Cell|Cols], NumCols, N + 1);
-p2_rows([end_table|Tail], Rows, [], _, _) ->
-	{lists:reverse(Rows), Tail}.
-
-%% Annotations.
-
-ann(Line, St) ->
-	ann(Line, 1, St).
-
-%% @todo Take filename too, if any.
-ann(Line, Col, _St) ->
-	#{line => Line, col => Col}.
-
-%% Nodes.
-
-cell(Nodes, Ann) ->
-	{cell, #{}, Nodes, Ann}.
-
-comment(Text, Ann) ->
-	{comment, #{}, Text, Ann}.
-
-li(Nodes, Ann) ->
-	li(Nodes, #{}, Ann).
-
-li(Nodes, Attrs, Ann) ->
-	{li, Attrs, Nodes, Ann}.
-
-listing(Text, Attrs, Ann) ->
-	{listing, Attrs, Text, Ann}.
-
-ll(Nodes, Attrs, Ann) ->
-	{ll, Attrs, Nodes, Ann}.
-
-paragraph(Text, Attrs, Ann) ->
-	{p, Attrs, Text, Ann}.
-
-quoted(NodeType, Text, Ann) ->
-	{NodeType, #{}, Text, Ann}.
-
-rel_link(Text, Link, Ann) ->
-	{rel_link, #{target => Link}, Text, Ann}.
-
-row(Nodes, Ann) ->
-	{row, #{}, Nodes, Ann}.
-
-table(Nodes, Attrs, Ann) ->
-	{table, Attrs, Nodes, Ann}.
-
-title(Text, Attrs, Ann) ->
-	{title, Attrs, Text, Ann}.
-
-ul(Nodes, Attrs, Ann) ->
-	{ul, Attrs, Nodes, Ann}.
-
-%% Utility functions.
-
-trim_ws(Text) ->
-	iolist_to_binary(re:replace(Text, "^[ \\t]+|[ \\t]+$", <<>>, [global])).
diff --git a/src/asciideck_reader.erl b/src/asciideck_reader.erl
new file mode 100644
index 0000000..d098417
--- /dev/null
+++ b/src/asciideck_reader.erl
@@ -0,0 +1,33 @@
+%% Copyright (c) 2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(asciideck_reader).
+
+-export([read_line/1]).
+-export([get_position/1]).
+-export([set_position/2]).
+
+-spec read_line(pid()) -> binary() | eof.
+read_line(Pid) ->
+	gen_server:call(Pid, read_line).
+
+%% @todo peek_line
+
+-spec get_position(pid()) -> pos_integer().
+get_position(Pid) ->
+	gen_server:call(Pid, get_position).
+
+-spec set_position(pid(), pos_integer()) -> ok.
+set_position(Pid, Pos) ->
+	gen_server:cast(Pid, {set_position, Pos}).
diff --git a/src/asciideck_source_highlight.erl b/src/asciideck_source_highlight.erl
new file mode 100644
index 0000000..b672784
--- /dev/null
+++ b/src/asciideck_source_highlight.erl
@@ -0,0 +1,27 @@
+%% Copyright (c) 2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% https://www.gnu.org/software/src-highlite/source-highlight.html
+-module(asciideck_source_highlight).
+
+-export([filter/2]).
+
+filter(Input, #{2 := Lang}) ->
+	TmpFile = "/tmp/asciideck-" ++ integer_to_list(erlang:phash2(make_ref())),
+	ok = file:write_file(TmpFile, Input),
+	Output = os:cmd(io_lib:format(
+		"source-highlight -i ~s -s ~s",
+		[TmpFile, Lang])),
+	_ = file:delete(TmpFile),
+	unicode:characters_to_binary(Output).
diff --git a/src/asciideck_stdin_reader.erl b/src/asciideck_stdin_reader.erl
new file mode 100644
index 0000000..9ea9dc8
--- /dev/null
+++ b/src/asciideck_stdin_reader.erl
@@ -0,0 +1,74 @@
+%% Copyright (c) 2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(asciideck_stdin_reader).
+-behaviour(gen_server).
+
+%% The API is defined in asciideck_reader.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+	lines = [] :: [binary()],
+	pos = 1 :: non_neg_integer()
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+	gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+	{ok, #state{}}.
+
+handle_call(read_line, _From, State=#state{lines=Lines, pos=Pos})
+		when length(Lines) >= Pos ->
+	{reply, lists:nth(Pos, lists:reverse(Lines)), State#state{pos=Pos + 1}};
+handle_call(read_line, _From, State=#state{lines=Lines, pos=Pos}) ->
+	case io:get_line('') of
+		eof ->
+			{reply, eof, State};
+		Line0 ->
+			Line1 = string:strip(Line0, right, $\n),
+			Line = unicode:characters_to_binary(Line1),
+			{reply, Line, State#state{lines=[Line|Lines], pos=Pos + 1}}
+	end;
+handle_call(get_position, _From, State=#state{pos=Pos}) ->
+	{reply, Pos, State};
+handle_call(_Request, _From, State) ->
+	{reply, ignored, State}.
+
+handle_cast({set_position, Pos}, State) ->
+	{noreply, State#state{pos=Pos}};
+handle_cast(_Msg, State) ->
+	{noreply, State}.
+
+handle_info(_Info, State) ->
+	{noreply, State}.
+
+terminate(_Reason, _State) ->
+	ok.
+
+code_change(_OldVsn, State, _Extra) ->
+	{ok, State}.
diff --git a/src/asciideck_tables_pass.erl b/src/asciideck_tables_pass.erl
new file mode 100644
index 0000000..e20d7d5
--- /dev/null
+++ b/src/asciideck_tables_pass.erl
@@ -0,0 +1,320 @@
+%% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% This pass parses and builds a table from the contents
+%% of a table block.
+%%
+%% Asciidoc User Guide 23
+%%
+%% @todo Rows and cells are currently not annotated.
+-module(asciideck_tables_pass).
+
+-export([run/1]).
+
+-define(IS_WS(C), (C =:= $\s) or (C =:= $\t) or (C =:= $\n).
+
+run([]) ->
+	[];
+run([Table={table, _, _, _}|Tail]) ->
+	[table(Table)|run(Tail)];
+run([Block|Tail]) ->
+	[Block|run(Tail)].
+
+table({table, Attrs, Contents, Ann}) ->
+	{Cells, NumCols} = parse_table(Contents, Attrs),
+	Children = rows(Cells, NumCols),
+	{table, Attrs, Children, Ann}.
+
+-ifdef(TEST).
+table_test() ->
+	{table, _, [
+		{row, _, [
+			{cell, _, [{paragraph, _, <<"1">>, _}], _},
+			{cell, _, [{paragraph, _, <<"2">>, _}], _},
+			{cell, _, [{paragraph, _, <<"A">>, _}], _}
+		], _},
+		{row, _, [
+			{cell, _, [{paragraph, _, <<"3">>, _}], _},
+			{cell, _, [{paragraph, _, <<"4">>, _}], _},
+			{cell, _, [{paragraph, _, <<"B">>, _}], _}
+		], _},
+		{row, _, [
+			{cell, _, [{paragraph, _, <<"5">>, _}], _},
+			{cell, _, [{paragraph, _, <<"6">>, _}], _},
+			{cell, _, [{paragraph, _, <<"C">>, _}], _}
+		], _}
+	], _} = table({table, #{}, <<
+		"|1 |2 |A\n"
+		"|3 |4 |B\n"
+		"|5 |6 |C">>, #{line => 1}}),
+	ok.
+-endif.
+
+%% If the cols attribute is not specified, the number of
+%% columns is the number of cells on the first line.
+parse_table(Contents, #{<<"cols">> := Cols}) ->
+	{parse_cells(Contents, []), num_cols(Cols)};
+%% We get the first line, parse the cells in it then
+%% count the number of columns in the table. Finally
+%% we parse all the remaining cells.
+parse_table(Contents, _) ->
+	case binary:split(Contents, <<$\n>>) of
+		%% We only have the one line. Who writes tables like this?
+		[Line] ->
+			Cells = parse_cells(Line, []),
+			{Cells, length(Cells)};
+		%% We have a useful table with more than one line. Good user!
+		[Line, Rest] ->
+			Cells0 = parse_cells(Line, []),
+			Cells = parse_cells(Rest, lists:reverse(Cells0)),
+			{Cells, length(Cells0)}
+	end.
+
+%% @todo Don't discard Specs.
+num_cols(Cols) ->
+	try binary_to_integer(Cols) of
+		Int -> Int
+	catch _:_ ->
+		Specs0 = binary:split(Cols, <<$,>>, [global]),
+		Specs = [parse_specs(Spec) || Spec <- Specs0],
+		lists:sum([M || #{multiplier := M} <- Specs])
+	end.
+
+-ifdef(TEST).
+num_cols_test_() ->
+	Tests = [
+		{<<"4">>, 4},
+		{<<">s,^m,e">>, 3},
+		{<<"3,^2,^2,10">>, 4},
+		{<<"^1,4*2">>, 5},
+		{<<"e,m,^,>s">>, 4},
+		{<<"2<d,2*,4d,>">>, 5},
+		{<<"4*<">>, 4},
+		{<<"3*.^">>, 3},
+		{<<"2*,.>">>, 3},
+		{<<".<,.^,.>">>, 3},
+		{<<".<,.^,^.>">>, 3}
+	],
+	[{V, fun() -> R = num_cols(V) end} || {V, R} <- Tests].
+-endif.
+
+%% Asciidoc User Guide 23.4
+%%
+%% [<multiplier>*][<horizontal>][.<vertical>][<width>][<style>]
+parse_specs(Bin0) ->
+	{ok, Bin1, Spec1} = parse_specs_multiplier(Bin0, #{}),
+	%% Width and alignment positions may be switched.
+	{ok, Bin4, Spec4} = case Bin1 of
+		<<C, _/bits>> when C >= $0, C =< $9 ->
+			{ok, Bin2, Spec2} = parse_specs_width(Bin1, Spec1),
+			{ok, Bin3, Spec3} = parse_specs_horizontal(Bin2, Spec2),
+			parse_specs_vertical(Bin3, Spec3);
+		_ ->
+			{ok, Bin2, Spec2} = parse_specs_horizontal(Bin1, Spec1),
+			{ok, Bin3, Spec3} = parse_specs_vertical(Bin2, Spec2),
+			parse_specs_width(Bin3, Spec3)
+	end,
+	parse_specs_style(Bin4, Spec4).
+
+parse_specs_multiplier(Bin, Spec) ->
+	case binary:split(Bin, <<"*">>) of
+		[_] ->
+			{ok, Bin, Spec#{multiplier => 1}};
+		[Multiplier, Rest] ->
+			{ok, Rest, Spec#{multiplier => binary_to_integer(Multiplier)}}
+	end.
+
+parse_specs_horizontal(Bin, Spec) ->
+	case Bin of
+		<<"<", Rest/bits>> -> {ok, Rest, Spec#{horizontal => left}};
+		<<"^", Rest/bits>> -> {ok, Rest, Spec#{horizontal => center}};
+		<<">", Rest/bits>> -> {ok, Rest, Spec#{horizontal => right}};
+		_ -> {ok, Bin, Spec#{horizontal => left}}
+	end.
+
+parse_specs_vertical(Bin, Spec) ->
+	case Bin of
+		<<".<", Rest/bits>> -> {ok, Rest, Spec#{vertical => top}};
+		<<".^", Rest/bits>> -> {ok, Rest, Spec#{vertical => middle}};
+		<<".>", Rest/bits>> -> {ok, Rest, Spec#{vertical => bottom}};
+		_ -> {ok, Bin, Spec#{vertical => top}}
+	end.
+
+parse_specs_width(Bin, Spec) ->
+	case binary:split(Bin, <<"%">>) of
+		[_] ->
+			case binary_take_while_integer(Bin, <<>>) of
+				{<<>>, _} ->
+					{ok, Bin, Spec#{width => 1, width_unit => proportional}};
+				{Width, Rest} ->
+					{ok, Rest, Spec#{width => binary_to_integer(Width), width_unit => proportional}}
+			end;
+		[Percent, Rest] ->
+			{ok, Rest, Spec#{width => binary_to_integer(Percent), width_unit => percent}}
+	end.
+
+binary_take_while_integer(<<C, R/bits>>, Acc) when C >= $0, C =< $9 ->
+	binary_take_while_integer(R, <<Acc/binary, C>>);
+binary_take_while_integer(Rest, Acc) ->
+	{Acc, Rest}.
+
+parse_specs_style(<<>>, Spec) ->
+	Spec#{style => default};
+parse_specs_style(Bin, Spec) ->
+	Style = parse_specs_match_style(Bin, [
+		<<"default">>, <<"emphasis">>, <<"monospaced">>, <<"strong">>,
+		<<"header">>, <<"asciidoc">>, <<"literal">>, <<"verse">>
+	]),
+	Spec#{style => Style}.
+
+parse_specs_match_style(Prefix, [Style|Tail]) ->
+	case binary:longest_common_prefix([Prefix, Style]) of
+		0 -> parse_specs_match_style(Prefix, Tail);
+		_ -> binary_to_atom(Style, latin1)
+	end.
+
+-ifdef(TEST).
+parse_specs_test_() ->
+	Res = fun(Override) ->
+		maps:merge(#{
+			multiplier => 1,
+			horizontal => left,
+			vertical => top,
+			width => 1,
+			width_unit => proportional,
+			style => default
+		}, Override)
+	end,
+	Tests = [
+		{<<"3">>, Res(#{width => 3})},
+		{<<"10">>, Res(#{width => 10})},
+		{<<">s">>, Res(#{horizontal => right, style => strong})},
+		{<<"^m">>, Res(#{horizontal => center, style => monospaced})},
+		{<<"e">>, Res(#{style => emphasis})},
+		{<<"^2">>, Res(#{horizontal => center, width => 2})},
+		{<<"4*2">>, Res(#{multiplier => 4, width => 2})},
+		{<<"^">>, Res(#{horizontal => center})},
+		{<<">">>, Res(#{horizontal => right})},
+		{<<"2<h">>, Res(#{width => 2, horizontal => left, style => header})},
+		{<<"2*">>, Res(#{multiplier => 2})},
+		{<<"4*<">>, Res(#{multiplier => 4, horizontal => left})},
+		{<<"3*.^">>, Res(#{multiplier => 3, vertical => middle})},
+		{<<".>">>, Res(#{vertical => bottom})}
+	],
+	[{V, fun() -> R = parse_specs(V) end} || {V, R} <- Tests].
+-endif.
+
+parse_cells(Contents, Acc) ->
+	Cells = split_cells(Contents),%binary:split(Contents, [<<$|>>], [global]),
+	do_parse_cells(Cells, Acc).
+	%% Split on |
+	%% Look at the end of each element see if there's a cell specifier
+	%% Add it as an attribute to the cell for now and consolidate
+	%% when processing rows.
+
+split_cells(Contents) ->
+	split_cells(Contents, <<>>, []).
+
+split_cells(<<>>, Cell, Acc) ->
+	lists:reverse([Cell|Acc]);
+split_cells(<<$\\, $|, R/bits>>, Cell, Acc) ->
+	split_cells(R, <<Cell/binary, $|>>, Acc);
+split_cells(<<$|, R/bits>>, Cell, Acc) ->
+	split_cells(R, <<>>, [Cell|Acc]);
+split_cells(<<C, R/bits>>, Cell, Acc) ->
+	split_cells(R, <<Cell/binary, C>>, Acc).
+
+%% Malformed table (no pipe before cell). Process it like it is a single cell.
+do_parse_cells([Contents], Acc) ->
+	%% @todo Annotations.
+	lists:reverse([{cell, #{specifiers => <<>>}, Contents, #{}}|Acc]);
+%% Last cell. There are no further cell specifiers.
+do_parse_cells([Specs, Contents0], Acc) ->
+	Contents = asciideck_block_parser:parse(Contents0),
+	%% @todo Annotations.
+	Cell = {cell, #{specifiers => Specs}, Contents, #{}},
+	lists:reverse([Cell|Acc]);
+%% If there are cell specifiers we need to extract them from the cell
+%% contents. Cell specifiers are everything from the last whitespace
+%% until the end of the binary.
+do_parse_cells([Specs, Contents0|Tail], Acc) ->
+	NextSpecs = <<>>, %% @todo find_r(Contents0, <<>>),
+	Len = byte_size(Contents0) - byte_size(NextSpecs),
+	<<Contents1:Len/binary, _/bits>> = Contents0,
+	Contents = asciideck_block_parser:parse(Contents1),
+	%% @todo Annotations.
+	Cell = {cell, #{specifiers => Specs}, Contents, #{}},
+	do_parse_cells([NextSpecs|Tail], [Cell|Acc]).
+
+%% @todo This is not correct. Not all remaining data is specifiers.
+%% In addition, for columns at the end of the line this doesn't apply.
+%% Find the remaining data after the last whitespace character.
+%find_r(<<>>, Acc) ->
+%	Acc;
+%find_r(<<C, Rest/bits>>, _) when ?IS_WS(C) ->
+%	find_r(Rest, Rest);
+%find_r(<<_, Rest/bits>>, Acc) ->
+%	find_r(Rest, Acc).
+
+-ifdef(TEST).
+parse_table_test() ->
+	{[
+		{cell, _, [{paragraph, _, <<"1">>, _}], _},
+		{cell, _, [{paragraph, _, <<"2">>, _}], _},
+		{cell, _, [{paragraph, _, <<"A">>, _}], _},
+		{cell, _, [{paragraph, _, <<"3">>, _}], _},
+		{cell, _, [{paragraph, _, <<"4">>, _}], _},
+		{cell, _, [{paragraph, _, <<"B">>, _}], _},
+		{cell, _, [{paragraph, _, <<"5">>, _}], _},
+		{cell, _, [{paragraph, _, <<"6">>, _}], _},
+		{cell, _, [{paragraph, _, <<"C">>, _}], _}
+	], 3} = parse_table(<<
+		"|1 |2 |A\n"
+		"|3 |4 |B\n"
+		"|5 |6 |C">>, #{}),
+	ok.
+
+parse_table_escape_pipe_test() ->
+	{[
+		{cell, _, [{paragraph, _, <<"1">>, _}], _},
+		{cell, _, [{paragraph, _, <<"2">>, _}], _},
+		{cell, _, [{paragraph, _, <<"3 |4">>, _}], _},
+		{cell, _, [{paragraph, _, <<"5">>, _}], _}
+	], 2} = parse_table(<<
+		"|1 |2\n"
+		"|3 \\|4 |5">>, #{}),
+	ok.
+-endif.
+
+%% @todo We currently don't handle colspans and rowspans.
+rows(Cells, NumCols) ->
+	rows(Cells, [], NumCols, [], NumCols).
+
+%% End of row.
+rows(Tail, Acc, NumCols, RowAcc, CurCol) when CurCol =< 0 ->
+	%% @todo Annotations.
+	Row = {row, #{}, lists:reverse(RowAcc), #{}},
+	rows(Tail, [Row|Acc], NumCols, [], NumCols);
+%% Add a cell to the row.
+rows([Cell|Tail], Acc, NumCols, RowAcc, CurCol) ->
+	rows(Tail, Acc, NumCols, [Cell|RowAcc], CurCol - 1);
+%% End of a properly formed table.
+rows([], Acc, _, [], _) ->
+	lists:reverse(Acc);
+%% Malformed table. Even if we expect more columns,
+%% if there are no more cells there's nothing we can do.
+rows([], Acc, _, RowAcc, _) ->
+	%% @todo Annotations.
+	Row = {row, #{}, lists:reverse(RowAcc), #{}},
+	lists:reverse([Row|Acc]).
diff --git a/src/asciideck_to_html.erl b/src/asciideck_to_html.erl
new file mode 100644
index 0000000..228c043
--- /dev/null
+++ b/src/asciideck_to_html.erl
@@ -0,0 +1,246 @@
+%% Copyright (c) 2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(asciideck_to_html).
+
+-export([translate/2]).
+
+translate(AST, Opts) ->
+	Output0 = ast(AST),
+	Output1 = maybe_header_footer(Output0, Opts),
+	{CompressExt, Output} = case Opts of
+		#{compress := gzip} -> {".gz", zlib:gzip(Output1)};
+		_ -> {"", Output1}
+	end,
+	case Opts of
+		#{outdir := Path, outfile := Filename} ->
+			file:write_file(binary_to_list(iolist_to_binary(
+				[Path, "/", Filename, ".html", CompressExt])), Output);
+		#{outdir := Path} ->
+			Filename = filename_from_ast(AST),
+			file:write_file(binary_to_list(iolist_to_binary(
+				[Path, "/", Filename, ".html", CompressExt])), Output);
+		_ ->
+			Output
+	end.
+
+maybe_header_footer(Body, #{no_header_footer := _}) ->
+	Body;
+maybe_header_footer(Body, _Opts) ->
+	[
+		"<!DOCTYPE html>\n"
+		"<html lang=\"en\">\n"
+		"<head>\n"
+		"<meta charset=\"utf-8\"/>\n"
+		"<title>TODO title</title>\n" %% @todo
+		"</head>\n"
+		"<body>\n",
+		Body,
+		"</body>\n"
+		"</html>\n"
+	].
+
+filename_from_ast([{section_title, #{level := 0}, Filename, _}|_]) ->
+	Filename.
+
+%% Loop over all types of AST nodes.
+
+ast(AST) ->
+	fold(AST, fun ast_node/1).
+
+fold(AST, Fun) ->
+	lists:reverse(lists:foldl(
+		fun(Node, Acc) -> [Fun(Node)|Acc] end,
+		[], AST)).
+
+ast_node(Node={Type, _, _, _}) ->
+	try
+		case Type of
+			section_title -> section_title(Node);
+			paragraph -> paragraph(Node);
+			listing_block -> listing_block(Node);
+			passthrough_block -> passthrough_block(Node);
+			list -> list(Node);
+			table -> table(Node);
+			block_macro -> block_macro(Node);
+			comment_line -> comment_line(Node);
+			_ -> ast_error({unknown_type, Node})
+		end
+	catch C:E:S ->
+		ast_error({crash, C, E, S, Node})
+	end.
+
+ast_error(Error) ->
+	[
+		"<p class=\"asciideck-error\">",
+		html_encode(unicode:characters_to_binary(io_lib:format("~p", [Error]))),
+		"</p>"
+	].
+
+%% Section titles.
+
+section_title({section_title, Attrs=#{level := Level}, Title, _}) ->
+	LevelC = $1 + Level,
+	ID = case Attrs of
+		#{<<"id">> := ID0} -> ID0;
+		_ -> id_from_title(Title)
+	end,
+	["<h", LevelC, " id=\"", ID, "\">", inline(Title), "</h", LevelC, ">\n"].
+
+%% Asciidoc User Guide 8.4.2
+%% @todo Handle cases where the title is repeated in the same document.
+id_from_title(Title) ->
+	ID0 = unicode:characters_to_binary(string:to_lower(unicode:characters_to_list(Title))),
+	ID1 = <<if
+		C >= $a, C =< $z -> <<C/utf8>>;
+		C >= $0, C =< $9 -> <<C/utf8>>;
+		true -> <<$_>>
+	end || <<C/utf8>> <= ID0>>,
+	ID = string:strip(unicode:characters_to_list(ID1), both, $_),
+	[$_, unicode:characters_to_binary(ID)].
+
+%% Paragraphs.
+
+paragraph({paragraph, _, Text, _}) ->
+	["<p>", inline(Text), "</p>\n"].
+
+%% Listing blocks.
+
+listing_block({listing_block, Attrs, Listing0, _}) ->
+	Listing = case Attrs of
+		#{1 := <<"source">>, 2 := _} ->
+			try asciideck_source_highlight:filter(Listing0, Attrs) catch C:E:S -> io:format("~p ~p ~p~n", [C, E, S]), exit(bad) end;
+		_ ->
+			["<pre>", html_encode(Listing0), "</pre>"]
+	end,
+	[
+		"<div class=\"listingblock\">",
+		case Attrs of
+			#{<<"title">> := Title} ->
+				["<div class=\"title\">", inline(Title), "</div>\n"];
+			_ ->
+				[]
+		end,
+		"<div class=\"content\">",
+		Listing,
+		"</div></div>\n"
+	].
+
+%% Passthrough blocks.
+
+passthrough_block({passthrough_block, _, HTML, _}) ->
+	HTML.
+
+%% Lists.
+
+list({list, #{type := bulleted}, Items, _}) ->
+	["<ul>", fold(Items, fun list_item/1), "</ul>\n"];
+list({list, #{type := numbered}, Items, _}) ->
+	["<ol>", fold(Items, fun list_item/1), "</ol>\n"];
+list({list, #{type := labeled}, Items, _}) ->
+	["<dl>", fold(Items, fun labeled_list_item/1), "</dl>\n"].
+
+list_item({list_item, _, [{paragraph, _, Text, _}|AST], _}) ->
+	[
+		"<li>",
+		inline(Text), "\n",
+		ast(AST),
+		"</li>\n"
+	].
+
+labeled_list_item({list_item, #{label := Label}, AST, _}) ->
+	[
+		"<dt>", inline(Label), "</dt>\n",
+		"<dd>",
+		ast(AST),
+		"</dd>\n"
+	].
+
+%% Tables.
+
+table({table, Attrs, [{row, _, Head, _}|Rows], _}) ->
+	[
+		"<table rules=\"all\" width=\"100%\" frame=\"border\"
+			cellspacing=\"0\" cellpadding=\"4\">\n",
+		case Attrs of
+			#{<<"title">> := Caption} -> ["<caption>", inline(Caption), "</caption>"];
+			_ -> []
+		end,
+		"<thead><tr>", table_head(Head), "</tr></thead>"
+		"<tbody>", table_body(Rows), "</tbody>"
+		"</table>\n"
+	].
+
+table_head(Cells) ->
+	[["<th>", table_cell(AST), "</th>\n"]
+		|| {cell, _, AST, _} <- Cells].
+
+table_body(Rows) ->
+	[["<tr>", table_body_cells(Cells), "</tr>\n"]
+		|| {row, _, Cells, _} <- Rows].
+
+table_body_cells(Cells) ->
+	[["<td>", table_cell(AST), "</td>\n"]
+		|| {cell, _, AST, _} <- Cells].
+
+table_cell(AST0) ->
+	AST = [Node || Node={Type, _, _, _} <- AST0, Type =/= comment_line],
+	case AST of
+		[{paragraph, _, Text, _}] ->
+			inline(Text);
+		_ ->
+			ast(AST)
+	end.
+
+%% Block macros.
+
+block_macro({block_macro, #{name := <<"image">>,
+		target := Target, 1 := Caption}, _, _}) ->
+	["<img src=\"", html_encode(Target), "\" "
+		"alt=\"", html_encode(Caption), "\"/>"].
+
+%% Comment lines are printed in the generated file
+%% but are not visible in viewers.
+
+comment_line({comment_line, _, Text, _}) ->
+	["<!-- ", html_encode(Text), "-->\n"].
+
+%% Inline formatting.
+
+inline(Text) when is_binary(Text) ->
+	html_encode(Text);
+inline({link, #{target := Target}, Text, _}) ->
+	["<a href=\"", html_encode(Target), "\">", html_encode(Text), "</a>"];
+inline({xref, #{id := ID}, Text, _}) ->
+	["<a href=\"#", html_encode(ID), "\">", html_encode(Text), "</a>"];
+inline({emphasized, _, Text, _}) ->
+	["<em>", inline(Text), "</em>"];
+inline({strong, _, Text, _}) ->
+	["<strong>", inline(Text), "</strong>"];
+inline({inline_literal_passthrough, _, Text, _}) ->
+	["<code>", inline(Text), "</code>"];
+inline({line_break, _, _, _}) ->
+	"<br/>";
+inline(Text) when is_list(Text) ->
+	[inline(T) || T <- Text].
+
+html_encode(Text) ->
+	<<case C of
+		$& -> <<"&amp;">>;
+		$< -> <<"&lt;">>;
+		$> -> <<"&gt;">>;
+		$" -> <<"&quot;">>;
+		$' -> <<"&apos;">>;
+		_ -> <<C/utf8>>
+	end || <<C/utf8>> <= Text>>.
diff --git a/src/asciideck_to_manpage.erl b/src/asciideck_to_manpage.erl
index bdff90e..8e1a19e 100644
--- a/src/asciideck_to_manpage.erl
+++ b/src/asciideck_to_manpage.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
+%% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
@@ -19,7 +19,7 @@
 -export([translate/2]).
 
 translate(AST, Opts) ->
-	{Man, Section, Output0} = translate_man(AST, Opts),
+	{Man, Section, Output0} = man(AST, Opts),
 	{CompressExt, Output} = case Opts of
 		#{compress := gzip} -> {".gz", zlib:gzip(Output0)};
 		_ -> {"", Output0}
@@ -32,7 +32,9 @@ translate(AST, Opts) ->
 			Output
 	end.
 
-translate_man([{title, #{level := 0}, Title0, _Ann}|AST], Opts) ->
+%% Header of the man page file.
+
+man([{section_title, #{level := 0}, Title0, _Ann}|AST], Opts) ->
 	ensure_name_section(AST),
 	[Title, << Section:1/binary, _/bits >>] = binary:split(Title0, <<"(">>),
 	Extra1 = maps:get(extra1, Opts, today()),
@@ -42,10 +44,10 @@ translate_man([{title, #{level := 0}, Title0, _Ann}|AST], Opts) ->
 		".TH \"", Title, "\" \"", Section, "\" \"",
 			Extra1, "\" \"", Extra2, "\" \"", Extra3, "\"\n"
 		".ta T 4n\n\\&\n",
-		man(AST, [])
+		ast(AST)
 	]}.
 
-ensure_name_section([{title, #{level := 1}, Title, _}|_]) ->
+ensure_name_section([{section_title, #{level := 1}, Title, _}|_]) ->
 	case string:to_lower(string:strip(binary_to_list(Title))) of
 		"name" -> ok;
 		_ -> error(badarg)
@@ -57,80 +59,78 @@ today() ->
 	{{Y, M, D}, _} = calendar:universal_time(),
 	io_lib:format("~b-~2.10.0b-~2.10.0b", [Y, M, D]).
 
-man([], Acc) ->
-	lists:reverse(Acc);
-man([{title, #{level := 1}, Title, _Ann}|Tail], Acc) ->
-	man(Tail, [[".SH ", string:to_upper(binary_to_list(Title)), "\n"]|Acc]);
-man([{title, #{level := 2}, Title, _Ann}|Tail], Acc) ->
-	man(Tail, [[".SS ", Title, "\n"]|Acc]);
-man([{p, _Attrs, Text, _Ann}|Tail], Acc) ->
-	man(Tail, [[".LP\n", man_format(Text), "\n.sp\n"]|Acc]);
-man([{listing, Attrs, Listing, _Ann}|Tail], Acc0) ->
-	Acc1 = case Attrs of
-		#{title := Title} ->
-			[[".PP\n\\fB", Title, "\\fR\n"]|Acc0];
-		_ ->
-			Acc0
-	end,
-	Acc = [[
+%% Loop over all types of AST nodes.
+
+ast(AST) ->
+	fold(AST, fun ast_node/1).
+
+fold(AST, Fun) ->
+	lists:reverse(lists:foldl(
+		fun(Node, Acc) -> [Fun(Node)|Acc] end,
+		[], AST)).
+
+ast_node(Node={Type, _, _, _}) ->
+	try
+		case Type of
+			section_title -> section_title(Node);
+			paragraph -> paragraph(Node);
+			listing_block -> listing_block(Node);
+			list -> list(Node);
+			table -> table(Node);
+			comment_line -> comment_line(Node);
+			_ ->
+				io:format("Ignored AST node ~p~n", [Node]),
+				[]
+		end
+	catch C:E:S ->
+		io:format("Ignored AST node ~p~nReason: ~p:~p~nStacktrace: ~p~n",
+			[Node, C, E, S]),
+		[]
+	end.
+
+%% Section titles.
+
+section_title({section_title, #{level := 1}, Title, _}) ->
+	[".SH ", string:to_upper(binary_to_list(Title)), "\n"];
+section_title({section_title, #{level := 2}, Title, _}) ->
+	[".SS ", Title, "\n"].
+
+%% Paragraphs.
+
+paragraph({paragraph, _, Text, _}) ->
+	[".LP\n", inline(Text), "\n.sp\n"].
+
+%% Listing blocks.
+
+listing_block({listing_block, Attrs, Listing, _}) ->
+	[
+		case Attrs of
+			#{<<"title">> := Title} ->
+				[".PP\n\\fB", Title, "\\fR\n"];
+			_ ->
+				[]
+		end,
 		".if n \\{\\\n"
 		".RS 4\n"
 		".\\}\n"
 		".nf\n",
-		Listing,
+		escape(Listing),
 		"\n"
 		".fi\n"
 		".if n \\{\\\n"
 		".RE\n"
-		".\\}\n"]|Acc1],
-	man(Tail, Acc);
-man([{ul, _Attrs, Items, _Ann}|Tail], Acc0) ->
-	Acc = man_ul(Items, Acc0),
-	man(Tail, Acc);
-man([{ll, _Attrs, Items, _Ann}|Tail], Acc0) ->
-	Acc = man_ll(Items, Acc0),
-	man(Tail, Acc);
-%% @todo Attributes.
-%% Currently acts as if options="headers" was always set.
-man([{table, _TAttrs, [{row, RowAttrs, Headers0, RowAnn}|Rows0], _TAnn}|Tail], Acc0) ->
-	Headers = [{cell, CAttrs, [{p, Attrs, [{strong, #{}, P, CAnn}], Ann}], CAnn}
-		|| {cell, CAttrs, [{p, Attrs, P, Ann}], CAnn} <- Headers0],
-	Rows = [{row, RowAttrs, Headers, RowAnn}|Rows0],
-	Acc = [[
-		".TS\n"
-		"allbox tab(:);\n",
-		man_table_style(Rows, []),
-		man_table_contents(Rows),
-		".TE\n"
-		".sp 1\n"]|Acc0],
-	man(Tail, Acc);
-%% Skip everything we don't understand.
-man([_Ignore|Tail], Acc) ->
-	io:format("Ignore ~p~n", [_Ignore]), %% @todo lol io:format
-	man(Tail, Acc).
-
-man_ll([], Acc) ->
-	Acc;
-man_ll([{li, #{label := Label}, Item, _LiAnn}|Tail], Acc0) ->
-	Acc = [[
-		".PP\n"
-		"\\fB", Label, "\\fR\n",
-		".RS 4\n",
-		man_ll_item(Item),
-		".RE\n"]|Acc0],
-	man_ll(Tail, Acc).
-
-man_ll_item([{ul, _Attrs, Items, _Ann}]) ->
-	[man_ul(Items, []), "\n"];
-man_ll_item([{p, _PAttrs, Text, _PAnn}]) ->
-	[man_format(Text), "\n"];
-man_ll_item([{p, _PAttrs, Text, _PAnn}|Tail]) ->
-	[man_format(Text), "\n\n", man_ll_item(Tail)].
-
-man_ul([], Acc) ->
-	Acc;
-man_ul([{li, _LiAttrs, [{p, _PAttrs, Text, _PAnn}], _LiAnn}|Tail], Acc0) ->
-	Acc = [[
+		".\\}\n"
+	].
+
+%% Lists.
+
+list({list, #{type := bulleted}, Items, _}) ->
+	fold(Items, fun bulleted_list_item/1);
+list({list, #{type := labeled}, Items, _}) ->
+	fold(Items, fun labeled_list_item/1).
+
+bulleted_list_item({list_item, _, [{paragraph, _, Text, _}|AST], _}) ->
+	[
 		".ie n \\{\\\n"
 		".RS 2\n"
 		"\\h'-02'\\(bu\\h'+01'\\c\n"
@@ -140,40 +140,93 @@ man_ul([{li, _LiAttrs, [{p, _PAttrs, Text, _PAnn}], _LiAnn}|Tail], Acc0) ->
 		".sp -1\n"
 		".IP \\(bu 2.3\n"
 		".\\}\n",
-		man_format(Text), "\n"
-		".RE\n"]|Acc0],
-	man_ul(Tail, Acc).
-
-man_table_style([], [_|Acc]) ->
-	lists:reverse([".\n"|Acc]);
-man_table_style([{row, _, Cols, _}|Tail], Acc) ->
-	man_table_style(Tail, [$\n, man_table_style_cols(Cols, [])|Acc]).
-
-man_table_style_cols([], [_|Acc]) ->
-	lists:reverse(Acc);
-man_table_style_cols([{cell, _, _, _}|Tail], Acc) ->
-	man_table_style_cols(Tail, [$\s, "lt"|Acc]).
-
-man_table_contents(Rows) ->
-	[man_table_contents_cols(Cols, []) || {row, _, Cols, _} <- Rows].
-
-man_table_contents_cols([], [_|Acc]) ->
-	lists:reverse(["\n"|Acc]);
-man_table_contents_cols([{cell, _CAttrs, [{p, _PAttrs, Text, _PAnn}], _CAnn}|Tail], Acc) ->
-	man_table_contents_cols(Tail, [$:, "\nT}", man_format(Text), "T{\n"|Acc]).
-
-man_format(Text) when is_binary(Text) ->
-	Text;
-man_format({rel_link, #{target := Link}, Text, _}) ->
+		inline(Text), "\n",
+		ast(AST),
+		".RE\n"
+	].
+
+labeled_list_item({list_item, #{label := Label}, [{paragraph, _, Text, _}|AST], _}) ->
+	[
+		".PP\n"
+		"\\fB", inline(Label), "\\fR\n",
+		".RS 4\n",
+		inline(Text), "\n",
+		ast(AST),
+		".RE\n"
+	].
+
+%% Tables.
+
+table({table, _, Rows0, _}) ->
+	Rows = table_apply_options(Rows0),
+	[
+		".TS\n"
+		"allbox tab(:);\n",
+		table_style(Rows), ".\n",
+		table_contents(Rows),
+		".TE\n"
+		".sp 1\n"
+	].
+
+%% @todo Currently acts as if options="headers" was always set.
+table_apply_options([{row, RAttrs, Headers0, RAnn}|Tail]) ->
+	Headers = [{cell, CAttrs#{style => <<"strong">>}, CText, CAnn}
+		|| {cell, CAttrs, CText, CAnn} <- Headers0],
+	[{row, RAttrs, Headers, RAnn}|Tail].
+
+table_style(Rows) ->
+	[[table_style_cells(Cells), "\n"]
+		|| {row, _, Cells, _} <- Rows].
+
+table_style_cells(Cells) ->
+	[case CAttrs of
+		#{style := <<"strong">>} -> "ltb ";
+		_ -> "lt "
+	end || {cell, CAttrs, _, _} <- Cells].
+
+table_contents(Rows) ->
+	[[table_contents_cells(Cells), "\n"]
+		|| {row, _, Cells, _} <- Rows].
+
+table_contents_cells([FirstCell|Cells]) ->
+	[table_contents_cell(FirstCell),
+		[[":", table_contents_cell(Cell)] || Cell <- Cells]].
+
+table_contents_cell({cell, _, [{paragraph, _, Text, _}], _}) ->
+	["T{\n", inline(Text), "\nT}"].
+
+%% Comment lines are printed in the generated file
+%% but are not visible in viewers.
+
+comment_line({comment_line, _, Text, _}) ->
+	["\\# ", Text, "\n"].
+
+%% Inline formatting.
+
+inline(Text) when is_binary(Text) ->
+	escape(Text);
+%% When the link is the text we only print it once.
+inline({link, #{target := Link}, Link, _}) ->
+	Link;
+inline({link, #{target := Link}, Text, _}) ->
 	case re:run(Text, "^([-_:.a-zA-Z0-9]*)(\\([0-9]\\))$", [{capture, all, binary}]) of
 		nomatch -> [Text, " (", Link, ")"];
 		{match, [_, ManPage, ManSection]} -> ["\\fB", ManPage, "\\fR", ManSection]
 	end;
-man_format({strong, _, Text, _}) ->
-	["\\fB", man_format(Text), "\\fR"];
+inline({emphasized, _, Text, _}) ->
+	["\\fI", inline(Text), "\\fR"];
+inline({strong, _, Text, _}) ->
+	["\\fB", inline(Text), "\\fR"];
 %% We are already using a monospace font.
-%% @todo Maybe there's a readable formatting we could use to differentiate from normal text?
-man_format({mono, _, Text, _}) ->
-	man_format(Text);
-man_format(Text) when is_list(Text) ->
-	[man_format(T) || T <- Text].
+inline({inline_literal_passthrough, _, Text, _}) ->
+	inline(Text);
+%% Xref links appear as plain text in manuals.
+inline({xref, _, Text, _}) ->
+	inline(Text);
+inline({line_break, _, _, _}) ->
+	"\n.br\n";
+inline(Text) when is_list(Text) ->
+	[inline(T) || T <- Text].
+
+escape(Text) ->
+	binary:replace(iolist_to_binary(Text), <<$\\>>, <<$\\, $\\>>, [global]).
diff --git a/src/asciideck_transform_pass.erl b/src/asciideck_transform_pass.erl
new file mode 100644
index 0000000..a1cf37c
--- /dev/null
+++ b/src/asciideck_transform_pass.erl
@@ -0,0 +1,33 @@
+%% Copyright (c) 2018, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% The purpose of this pass is to transform elements based
+%% on the style given in their attributes.
+-module(asciideck_transform_pass).
+
+-export([run/1]).
+
+run([]) ->
+	[];
+%% The following syntax gets converted in the corresponding
+%% listing_block element:
+%%
+%% [source,erlang]
+%% f() -> ok.
+%%
+%% @todo We should not totally overwrite subs.
+run([{paragraph, Attrs=#{1 := <<"source">>}, Text, Ann}|Tail]) ->
+	[{listing_block, Attrs#{<<"subs">> => <<"verbatim">>}, Text, Ann}|run(Tail)];
+run([Block|Tail]) ->
+	[Block|run(Tail)].
diff --git a/test/man_SUITE.erl b/test/man_SUITE.erl
index 30eec4f..384bf68 100644
--- a/test/man_SUITE.erl
+++ b/test/man_SUITE.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
+%% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
@@ -14,6 +14,7 @@
 
 -module(man_SUITE).
 -compile(export_all).
+-compile(nowarn_export_all).
 
 -import(ct_helper, [doc/1]).
 
diff --git a/test/parser_SUITE.erl b/test/parser_SUITE.erl
index 0f7b393..f3b4b27 100644
--- a/test/parser_SUITE.erl
+++ b/test/parser_SUITE.erl
@@ -1,4 +1,4 @@
-%% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
+%% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
@@ -14,14 +14,17 @@
 
 -module(parser_SUITE).
 -compile(export_all).
+-compile(nowarn_export_all).
 
 -import(asciideck, [parse/1]).
 -import(ct_helper, [doc/1]).
 
 all() ->
-	ct_helper:all(?MODULE).
+	[{group, blocks}].
 
 %% @todo Test formatting too!
+groups() ->
+	[{blocks, [parallel], ct_helper:all(?MODULE)}].
 
 %% Empty lines.
 
@@ -43,20 +46,20 @@ empty_line_spaces(_) ->
 
 quoted_text_strong(_) ->
 	doc("Strong text formatting. (10.1)"),
-	[{p, _, [{strong, _, <<"Hello beautiful world!">>, _}], _}] =
+	[{paragraph, _, [{strong, _, <<"Hello beautiful world!">>, _}], _}] =
 		parse("*Hello beautiful world!*"),
-	[{p, _, [{strong, _, <<"Hello">>, _}, <<" beautiful world!">>], _}] =
+	[{paragraph, _, [{strong, _, <<"Hello">>, _}, <<" beautiful world!">>], _}] =
 		parse("*Hello* beautiful world!"),
-	[{p, _, [<<"Hello ">>, {strong, _, <<"beautiful">>, _}, <<" world!">>], _}] =
+	[{paragraph, _, [<<"Hello ">>, {strong, _, <<"beautiful">>, _}, <<" world!">>], _}] =
 		parse("Hello *beautiful* world!"),
-	[{p, _, [<<"Hello beautiful ">>, {strong, _, <<"world!">>, _}], _}] =
+	[{paragraph, _, [<<"Hello beautiful ">>, {strong, _, <<"world!">>, _}], _}] =
 		parse("Hello beautiful *world!*"),
-	[{p, _, [<<"Hello beautiful ">>, {strong, _, <<"multiline world!">>, _}, <<" lol">>], _}] =
+	[{paragraph, _, [<<"Hello beautiful ">>, {strong, _, <<"multiline world!">>, _}, <<" lol">>], _}] =
 		parse("Hello beautiful *multiline\nworld!* lol"),
 	%% Nested formatting.
-	[{p, _, [{strong, _, [
+	[{paragraph, _, [{strong, _, [
 		<<"Hello ">>,
-		{rel_link, #{target := <<"downloads/cowboy-2.0.tgz">>}, <<"2.0">>, _},
+		{link, #{target := <<"downloads/cowboy-2.0.tgz">>}, <<"2.0">>, _},
 		<<" world!">>
 	], _}], _}] =
 		parse("*Hello link:downloads/cowboy-2.0.tgz[2.0] world!*"),
@@ -64,18 +67,18 @@ quoted_text_strong(_) ->
 
 quoted_text_literal_mono(_) ->
 	doc("Literal monospace text formatting. (10.1)"),
-	[{p, _, [{mono, _, <<"Hello beautiful world!">>, _}], _}] =
+	[{paragraph, _, [{inline_literal_passthrough, _, <<"Hello beautiful world!">>, _}], _}] =
 		parse("`Hello beautiful world!`"),
-	[{p, _, [{mono, _, <<"Hello">>, _}, <<" beautiful world!">>], _}] =
+	[{paragraph, _, [{inline_literal_passthrough, _, <<"Hello">>, _}, <<" beautiful world!">>], _}] =
 		parse("`Hello` beautiful world!"),
-	[{p, _, [<<"Hello ">>, {mono, _, <<"beautiful">>, _}, <<" world!">>], _}] =
+	[{paragraph, _, [<<"Hello ">>, {inline_literal_passthrough, _, <<"beautiful">>, _}, <<" world!">>], _}] =
 		parse("Hello `beautiful` world!"),
-	[{p, _, [<<"Hello beautiful ">>, {mono, _, <<"world!">>, _}], _}] =
+	[{paragraph, _, [<<"Hello beautiful ">>, {inline_literal_passthrough, _, <<"world!">>, _}], _}] =
 		parse("Hello beautiful `world!`"),
-	[{p, _, [<<"Hello beautiful ">>, {mono, _, <<"multiline world!">>, _}, <<" lol">>], _}] =
+	[{paragraph, _, [<<"Hello beautiful ">>, {inline_literal_passthrough, _, <<"multiline\nworld!">>, _}, <<" lol">>], _}] =
 		parse("Hello beautiful `multiline\nworld!` lol"),
 	%% No text formatting must occur inside backticks.
-	[{p, _, [{mono, _, <<"Hello *beautiful* world!">>, _}], _}] =
+	[{paragraph, _, [{inline_literal_passthrough, _, <<"Hello *beautiful* world!">>, _}], _}] =
 		parse("`Hello *beautiful* world!`"),
 	ok.
 
@@ -86,110 +89,110 @@ quoted_text_literal_mono(_) ->
 
 title_short(_) ->
 	doc("The trailing title delimiter is optional. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!"),
-	[{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!"),
-	[{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!"),
-	[{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!"),
-	[{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world!"),
+	[{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!"),
+	[{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!"),
+	[{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!"),
+	[{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!"),
+	[{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world!"),
 	ok.
 
 title_short_no_spaces(_) ->
 	doc("One or more spaces must fall between the title and the delimiter. (11.2)"),
-	[{p, _, <<"=Hello world!">>, _}] = parse("=Hello world!"),
-	[{p, _, <<"==Hello world!">>, _}] = parse("==Hello world!"),
-	[{p, _, <<"===Hello world!">>, _}] = parse("===Hello world!"),
-	[{p, _, <<"====Hello world!">>, _}] = parse("====Hello world!"),
-	[{p, _, <<"=====Hello world!">>, _}] = parse("=====Hello world!"),
+	[{paragraph, _, <<"=Hello world!">>, _}] = parse("=Hello world!"),
+	[{paragraph, _, <<"==Hello world!">>, _}] = parse("==Hello world!"),
+	[{paragraph, _, <<"===Hello world!">>, _}] = parse("===Hello world!"),
+	[{paragraph, _, <<"====Hello world!">>, _}] = parse("====Hello world!"),
+	[{paragraph, _, <<"=====Hello world!">>, _}] = parse("=====Hello world!"),
 	ok.
 
 title_short_trim_spaces_before(_) ->
 	doc("Spaces between the title and delimiter must be ignored. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!">>, _}] = parse("=      Hello world!"),
-	[{title, #{level := 1}, <<"Hello world!">>, _}] = parse("==     Hello world!"),
-	[{title, #{level := 2}, <<"Hello world!">>, _}] = parse("===    Hello world!"),
-	[{title, #{level := 3}, <<"Hello world!">>, _}] = parse("====   Hello world!"),
-	[{title, #{level := 4}, <<"Hello world!">>, _}] = parse("=====  Hello world!"),
+	[{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("=      Hello world!"),
+	[{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("==     Hello world!"),
+	[{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("===    Hello world!"),
+	[{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("====   Hello world!"),
+	[{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("=====  Hello world!"),
 	ok.
 
 title_short_trim_spaces_after(_) ->
 	doc("Spaces after the title must be ignored. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!     "),
-	[{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!    "),
-	[{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!   "),
-	[{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!  "),
-	[{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! "),
+	[{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!     "),
+	[{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!    "),
+	[{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!   "),
+	[{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!  "),
+	[{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! "),
 	ok.
 
 title_short_trim_spaces_before_after(_) ->
 	doc("Spaces before and after the title must be ignored. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!">>, _}] = parse("=      Hello world!     "),
-	[{title, #{level := 1}, <<"Hello world!">>, _}] = parse("==     Hello world!    "),
-	[{title, #{level := 2}, <<"Hello world!">>, _}] = parse("===    Hello world!   "),
-	[{title, #{level := 3}, <<"Hello world!">>, _}] = parse("====   Hello world!  "),
-	[{title, #{level := 4}, <<"Hello world!">>, _}] = parse("=====  Hello world! "),
+	[{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("=      Hello world!     "),
+	[{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("==     Hello world!    "),
+	[{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("===    Hello world!   "),
+	[{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("====   Hello world!  "),
+	[{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("=====  Hello world! "),
 	ok.
 
 title_short_trailer(_) ->
 	doc("The trailing title delimiter is optional. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! ="),
-	[{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! =="),
-	[{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! ==="),
-	[{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ===="),
-	[{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ====="),
+	[{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! ="),
+	[{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! =="),
+	[{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! ==="),
+	[{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ===="),
+	[{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ====="),
 	ok.
 
 title_short_trailer_no_spaces(_) ->
 	doc("One or more spaces must fall between the title and the trailer. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!=">>, _}] = parse("= Hello world!="),
-	[{title, #{level := 1}, <<"Hello world!==">>, _}] = parse("== Hello world!=="),
-	[{title, #{level := 2}, <<"Hello world!===">>, _}] = parse("=== Hello world!==="),
-	[{title, #{level := 3}, <<"Hello world!====">>, _}] = parse("==== Hello world!===="),
-	[{title, #{level := 4}, <<"Hello world!=====">>, _}] = parse("===== Hello world!====="),
+	[{section_title, #{level := 0}, <<"Hello world!=">>, _}] = parse("= Hello world!="),
+	[{section_title, #{level := 1}, <<"Hello world!==">>, _}] = parse("== Hello world!=="),
+	[{section_title, #{level := 2}, <<"Hello world!===">>, _}] = parse("=== Hello world!==="),
+	[{section_title, #{level := 3}, <<"Hello world!====">>, _}] = parse("==== Hello world!===="),
+	[{section_title, #{level := 4}, <<"Hello world!=====">>, _}] = parse("===== Hello world!====="),
 	ok.
 
 title_short_trim_spaces_before_trailer(_) ->
 	doc("Spaces between the title and trailer must be ignored. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!          ="),
-	[{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!        =="),
-	[{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!      ==="),
-	[{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!    ===="),
-	[{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world!  ====="),
+	[{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!          ="),
+	[{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!        =="),
+	[{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!      ==="),
+	[{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!    ===="),
+	[{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world!  ====="),
 	ok.
 
 title_short_trim_spaces_after_trailer(_) ->
 	doc("Spaces after the trailer must be ignored. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! =         "),
-	[{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! ==       "),
-	[{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! ===     "),
-	[{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ====   "),
-	[{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "),
+	[{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! =         "),
+	[{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! ==       "),
+	[{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! ===     "),
+	[{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ====   "),
+	[{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "),
 	ok.
 
 title_short_trim_spaces_before_after_trailer(_) ->
 	doc("Spaces before and after the trailer must be ignored. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!     =     "),
-	[{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!    ==    "),
-	[{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!   ===   "),
-	[{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!  ====  "),
-	[{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "),
+	[{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!     =     "),
+	[{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!    ==    "),
+	[{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!   ===   "),
+	[{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!  ====  "),
+	[{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "),
 	ok.
 
 title_short_trim_spaces_before_after_title_trailer(_) ->
 	doc("Spaces before and after both the title and the trailer must be ignored. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world!">>, _}] = parse("=      Hello world!     =     "),
-	[{title, #{level := 1}, <<"Hello world!">>, _}] = parse("==     Hello world!    ==    "),
-	[{title, #{level := 2}, <<"Hello world!">>, _}] = parse("===    Hello world!   ===   "),
-	[{title, #{level := 3}, <<"Hello world!">>, _}] = parse("====   Hello world!  ====  "),
-	[{title, #{level := 4}, <<"Hello world!">>, _}] = parse("=====  Hello world! ===== "),
+	[{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("=      Hello world!     =     "),
+	[{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("==     Hello world!    ==    "),
+	[{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("===    Hello world!   ===   "),
+	[{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("====   Hello world!  ====  "),
+	[{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("=====  Hello world! ===== "),
 	ok.
 
 title_short_wrong_trailer(_) ->
 	doc("The delimiters must be the same size when a trailer is present. (11.2)"),
-	[{title, #{level := 0}, <<"Hello world! ===">>, _}] = parse("= Hello world! ==="),
-	[{title, #{level := 1}, <<"Hello world! ====">>, _}] = parse("== Hello world! ===="),
-	[{title, #{level := 2}, <<"Hello world! =====">>, _}] = parse("=== Hello world! ====="),
-	[{title, #{level := 3}, <<"Hello world! =">>, _}] = parse("==== Hello world! ="),
-	[{title, #{level := 4}, <<"Hello world! ==">>, _}] = parse("===== Hello world! =="),
+	[{section_title, #{level := 0}, <<"Hello world! ===">>, _}] = parse("= Hello world! ==="),
+	[{section_title, #{level := 1}, <<"Hello world! ====">>, _}] = parse("== Hello world! ===="),
+	[{section_title, #{level := 2}, <<"Hello world! =====">>, _}] = parse("=== Hello world! ====="),
+	[{section_title, #{level := 3}, <<"Hello world! =">>, _}] = parse("==== Hello world! ="),
+	[{section_title, #{level := 4}, <<"Hello world! ==">>, _}] = parse("===== Hello world! =="),
 	ok.
 
 %% Normal paragraphs.
@@ -198,13 +201,13 @@ title_short_wrong_trailer(_) ->
 
 paragraph(_) ->
 	doc("Normal paragraph. (15.1)"),
-	[{p, _, <<"Hello world this is a paragraph peace.">>, _}] = parse(
+	[{paragraph, _, <<"Hello world this is a paragraph peace.">>, _}] = parse(
 		"Hello world\n"
 		"this is a paragraph\n"
 		"peace.\n"),
 	[
-		{p, _, <<"Hello world this is a paragraph peace.">>, _},
-		{p, _, <<"This is another paragraph.">>, _}
+		{paragraph, _, <<"Hello world this is a paragraph peace.">>, _},
+		{paragraph, _, <<"This is another paragraph.">>, _}
 	] = parse(
 		"Hello world\n"
 		"this is a paragraph\n"
@@ -215,7 +218,7 @@ paragraph(_) ->
 
 paragraph_title(_) ->
 	doc("Paragraph preceded by a block title. (12, 15.1)"),
-	[{p, #{title := <<"Block title!">>}, <<"Hello world this is a paragraph peace.">>, _}] = parse(
+	[{paragraph, #{<<"title">> := <<"Block title!">>}, <<"Hello world this is a paragraph peace.">>, _}] = parse(
 		".Block title!\n"
 		"Hello world\n"
 		"this is a paragraph\n"
@@ -229,7 +232,7 @@ listing(_) ->
 	Source = <<
 		"init(Req, State) ->\n"
 		"    {ok, Req, State}.">>,
-	[{listing, _, Source, _}] = parse(iolist_to_binary([
+	[{listing_block, _, Source, _}] = parse(iolist_to_binary([
 		"----\n",
 		Source, "\n"
 		"----\n"])),
@@ -237,7 +240,7 @@ listing(_) ->
 
 listing_title(_) ->
 	doc("Listing block with title. (12, 16.2)"),
-	[{listing, #{title := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse(
+	[{listing_block, #{<<"title">> := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse(
 		".Block title!\n"
 		"----\n"
 		"1 = 2.\n"
@@ -249,7 +252,7 @@ listing_filter_source(_) ->
 	Source = <<
 		"init(Req, State) ->\n"
 		"    {ok, Req, State}.">>,
-	[{listing, #{language := <<"erlang">>}, Source, _}] = parse(iolist_to_binary([
+	[{listing_block, #{1 := <<"source">>, 2 := <<"erlang">>}, Source, _}] = parse(iolist_to_binary([
 		"[source,erlang]\n"
 		"----\n",
 		Source, "\n"
@@ -258,13 +261,13 @@ listing_filter_source(_) ->
 
 listing_filter_source_title(_) ->
 	doc("Source code listing filter with title. (12, source-highlight-filter)"),
-	[{listing, #{language := <<"erlang">>, title := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse(
+	[{listing_block, #{1 := <<"source">>, 2 := <<"erlang">>, <<"title">> := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse(
 		".Block title!\n"
 		"[source,erlang]\n"
 		"----\n"
 		"1 = 2.\n"
 		"----\n"),
-	[{listing, #{language := <<"erlang">>, title := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse(
+	[{listing_block, #{1 := <<"source">>, 2 := <<"erlang">>, <<"title">> := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse(
 		"[source,erlang]\n"
 		".Block title!\n"
 		"----\n"
@@ -272,17 +275,27 @@ listing_filter_source_title(_) ->
 		"----\n"),
 	ok.
 
+paragraph_filter_source(_) ->
+	doc("Source code listing filter as a paragraph. (source-highlight-filter)"),
+	Source = <<
+		"init(Req, State) ->\n"
+		"    {ok, Req, State}.">>,
+	[{listing_block, #{1 := <<"source">>, 2 := <<"erlang">>}, Source, _}] = parse(iolist_to_binary([
+		"[source,erlang]\n",
+		Source, "\n"])),
+	ok.
+
 %% Bulleted lists.
 
 unordered_list(_) ->
 	doc("Unoredered lists. (17.1)"),
-	[{ul, _, [
-		{li, _, [{p, _, <<"Hello!">>, _}], _}
+	[{list, #{type := bulleted}, [
+		{list_item, _, [{paragraph, #{}, <<"Hello!">>, _}], _}
 	], _}] = parse("* Hello!"),
-	[{ul, _, [
-		{li, _, [{p, _, <<"Hello!">>, _}], _},
-		{li, _, [{p, _, <<"World!">>, _}], _},
-		{li, _, [{p, _, <<"Hehe.">>, _}], _}
+	[{list, #{type := bulleted}, [
+		{list_item, _, [{paragraph, #{}, <<"Hello!">>, _}], _},
+		{list_item, _, [{paragraph, #{}, <<"World!">>, _}], _},
+		{list_item, _, [{paragraph, #{}, <<"Hehe.">>, _}], _}
 	], _}] = parse(
 		"* Hello!\n"
 		"* World!\n"
@@ -300,68 +313,112 @@ unordered_list(_) ->
 
 labeled_list(_) ->
 	doc("Labeled lists. (17.3)"),
-	[{ll, _, [
-		{li, #{label := <<"The label">>}, [{p, _, <<"The value!">>, _}], _}
+	[{list, #{type := labeled}, [
+		{list_item, #{label := <<"The label">>},
+			[{paragraph, #{}, <<"The value!">>, _}], _}
 	], _}] = parse("The label:: The value!"),
-	%% @todo Currently this returns two ll. This is a bug but it gives
-	%% me the result I want, or close enough, for now.
-	[{ll, _, [
-		{li, #{label := <<"The label">>}, [{p, _, <<"The value!">>, _}], _}
-	], _},
-	{ll, _, [
-		{li, #{label := <<"More labels">>}, [{p, _, <<"More values!">>, _}], _}
+	[{list, #{type := labeled}, [
+		{list_item, #{label := <<"The label">>},
+			[{paragraph, #{}, <<"The value!">>, _}], _},
+		{list_item, #{label := <<"More labels">>},
+			[{paragraph, #{}, <<"More values!">>, _}], _}
 	], _}] = parse(
 		"The label:: The value!\n"
 		"More labels:: More values!\n"),
-	[{ll, _, [
-		{li, #{label := <<"The label">>}, [{p, _, <<"The value!">>, _}], _}
+	[{list, #{type := labeled}, [
+		{list_item, #{label := <<"The label">>},
+			[{paragraph, #{}, <<"The value!">>, _}], _}
 	], _}] = parse(
 		"The label::\n"
 		"\n"
 		"The value!"),
+	[{list, #{type := labeled}, [
+		{list_item, #{label := <<"The label">>},
+			[{paragraph, #{}, <<"The value!">>, _}], _}
+	], _}] = parse(
+		"The label::\n"
+		"    The value!"),
+	[{list, #{type := labeled}, [
+		{list_item, #{label := <<"The label">>}, [
+			{paragraph, _, <<"The value!">>, _},
+			{paragraph, _, <<"With continuations!">>, _},
+			{paragraph, _, <<"OK good.">>, _}
+		], _}
+	], _}] = parse(
+		"The label::\n"
+		"\n"
+		"The value!\n"
+		"+\n"
+		"With continuations!\n"
+		"+\n"
+		"OK good."),
+	[{list, #{type := labeled}, [
+		{list_item, #{label := <<"The label">>}, [
+			{paragraph, #{}, <<"The value!">>, _},
+			{list, #{type := bulleted}, [
+				{list_item, _, [{paragraph, #{}, <<"first list item">>, _}], _},
+				{list_item, _, [{paragraph, #{}, <<"second list item">>, _}], _},
+				{list_item, _, [{paragraph, #{}, <<"third list item">>, _}], _}
+			], _}
+		], _}
+	], _}] = parse(
+		"The label::\n"
+		"\n"
+		"The value!\n"
+		"+\n"
+		"    * first list item\n"
+		"    * second list\n"
+		"      item\n"
+		"    * third list\n"
+		"      item\n"
+		"\n"),
 	ok.
 
-%% @todo Very little was implemented from labeled lists. They need more work.
-
 %% Macros.
 
 rel_link(_) ->
-	doc("Relative links are built using the link:<target>[<caption>] macro. (21.1.3)"),
-	[{p, _, [
-		{rel_link, #{target := <<"downloads/cowboy-2.0.tgz">>}, <<"2.0">>, _}
+	doc("Relative links are built using the link:Target[Caption] macro. (21.1.3)"),
+	[{paragraph, _, [
+		{link, #{target := <<"downloads/cowboy-2.0.tgz">>}, <<"2.0">>, _}
 	], _}] = parse("link:downloads/cowboy-2.0.tgz[2.0]"),
-	[{p, _, [
+	[{paragraph, _, [
 		<<"Download ">>,
-		{rel_link, #{target := <<"downloads/cowboy-2.0.zip">>}, <<"Cowboy 2.0">>, _},
+		{link, #{target := <<"downloads/cowboy-2.0.zip">>}, <<"Cowboy 2.0">>, _},
 		<<" as zip">>
 	], _}] = parse("Download link:downloads/cowboy-2.0.zip[Cowboy 2.0] as zip"),
 	ok.
 
 comment_line(_) ->
 	doc("Lines starting with two slashes are treated as comments. (21.2.3)"),
-	[{comment, _, <<"This is a comment.">>, _}] = parse("// This is a comment."),
-	[{comment, _, <<"This is a comment.">>, _}] = parse("//   This is a comment.  "),
+	[{comment_line, _, <<"This is a comment.">>, _}] = parse("//This is a comment."),
+	[{comment_line, _, <<"This is a comment.">>, _}] = parse("// This is a comment."),
+	[{comment_line, _, <<"This is a comment.">>, _}] = parse("//   This is a comment.  "),
+	[
+		{comment_line, _, <<"First line.">>, _},
+		{comment_line, _, <<"Second line.">>, _}
+	] = parse(
+		"// First line.\n"
+		"// Second line.\n"),
 	ok.
 
 %% Tables. (23)
 
 table(_) ->
-	%% @todo I think I read somewhere that paragraphs are not allowed in cells... Double check.
 	[{table, _, [
 		{row, _, [
-			{cell, _, [{p, _, <<"1">>, _}], _},
-			{cell, _, [{p, _, <<"2">>, _}], _},
-			{cell, _, [{p, _, <<"A">>, _}], _}
+			{cell, _, [{paragraph, _, <<"1">>, _}], _},
+			{cell, _, [{paragraph, _, <<"2">>, _}], _},
+			{cell, _, [{paragraph, _, <<"A">>, _}], _}
 		], _},
 		{row, _, [
-			{cell, _, [{p, _, <<"3">>, _}], _},
-			{cell, _, [{p, _, <<"4">>, _}], _},
-			{cell, _, [{p, _, <<"B">>, _}], _}
+			{cell, _, [{paragraph, _, <<"3">>, _}], _},
+			{cell, _, [{paragraph, _, <<"4">>, _}], _},
+			{cell, _, [{paragraph, _, <<"B">>, _}], _}
 		], _},
 		{row, _, [
-			{cell, _, [{p, _, <<"5">>, _}], _},
-			{cell, _, [{p, _, <<"6">>, _}], _},
-			{cell, _, [{p, _, <<"C">>, _}], _}
+			{cell, _, [{paragraph, _, <<"5">>, _}], _},
+			{cell, _, [{paragraph, _, <<"6">>, _}], _},
+			{cell, _, [{paragraph, _, <<"C">>, _}], _}
 		], _}
 	], _}]= parse(
 		"|=======\n"

More details

Full run details